@e-llm-studio/instant-learning 0.0.202 → 0.0.203

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (579) hide show
  1. package/dist/cjs/node_modules/@e-llm-studio/streaming-response/node_modules/ws/browser.js +2 -0
  2. package/dist/cjs/node_modules/@e-llm-studio/streaming-response/node_modules/ws/browser.js.map +1 -0
  3. package/dist/cjs/node_modules/@emotion/serialize/node_modules/@emotion/hash/dist/emotion-hash.esm.js +2 -0
  4. package/dist/cjs/node_modules/@emotion/serialize/node_modules/@emotion/hash/dist/emotion-hash.esm.js.map +1 -0
  5. package/dist/cjs/node_modules/hast-util-from-parse5/node_modules/comma-separated-tokens/index.js +2 -0
  6. package/dist/cjs/node_modules/hast-util-from-parse5/node_modules/comma-separated-tokens/index.js.map +1 -0
  7. package/dist/cjs/node_modules/hast-util-from-parse5/node_modules/space-separated-tokens/index.js +2 -0
  8. package/dist/cjs/node_modules/hast-util-from-parse5/node_modules/space-separated-tokens/index.js.map +1 -0
  9. package/dist/cjs/node_modules/hast-util-raw/node_modules/unist-util-position/lib/index.js +2 -0
  10. package/dist/cjs/node_modules/hast-util-raw/node_modules/unist-util-position/lib/index.js.map +1 -0
  11. package/dist/cjs/node_modules/hast-util-to-parse5/node_modules/comma-separated-tokens/index.js +2 -0
  12. package/dist/cjs/node_modules/hast-util-to-parse5/node_modules/comma-separated-tokens/index.js.map +1 -0
  13. package/dist/cjs/node_modules/hast-util-to-parse5/node_modules/space-separated-tokens/index.js +2 -0
  14. package/dist/cjs/node_modules/hast-util-to-parse5/node_modules/space-separated-tokens/index.js.map +1 -0
  15. package/dist/cjs/node_modules/mdast-util-definitions/node_modules/unist-util-is/lib/index.js +2 -0
  16. package/dist/cjs/node_modules/mdast-util-definitions/node_modules/unist-util-is/lib/index.js.map +1 -0
  17. package/dist/cjs/node_modules/mdast-util-definitions/node_modules/unist-util-visit/lib/index.js +2 -0
  18. package/dist/cjs/node_modules/mdast-util-definitions/node_modules/unist-util-visit/lib/index.js.map +1 -0
  19. package/dist/cjs/node_modules/mdast-util-definitions/node_modules/unist-util-visit-parents/lib/color.browser.js +2 -0
  20. package/dist/cjs/node_modules/mdast-util-definitions/node_modules/unist-util-visit-parents/lib/color.browser.js.map +1 -0
  21. package/dist/cjs/node_modules/mdast-util-definitions/node_modules/unist-util-visit-parents/lib/index.js +2 -0
  22. package/dist/cjs/node_modules/mdast-util-definitions/node_modules/unist-util-visit-parents/lib/index.js.map +1 -0
  23. package/dist/cjs/node_modules/mdast-util-to-hast/lib/footer.js +2 -0
  24. package/dist/cjs/node_modules/mdast-util-to-hast/lib/footer.js.map +1 -0
  25. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/blockquote.js +2 -0
  26. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/blockquote.js.map +1 -0
  27. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/break.js +2 -0
  28. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/break.js.map +1 -0
  29. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/code.js +2 -0
  30. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/code.js.map +1 -0
  31. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/delete.js +2 -0
  32. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/delete.js.map +1 -0
  33. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/emphasis.js +2 -0
  34. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/emphasis.js.map +1 -0
  35. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/footnote-reference.js +2 -0
  36. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/footnote-reference.js.map +1 -0
  37. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/footnote.js +2 -0
  38. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/footnote.js.map +1 -0
  39. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/heading.js +2 -0
  40. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/heading.js.map +1 -0
  41. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/html.js +2 -0
  42. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/html.js.map +1 -0
  43. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/image-reference.js +2 -0
  44. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/image-reference.js.map +1 -0
  45. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/image.js +2 -0
  46. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/image.js.map +1 -0
  47. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/index.js +2 -0
  48. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/index.js.map +1 -0
  49. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/inline-code.js +2 -0
  50. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/inline-code.js.map +1 -0
  51. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/link-reference.js +2 -0
  52. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/link-reference.js.map +1 -0
  53. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/link.js +2 -0
  54. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/link.js.map +1 -0
  55. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/list-item.js +2 -0
  56. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/list-item.js.map +1 -0
  57. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/list.js +2 -0
  58. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/list.js.map +1 -0
  59. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/paragraph.js +2 -0
  60. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/paragraph.js.map +1 -0
  61. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/root.js +2 -0
  62. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/root.js.map +1 -0
  63. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/strong.js +2 -0
  64. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/strong.js.map +1 -0
  65. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/table-cell.js +2 -0
  66. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/table-cell.js.map +1 -0
  67. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/table-row.js +2 -0
  68. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/table-row.js.map +1 -0
  69. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/table.js +2 -0
  70. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/table.js.map +1 -0
  71. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/text.js +2 -0
  72. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/text.js.map +1 -0
  73. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/thematic-break.js +2 -0
  74. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/thematic-break.js.map +1 -0
  75. package/dist/cjs/node_modules/mdast-util-to-hast/lib/index.js +2 -0
  76. package/dist/cjs/node_modules/mdast-util-to-hast/lib/index.js.map +1 -0
  77. package/dist/cjs/node_modules/mdast-util-to-hast/lib/revert.js +2 -0
  78. package/dist/cjs/node_modules/mdast-util-to-hast/lib/revert.js.map +1 -0
  79. package/dist/cjs/node_modules/mdast-util-to-hast/lib/state.js +2 -0
  80. package/dist/cjs/node_modules/mdast-util-to-hast/lib/state.js.map +1 -0
  81. package/dist/cjs/node_modules/mdast-util-to-hast/node_modules/micromark-util-character/index.js +2 -0
  82. package/dist/cjs/node_modules/mdast-util-to-hast/node_modules/micromark-util-character/index.js.map +1 -0
  83. package/dist/cjs/node_modules/mdast-util-to-hast/node_modules/micromark-util-sanitize-uri/index.js +2 -0
  84. package/dist/cjs/node_modules/mdast-util-to-hast/node_modules/micromark-util-sanitize-uri/index.js.map +1 -0
  85. package/dist/cjs/node_modules/mdast-util-to-hast/node_modules/unist-util-is/lib/index.js +2 -0
  86. package/dist/cjs/node_modules/mdast-util-to-hast/node_modules/unist-util-is/lib/index.js.map +1 -0
  87. package/dist/cjs/node_modules/mdast-util-to-hast/node_modules/unist-util-visit/lib/index.js +2 -0
  88. package/dist/cjs/node_modules/mdast-util-to-hast/node_modules/unist-util-visit/lib/index.js.map +1 -0
  89. package/dist/cjs/node_modules/mdast-util-to-hast/node_modules/unist-util-visit-parents/lib/color.browser.js +2 -0
  90. package/dist/cjs/node_modules/mdast-util-to-hast/node_modules/unist-util-visit-parents/lib/color.browser.js.map +1 -0
  91. package/dist/cjs/node_modules/mdast-util-to-hast/node_modules/unist-util-visit-parents/lib/index.js +2 -0
  92. package/dist/cjs/node_modules/mdast-util-to-hast/node_modules/unist-util-visit-parents/lib/index.js.map +1 -0
  93. package/dist/cjs/node_modules/react-markdown/node_modules/comma-separated-tokens/index.js +2 -0
  94. package/dist/cjs/node_modules/react-markdown/node_modules/comma-separated-tokens/index.js.map +1 -0
  95. package/dist/cjs/node_modules/react-markdown/node_modules/is-plain-obj/index.js +2 -0
  96. package/dist/cjs/node_modules/react-markdown/node_modules/is-plain-obj/index.js.map +1 -0
  97. package/dist/cjs/node_modules/react-markdown/node_modules/mdast-util-from-markdown/lib/index.js +2 -0
  98. package/dist/cjs/node_modules/react-markdown/node_modules/mdast-util-from-markdown/lib/index.js.map +1 -0
  99. package/dist/cjs/node_modules/react-markdown/node_modules/mdast-util-to-string/lib/index.js +2 -0
  100. package/dist/cjs/node_modules/react-markdown/node_modules/mdast-util-to-string/lib/index.js.map +1 -0
  101. package/dist/cjs/node_modules/react-markdown/node_modules/micromark/lib/constructs.js +2 -0
  102. package/dist/cjs/node_modules/react-markdown/node_modules/micromark/lib/constructs.js.map +1 -0
  103. package/dist/cjs/node_modules/react-markdown/node_modules/micromark/lib/create-tokenizer.js +2 -0
  104. package/dist/cjs/node_modules/react-markdown/node_modules/micromark/lib/create-tokenizer.js.map +1 -0
  105. package/dist/cjs/node_modules/react-markdown/node_modules/micromark/lib/initialize/content.js +2 -0
  106. package/dist/cjs/node_modules/react-markdown/node_modules/micromark/lib/initialize/content.js.map +1 -0
  107. package/dist/cjs/node_modules/react-markdown/node_modules/micromark/lib/initialize/document.js +2 -0
  108. package/dist/cjs/node_modules/react-markdown/node_modules/micromark/lib/initialize/document.js.map +1 -0
  109. package/dist/cjs/node_modules/react-markdown/node_modules/micromark/lib/initialize/flow.js +2 -0
  110. package/dist/cjs/node_modules/react-markdown/node_modules/micromark/lib/initialize/flow.js.map +1 -0
  111. package/dist/cjs/node_modules/react-markdown/node_modules/micromark/lib/initialize/text.js +2 -0
  112. package/dist/cjs/node_modules/react-markdown/node_modules/micromark/lib/initialize/text.js.map +1 -0
  113. package/dist/cjs/node_modules/react-markdown/node_modules/micromark/lib/parse.js +2 -0
  114. package/dist/cjs/node_modules/react-markdown/node_modules/micromark/lib/parse.js.map +1 -0
  115. package/dist/cjs/node_modules/react-markdown/node_modules/micromark/lib/postprocess.js +2 -0
  116. package/dist/cjs/node_modules/react-markdown/node_modules/micromark/lib/postprocess.js.map +1 -0
  117. package/dist/cjs/node_modules/react-markdown/node_modules/micromark/lib/preprocess.js +2 -0
  118. package/dist/cjs/node_modules/react-markdown/node_modules/micromark/lib/preprocess.js.map +1 -0
  119. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/attention.js +2 -0
  120. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/attention.js.map +1 -0
  121. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/autolink.js +2 -0
  122. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/autolink.js.map +1 -0
  123. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/blank-line.js +2 -0
  124. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/blank-line.js.map +1 -0
  125. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/block-quote.js +2 -0
  126. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/block-quote.js.map +1 -0
  127. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/character-escape.js +2 -0
  128. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/character-escape.js.map +1 -0
  129. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/character-reference.js +2 -0
  130. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/character-reference.js.map +1 -0
  131. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/code-fenced.js +2 -0
  132. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/code-fenced.js.map +1 -0
  133. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/code-indented.js +2 -0
  134. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/code-indented.js.map +1 -0
  135. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/code-text.js +2 -0
  136. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/code-text.js.map +1 -0
  137. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/content.js +2 -0
  138. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/content.js.map +1 -0
  139. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/definition.js +2 -0
  140. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/definition.js.map +1 -0
  141. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/hard-break-escape.js +2 -0
  142. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/hard-break-escape.js.map +1 -0
  143. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/heading-atx.js +2 -0
  144. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/heading-atx.js.map +1 -0
  145. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/html-flow.js +2 -0
  146. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/html-flow.js.map +1 -0
  147. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/html-text.js +2 -0
  148. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/html-text.js.map +1 -0
  149. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/label-end.js +2 -0
  150. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/label-end.js.map +1 -0
  151. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/label-start-image.js +2 -0
  152. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/label-start-image.js.map +1 -0
  153. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/label-start-link.js +2 -0
  154. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/label-start-link.js.map +1 -0
  155. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/line-ending.js +2 -0
  156. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/line-ending.js.map +1 -0
  157. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/list.js +2 -0
  158. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/list.js.map +1 -0
  159. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/setext-underline.js +2 -0
  160. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/setext-underline.js.map +1 -0
  161. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/thematic-break.js +2 -0
  162. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/thematic-break.js.map +1 -0
  163. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-factory-destination/index.js +2 -0
  164. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-factory-destination/index.js.map +1 -0
  165. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-factory-label/index.js +2 -0
  166. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-factory-label/index.js.map +1 -0
  167. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-factory-space/index.js +2 -0
  168. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-factory-space/index.js.map +1 -0
  169. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-factory-title/index.js +2 -0
  170. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-factory-title/index.js.map +1 -0
  171. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-factory-whitespace/index.js +2 -0
  172. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-factory-whitespace/index.js.map +1 -0
  173. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-util-character/index.js +2 -0
  174. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-util-character/index.js.map +1 -0
  175. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-util-character/lib/unicode-punctuation-regex.js +2 -0
  176. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-util-character/lib/unicode-punctuation-regex.js.map +1 -0
  177. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-util-chunked/index.js +2 -0
  178. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-util-chunked/index.js.map +1 -0
  179. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-util-classify-character/index.js +2 -0
  180. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-util-classify-character/index.js.map +1 -0
  181. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-util-combine-extensions/index.js +2 -0
  182. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-util-combine-extensions/index.js.map +1 -0
  183. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-util-decode-numeric-character-reference/index.js +2 -0
  184. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-util-decode-numeric-character-reference/index.js.map +1 -0
  185. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-util-decode-string/index.js +2 -0
  186. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-util-decode-string/index.js.map +1 -0
  187. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-util-html-tag-name/index.js +2 -0
  188. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-util-html-tag-name/index.js.map +1 -0
  189. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-util-normalize-identifier/index.js +2 -0
  190. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-util-normalize-identifier/index.js.map +1 -0
  191. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-util-resolve-all/index.js +2 -0
  192. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-util-resolve-all/index.js.map +1 -0
  193. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-util-subtokenize/index.js +2 -0
  194. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-util-subtokenize/index.js.map +1 -0
  195. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/index.js +2 -0
  196. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/index.js.map +1 -0
  197. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/aria.js +2 -0
  198. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/aria.js.map +1 -0
  199. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/find.js +2 -0
  200. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/find.js.map +1 -0
  201. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/hast-to-react.js +2 -0
  202. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/hast-to-react.js.map +1 -0
  203. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/html.js +2 -0
  204. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/html.js.map +1 -0
  205. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/normalize.js +2 -0
  206. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/normalize.js.map +1 -0
  207. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/svg.js +2 -0
  208. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/svg.js.map +1 -0
  209. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/util/case-insensitive-transform.js +2 -0
  210. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/util/case-insensitive-transform.js.map +1 -0
  211. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/util/case-sensitive-transform.js +2 -0
  212. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/util/case-sensitive-transform.js.map +1 -0
  213. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/util/create.js +2 -0
  214. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/util/create.js.map +1 -0
  215. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/util/defined-info.js +2 -0
  216. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/util/defined-info.js.map +1 -0
  217. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/util/info.js +2 -0
  218. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/util/info.js.map +1 -0
  219. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/util/merge.js +2 -0
  220. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/util/merge.js.map +1 -0
  221. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/util/schema.js +2 -0
  222. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/util/schema.js.map +1 -0
  223. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/util/types.js +2 -0
  224. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/util/types.js.map +1 -0
  225. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/xlink.js +2 -0
  226. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/xlink.js.map +1 -0
  227. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/xml.js +2 -0
  228. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/xml.js.map +1 -0
  229. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/xmlns.js +2 -0
  230. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/xmlns.js.map +1 -0
  231. package/dist/cjs/node_modules/react-markdown/node_modules/remark-parse/lib/index.js +2 -0
  232. package/dist/cjs/node_modules/react-markdown/node_modules/remark-parse/lib/index.js.map +1 -0
  233. package/dist/cjs/node_modules/react-markdown/node_modules/space-separated-tokens/index.js +2 -0
  234. package/dist/cjs/node_modules/react-markdown/node_modules/space-separated-tokens/index.js.map +1 -0
  235. package/dist/cjs/node_modules/react-markdown/node_modules/unified/lib/index.js +2 -0
  236. package/dist/cjs/node_modules/react-markdown/node_modules/unified/lib/index.js.map +1 -0
  237. package/dist/cjs/node_modules/react-markdown/node_modules/unist-util-is/lib/index.js +2 -0
  238. package/dist/cjs/node_modules/react-markdown/node_modules/unist-util-is/lib/index.js.map +1 -0
  239. package/dist/cjs/node_modules/react-markdown/node_modules/unist-util-stringify-position/lib/index.js +2 -0
  240. package/dist/cjs/node_modules/react-markdown/node_modules/unist-util-stringify-position/lib/index.js.map +1 -0
  241. package/dist/cjs/node_modules/react-markdown/node_modules/unist-util-visit/lib/index.js +2 -0
  242. package/dist/cjs/node_modules/react-markdown/node_modules/unist-util-visit/lib/index.js.map +1 -0
  243. package/dist/cjs/node_modules/react-markdown/node_modules/unist-util-visit-parents/lib/color.browser.js +2 -0
  244. package/dist/cjs/node_modules/react-markdown/node_modules/unist-util-visit-parents/lib/color.browser.js.map +1 -0
  245. package/dist/cjs/node_modules/react-markdown/node_modules/unist-util-visit-parents/lib/index.js +2 -0
  246. package/dist/cjs/node_modules/react-markdown/node_modules/unist-util-visit-parents/lib/index.js.map +1 -0
  247. package/dist/cjs/node_modules/react-markdown/node_modules/vfile/lib/index.js +2 -0
  248. package/dist/cjs/node_modules/react-markdown/node_modules/vfile/lib/index.js.map +1 -0
  249. package/dist/cjs/node_modules/react-markdown/node_modules/vfile/lib/minpath.browser.js +2 -0
  250. package/dist/cjs/node_modules/react-markdown/node_modules/vfile/lib/minpath.browser.js.map +1 -0
  251. package/dist/cjs/node_modules/react-markdown/node_modules/vfile/lib/minproc.browser.js +2 -0
  252. package/dist/cjs/node_modules/react-markdown/node_modules/vfile/lib/minproc.browser.js.map +1 -0
  253. package/dist/cjs/node_modules/react-markdown/node_modules/vfile/lib/minurl.browser.js +2 -0
  254. package/dist/cjs/node_modules/react-markdown/node_modules/vfile/lib/minurl.browser.js.map +1 -0
  255. package/dist/cjs/node_modules/react-markdown/node_modules/vfile/lib/minurl.shared.js +2 -0
  256. package/dist/cjs/node_modules/react-markdown/node_modules/vfile/lib/minurl.shared.js.map +1 -0
  257. package/dist/cjs/node_modules/react-markdown/node_modules/vfile-message/lib/index.js +2 -0
  258. package/dist/cjs/node_modules/react-markdown/node_modules/vfile-message/lib/index.js.map +1 -0
  259. package/dist/cjs/node_modules/unist-util-visit-parents/lib/color.js +2 -0
  260. package/dist/cjs/node_modules/unist-util-visit-parents/lib/color.js.map +1 -0
  261. package/dist/node_modules/@e-llm-studio/streaming-response/node_modules/ws/browser.js +2 -0
  262. package/dist/node_modules/@e-llm-studio/streaming-response/node_modules/ws/browser.js.map +1 -0
  263. package/dist/node_modules/@emotion/serialize/node_modules/@emotion/hash/dist/emotion-hash.esm.js +2 -0
  264. package/dist/node_modules/@emotion/serialize/node_modules/@emotion/hash/dist/emotion-hash.esm.js.map +1 -0
  265. package/dist/node_modules/hast-util-from-parse5/node_modules/comma-separated-tokens/index.js +2 -0
  266. package/dist/node_modules/hast-util-from-parse5/node_modules/comma-separated-tokens/index.js.map +1 -0
  267. package/dist/node_modules/hast-util-from-parse5/node_modules/space-separated-tokens/index.js +2 -0
  268. package/dist/node_modules/hast-util-from-parse5/node_modules/space-separated-tokens/index.js.map +1 -0
  269. package/dist/node_modules/hast-util-raw/node_modules/unist-util-position/lib/index.js +2 -0
  270. package/dist/node_modules/hast-util-raw/node_modules/unist-util-position/lib/index.js.map +1 -0
  271. package/dist/node_modules/hast-util-to-parse5/node_modules/comma-separated-tokens/index.js +2 -0
  272. package/dist/node_modules/hast-util-to-parse5/node_modules/comma-separated-tokens/index.js.map +1 -0
  273. package/dist/node_modules/hast-util-to-parse5/node_modules/space-separated-tokens/index.js +2 -0
  274. package/dist/node_modules/hast-util-to-parse5/node_modules/space-separated-tokens/index.js.map +1 -0
  275. package/dist/node_modules/mdast-util-definitions/node_modules/unist-util-is/lib/index.js +2 -0
  276. package/dist/node_modules/mdast-util-definitions/node_modules/unist-util-is/lib/index.js.map +1 -0
  277. package/dist/node_modules/mdast-util-definitions/node_modules/unist-util-visit/lib/index.js +2 -0
  278. package/dist/node_modules/mdast-util-definitions/node_modules/unist-util-visit/lib/index.js.map +1 -0
  279. package/dist/node_modules/mdast-util-definitions/node_modules/unist-util-visit-parents/lib/color.browser.js +2 -0
  280. package/dist/node_modules/mdast-util-definitions/node_modules/unist-util-visit-parents/lib/color.browser.js.map +1 -0
  281. package/dist/node_modules/mdast-util-definitions/node_modules/unist-util-visit-parents/lib/index.js +2 -0
  282. package/dist/node_modules/mdast-util-definitions/node_modules/unist-util-visit-parents/lib/index.js.map +1 -0
  283. package/dist/node_modules/mdast-util-to-hast/lib/footer.js +2 -0
  284. package/dist/node_modules/mdast-util-to-hast/lib/footer.js.map +1 -0
  285. package/dist/node_modules/mdast-util-to-hast/lib/handlers/blockquote.js +2 -0
  286. package/dist/node_modules/mdast-util-to-hast/lib/handlers/blockquote.js.map +1 -0
  287. package/dist/node_modules/mdast-util-to-hast/lib/handlers/break.js +2 -0
  288. package/dist/node_modules/mdast-util-to-hast/lib/handlers/break.js.map +1 -0
  289. package/dist/node_modules/mdast-util-to-hast/lib/handlers/code.js +2 -0
  290. package/dist/node_modules/mdast-util-to-hast/lib/handlers/code.js.map +1 -0
  291. package/dist/node_modules/mdast-util-to-hast/lib/handlers/delete.js +2 -0
  292. package/dist/node_modules/mdast-util-to-hast/lib/handlers/delete.js.map +1 -0
  293. package/dist/node_modules/mdast-util-to-hast/lib/handlers/emphasis.js +2 -0
  294. package/dist/node_modules/mdast-util-to-hast/lib/handlers/emphasis.js.map +1 -0
  295. package/dist/node_modules/mdast-util-to-hast/lib/handlers/footnote-reference.js +2 -0
  296. package/dist/node_modules/mdast-util-to-hast/lib/handlers/footnote-reference.js.map +1 -0
  297. package/dist/node_modules/mdast-util-to-hast/lib/handlers/footnote.js +2 -0
  298. package/dist/node_modules/mdast-util-to-hast/lib/handlers/footnote.js.map +1 -0
  299. package/dist/node_modules/mdast-util-to-hast/lib/handlers/heading.js +2 -0
  300. package/dist/node_modules/mdast-util-to-hast/lib/handlers/heading.js.map +1 -0
  301. package/dist/node_modules/mdast-util-to-hast/lib/handlers/html.js +2 -0
  302. package/dist/node_modules/mdast-util-to-hast/lib/handlers/html.js.map +1 -0
  303. package/dist/node_modules/mdast-util-to-hast/lib/handlers/image-reference.js +2 -0
  304. package/dist/node_modules/mdast-util-to-hast/lib/handlers/image-reference.js.map +1 -0
  305. package/dist/node_modules/mdast-util-to-hast/lib/handlers/image.js +2 -0
  306. package/dist/node_modules/mdast-util-to-hast/lib/handlers/image.js.map +1 -0
  307. package/dist/node_modules/mdast-util-to-hast/lib/handlers/index.js +2 -0
  308. package/dist/node_modules/mdast-util-to-hast/lib/handlers/index.js.map +1 -0
  309. package/dist/node_modules/mdast-util-to-hast/lib/handlers/inline-code.js +2 -0
  310. package/dist/node_modules/mdast-util-to-hast/lib/handlers/inline-code.js.map +1 -0
  311. package/dist/node_modules/mdast-util-to-hast/lib/handlers/link-reference.js +2 -0
  312. package/dist/node_modules/mdast-util-to-hast/lib/handlers/link-reference.js.map +1 -0
  313. package/dist/node_modules/mdast-util-to-hast/lib/handlers/link.js +2 -0
  314. package/dist/node_modules/mdast-util-to-hast/lib/handlers/link.js.map +1 -0
  315. package/dist/node_modules/mdast-util-to-hast/lib/handlers/list-item.js +2 -0
  316. package/dist/node_modules/mdast-util-to-hast/lib/handlers/list-item.js.map +1 -0
  317. package/dist/node_modules/mdast-util-to-hast/lib/handlers/list.js +2 -0
  318. package/dist/node_modules/mdast-util-to-hast/lib/handlers/list.js.map +1 -0
  319. package/dist/node_modules/mdast-util-to-hast/lib/handlers/paragraph.js +2 -0
  320. package/dist/node_modules/mdast-util-to-hast/lib/handlers/paragraph.js.map +1 -0
  321. package/dist/node_modules/mdast-util-to-hast/lib/handlers/root.js +2 -0
  322. package/dist/node_modules/mdast-util-to-hast/lib/handlers/root.js.map +1 -0
  323. package/dist/node_modules/mdast-util-to-hast/lib/handlers/strong.js +2 -0
  324. package/dist/node_modules/mdast-util-to-hast/lib/handlers/strong.js.map +1 -0
  325. package/dist/node_modules/mdast-util-to-hast/lib/handlers/table-cell.js +2 -0
  326. package/dist/node_modules/mdast-util-to-hast/lib/handlers/table-cell.js.map +1 -0
  327. package/dist/node_modules/mdast-util-to-hast/lib/handlers/table-row.js +2 -0
  328. package/dist/node_modules/mdast-util-to-hast/lib/handlers/table-row.js.map +1 -0
  329. package/dist/node_modules/mdast-util-to-hast/lib/handlers/table.js +2 -0
  330. package/dist/node_modules/mdast-util-to-hast/lib/handlers/table.js.map +1 -0
  331. package/dist/node_modules/mdast-util-to-hast/lib/handlers/text.js +2 -0
  332. package/dist/node_modules/mdast-util-to-hast/lib/handlers/text.js.map +1 -0
  333. package/dist/node_modules/mdast-util-to-hast/lib/handlers/thematic-break.js +2 -0
  334. package/dist/node_modules/mdast-util-to-hast/lib/handlers/thematic-break.js.map +1 -0
  335. package/dist/node_modules/mdast-util-to-hast/lib/index.js +2 -0
  336. package/dist/node_modules/mdast-util-to-hast/lib/index.js.map +1 -0
  337. package/dist/node_modules/mdast-util-to-hast/lib/revert.js +2 -0
  338. package/dist/node_modules/mdast-util-to-hast/lib/revert.js.map +1 -0
  339. package/dist/node_modules/mdast-util-to-hast/lib/state.js +2 -0
  340. package/dist/node_modules/mdast-util-to-hast/lib/state.js.map +1 -0
  341. package/dist/node_modules/mdast-util-to-hast/node_modules/micromark-util-character/index.js +2 -0
  342. package/dist/node_modules/mdast-util-to-hast/node_modules/micromark-util-character/index.js.map +1 -0
  343. package/dist/node_modules/mdast-util-to-hast/node_modules/micromark-util-sanitize-uri/index.js +2 -0
  344. package/dist/node_modules/mdast-util-to-hast/node_modules/micromark-util-sanitize-uri/index.js.map +1 -0
  345. package/dist/node_modules/mdast-util-to-hast/node_modules/unist-util-is/lib/index.js +2 -0
  346. package/dist/node_modules/mdast-util-to-hast/node_modules/unist-util-is/lib/index.js.map +1 -0
  347. package/dist/node_modules/mdast-util-to-hast/node_modules/unist-util-visit/lib/index.js +2 -0
  348. package/dist/node_modules/mdast-util-to-hast/node_modules/unist-util-visit/lib/index.js.map +1 -0
  349. package/dist/node_modules/mdast-util-to-hast/node_modules/unist-util-visit-parents/lib/color.browser.js +2 -0
  350. package/dist/node_modules/mdast-util-to-hast/node_modules/unist-util-visit-parents/lib/color.browser.js.map +1 -0
  351. package/dist/node_modules/mdast-util-to-hast/node_modules/unist-util-visit-parents/lib/index.js +2 -0
  352. package/dist/node_modules/mdast-util-to-hast/node_modules/unist-util-visit-parents/lib/index.js.map +1 -0
  353. package/dist/node_modules/react-markdown/node_modules/comma-separated-tokens/index.js +2 -0
  354. package/dist/node_modules/react-markdown/node_modules/comma-separated-tokens/index.js.map +1 -0
  355. package/dist/node_modules/react-markdown/node_modules/is-plain-obj/index.js +2 -0
  356. package/dist/node_modules/react-markdown/node_modules/is-plain-obj/index.js.map +1 -0
  357. package/dist/node_modules/react-markdown/node_modules/mdast-util-from-markdown/lib/index.js +2 -0
  358. package/dist/node_modules/react-markdown/node_modules/mdast-util-from-markdown/lib/index.js.map +1 -0
  359. package/dist/node_modules/react-markdown/node_modules/mdast-util-to-string/lib/index.js +2 -0
  360. package/dist/node_modules/react-markdown/node_modules/mdast-util-to-string/lib/index.js.map +1 -0
  361. package/dist/node_modules/react-markdown/node_modules/micromark/lib/constructs.js +2 -0
  362. package/dist/node_modules/react-markdown/node_modules/micromark/lib/constructs.js.map +1 -0
  363. package/dist/node_modules/react-markdown/node_modules/micromark/lib/create-tokenizer.js +2 -0
  364. package/dist/node_modules/react-markdown/node_modules/micromark/lib/create-tokenizer.js.map +1 -0
  365. package/dist/node_modules/react-markdown/node_modules/micromark/lib/initialize/content.js +2 -0
  366. package/dist/node_modules/react-markdown/node_modules/micromark/lib/initialize/content.js.map +1 -0
  367. package/dist/node_modules/react-markdown/node_modules/micromark/lib/initialize/document.js +2 -0
  368. package/dist/node_modules/react-markdown/node_modules/micromark/lib/initialize/document.js.map +1 -0
  369. package/dist/node_modules/react-markdown/node_modules/micromark/lib/initialize/flow.js +2 -0
  370. package/dist/node_modules/react-markdown/node_modules/micromark/lib/initialize/flow.js.map +1 -0
  371. package/dist/node_modules/react-markdown/node_modules/micromark/lib/initialize/text.js +2 -0
  372. package/dist/node_modules/react-markdown/node_modules/micromark/lib/initialize/text.js.map +1 -0
  373. package/dist/node_modules/react-markdown/node_modules/micromark/lib/parse.js +2 -0
  374. package/dist/node_modules/react-markdown/node_modules/micromark/lib/parse.js.map +1 -0
  375. package/dist/node_modules/react-markdown/node_modules/micromark/lib/postprocess.js +2 -0
  376. package/dist/node_modules/react-markdown/node_modules/micromark/lib/postprocess.js.map +1 -0
  377. package/dist/node_modules/react-markdown/node_modules/micromark/lib/preprocess.js +2 -0
  378. package/dist/node_modules/react-markdown/node_modules/micromark/lib/preprocess.js.map +1 -0
  379. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/attention.js +2 -0
  380. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/attention.js.map +1 -0
  381. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/autolink.js +2 -0
  382. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/autolink.js.map +1 -0
  383. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/blank-line.js +2 -0
  384. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/blank-line.js.map +1 -0
  385. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/block-quote.js +2 -0
  386. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/block-quote.js.map +1 -0
  387. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/character-escape.js +2 -0
  388. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/character-escape.js.map +1 -0
  389. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/character-reference.js +2 -0
  390. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/character-reference.js.map +1 -0
  391. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/code-fenced.js +2 -0
  392. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/code-fenced.js.map +1 -0
  393. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/code-indented.js +2 -0
  394. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/code-indented.js.map +1 -0
  395. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/code-text.js +2 -0
  396. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/code-text.js.map +1 -0
  397. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/content.js +2 -0
  398. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/content.js.map +1 -0
  399. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/definition.js +2 -0
  400. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/definition.js.map +1 -0
  401. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/hard-break-escape.js +2 -0
  402. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/hard-break-escape.js.map +1 -0
  403. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/heading-atx.js +2 -0
  404. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/heading-atx.js.map +1 -0
  405. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/html-flow.js +2 -0
  406. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/html-flow.js.map +1 -0
  407. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/html-text.js +2 -0
  408. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/html-text.js.map +1 -0
  409. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/label-end.js +2 -0
  410. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/label-end.js.map +1 -0
  411. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/label-start-image.js +2 -0
  412. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/label-start-image.js.map +1 -0
  413. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/label-start-link.js +2 -0
  414. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/label-start-link.js.map +1 -0
  415. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/line-ending.js +2 -0
  416. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/line-ending.js.map +1 -0
  417. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/list.js +2 -0
  418. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/list.js.map +1 -0
  419. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/setext-underline.js +2 -0
  420. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/setext-underline.js.map +1 -0
  421. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/thematic-break.js +2 -0
  422. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/thematic-break.js.map +1 -0
  423. package/dist/node_modules/react-markdown/node_modules/micromark-factory-destination/index.js +2 -0
  424. package/dist/node_modules/react-markdown/node_modules/micromark-factory-destination/index.js.map +1 -0
  425. package/dist/node_modules/react-markdown/node_modules/micromark-factory-label/index.js +2 -0
  426. package/dist/node_modules/react-markdown/node_modules/micromark-factory-label/index.js.map +1 -0
  427. package/dist/node_modules/react-markdown/node_modules/micromark-factory-space/index.js +2 -0
  428. package/dist/node_modules/react-markdown/node_modules/micromark-factory-space/index.js.map +1 -0
  429. package/dist/node_modules/react-markdown/node_modules/micromark-factory-title/index.js +2 -0
  430. package/dist/node_modules/react-markdown/node_modules/micromark-factory-title/index.js.map +1 -0
  431. package/dist/node_modules/react-markdown/node_modules/micromark-factory-whitespace/index.js +2 -0
  432. package/dist/node_modules/react-markdown/node_modules/micromark-factory-whitespace/index.js.map +1 -0
  433. package/dist/node_modules/react-markdown/node_modules/micromark-util-character/index.js +2 -0
  434. package/dist/node_modules/react-markdown/node_modules/micromark-util-character/index.js.map +1 -0
  435. package/dist/node_modules/react-markdown/node_modules/micromark-util-character/lib/unicode-punctuation-regex.js +2 -0
  436. package/dist/node_modules/react-markdown/node_modules/micromark-util-character/lib/unicode-punctuation-regex.js.map +1 -0
  437. package/dist/node_modules/react-markdown/node_modules/micromark-util-chunked/index.js +2 -0
  438. package/dist/node_modules/react-markdown/node_modules/micromark-util-chunked/index.js.map +1 -0
  439. package/dist/node_modules/react-markdown/node_modules/micromark-util-classify-character/index.js +2 -0
  440. package/dist/node_modules/react-markdown/node_modules/micromark-util-classify-character/index.js.map +1 -0
  441. package/dist/node_modules/react-markdown/node_modules/micromark-util-combine-extensions/index.js +2 -0
  442. package/dist/node_modules/react-markdown/node_modules/micromark-util-combine-extensions/index.js.map +1 -0
  443. package/dist/node_modules/react-markdown/node_modules/micromark-util-decode-numeric-character-reference/index.js +2 -0
  444. package/dist/node_modules/react-markdown/node_modules/micromark-util-decode-numeric-character-reference/index.js.map +1 -0
  445. package/dist/node_modules/react-markdown/node_modules/micromark-util-decode-string/index.js +2 -0
  446. package/dist/node_modules/react-markdown/node_modules/micromark-util-decode-string/index.js.map +1 -0
  447. package/dist/node_modules/react-markdown/node_modules/micromark-util-html-tag-name/index.js +2 -0
  448. package/dist/node_modules/react-markdown/node_modules/micromark-util-html-tag-name/index.js.map +1 -0
  449. package/dist/node_modules/react-markdown/node_modules/micromark-util-normalize-identifier/index.js +2 -0
  450. package/dist/node_modules/react-markdown/node_modules/micromark-util-normalize-identifier/index.js.map +1 -0
  451. package/dist/node_modules/react-markdown/node_modules/micromark-util-resolve-all/index.js +2 -0
  452. package/dist/node_modules/react-markdown/node_modules/micromark-util-resolve-all/index.js.map +1 -0
  453. package/dist/node_modules/react-markdown/node_modules/micromark-util-subtokenize/index.js +2 -0
  454. package/dist/node_modules/react-markdown/node_modules/micromark-util-subtokenize/index.js.map +1 -0
  455. package/dist/node_modules/react-markdown/node_modules/property-information/index.js +2 -0
  456. package/dist/node_modules/react-markdown/node_modules/property-information/index.js.map +1 -0
  457. package/dist/node_modules/react-markdown/node_modules/property-information/lib/aria.js +2 -0
  458. package/dist/node_modules/react-markdown/node_modules/property-information/lib/aria.js.map +1 -0
  459. package/dist/node_modules/react-markdown/node_modules/property-information/lib/find.js +2 -0
  460. package/dist/node_modules/react-markdown/node_modules/property-information/lib/find.js.map +1 -0
  461. package/dist/node_modules/react-markdown/node_modules/property-information/lib/hast-to-react.js +2 -0
  462. package/dist/node_modules/react-markdown/node_modules/property-information/lib/hast-to-react.js.map +1 -0
  463. package/dist/node_modules/react-markdown/node_modules/property-information/lib/html.js +2 -0
  464. package/dist/node_modules/react-markdown/node_modules/property-information/lib/html.js.map +1 -0
  465. package/dist/node_modules/react-markdown/node_modules/property-information/lib/normalize.js +2 -0
  466. package/dist/node_modules/react-markdown/node_modules/property-information/lib/normalize.js.map +1 -0
  467. package/dist/node_modules/react-markdown/node_modules/property-information/lib/svg.js +2 -0
  468. package/dist/node_modules/react-markdown/node_modules/property-information/lib/svg.js.map +1 -0
  469. package/dist/node_modules/react-markdown/node_modules/property-information/lib/util/case-insensitive-transform.js +2 -0
  470. package/dist/node_modules/react-markdown/node_modules/property-information/lib/util/case-insensitive-transform.js.map +1 -0
  471. package/dist/node_modules/react-markdown/node_modules/property-information/lib/util/case-sensitive-transform.js +2 -0
  472. package/dist/node_modules/react-markdown/node_modules/property-information/lib/util/case-sensitive-transform.js.map +1 -0
  473. package/dist/node_modules/react-markdown/node_modules/property-information/lib/util/create.js +2 -0
  474. package/dist/node_modules/react-markdown/node_modules/property-information/lib/util/create.js.map +1 -0
  475. package/dist/node_modules/react-markdown/node_modules/property-information/lib/util/defined-info.js +2 -0
  476. package/dist/node_modules/react-markdown/node_modules/property-information/lib/util/defined-info.js.map +1 -0
  477. package/dist/node_modules/react-markdown/node_modules/property-information/lib/util/info.js +2 -0
  478. package/dist/node_modules/react-markdown/node_modules/property-information/lib/util/info.js.map +1 -0
  479. package/dist/node_modules/react-markdown/node_modules/property-information/lib/util/merge.js +2 -0
  480. package/dist/node_modules/react-markdown/node_modules/property-information/lib/util/merge.js.map +1 -0
  481. package/dist/node_modules/react-markdown/node_modules/property-information/lib/util/schema.js +2 -0
  482. package/dist/node_modules/react-markdown/node_modules/property-information/lib/util/schema.js.map +1 -0
  483. package/dist/node_modules/react-markdown/node_modules/property-information/lib/util/types.js +2 -0
  484. package/dist/node_modules/react-markdown/node_modules/property-information/lib/util/types.js.map +1 -0
  485. package/dist/node_modules/react-markdown/node_modules/property-information/lib/xlink.js +2 -0
  486. package/dist/node_modules/react-markdown/node_modules/property-information/lib/xlink.js.map +1 -0
  487. package/dist/node_modules/react-markdown/node_modules/property-information/lib/xml.js +2 -0
  488. package/dist/node_modules/react-markdown/node_modules/property-information/lib/xml.js.map +1 -0
  489. package/dist/node_modules/react-markdown/node_modules/property-information/lib/xmlns.js +2 -0
  490. package/dist/node_modules/react-markdown/node_modules/property-information/lib/xmlns.js.map +1 -0
  491. package/dist/node_modules/react-markdown/node_modules/remark-parse/lib/index.js +2 -0
  492. package/dist/node_modules/react-markdown/node_modules/remark-parse/lib/index.js.map +1 -0
  493. package/dist/node_modules/react-markdown/node_modules/space-separated-tokens/index.js +2 -0
  494. package/dist/node_modules/react-markdown/node_modules/space-separated-tokens/index.js.map +1 -0
  495. package/dist/node_modules/react-markdown/node_modules/unified/lib/index.js +2 -0
  496. package/dist/node_modules/react-markdown/node_modules/unified/lib/index.js.map +1 -0
  497. package/dist/node_modules/react-markdown/node_modules/unist-util-is/lib/index.js +2 -0
  498. package/dist/node_modules/react-markdown/node_modules/unist-util-is/lib/index.js.map +1 -0
  499. package/dist/node_modules/react-markdown/node_modules/unist-util-stringify-position/lib/index.js +2 -0
  500. package/dist/node_modules/react-markdown/node_modules/unist-util-stringify-position/lib/index.js.map +1 -0
  501. package/dist/node_modules/react-markdown/node_modules/unist-util-visit/lib/index.js +2 -0
  502. package/dist/node_modules/react-markdown/node_modules/unist-util-visit/lib/index.js.map +1 -0
  503. package/dist/node_modules/react-markdown/node_modules/unist-util-visit-parents/lib/color.browser.js +2 -0
  504. package/dist/node_modules/react-markdown/node_modules/unist-util-visit-parents/lib/color.browser.js.map +1 -0
  505. package/dist/node_modules/react-markdown/node_modules/unist-util-visit-parents/lib/index.js +2 -0
  506. package/dist/node_modules/react-markdown/node_modules/unist-util-visit-parents/lib/index.js.map +1 -0
  507. package/dist/node_modules/react-markdown/node_modules/vfile/lib/index.js +2 -0
  508. package/dist/node_modules/react-markdown/node_modules/vfile/lib/index.js.map +1 -0
  509. package/dist/node_modules/react-markdown/node_modules/vfile/lib/minpath.browser.js +2 -0
  510. package/dist/node_modules/react-markdown/node_modules/vfile/lib/minpath.browser.js.map +1 -0
  511. package/dist/node_modules/react-markdown/node_modules/vfile/lib/minproc.browser.js +2 -0
  512. package/dist/node_modules/react-markdown/node_modules/vfile/lib/minproc.browser.js.map +1 -0
  513. package/dist/node_modules/react-markdown/node_modules/vfile/lib/minurl.browser.js +2 -0
  514. package/dist/node_modules/react-markdown/node_modules/vfile/lib/minurl.browser.js.map +1 -0
  515. package/dist/node_modules/react-markdown/node_modules/vfile/lib/minurl.shared.js +2 -0
  516. package/dist/node_modules/react-markdown/node_modules/vfile/lib/minurl.shared.js.map +1 -0
  517. package/dist/node_modules/react-markdown/node_modules/vfile-message/lib/index.js +2 -0
  518. package/dist/node_modules/react-markdown/node_modules/vfile-message/lib/index.js.map +1 -0
  519. package/dist/node_modules/unist-util-visit-parents/lib/color.js +2 -0
  520. package/dist/node_modules/unist-util-visit-parents/lib/color.js.map +1 -0
  521. package/package.json +1 -1
  522. package/dist/cjs/features/DocumentLearning/_components/MinimizedThinkingSteps.js +0 -2
  523. package/dist/cjs/features/DocumentLearning/_components/MinimizedThinkingSteps.js.map +0 -1
  524. package/dist/cjs/features/DocumentLearning/_components/_svg/ArrowDown.js +0 -2
  525. package/dist/cjs/features/DocumentLearning/_components/_svg/ArrowDown.js.map +0 -1
  526. package/dist/cjs/features/DocumentLearning/_components/_svg/ArrowIconBlue.js +0 -2
  527. package/dist/cjs/features/DocumentLearning/_components/_svg/ArrowIconBlue.js.map +0 -1
  528. package/dist/cjs/features/DocumentLearning/_components/_svg/CompleteIconGreen.js +0 -2
  529. package/dist/cjs/features/DocumentLearning/_components/_svg/CompleteIconGreen.js.map +0 -1
  530. package/dist/cjs/features/IL-OTJ/_components/CognitiveDecisioningCard.js +0 -2
  531. package/dist/cjs/features/IL-OTJ/_components/CognitiveDecisioningCard.js.map +0 -1
  532. package/dist/cjs/features/IL-OTJ/_components/MinimizedThinkingSteps.js +0 -2
  533. package/dist/cjs/features/IL-OTJ/_components/MinimizedThinkingSteps.js.map +0 -1
  534. package/dist/cjs/features/IL-OTJ/_components/ViewInterpretedFieldsCard.js +0 -2
  535. package/dist/cjs/features/IL-OTJ/_components/ViewInterpretedFieldsCard.js.map +0 -1
  536. package/dist/cjs/features/IL-OTJ/_components/_svg/ArrowDown.js +0 -2
  537. package/dist/cjs/features/IL-OTJ/_components/_svg/ArrowDown.js.map +0 -1
  538. package/dist/cjs/features/IL-OTJ/_components/_svg/ArrowIconBlue.js +0 -2
  539. package/dist/cjs/features/IL-OTJ/_components/_svg/ArrowIconBlue.js.map +0 -1
  540. package/dist/cjs/features/IL-OTJ/_components/_svg/ChevronIcon.js +0 -2
  541. package/dist/cjs/features/IL-OTJ/_components/_svg/ChevronIcon.js.map +0 -1
  542. package/dist/cjs/features/IL-OTJ/_components/_svg/CompleteIconGreen.js +0 -2
  543. package/dist/cjs/features/IL-OTJ/_components/_svg/CompleteIconGreen.js.map +0 -1
  544. package/dist/cjs/features/IL-OTJ/_components/_svg/HumanWithBrainIcon.js +0 -2
  545. package/dist/cjs/features/IL-OTJ/_components/_svg/HumanWithBrainIcon.js.map +0 -1
  546. package/dist/cjs/features/IL-OTJ/_components/_svg/LeftAndRightArrow.js +0 -2
  547. package/dist/cjs/features/IL-OTJ/_components/_svg/LeftAndRightArrow.js.map +0 -1
  548. package/dist/cjs/features/IL-OTJ/_components/_svg/WatchMeWorkIcon.js +0 -2
  549. package/dist/cjs/features/IL-OTJ/_components/_svg/WatchMeWorkIcon.js.map +0 -1
  550. package/dist/features/DocumentLearning/_components/MinimizedThinkingSteps.js +0 -2
  551. package/dist/features/DocumentLearning/_components/MinimizedThinkingSteps.js.map +0 -1
  552. package/dist/features/DocumentLearning/_components/_svg/ArrowDown.js +0 -2
  553. package/dist/features/DocumentLearning/_components/_svg/ArrowDown.js.map +0 -1
  554. package/dist/features/DocumentLearning/_components/_svg/ArrowIconBlue.js +0 -2
  555. package/dist/features/DocumentLearning/_components/_svg/ArrowIconBlue.js.map +0 -1
  556. package/dist/features/DocumentLearning/_components/_svg/CompleteIconGreen.js +0 -2
  557. package/dist/features/DocumentLearning/_components/_svg/CompleteIconGreen.js.map +0 -1
  558. package/dist/features/IL-OTJ/_components/CognitiveDecisioningCard.js +0 -2
  559. package/dist/features/IL-OTJ/_components/CognitiveDecisioningCard.js.map +0 -1
  560. package/dist/features/IL-OTJ/_components/MinimizedThinkingSteps.js +0 -2
  561. package/dist/features/IL-OTJ/_components/MinimizedThinkingSteps.js.map +0 -1
  562. package/dist/features/IL-OTJ/_components/ViewInterpretedFieldsCard.js +0 -2
  563. package/dist/features/IL-OTJ/_components/ViewInterpretedFieldsCard.js.map +0 -1
  564. package/dist/features/IL-OTJ/_components/_svg/ArrowDown.js +0 -2
  565. package/dist/features/IL-OTJ/_components/_svg/ArrowDown.js.map +0 -1
  566. package/dist/features/IL-OTJ/_components/_svg/ArrowIconBlue.js +0 -2
  567. package/dist/features/IL-OTJ/_components/_svg/ArrowIconBlue.js.map +0 -1
  568. package/dist/features/IL-OTJ/_components/_svg/ChevronIcon.js +0 -2
  569. package/dist/features/IL-OTJ/_components/_svg/ChevronIcon.js.map +0 -1
  570. package/dist/features/IL-OTJ/_components/_svg/CompleteIconGreen.js +0 -2
  571. package/dist/features/IL-OTJ/_components/_svg/CompleteIconGreen.js.map +0 -1
  572. package/dist/features/IL-OTJ/_components/_svg/HumanWithBrainIcon.js +0 -2
  573. package/dist/features/IL-OTJ/_components/_svg/HumanWithBrainIcon.js.map +0 -1
  574. package/dist/features/IL-OTJ/_components/_svg/LeftAndRightArrow.js +0 -2
  575. package/dist/features/IL-OTJ/_components/_svg/LeftAndRightArrow.js.map +0 -1
  576. package/dist/features/IL-OTJ/_components/_svg/WatchMeWorkIcon.js +0 -2
  577. package/dist/features/IL-OTJ/_components/_svg/WatchMeWorkIcon.js.map +0 -1
  578. package/dist/types/src/features/IL-OTJ/_components/ViewInterpretedFieldsCard.d.ts +0 -2
  579. package/dist/types/src/features/IL-OTJ/_components/ViewInterpretedFieldsCard.d.ts.map +0 -1
@@ -0,0 +1 @@
1
+ {"version":3,"file":"document.js","sources":["../../../../../../../../node_modules/react-markdown/node_modules/micromark/lib/initialize/document.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').ContainerState} ContainerState\n * @typedef {import('micromark-util-types').InitialConstruct} InitialConstruct\n * @typedef {import('micromark-util-types').Initializer} Initializer\n * @typedef {import('micromark-util-types').Point} Point\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').Token} Token\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\n/**\n * @typedef {[Construct, ContainerState]} StackItem\n */\n\nimport {factorySpace} from 'micromark-factory-space'\nimport {markdownLineEnding} from 'micromark-util-character'\nimport {splice} from 'micromark-util-chunked'\n/** @type {InitialConstruct} */\nexport const document = {\n tokenize: initializeDocument\n}\n\n/** @type {Construct} */\nconst containerConstruct = {\n tokenize: tokenizeContainer\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Initializer}\n */\nfunction initializeDocument(effects) {\n const self = this\n /** @type {Array<StackItem>} */\n const stack = []\n let continued = 0\n /** @type {TokenizeContext | undefined} */\n let childFlow\n /** @type {Token | undefined} */\n let childToken\n /** @type {number} */\n let lineStartOffset\n return start\n\n /** @type {State} */\n function start(code) {\n // First we iterate through the open blocks, starting with the root\n // document, and descending through last children down to the last open\n // block.\n // Each block imposes a condition that the line must satisfy if the block is\n // to remain open.\n // For example, a block quote requires a `>` character.\n // A paragraph requires a non-blank line.\n // In this phase we may match all or just some of the open blocks.\n // But we cannot close unmatched blocks yet, because we may have a lazy\n // continuation line.\n if (continued < stack.length) {\n const item = stack[continued]\n self.containerState = item[1]\n return effects.attempt(\n item[0].continuation,\n documentContinue,\n checkNewContainers\n )(code)\n }\n\n // Done.\n return checkNewContainers(code)\n }\n\n /** @type {State} */\n function documentContinue(code) {\n continued++\n\n // Note: this field is called `_closeFlow` but it also closes containers.\n // Perhaps a good idea to rename it but it’s already used in the wild by\n // extensions.\n if (self.containerState._closeFlow) {\n self.containerState._closeFlow = undefined\n if (childFlow) {\n closeFlow()\n }\n\n // Note: this algorithm for moving events around is similar to the\n // algorithm when dealing with lazy lines in `writeToChild`.\n const indexBeforeExits = self.events.length\n let indexBeforeFlow = indexBeforeExits\n /** @type {Point | undefined} */\n let point\n\n // Find the flow chunk.\n while (indexBeforeFlow--) {\n if (\n self.events[indexBeforeFlow][0] === 'exit' &&\n self.events[indexBeforeFlow][1].type === 'chunkFlow'\n ) {\n point = self.events[indexBeforeFlow][1].end\n break\n }\n }\n exitContainers(continued)\n\n // Fix positions.\n let index = indexBeforeExits\n while (index < self.events.length) {\n self.events[index][1].end = Object.assign({}, point)\n index++\n }\n\n // Inject the exits earlier (they’re still also at the end).\n splice(\n self.events,\n indexBeforeFlow + 1,\n 0,\n self.events.slice(indexBeforeExits)\n )\n\n // Discard the duplicate exits.\n self.events.length = index\n return checkNewContainers(code)\n }\n return start(code)\n }\n\n /** @type {State} */\n function checkNewContainers(code) {\n // Next, after consuming the continuation markers for existing blocks, we\n // look for new block starts (e.g. `>` for a block quote).\n // If we encounter a new block start, we close any blocks unmatched in\n // step 1 before creating the new block as a child of the last matched\n // block.\n if (continued === stack.length) {\n // No need to `check` whether there’s a container, of `exitContainers`\n // would be moot.\n // We can instead immediately `attempt` to parse one.\n if (!childFlow) {\n return documentContinued(code)\n }\n\n // If we have concrete content, such as block HTML or fenced code,\n // we can’t have containers “pierce” into them, so we can immediately\n // start.\n if (childFlow.currentConstruct && childFlow.currentConstruct.concrete) {\n return flowStart(code)\n }\n\n // If we do have flow, it could still be a blank line,\n // but we’d be interrupting it w/ a new container if there’s a current\n // construct.\n // To do: next major: remove `_gfmTableDynamicInterruptHack` (no longer\n // needed in micromark-extension-gfm-table@1.0.6).\n self.interrupt = Boolean(\n childFlow.currentConstruct && !childFlow._gfmTableDynamicInterruptHack\n )\n }\n\n // Check if there is a new container.\n self.containerState = {}\n return effects.check(\n containerConstruct,\n thereIsANewContainer,\n thereIsNoNewContainer\n )(code)\n }\n\n /** @type {State} */\n function thereIsANewContainer(code) {\n if (childFlow) closeFlow()\n exitContainers(continued)\n return documentContinued(code)\n }\n\n /** @type {State} */\n function thereIsNoNewContainer(code) {\n self.parser.lazy[self.now().line] = continued !== stack.length\n lineStartOffset = self.now().offset\n return flowStart(code)\n }\n\n /** @type {State} */\n function documentContinued(code) {\n // Try new containers.\n self.containerState = {}\n return effects.attempt(\n containerConstruct,\n containerContinue,\n flowStart\n )(code)\n }\n\n /** @type {State} */\n function containerContinue(code) {\n continued++\n stack.push([self.currentConstruct, self.containerState])\n // Try another.\n return documentContinued(code)\n }\n\n /** @type {State} */\n function flowStart(code) {\n if (code === null) {\n if (childFlow) closeFlow()\n exitContainers(0)\n effects.consume(code)\n return\n }\n childFlow = childFlow || self.parser.flow(self.now())\n effects.enter('chunkFlow', {\n contentType: 'flow',\n previous: childToken,\n _tokenizer: childFlow\n })\n return flowContinue(code)\n }\n\n /** @type {State} */\n function flowContinue(code) {\n if (code === null) {\n writeToChild(effects.exit('chunkFlow'), true)\n exitContainers(0)\n effects.consume(code)\n return\n }\n if (markdownLineEnding(code)) {\n effects.consume(code)\n writeToChild(effects.exit('chunkFlow'))\n // Get ready for the next line.\n continued = 0\n self.interrupt = undefined\n return start\n }\n effects.consume(code)\n return flowContinue\n }\n\n /**\n * @param {Token} token\n * @param {boolean | undefined} [eof]\n * @returns {void}\n */\n function writeToChild(token, eof) {\n const stream = self.sliceStream(token)\n if (eof) stream.push(null)\n token.previous = childToken\n if (childToken) childToken.next = token\n childToken = token\n childFlow.defineSkip(token.start)\n childFlow.write(stream)\n\n // Alright, so we just added a lazy line:\n //\n // ```markdown\n // > a\n // b.\n //\n // Or:\n //\n // > ~~~c\n // d\n //\n // Or:\n //\n // > | e |\n // f\n // ```\n //\n // The construct in the second example (fenced code) does not accept lazy\n // lines, so it marked itself as done at the end of its first line, and\n // then the content construct parses `d`.\n // Most constructs in markdown match on the first line: if the first line\n // forms a construct, a non-lazy line can’t “unmake” it.\n //\n // The construct in the third example is potentially a GFM table, and\n // those are *weird*.\n // It *could* be a table, from the first line, if the following line\n // matches a condition.\n // In this case, that second line is lazy, which “unmakes” the first line\n // and turns the whole into one content block.\n //\n // We’ve now parsed the non-lazy and the lazy line, and can figure out\n // whether the lazy line started a new flow block.\n // If it did, we exit the current containers between the two flow blocks.\n if (self.parser.lazy[token.start.line]) {\n let index = childFlow.events.length\n while (index--) {\n if (\n // The token starts before the line ending…\n childFlow.events[index][1].start.offset < lineStartOffset &&\n // …and either is not ended yet…\n (!childFlow.events[index][1].end ||\n // …or ends after it.\n childFlow.events[index][1].end.offset > lineStartOffset)\n ) {\n // Exit: there’s still something open, which means it’s a lazy line\n // part of something.\n return\n }\n }\n\n // Note: this algorithm for moving events around is similar to the\n // algorithm when closing flow in `documentContinue`.\n const indexBeforeExits = self.events.length\n let indexBeforeFlow = indexBeforeExits\n /** @type {boolean | undefined} */\n let seen\n /** @type {Point | undefined} */\n let point\n\n // Find the previous chunk (the one before the lazy line).\n while (indexBeforeFlow--) {\n if (\n self.events[indexBeforeFlow][0] === 'exit' &&\n self.events[indexBeforeFlow][1].type === 'chunkFlow'\n ) {\n if (seen) {\n point = self.events[indexBeforeFlow][1].end\n break\n }\n seen = true\n }\n }\n exitContainers(continued)\n\n // Fix positions.\n index = indexBeforeExits\n while (index < self.events.length) {\n self.events[index][1].end = Object.assign({}, point)\n index++\n }\n\n // Inject the exits earlier (they’re still also at the end).\n splice(\n self.events,\n indexBeforeFlow + 1,\n 0,\n self.events.slice(indexBeforeExits)\n )\n\n // Discard the duplicate exits.\n self.events.length = index\n }\n }\n\n /**\n * @param {number} size\n * @returns {void}\n */\n function exitContainers(size) {\n let index = stack.length\n\n // Exit open containers.\n while (index-- > size) {\n const entry = stack[index]\n self.containerState = entry[1]\n entry[0].exit.call(self, effects)\n }\n stack.length = size\n }\n function closeFlow() {\n childFlow.write([null])\n childToken = undefined\n childFlow = undefined\n self.containerState._closeFlow = undefined\n }\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeContainer(effects, ok, nok) {\n // Always populated by defaults.\n\n return factorySpace(\n effects,\n effects.attempt(this.parser.constructs.document, ok, nok),\n 'linePrefix',\n this.parser.constructs.disable.null.includes('codeIndented') ? undefined : 4\n )\n}\n"],"names":["document","tokenize","effects","self","this","stack","childFlow","childToken","lineStartOffset","continued","start","code","length","item","containerState","attempt","continuation","documentContinue","checkNewContainers","_closeFlow","undefined","closeFlow","indexBeforeExits","events","point","indexBeforeFlow","type","end","exitContainers","index","Object","assign","splice","slice","documentContinued","currentConstruct","concrete","flowStart","interrupt","Boolean","_gfmTableDynamicInterruptHack","check","containerConstruct","thereIsANewContainer","thereIsNoNewContainer","parser","lazy","now","line","offset","containerContinue","push","consume","flow","enter","contentType","previous","_tokenizer","flowContinue","writeToChild","exit","markdownLineEnding","token","eof","stream","sliceStream","next","defineSkip","write","seen","size","entry","call","ok","nok","factorySpace","constructs","disable","null","includes"],"mappings":"6OAoBY,MAACA,EAAW,CACtBC,SAYF,SAA4BC,GAC1B,MAAMC,EAAOC,KAEPC,EAAQ,GACd,IAEIC,EAEAC,EAEAC,EANAC,EAAY,EAOhB,OAAOC,EAGP,SAASA,EAAMC,GAWb,GAAIF,EAAYJ,EAAMO,OAAQ,CAC5B,MAAMC,EAAOR,EAAMI,GAEnB,OADAN,EAAKW,eAAiBD,EAAK,GACpBX,EAAQa,QACbF,EAAK,GAAGG,aACRC,EACAC,EAHKhB,CAILS,EACH,CAGD,OAAOO,EAAmBP,EAC3B,CAGD,SAASM,EAAiBN,GAMxB,GALAF,IAKIN,EAAKW,eAAeK,WAAY,CAClChB,EAAKW,eAAeK,gBAAaC,EAC7Bd,GACFe,IAKF,MAAMC,EAAmBnB,EAAKoB,OAAOX,OACrC,IAEIY,EAFAC,EAAkBH,EAKtB,KAAOG,KACL,GACsC,SAApCtB,EAAKoB,OAAOE,GAAiB,IACY,cAAzCtB,EAAKoB,OAAOE,GAAiB,GAAGC,KAChC,CACAF,EAAQrB,EAAKoB,OAAOE,GAAiB,GAAGE,IACxC,KACD,CAEHC,EAAenB,GAGf,IAAIoB,EAAQP,EACZ,KAAOO,EAAQ1B,EAAKoB,OAAOX,QACzBT,EAAKoB,OAAOM,GAAO,GAAGF,IAAMG,OAAOC,OAAO,CAAE,EAAEP,GAC9CK,IAaF,OATAG,EAAMA,OACJ7B,EAAKoB,OACLE,EAAkB,EAClB,EACAtB,EAAKoB,OAAOU,MAAMX,IAIpBnB,EAAKoB,OAAOX,OAASiB,EACdX,EAAmBP,EAC3B,CACD,OAAOD,EAAMC,EACd,CAGD,SAASO,EAAmBP,GAM1B,GAAIF,IAAcJ,EAAMO,OAAQ,CAI9B,IAAKN,EACH,OAAO4B,EAAkBvB,GAM3B,GAAIL,EAAU6B,kBAAoB7B,EAAU6B,iBAAiBC,SAC3D,OAAOC,EAAU1B,GAQnBR,EAAKmC,UAAYC,QACfjC,EAAU6B,mBAAqB7B,EAAUkC,8BAE5C,CAID,OADArC,EAAKW,eAAiB,CAAE,EACjBZ,EAAQuC,MACbC,EACAC,EACAC,EAHK1C,CAILS,EACH,CAGD,SAASgC,EAAqBhC,GAG5B,OAFIL,GAAWe,IACfO,EAAenB,GACRyB,EAAkBvB,EAC1B,CAGD,SAASiC,EAAsBjC,GAG7B,OAFAR,EAAK0C,OAAOC,KAAK3C,EAAK4C,MAAMC,MAAQvC,IAAcJ,EAAMO,OACxDJ,EAAkBL,EAAK4C,MAAME,OACtBZ,EAAU1B,EAClB,CAGD,SAASuB,EAAkBvB,GAGzB,OADAR,EAAKW,eAAiB,CAAE,EACjBZ,EAAQa,QACb2B,EACAQ,EACAb,EAHKnC,CAILS,EACH,CAGD,SAASuC,EAAkBvC,GAIzB,OAHAF,IACAJ,EAAM8C,KAAK,CAAChD,EAAKgC,iBAAkBhC,EAAKW,iBAEjCoB,EAAkBvB,EAC1B,CAGD,SAAS0B,EAAU1B,GACjB,OAAa,OAATA,GACEL,GAAWe,IACfO,EAAe,QACf1B,EAAQkD,QAAQzC,KAGlBL,EAAYA,GAAaH,EAAK0C,OAAOQ,KAAKlD,EAAK4C,OAC/C7C,EAAQoD,MAAM,YAAa,CACzBC,YAAa,OACbC,SAAUjD,EACVkD,WAAYnD,IAEPoD,EAAa/C,GACrB,CAGD,SAAS+C,EAAa/C,GACpB,OAAa,OAATA,GACFgD,EAAazD,EAAQ0D,KAAK,cAAc,GACxChC,EAAe,QACf1B,EAAQkD,QAAQzC,IAGdkD,EAAAA,mBAAmBlD,IACrBT,EAAQkD,QAAQzC,GAChBgD,EAAazD,EAAQ0D,KAAK,cAE1BnD,EAAY,EACZN,EAAKmC,eAAYlB,EACVV,IAETR,EAAQkD,QAAQzC,GACT+C,EACR,CAOD,SAASC,EAAaG,EAAOC,GAC3B,MAAMC,EAAS7D,EAAK8D,YAAYH,GAyChC,GAxCIC,GAAKC,EAAOb,KAAK,MACrBW,EAAMN,SAAWjD,EACbA,IAAYA,EAAW2D,KAAOJ,GAClCvD,EAAauD,EACbxD,EAAU6D,WAAWL,EAAMpD,OAC3BJ,EAAU8D,MAAMJ,GAmCZ7D,EAAK0C,OAAOC,KAAKgB,EAAMpD,MAAMsC,MAAO,CACtC,IAAInB,EAAQvB,EAAUiB,OAAOX,OAC7B,KAAOiB,KACL,GAEEvB,EAAUiB,OAAOM,GAAO,GAAGnB,MAAMuC,OAASzC,KAExCF,EAAUiB,OAAOM,GAAO,GAAGF,KAE3BrB,EAAUiB,OAAOM,GAAO,GAAGF,IAAIsB,OAASzC,GAI1C,OAMJ,MAAMc,EAAmBnB,EAAKoB,OAAOX,OACrC,IAEIyD,EAEA7C,EAJAC,EAAkBH,EAOtB,KAAOG,KACL,GACsC,SAApCtB,EAAKoB,OAAOE,GAAiB,IACY,cAAzCtB,EAAKoB,OAAOE,GAAiB,GAAGC,KAChC,CACA,GAAI2C,EAAM,CACR7C,EAAQrB,EAAKoB,OAAOE,GAAiB,GAAGE,IACxC,KACD,CACD0C,GAAO,CACR,CAMH,IAJAzC,EAAenB,GAGfoB,EAAQP,EACDO,EAAQ1B,EAAKoB,OAAOX,QACzBT,EAAKoB,OAAOM,GAAO,GAAGF,IAAMG,OAAOC,OAAO,CAAE,EAAEP,GAC9CK,IAIFG,EAAMA,OACJ7B,EAAKoB,OACLE,EAAkB,EAClB,EACAtB,EAAKoB,OAAOU,MAAMX,IAIpBnB,EAAKoB,OAAOX,OAASiB,CACtB,CACF,CAMD,SAASD,EAAe0C,GACtB,IAAIzC,EAAQxB,EAAMO,OAGlB,KAAOiB,KAAUyC,GAAM,CACrB,MAAMC,EAAQlE,EAAMwB,GACpB1B,EAAKW,eAAiByD,EAAM,GAC5BA,EAAM,GAAGX,KAAKY,KAAKrE,EAAMD,EAC1B,CACDG,EAAMO,OAAS0D,CAChB,CACD,SAASjD,IACPf,EAAU8D,MAAM,CAAC,OACjB7D,OAAaa,EACbd,OAAYc,EACZjB,EAAKW,eAAeK,gBAAaC,CAClC,CACH,GArVMsB,EAAqB,CACzBzC,SA0VF,SAA2BC,EAASuE,EAAIC,GAGtC,OAAOC,EAAYA,aACjBzE,EACAA,EAAQa,QAAQX,KAAKyC,OAAO+B,WAAW5E,SAAUyE,EAAIC,GACrD,aACAtE,KAAKyC,OAAO+B,WAAWC,QAAQC,KAAKC,SAAS,qBAAkB3D,EAAY,EAE/E"}
@@ -0,0 +1,2 @@
1
+ "use strict";Object.defineProperty(exports,"__esModule",{value:!0});var e=require("../../../micromark-core-commonmark/lib/blank-line.js"),r=require("../../../micromark-factory-space/index.js");require("../../../../../decode-named-character-reference/index.dom.js");var n=require("../../../micromark-core-commonmark/lib/content.js");require("../../../micromark-core-commonmark/lib/label-start-image.js"),require("../../../micromark-core-commonmark/lib/label-start-link.js");const t={tokenize:function(t){const i=this,o=t.attempt(e.blankLine,(function(e){if(null===e)return void t.consume(e);return t.enter("lineEndingBlank"),t.consume(e),t.exit("lineEndingBlank"),i.currentConstruct=void 0,o}),t.attempt(this.parser.constructs.flowInitial,c,r.factorySpace(t,t.attempt(this.parser.constructs.flow,c,t.attempt(n.content,c)),"linePrefix")));return o;function c(e){if(null!==e)return t.enter("lineEnding"),t.consume(e),t.exit("lineEnding"),i.currentConstruct=void 0,o;t.consume(e)}}};exports.flow=t;
2
+ //# sourceMappingURL=flow.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"flow.js","sources":["../../../../../../../../node_modules/react-markdown/node_modules/micromark/lib/initialize/flow.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').InitialConstruct} InitialConstruct\n * @typedef {import('micromark-util-types').Initializer} Initializer\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n */\n\nimport {blankLine, content} from 'micromark-core-commonmark'\nimport {factorySpace} from 'micromark-factory-space'\nimport {markdownLineEnding} from 'micromark-util-character'\n/** @type {InitialConstruct} */\nexport const flow = {\n tokenize: initializeFlow\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Initializer}\n */\nfunction initializeFlow(effects) {\n const self = this\n const initial = effects.attempt(\n // Try to parse a blank line.\n blankLine,\n atBlankEnding,\n // Try to parse initial flow (essentially, only code).\n effects.attempt(\n this.parser.constructs.flowInitial,\n afterConstruct,\n factorySpace(\n effects,\n effects.attempt(\n this.parser.constructs.flow,\n afterConstruct,\n effects.attempt(content, afterConstruct)\n ),\n 'linePrefix'\n )\n )\n )\n return initial\n\n /** @type {State} */\n function atBlankEnding(code) {\n if (code === null) {\n effects.consume(code)\n return\n }\n effects.enter('lineEndingBlank')\n effects.consume(code)\n effects.exit('lineEndingBlank')\n self.currentConstruct = undefined\n return initial\n }\n\n /** @type {State} */\n function afterConstruct(code) {\n if (code === null) {\n effects.consume(code)\n return\n }\n effects.enter('lineEnding')\n effects.consume(code)\n effects.exit('lineEnding')\n self.currentConstruct = undefined\n return initial\n }\n}\n"],"names":["flow","tokenize","effects","self","this","initial","attempt","blankLine","code","consume","enter","exit","currentConstruct","undefined","parser","constructs","flowInitial","afterConstruct","factorySpace","content"],"mappings":"ydAWY,MAACA,EAAO,CAClBC,SAOF,SAAwBC,GACtB,MAAMC,EAAOC,KACPC,EAAUH,EAAQI,QAEtBC,EAASA,WAoBX,SAAuBC,GACrB,GAAa,OAATA,EAEF,YADAN,EAAQO,QAAQD,GAOlB,OAJAN,EAAQQ,MAAM,mBACdR,EAAQO,QAAQD,GAChBN,EAAQS,KAAK,mBACbR,EAAKS,sBAAmBC,EACjBR,CACR,GA3BCH,EAAQI,QACNF,KAAKU,OAAOC,WAAWC,YACvBC,EACAC,EAAYA,aACVhB,EACAA,EAAQI,QACNF,KAAKU,OAAOC,WAAWf,KACvBiB,EACAf,EAAQI,QAAQa,EAAOA,QAAEF,IAE3B,gBAIN,OAAOZ,EAgBP,SAASY,EAAeT,GACtB,GAAa,OAATA,EAQJ,OAJAN,EAAQQ,MAAM,cACdR,EAAQO,QAAQD,GAChBN,EAAQS,KAAK,cACbR,EAAKS,sBAAmBC,EACjBR,EAPLH,EAAQO,QAAQD,EAQnB,CACH"}
@@ -0,0 +1,2 @@
1
+ "use strict";Object.defineProperty(exports,"__esModule",{value:!0});const e={resolveAll:i()},t=r("string"),n=r("text");function r(e){return{tokenize:function(t){const n=this,r=this.parser.constructs[e],i=t.attempt(r,s,o);return s;function s(e){return l(e)?i(e):o(e)}function o(e){if(null!==e)return t.enter("data"),t.consume(e),f;t.consume(e)}function f(e){return l(e)?(t.exit("data"),i(e)):(t.consume(e),f)}function l(e){if(null===e)return!0;const t=r[e];let i=-1;if(t)for(;++i<t.length;){const e=t[i];if(!e.previous||e.previous.call(n,n.previous))return!0}return!1}},resolveAll:i("text"===e?s:void 0)}}function i(e){return function(t,n){let r,i=-1;for(;++i<=t.length;)void 0===r?t[i]&&"data"===t[i][1].type&&(r=i,i++):t[i]&&"data"===t[i][1].type||(i!==r+2&&(t[r][1].end=t[i-1][1].end,t.splice(r+2,i-r-2),i=r+2),r=void 0);return e?e(t,n):t}}function s(e,t){let n=0;for(;++n<=e.length;)if((n===e.length||"lineEnding"===e[n][1].type)&&"data"===e[n-1][1].type){const r=e[n-1][1],i=t.sliceStream(r);let s,o=i.length,f=-1,l=0;for(;o--;){const e=i[o];if("string"==typeof e){for(f=e.length;32===e.charCodeAt(f-1);)l++,f--;if(f)break;f=-1}else if(-2===e)s=!0,l++;else if(-1!==e){o++;break}}if(l){const i={type:n===e.length||s||l<2?"lineSuffix":"hardBreakTrailing",start:{line:r.end.line,column:r.end.column-l,offset:r.end.offset-l,_index:r.start._index+o,_bufferIndex:o?f:r.start._bufferIndex+f},end:Object.assign({},r.end)};r.end=Object.assign({},i.start),r.start.offset===r.end.offset?Object.assign(r,i):(e.splice(n,0,["enter",i,t],["exit",i,t]),n+=2)}n++}return e}exports.resolver=e,exports.string=t,exports.text=n;
2
+ //# sourceMappingURL=text.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"text.js","sources":["../../../../../../../../node_modules/react-markdown/node_modules/micromark/lib/initialize/text.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Code} Code\n * @typedef {import('micromark-util-types').InitialConstruct} InitialConstruct\n * @typedef {import('micromark-util-types').Initializer} Initializer\n * @typedef {import('micromark-util-types').Resolver} Resolver\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n */\n\nexport const resolver = {\n resolveAll: createResolver()\n}\nexport const string = initializeFactory('string')\nexport const text = initializeFactory('text')\n\n/**\n * @param {'string' | 'text'} field\n * @returns {InitialConstruct}\n */\nfunction initializeFactory(field) {\n return {\n tokenize: initializeText,\n resolveAll: createResolver(\n field === 'text' ? resolveAllLineSuffixes : undefined\n )\n }\n\n /**\n * @this {TokenizeContext}\n * @type {Initializer}\n */\n function initializeText(effects) {\n const self = this\n const constructs = this.parser.constructs[field]\n const text = effects.attempt(constructs, start, notText)\n return start\n\n /** @type {State} */\n function start(code) {\n return atBreak(code) ? text(code) : notText(code)\n }\n\n /** @type {State} */\n function notText(code) {\n if (code === null) {\n effects.consume(code)\n return\n }\n effects.enter('data')\n effects.consume(code)\n return data\n }\n\n /** @type {State} */\n function data(code) {\n if (atBreak(code)) {\n effects.exit('data')\n return text(code)\n }\n\n // Data.\n effects.consume(code)\n return data\n }\n\n /**\n * @param {Code} code\n * @returns {boolean}\n */\n function atBreak(code) {\n if (code === null) {\n return true\n }\n const list = constructs[code]\n let index = -1\n if (list) {\n // Always populated by defaults.\n\n while (++index < list.length) {\n const item = list[index]\n if (!item.previous || item.previous.call(self, self.previous)) {\n return true\n }\n }\n }\n return false\n }\n }\n}\n\n/**\n * @param {Resolver | undefined} [extraResolver]\n * @returns {Resolver}\n */\nfunction createResolver(extraResolver) {\n return resolveAllText\n\n /** @type {Resolver} */\n function resolveAllText(events, context) {\n let index = -1\n /** @type {number | undefined} */\n let enter\n\n // A rather boring computation (to merge adjacent `data` events) which\n // improves mm performance by 29%.\n while (++index <= events.length) {\n if (enter === undefined) {\n if (events[index] && events[index][1].type === 'data') {\n enter = index\n index++\n }\n } else if (!events[index] || events[index][1].type !== 'data') {\n // Don’t do anything if there is one data token.\n if (index !== enter + 2) {\n events[enter][1].end = events[index - 1][1].end\n events.splice(enter + 2, index - enter - 2)\n index = enter + 2\n }\n enter = undefined\n }\n }\n return extraResolver ? extraResolver(events, context) : events\n }\n}\n\n/**\n * A rather ugly set of instructions which again looks at chunks in the input\n * stream.\n * The reason to do this here is that it is *much* faster to parse in reverse.\n * And that we can’t hook into `null` to split the line suffix before an EOF.\n * To do: figure out if we can make this into a clean utility, or even in core.\n * As it will be useful for GFMs literal autolink extension (and maybe even\n * tables?)\n *\n * @type {Resolver}\n */\nfunction resolveAllLineSuffixes(events, context) {\n let eventIndex = 0 // Skip first.\n\n while (++eventIndex <= events.length) {\n if (\n (eventIndex === events.length ||\n events[eventIndex][1].type === 'lineEnding') &&\n events[eventIndex - 1][1].type === 'data'\n ) {\n const data = events[eventIndex - 1][1]\n const chunks = context.sliceStream(data)\n let index = chunks.length\n let bufferIndex = -1\n let size = 0\n /** @type {boolean | undefined} */\n let tabs\n while (index--) {\n const chunk = chunks[index]\n if (typeof chunk === 'string') {\n bufferIndex = chunk.length\n while (chunk.charCodeAt(bufferIndex - 1) === 32) {\n size++\n bufferIndex--\n }\n if (bufferIndex) break\n bufferIndex = -1\n }\n // Number\n else if (chunk === -2) {\n tabs = true\n size++\n } else if (chunk === -1) {\n // Empty\n } else {\n // Replacement character, exit.\n index++\n break\n }\n }\n if (size) {\n const token = {\n type:\n eventIndex === events.length || tabs || size < 2\n ? 'lineSuffix'\n : 'hardBreakTrailing',\n start: {\n line: data.end.line,\n column: data.end.column - size,\n offset: data.end.offset - size,\n _index: data.start._index + index,\n _bufferIndex: index\n ? bufferIndex\n : data.start._bufferIndex + bufferIndex\n },\n end: Object.assign({}, data.end)\n }\n data.end = Object.assign({}, token.start)\n if (data.start.offset === data.end.offset) {\n Object.assign(data, token)\n } else {\n events.splice(\n eventIndex,\n 0,\n ['enter', token, context],\n ['exit', token, context]\n )\n eventIndex += 2\n }\n }\n eventIndex++\n }\n }\n return events\n}\n"],"names":["resolver","resolveAll","createResolver","string","initializeFactory","text","field","tokenize","effects","self","this","constructs","parser","attempt","start","notText","code","atBreak","enter","consume","data","exit","list","index","length","item","previous","call","resolveAllLineSuffixes","undefined","extraResolver","events","context","type","end","splice","eventIndex","chunks","sliceStream","tabs","bufferIndex","size","chunk","charCodeAt","token","line","column","offset","_index","_bufferIndex","Object","assign"],"mappings":"oEASY,MAACA,EAAW,CACtBC,WAAYC,KAEDC,EAASC,EAAkB,UAC3BC,EAAOD,EAAkB,QAMtC,SAASA,EAAkBE,GACzB,MAAO,CACLC,SAUF,SAAwBC,GACtB,MAAMC,EAAOC,KACPC,EAAaD,KAAKE,OAAOD,WAAWL,GACpCD,EAAOG,EAAQK,QAAQF,EAAYG,EAAOC,GAChD,OAAOD,EAGP,SAASA,EAAME,GACb,OAAOC,EAAQD,GAAQX,EAAKW,GAAQD,EAAQC,EAC7C,CAGD,SAASD,EAAQC,GACf,GAAa,OAATA,EAMJ,OAFAR,EAAQU,MAAM,QACdV,EAAQW,QAAQH,GACTI,EALLZ,EAAQW,QAAQH,EAMnB,CAGD,SAASI,EAAKJ,GACZ,OAAIC,EAAQD,IACVR,EAAQa,KAAK,QACNhB,EAAKW,KAIdR,EAAQW,QAAQH,GACTI,EACR,CAMD,SAASH,EAAQD,GACf,GAAa,OAATA,EACF,OAAO,EAET,MAAMM,EAAOX,EAAWK,GACxB,IAAIO,GAAS,EACb,GAAID,EAGF,OAASC,EAAQD,EAAKE,QAAQ,CAC5B,MAAMC,EAAOH,EAAKC,GAClB,IAAKE,EAAKC,UAAYD,EAAKC,SAASC,KAAKlB,EAAMA,EAAKiB,UAClD,OAAO,CAEV,CAEH,OAAO,CACR,CACF,EAjECzB,WAAYC,EACA,SAAVI,EAAmBsB,OAAyBC,GAiElD,CAMA,SAAS3B,EAAe4B,GACtB,OAGA,SAAwBC,EAAQC,GAC9B,IAEId,EAFAK,GAAS,EAMb,OAASA,GAASQ,EAAOP,aACTK,IAAVX,EACEa,EAAOR,IAAoC,SAA1BQ,EAAOR,GAAO,GAAGU,OACpCf,EAAQK,EACRA,KAEQQ,EAAOR,IAAoC,SAA1BQ,EAAOR,GAAO,GAAGU,OAExCV,IAAUL,EAAQ,IACpBa,EAAOb,GAAO,GAAGgB,IAAMH,EAAOR,EAAQ,GAAG,GAAGW,IAC5CH,EAAOI,OAAOjB,EAAQ,EAAGK,EAAQL,EAAQ,GACzCK,EAAQL,EAAQ,GAElBA,OAAQW,GAGZ,OAAOC,EAAgBA,EAAcC,EAAQC,GAAWD,CACzD,CACH,CAaA,SAASH,EAAuBG,EAAQC,GACtC,IAAII,EAAa,EAEjB,OAASA,GAAcL,EAAOP,QAC5B,IACGY,IAAeL,EAAOP,QACU,eAA/BO,EAAOK,GAAY,GAAGH,OACW,SAAnCF,EAAOK,EAAa,GAAG,GAAGH,KAC1B,CACA,MAAMb,EAAOW,EAAOK,EAAa,GAAG,GAC9BC,EAASL,EAAQM,YAAYlB,GACnC,IAIImB,EAJAhB,EAAQc,EAAOb,OACfgB,GAAe,EACfC,EAAO,EAGX,KAAOlB,KAAS,CACd,MAAMmB,EAAQL,EAAOd,GACrB,GAAqB,iBAAVmB,EAAoB,CAE7B,IADAF,EAAcE,EAAMlB,OACyB,KAAtCkB,EAAMC,WAAWH,EAAc,IACpCC,IACAD,IAEF,GAAIA,EAAa,MACjBA,GAAe,CAChB,MAEI,IAAe,IAAXE,EACPH,GAAO,EACPE,SACK,IAAe,IAAXC,EAEJ,CAELnB,IACA,KACD,CACF,CACD,GAAIkB,EAAM,CACR,MAAMG,EAAQ,CACZX,KACEG,IAAeL,EAAOP,QAAUe,GAAQE,EAAO,EAC3C,aACA,oBACN3B,MAAO,CACL+B,KAAMzB,EAAKc,IAAIW,KACfC,OAAQ1B,EAAKc,IAAIY,OAASL,EAC1BM,OAAQ3B,EAAKc,IAAIa,OAASN,EAC1BO,OAAQ5B,EAAKN,MAAMkC,OAASzB,EAC5B0B,aAAc1B,EACViB,EACApB,EAAKN,MAAMmC,aAAeT,GAEhCN,IAAKgB,OAAOC,OAAO,CAAA,EAAI/B,EAAKc,MAE9Bd,EAAKc,IAAMgB,OAAOC,OAAO,CAAE,EAAEP,EAAM9B,OAC/BM,EAAKN,MAAMiC,SAAW3B,EAAKc,IAAIa,OACjCG,OAAOC,OAAO/B,EAAMwB,IAEpBb,EAAOI,OACLC,EACA,EACA,CAAC,QAASQ,EAAOZ,GACjB,CAAC,OAAQY,EAAOZ,IAElBI,GAAc,EAEjB,CACDA,GACD,CAEH,OAAOL,CACT"}
@@ -0,0 +1,2 @@
1
+ "use strict";Object.defineProperty(exports,"__esModule",{value:!0});var e=require("../../micromark-util-combine-extensions/index.js"),t=require("./initialize/content.js"),i=require("./initialize/document.js"),n=require("./initialize/flow.js"),r=require("./initialize/text.js"),o=require("./create-tokenizer.js"),s=require("./constructs.js");exports.parse=function(u){const c=u||{},a={defined:[],lazy:{},constructs:e.combineExtensions([s,...c.extensions||[]]),content:l(t.content),document:l(i.document),flow:l(n.flow),string:l(r.string),text:l(r.text)};return a;function l(e){return function(t){return o.createTokenizer(a,e,t)}}};
2
+ //# sourceMappingURL=parse.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"parse.js","sources":["../../../../../../../node_modules/react-markdown/node_modules/micromark/lib/parse.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Create} Create\n * @typedef {import('micromark-util-types').FullNormalizedExtension} FullNormalizedExtension\n * @typedef {import('micromark-util-types').InitialConstruct} InitialConstruct\n * @typedef {import('micromark-util-types').ParseContext} ParseContext\n * @typedef {import('micromark-util-types').ParseOptions} ParseOptions\n */\n\nimport {combineExtensions} from 'micromark-util-combine-extensions'\nimport {content} from './initialize/content.js'\nimport {document} from './initialize/document.js'\nimport {flow} from './initialize/flow.js'\nimport {text, string} from './initialize/text.js'\nimport {createTokenizer} from './create-tokenizer.js'\nimport * as defaultConstructs from './constructs.js'\n\n/**\n * @param {ParseOptions | null | undefined} [options]\n * @returns {ParseContext}\n */\nexport function parse(options) {\n const settings = options || {}\n const constructs =\n /** @type {FullNormalizedExtension} */\n combineExtensions([defaultConstructs, ...(settings.extensions || [])])\n\n /** @type {ParseContext} */\n const parser = {\n defined: [],\n lazy: {},\n constructs,\n content: create(content),\n document: create(document),\n flow: create(flow),\n string: create(string),\n text: create(text)\n }\n return parser\n\n /**\n * @param {InitialConstruct} initial\n */\n function create(initial) {\n return creator\n /** @type {Create} */\n function creator(from) {\n return createTokenizer(parser, initial, from)\n }\n }\n}\n"],"names":["options","settings","parser","defined","lazy","constructs","combineExtensions","defaultConstructs","extensions","content","create","document","flow","string","text","initial","from","createTokenizer"],"mappings":"mWAoBO,SAAeA,GACpB,MAAMC,EAAWD,GAAW,CAAE,EAMxBE,EAAS,CACbC,QAAS,GACTC,KAAM,CAAE,EACZC,WANIC,EAAiBA,kBAAC,CAACC,KAAuBN,EAASO,YAAc,KAOjEC,QAASC,EAAOD,WAChBE,SAAUD,EAAOC,YACjBC,KAAMF,EAAOE,QACbC,OAAQH,EAAOG,UACfC,KAAMJ,EAAOI,SAEf,OAAOZ,EAKP,SAASQ,EAAOK,GACd,OAEA,SAAiBC,GACf,OAAOC,kBAAgBf,EAAQa,EAASC,EACzC,CACF,CACH"}
@@ -0,0 +1,2 @@
1
+ "use strict";Object.defineProperty(exports,"__esModule",{value:!0});var e=require("../../micromark-util-subtokenize/index.js");exports.postprocess=function(r){for(;!e.subtokenize(r););return r};
2
+ //# sourceMappingURL=postprocess.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"postprocess.js","sources":["../../../../../../../node_modules/react-markdown/node_modules/micromark/lib/postprocess.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Event} Event\n */\n\nimport {subtokenize} from 'micromark-util-subtokenize'\n\n/**\n * @param {Array<Event>} events\n * @returns {Array<Event>}\n */\nexport function postprocess(events) {\n while (!subtokenize(events)) {\n // Empty\n }\n return events\n}\n"],"names":["events","subtokenize"],"mappings":"mJAUO,SAAqBA,GAC1B,MAAQC,EAAAA,YAAYD,KAGpB,OAAOA,CACT"}
@@ -0,0 +1,2 @@
1
+ "use strict";Object.defineProperty(exports,"__esModule",{value:!0});const e=/[\0\t\n\r]/g;exports.preprocess=function(){let s,t=1,r="",u=!0;return function(o,c,i){const n=[];let h,p,l,a,d;o=r+o.toString(c),l=0,r="",u&&(65279===o.charCodeAt(0)&&l++,u=void 0);for(;l<o.length;){if(e.lastIndex=l,h=e.exec(o),a=h&&void 0!==h.index?h.index:o.length,d=o.charCodeAt(a),!h){r=o.slice(l);break}if(10===d&&l===a&&s)n.push(-3),s=void 0;else switch(s&&(n.push(-5),s=void 0),l<a&&(n.push(o.slice(l,a)),t+=a-l),d){case 0:n.push(65533),t++;break;case 9:for(p=4*Math.ceil(t/4),n.push(-2);t++<p;)n.push(-1);break;case 10:n.push(-4),t=1;break;default:s=!0,t=1}l=a+1}i&&(s&&n.push(-5),r&&n.push(r),n.push(null));return n}};
2
+ //# sourceMappingURL=preprocess.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"preprocess.js","sources":["../../../../../../../node_modules/react-markdown/node_modules/micromark/lib/preprocess.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Chunk} Chunk\n * @typedef {import('micromark-util-types').Code} Code\n * @typedef {import('micromark-util-types').Encoding} Encoding\n * @typedef {import('micromark-util-types').Value} Value\n */\n\n/**\n * @callback Preprocessor\n * @param {Value} value\n * @param {Encoding | null | undefined} [encoding]\n * @param {boolean | null | undefined} [end=false]\n * @returns {Array<Chunk>}\n */\n\nconst search = /[\\0\\t\\n\\r]/g\n\n/**\n * @returns {Preprocessor}\n */\nexport function preprocess() {\n let column = 1\n let buffer = ''\n /** @type {boolean | undefined} */\n let start = true\n /** @type {boolean | undefined} */\n let atCarriageReturn\n return preprocessor\n\n /** @type {Preprocessor} */\n function preprocessor(value, encoding, end) {\n /** @type {Array<Chunk>} */\n const chunks = []\n /** @type {RegExpMatchArray | null} */\n let match\n /** @type {number} */\n let next\n /** @type {number} */\n let startPosition\n /** @type {number} */\n let endPosition\n /** @type {Code} */\n let code\n\n // @ts-expect-error `Buffer` does allow an encoding.\n value = buffer + value.toString(encoding)\n startPosition = 0\n buffer = ''\n if (start) {\n // To do: `markdown-rs` actually parses BOMs (byte order mark).\n if (value.charCodeAt(0) === 65279) {\n startPosition++\n }\n start = undefined\n }\n while (startPosition < value.length) {\n search.lastIndex = startPosition\n match = search.exec(value)\n endPosition =\n match && match.index !== undefined ? match.index : value.length\n code = value.charCodeAt(endPosition)\n if (!match) {\n buffer = value.slice(startPosition)\n break\n }\n if (code === 10 && startPosition === endPosition && atCarriageReturn) {\n chunks.push(-3)\n atCarriageReturn = undefined\n } else {\n if (atCarriageReturn) {\n chunks.push(-5)\n atCarriageReturn = undefined\n }\n if (startPosition < endPosition) {\n chunks.push(value.slice(startPosition, endPosition))\n column += endPosition - startPosition\n }\n switch (code) {\n case 0: {\n chunks.push(65533)\n column++\n break\n }\n case 9: {\n next = Math.ceil(column / 4) * 4\n chunks.push(-2)\n while (column++ < next) chunks.push(-1)\n break\n }\n case 10: {\n chunks.push(-4)\n column = 1\n break\n }\n default: {\n atCarriageReturn = true\n column = 1\n }\n }\n }\n startPosition = endPosition + 1\n }\n if (end) {\n if (atCarriageReturn) chunks.push(-5)\n if (buffer) chunks.push(buffer)\n chunks.push(null)\n }\n return chunks\n }\n}\n"],"names":["search","atCarriageReturn","column","buffer","start","value","encoding","end","chunks","match","next","startPosition","endPosition","code","toString","charCodeAt","undefined","length","lastIndex","exec","index","slice","push","Math","ceil"],"mappings":"oEAeA,MAAMA,EAAS,iCAKR,WACL,IAKIC,EALAC,EAAS,EACTC,EAAS,GAETC,GAAQ,EAGZ,OAGA,SAAsBC,EAAOC,EAAUC,GAErC,MAAMC,EAAS,GAEf,IAAIC,EAEAC,EAEAC,EAEAC,EAEAC,EAGJR,EAAQF,EAASE,EAAMS,SAASR,GAChCK,EAAgB,EAChBR,EAAS,GACLC,IAE0B,QAAxBC,EAAMU,WAAW,IACnBJ,IAEFP,OAAQY,GAEV,KAAOL,EAAgBN,EAAMY,QAAQ,CAMnC,GALAjB,EAAOkB,UAAYP,EACnBF,EAAQT,EAAOmB,KAAKd,GACpBO,EACEH,QAAyBO,IAAhBP,EAAMW,MAAsBX,EAAMW,MAAQf,EAAMY,OAC3DJ,EAAOR,EAAMU,WAAWH,IACnBH,EAAO,CACVN,EAASE,EAAMgB,MAAMV,GACrB,KACD,CACD,GAAa,KAATE,GAAeF,IAAkBC,GAAeX,EAClDO,EAAOc,MAAM,GACbrB,OAAmBe,OAUnB,OARIf,IACFO,EAAOc,MAAM,GACbrB,OAAmBe,GAEjBL,EAAgBC,IAClBJ,EAAOc,KAAKjB,EAAMgB,MAAMV,EAAeC,IACvCV,GAAUU,EAAcD,GAElBE,GACN,KAAK,EACHL,EAAOc,KAAK,OACZpB,IACA,MAEF,KAAK,EAGH,IAFAQ,EAA+B,EAAxBa,KAAKC,KAAKtB,EAAS,GAC1BM,EAAOc,MAAM,GACNpB,IAAWQ,GAAMF,EAAOc,MAAM,GACrC,MAEF,KAAK,GACHd,EAAOc,MAAM,GACbpB,EAAS,EACT,MAEF,QACED,GAAmB,EACnBC,EAAS,EAIfS,EAAgBC,EAAc,CAC/B,CACGL,IACEN,GAAkBO,EAAOc,MAAM,GAC/BnB,GAAQK,EAAOc,KAAKnB,GACxBK,EAAOc,KAAK,OAEd,OAAOd,CACR,CACH"}
@@ -0,0 +1,2 @@
1
+ "use strict";Object.defineProperty(exports,"__esModule",{value:!0});var e=require("../../micromark-util-chunked/index.js"),t=require("../../micromark-util-classify-character/index.js"),n=require("../../micromark-util-resolve-all/index.js");const s={name:"attention",tokenize:function(e,n){const s=this.parser.constructs.attentionMarkers.null,r=this.previous,i=t.classifyCharacter(r);let a;return function(t){return a=t,e.enter("attentionSequence"),o(t)};function o(c){if(c===a)return e.consume(c),o;const f=e.exit("attentionSequence"),u=t.classifyCharacter(c),l=!u||2===u&&i||s.includes(c),d=!i||2===i&&u||s.includes(r);return f._open=Boolean(42===a?l:l&&(i||!d)),f._close=Boolean(42===a?d:d&&(u||!l)),n(c)}},resolveAll:function(t,s){let i,a,o,c,f,u,l,d,p=-1;for(;++p<t.length;)if("enter"===t[p][0]&&"attentionSequence"===t[p][1].type&&t[p][1]._close)for(i=p;i--;)if("exit"===t[i][0]&&"attentionSequence"===t[i][1].type&&t[i][1]._open&&s.sliceSerialize(t[i][1]).charCodeAt(0)===s.sliceSerialize(t[p][1]).charCodeAt(0)){if((t[i][1]._close||t[p][1]._open)&&(t[p][1].end.offset-t[p][1].start.offset)%3&&!((t[i][1].end.offset-t[i][1].start.offset+t[p][1].end.offset-t[p][1].start.offset)%3))continue;u=t[i][1].end.offset-t[i][1].start.offset>1&&t[p][1].end.offset-t[p][1].start.offset>1?2:1;const h=Object.assign({},t[i][1].end),g=Object.assign({},t[p][1].start);r(h,-u),r(g,u),c={type:u>1?"strongSequence":"emphasisSequence",start:h,end:Object.assign({},t[i][1].end)},f={type:u>1?"strongSequence":"emphasisSequence",start:Object.assign({},t[p][1].start),end:g},o={type:u>1?"strongText":"emphasisText",start:Object.assign({},t[i][1].end),end:Object.assign({},t[p][1].start)},a={type:u>1?"strong":"emphasis",start:Object.assign({},c.start),end:Object.assign({},f.end)},t[i][1].end=Object.assign({},c.start),t[p][1].start=Object.assign({},f.end),l=[],t[i][1].end.offset-t[i][1].start.offset&&(l=e.push(l,[["enter",t[i][1],s],["exit",t[i][1],s]])),l=e.push(l,[["enter",a,s],["enter",c,s],["exit",c,s],["enter",o,s]]),l=e.push(l,n.resolveAll(s.parser.constructs.insideSpan.null,t.slice(i+1,p),s)),l=e.push(l,[["exit",o,s],["enter",f,s],["exit",f,s],["exit",a,s]]),t[p][1].end.offset-t[p][1].start.offset?(d=2,l=e.push(l,[["enter",t[p][1],s],["exit",t[p][1],s]])):d=0,e.splice(t,i-1,p-i+3,l),p=i+l.length-d-2;break}p=-1;for(;++p<t.length;)"attentionSequence"===t[p][1].type&&(t[p][1].type="data");return t}};function r(e,t){e.column+=t,e.offset+=t,e._bufferIndex+=t}exports.attention=s;
2
+ //# sourceMappingURL=attention.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"attention.js","sources":["../../../../../../../node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/attention.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Code} Code\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').Event} Event\n * @typedef {import('micromark-util-types').Point} Point\n * @typedef {import('micromark-util-types').Resolver} Resolver\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').Token} Token\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {push, splice} from 'micromark-util-chunked'\nimport {classifyCharacter} from 'micromark-util-classify-character'\nimport {resolveAll} from 'micromark-util-resolve-all'\n/** @type {Construct} */\nexport const attention = {\n name: 'attention',\n tokenize: tokenizeAttention,\n resolveAll: resolveAllAttention\n}\n\n/**\n * Take all events and resolve attention to emphasis or strong.\n *\n * @type {Resolver}\n */\nfunction resolveAllAttention(events, context) {\n let index = -1\n /** @type {number} */\n let open\n /** @type {Token} */\n let group\n /** @type {Token} */\n let text\n /** @type {Token} */\n let openingSequence\n /** @type {Token} */\n let closingSequence\n /** @type {number} */\n let use\n /** @type {Array<Event>} */\n let nextEvents\n /** @type {number} */\n let offset\n\n // Walk through all events.\n //\n // Note: performance of this is fine on an mb of normal markdown, but it’s\n // a bottleneck for malicious stuff.\n while (++index < events.length) {\n // Find a token that can close.\n if (\n events[index][0] === 'enter' &&\n events[index][1].type === 'attentionSequence' &&\n events[index][1]._close\n ) {\n open = index\n\n // Now walk back to find an opener.\n while (open--) {\n // Find a token that can open the closer.\n if (\n events[open][0] === 'exit' &&\n events[open][1].type === 'attentionSequence' &&\n events[open][1]._open &&\n // If the markers are the same:\n context.sliceSerialize(events[open][1]).charCodeAt(0) ===\n context.sliceSerialize(events[index][1]).charCodeAt(0)\n ) {\n // If the opening can close or the closing can open,\n // and the close size *is not* a multiple of three,\n // but the sum of the opening and closing size *is* multiple of three,\n // then don’t match.\n if (\n (events[open][1]._close || events[index][1]._open) &&\n (events[index][1].end.offset - events[index][1].start.offset) % 3 &&\n !(\n (events[open][1].end.offset -\n events[open][1].start.offset +\n events[index][1].end.offset -\n events[index][1].start.offset) %\n 3\n )\n ) {\n continue\n }\n\n // Number of markers to use from the sequence.\n use =\n events[open][1].end.offset - events[open][1].start.offset > 1 &&\n events[index][1].end.offset - events[index][1].start.offset > 1\n ? 2\n : 1\n const start = Object.assign({}, events[open][1].end)\n const end = Object.assign({}, events[index][1].start)\n movePoint(start, -use)\n movePoint(end, use)\n openingSequence = {\n type: use > 1 ? 'strongSequence' : 'emphasisSequence',\n start,\n end: Object.assign({}, events[open][1].end)\n }\n closingSequence = {\n type: use > 1 ? 'strongSequence' : 'emphasisSequence',\n start: Object.assign({}, events[index][1].start),\n end\n }\n text = {\n type: use > 1 ? 'strongText' : 'emphasisText',\n start: Object.assign({}, events[open][1].end),\n end: Object.assign({}, events[index][1].start)\n }\n group = {\n type: use > 1 ? 'strong' : 'emphasis',\n start: Object.assign({}, openingSequence.start),\n end: Object.assign({}, closingSequence.end)\n }\n events[open][1].end = Object.assign({}, openingSequence.start)\n events[index][1].start = Object.assign({}, closingSequence.end)\n nextEvents = []\n\n // If there are more markers in the opening, add them before.\n if (events[open][1].end.offset - events[open][1].start.offset) {\n nextEvents = push(nextEvents, [\n ['enter', events[open][1], context],\n ['exit', events[open][1], context]\n ])\n }\n\n // Opening.\n nextEvents = push(nextEvents, [\n ['enter', group, context],\n ['enter', openingSequence, context],\n ['exit', openingSequence, context],\n ['enter', text, context]\n ])\n\n // Always populated by defaults.\n\n // Between.\n nextEvents = push(\n nextEvents,\n resolveAll(\n context.parser.constructs.insideSpan.null,\n events.slice(open + 1, index),\n context\n )\n )\n\n // Closing.\n nextEvents = push(nextEvents, [\n ['exit', text, context],\n ['enter', closingSequence, context],\n ['exit', closingSequence, context],\n ['exit', group, context]\n ])\n\n // If there are more markers in the closing, add them after.\n if (events[index][1].end.offset - events[index][1].start.offset) {\n offset = 2\n nextEvents = push(nextEvents, [\n ['enter', events[index][1], context],\n ['exit', events[index][1], context]\n ])\n } else {\n offset = 0\n }\n splice(events, open - 1, index - open + 3, nextEvents)\n index = open + nextEvents.length - offset - 2\n break\n }\n }\n }\n }\n\n // Remove remaining sequences.\n index = -1\n while (++index < events.length) {\n if (events[index][1].type === 'attentionSequence') {\n events[index][1].type = 'data'\n }\n }\n return events\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeAttention(effects, ok) {\n const attentionMarkers = this.parser.constructs.attentionMarkers.null\n const previous = this.previous\n const before = classifyCharacter(previous)\n\n /** @type {NonNullable<Code>} */\n let marker\n return start\n\n /**\n * Before a sequence.\n *\n * ```markdown\n * > | **\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n marker = code\n effects.enter('attentionSequence')\n return inside(code)\n }\n\n /**\n * In a sequence.\n *\n * ```markdown\n * > | **\n * ^^\n * ```\n *\n * @type {State}\n */\n function inside(code) {\n if (code === marker) {\n effects.consume(code)\n return inside\n }\n const token = effects.exit('attentionSequence')\n\n // To do: next major: move this to resolver, just like `markdown-rs`.\n const after = classifyCharacter(code)\n\n // Always populated by defaults.\n\n const open =\n !after || (after === 2 && before) || attentionMarkers.includes(code)\n const close =\n !before || (before === 2 && after) || attentionMarkers.includes(previous)\n token._open = Boolean(marker === 42 ? open : open && (before || !close))\n token._close = Boolean(marker === 42 ? close : close && (after || !open))\n return ok(code)\n }\n}\n\n/**\n * Move a point a bit.\n *\n * Note: `move` only works inside lines! It’s not possible to move past other\n * chunks (replacement characters, tabs, or line endings).\n *\n * @param {Point} point\n * @param {number} offset\n * @returns {void}\n */\nfunction movePoint(point, offset) {\n point.column += offset\n point.offset += offset\n point._bufferIndex += offset\n}\n"],"names":["attention","name","tokenize","effects","ok","attentionMarkers","this","parser","constructs","null","previous","before","classifyCharacter","marker","code","enter","inside","consume","token","exit","after","open","includes","close","_open","Boolean","_close","resolveAll","events","context","group","text","openingSequence","closingSequence","use","nextEvents","offset","index","length","type","sliceSerialize","charCodeAt","end","start","Object","assign","movePoint","push","insideSpan","slice","splice","point","column","_bufferIndex"],"mappings":"gPAgBY,MAACA,EAAY,CACvBC,KAAM,YACNC,SA4KF,SAA2BC,EAASC,GAClC,MAAMC,EAAmBC,KAAKC,OAAOC,WAAWH,iBAAiBI,KAC3DC,EAAWJ,KAAKI,SAChBC,EAASC,EAAiBA,kBAACF,GAGjC,IAAIG,EACJ,OAYA,SAAeC,GAGb,OAFAD,EAASC,EACTX,EAAQY,MAAM,qBACPC,EAAOF,EACf,EAYD,SAASE,EAAOF,GACd,GAAIA,IAASD,EAEX,OADAV,EAAQc,QAAQH,GACTE,EAET,MAAME,EAAQf,EAAQgB,KAAK,qBAGrBC,EAAQR,EAAiBA,kBAACE,GAI1BO,GACHD,GAAoB,IAAVA,GAAeT,GAAWN,EAAiBiB,SAASR,GAC3DS,GACHZ,GAAsB,IAAXA,GAAgBS,GAAUf,EAAiBiB,SAASZ,GAGlE,OAFAQ,EAAMM,MAAQC,QAAmB,KAAXZ,EAAgBQ,EAAOA,IAASV,IAAWY,IACjEL,EAAMQ,OAASD,QAAmB,KAAXZ,EAAgBU,EAAQA,IAAUH,IAAUC,IAC5DjB,EAAGU,EACX,CACH,EAlOEa,WAQF,SAA6BC,EAAQC,GACnC,IAEIR,EAEAS,EAEAC,EAEAC,EAEAC,EAEAC,EAEAC,EAEAC,EAhBAC,GAAS,EAsBb,OAASA,EAAQT,EAAOU,QAEtB,GACuB,UAArBV,EAAOS,GAAO,IACY,sBAA1BT,EAAOS,GAAO,GAAGE,MACjBX,EAAOS,GAAO,GAAGX,OAKjB,IAHAL,EAAOgB,EAGAhB,KAEL,GACsB,SAApBO,EAAOP,GAAM,IACY,sBAAzBO,EAAOP,GAAM,GAAGkB,MAChBX,EAAOP,GAAM,GAAGG,OAEhBK,EAAQW,eAAeZ,EAAOP,GAAM,IAAIoB,WAAW,KACjDZ,EAAQW,eAAeZ,EAAOS,GAAO,IAAII,WAAW,GACtD,CAKA,IACGb,EAAOP,GAAM,GAAGK,QAAUE,EAAOS,GAAO,GAAGb,SAC3CI,EAAOS,GAAO,GAAGK,IAAIN,OAASR,EAAOS,GAAO,GAAGM,MAAMP,QAAU,MAE7DR,EAAOP,GAAM,GAAGqB,IAAIN,OACnBR,EAAOP,GAAM,GAAGsB,MAAMP,OACtBR,EAAOS,GAAO,GAAGK,IAAIN,OACrBR,EAAOS,GAAO,GAAGM,MAAMP,QACzB,GAGF,SAIFF,EACEN,EAAOP,GAAM,GAAGqB,IAAIN,OAASR,EAAOP,GAAM,GAAGsB,MAAMP,OAAS,GAC5DR,EAAOS,GAAO,GAAGK,IAAIN,OAASR,EAAOS,GAAO,GAAGM,MAAMP,OAAS,EAC1D,EACA,EACN,MAAMO,EAAQC,OAAOC,OAAO,CAAE,EAAEjB,EAAOP,GAAM,GAAGqB,KAC1CA,EAAME,OAAOC,OAAO,CAAE,EAAEjB,EAAOS,GAAO,GAAGM,OAC/CG,EAAUH,GAAQT,GAClBY,EAAUJ,EAAKR,GACfF,EAAkB,CAChBO,KAAML,EAAM,EAAI,iBAAmB,mBACnCS,QACAD,IAAKE,OAAOC,OAAO,CAAE,EAAEjB,EAAOP,GAAM,GAAGqB,MAEzCT,EAAkB,CAChBM,KAAML,EAAM,EAAI,iBAAmB,mBACnCS,MAAOC,OAAOC,OAAO,CAAE,EAAEjB,EAAOS,GAAO,GAAGM,OAC1CD,OAEFX,EAAO,CACLQ,KAAML,EAAM,EAAI,aAAe,eAC/BS,MAAOC,OAAOC,OAAO,CAAE,EAAEjB,EAAOP,GAAM,GAAGqB,KACzCA,IAAKE,OAAOC,OAAO,CAAE,EAAEjB,EAAOS,GAAO,GAAGM,QAE1Cb,EAAQ,CACNS,KAAML,EAAM,EAAI,SAAW,WAC3BS,MAAOC,OAAOC,OAAO,CAAA,EAAIb,EAAgBW,OACzCD,IAAKE,OAAOC,OAAO,CAAA,EAAIZ,EAAgBS,MAEzCd,EAAOP,GAAM,GAAGqB,IAAME,OAAOC,OAAO,CAAA,EAAIb,EAAgBW,OACxDf,EAAOS,GAAO,GAAGM,MAAQC,OAAOC,OAAO,CAAA,EAAIZ,EAAgBS,KAC3DP,EAAa,GAGTP,EAAOP,GAAM,GAAGqB,IAAIN,OAASR,EAAOP,GAAM,GAAGsB,MAAMP,SACrDD,EAAaY,EAAIA,KAACZ,EAAY,CAC5B,CAAC,QAASP,EAAOP,GAAM,GAAIQ,GAC3B,CAAC,OAAQD,EAAOP,GAAM,GAAIQ,MAK9BM,EAAaY,EAAIA,KAACZ,EAAY,CAC5B,CAAC,QAASL,EAAOD,GACjB,CAAC,QAASG,EAAiBH,GAC3B,CAAC,OAAQG,EAAiBH,GAC1B,CAAC,QAASE,EAAMF,KAMlBM,EAAaY,EAAIA,KACfZ,EACAR,EAAUA,WACRE,EAAQtB,OAAOC,WAAWwC,WAAWvC,KACrCmB,EAAOqB,MAAM5B,EAAO,EAAGgB,GACvBR,IAKJM,EAAaY,EAAIA,KAACZ,EAAY,CAC5B,CAAC,OAAQJ,EAAMF,GACf,CAAC,QAASI,EAAiBJ,GAC3B,CAAC,OAAQI,EAAiBJ,GAC1B,CAAC,OAAQC,EAAOD,KAIdD,EAAOS,GAAO,GAAGK,IAAIN,OAASR,EAAOS,GAAO,GAAGM,MAAMP,QACvDA,EAAS,EACTD,EAAaY,EAAIA,KAACZ,EAAY,CAC5B,CAAC,QAASP,EAAOS,GAAO,GAAIR,GAC5B,CAAC,OAAQD,EAAOS,GAAO,GAAIR,MAG7BO,EAAS,EAEXc,EAAMA,OAACtB,EAAQP,EAAO,EAAGgB,EAAQhB,EAAO,EAAGc,GAC3CE,EAAQhB,EAAOc,EAAWG,OAASF,EAAS,EAC5C,KACD,CAMPC,GAAS,EACT,OAASA,EAAQT,EAAOU,QACQ,sBAA1BV,EAAOS,GAAO,GAAGE,OACnBX,EAAOS,GAAO,GAAGE,KAAO,QAG5B,OAAOX,CACT,GAyEA,SAASkB,EAAUK,EAAOf,GACxBe,EAAMC,QAAUhB,EAChBe,EAAMf,QAAUA,EAChBe,EAAME,cAAgBjB,CACxB"}
@@ -0,0 +1,2 @@
1
+ "use strict";Object.defineProperty(exports,"__esModule",{value:!0});var n=require("../../micromark-util-character/index.js");const e={name:"autolink",tokenize:function(e,t,i){let r=0;return function(n){return e.enter("autolink"),e.enter("autolinkMarker"),e.consume(n),e.exit("autolinkMarker"),e.enter("autolinkProtocol"),u};function u(t){return n.asciiAlpha(t)?(e.consume(t),o):l(t)}function o(e){return 43===e||45===e||46===e||n.asciiAlphanumeric(e)?(r=1,c(e)):l(e)}function c(t){return 58===t?(e.consume(t),r=0,a):(43===t||45===t||46===t||n.asciiAlphanumeric(t))&&r++<32?(e.consume(t),c):(r=0,l(t))}function a(r){return 62===r?(e.exit("autolinkProtocol"),e.enter("autolinkMarker"),e.consume(r),e.exit("autolinkMarker"),e.exit("autolink"),t):null===r||32===r||60===r||n.asciiControl(r)?i(r):(e.consume(r),a)}function l(t){return 64===t?(e.consume(t),s):n.asciiAtext(t)?(e.consume(t),l):i(t)}function s(e){return n.asciiAlphanumeric(e)?k(e):i(e)}function k(n){return 46===n?(e.consume(n),r=0,s):62===n?(e.exit("autolinkProtocol").type="autolinkEmail",e.enter("autolinkMarker"),e.consume(n),e.exit("autolinkMarker"),e.exit("autolink"),t):m(n)}function m(t){if((45===t||n.asciiAlphanumeric(t))&&r++<63){const n=45===t?m:k;return e.consume(t),n}return i(t)}}};exports.autolink=e;
2
+ //# sourceMappingURL=autolink.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"autolink.js","sources":["../../../../../../../node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/autolink.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {\n asciiAlpha,\n asciiAlphanumeric,\n asciiAtext,\n asciiControl\n} from 'micromark-util-character'\n/** @type {Construct} */\nexport const autolink = {\n name: 'autolink',\n tokenize: tokenizeAutolink\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeAutolink(effects, ok, nok) {\n let size = 0\n return start\n\n /**\n * Start of an autolink.\n *\n * ```markdown\n * > | a<https://example.com>b\n * ^\n * > | a<user@example.com>b\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n effects.enter('autolink')\n effects.enter('autolinkMarker')\n effects.consume(code)\n effects.exit('autolinkMarker')\n effects.enter('autolinkProtocol')\n return open\n }\n\n /**\n * After `<`, at protocol or atext.\n *\n * ```markdown\n * > | a<https://example.com>b\n * ^\n * > | a<user@example.com>b\n * ^\n * ```\n *\n * @type {State}\n */\n function open(code) {\n if (asciiAlpha(code)) {\n effects.consume(code)\n return schemeOrEmailAtext\n }\n return emailAtext(code)\n }\n\n /**\n * At second byte of protocol or atext.\n *\n * ```markdown\n * > | a<https://example.com>b\n * ^\n * > | a<user@example.com>b\n * ^\n * ```\n *\n * @type {State}\n */\n function schemeOrEmailAtext(code) {\n // ASCII alphanumeric and `+`, `-`, and `.`.\n if (code === 43 || code === 45 || code === 46 || asciiAlphanumeric(code)) {\n // Count the previous alphabetical from `open` too.\n size = 1\n return schemeInsideOrEmailAtext(code)\n }\n return emailAtext(code)\n }\n\n /**\n * In ambiguous protocol or atext.\n *\n * ```markdown\n * > | a<https://example.com>b\n * ^\n * > | a<user@example.com>b\n * ^\n * ```\n *\n * @type {State}\n */\n function schemeInsideOrEmailAtext(code) {\n if (code === 58) {\n effects.consume(code)\n size = 0\n return urlInside\n }\n\n // ASCII alphanumeric and `+`, `-`, and `.`.\n if (\n (code === 43 || code === 45 || code === 46 || asciiAlphanumeric(code)) &&\n size++ < 32\n ) {\n effects.consume(code)\n return schemeInsideOrEmailAtext\n }\n size = 0\n return emailAtext(code)\n }\n\n /**\n * After protocol, in URL.\n *\n * ```markdown\n * > | a<https://example.com>b\n * ^\n * ```\n *\n * @type {State}\n */\n function urlInside(code) {\n if (code === 62) {\n effects.exit('autolinkProtocol')\n effects.enter('autolinkMarker')\n effects.consume(code)\n effects.exit('autolinkMarker')\n effects.exit('autolink')\n return ok\n }\n\n // ASCII control, space, or `<`.\n if (code === null || code === 32 || code === 60 || asciiControl(code)) {\n return nok(code)\n }\n effects.consume(code)\n return urlInside\n }\n\n /**\n * In email atext.\n *\n * ```markdown\n * > | a<user.name@example.com>b\n * ^\n * ```\n *\n * @type {State}\n */\n function emailAtext(code) {\n if (code === 64) {\n effects.consume(code)\n return emailAtSignOrDot\n }\n if (asciiAtext(code)) {\n effects.consume(code)\n return emailAtext\n }\n return nok(code)\n }\n\n /**\n * In label, after at-sign or dot.\n *\n * ```markdown\n * > | a<user.name@example.com>b\n * ^ ^\n * ```\n *\n * @type {State}\n */\n function emailAtSignOrDot(code) {\n return asciiAlphanumeric(code) ? emailLabel(code) : nok(code)\n }\n\n /**\n * In label, where `.` and `>` are allowed.\n *\n * ```markdown\n * > | a<user.name@example.com>b\n * ^\n * ```\n *\n * @type {State}\n */\n function emailLabel(code) {\n if (code === 46) {\n effects.consume(code)\n size = 0\n return emailAtSignOrDot\n }\n if (code === 62) {\n // Exit, then change the token type.\n effects.exit('autolinkProtocol').type = 'autolinkEmail'\n effects.enter('autolinkMarker')\n effects.consume(code)\n effects.exit('autolinkMarker')\n effects.exit('autolink')\n return ok\n }\n return emailValue(code)\n }\n\n /**\n * In label, where `.` and `>` are *not* allowed.\n *\n * Though, this is also used in `emailLabel` to parse other values.\n *\n * ```markdown\n * > | a<user.name@ex-ample.com>b\n * ^\n * ```\n *\n * @type {State}\n */\n function emailValue(code) {\n // ASCII alphanumeric or `-`.\n if ((code === 45 || asciiAlphanumeric(code)) && size++ < 63) {\n const next = code === 45 ? emailValue : emailLabel\n effects.consume(code)\n return next\n }\n return nok(code)\n }\n}\n"],"names":["autolink","name","tokenize","effects","ok","nok","size","code","enter","consume","exit","open","asciiAlpha","schemeOrEmailAtext","emailAtext","asciiAlphanumeric","schemeInsideOrEmailAtext","urlInside","asciiControl","emailAtSignOrDot","asciiAtext","emailLabel","type","emailValue","next"],"mappings":"6HAcY,MAACA,EAAW,CACtBC,KAAM,WACNC,SAOF,SAA0BC,EAASC,EAAIC,GACrC,IAAIC,EAAO,EACX,OAcA,SAAeC,GAMb,OALAJ,EAAQK,MAAM,YACdL,EAAQK,MAAM,kBACdL,EAAQM,QAAQF,GAChBJ,EAAQO,KAAK,kBACbP,EAAQK,MAAM,oBACPG,CACR,EAcD,SAASA,EAAKJ,GACZ,OAAIK,EAAAA,WAAWL,IACbJ,EAAQM,QAAQF,GACTM,GAEFC,EAAWP,EACnB,CAcD,SAASM,EAAmBN,GAE1B,OAAa,KAATA,GAAwB,KAATA,GAAwB,KAATA,GAAeQ,oBAAkBR,IAEjED,EAAO,EACAU,EAAyBT,IAE3BO,EAAWP,EACnB,CAcD,SAASS,EAAyBT,GAChC,OAAa,KAATA,GACFJ,EAAQM,QAAQF,GAChBD,EAAO,EACAW,IAKG,KAATV,GAAwB,KAATA,GAAwB,KAATA,GAAeQ,EAAiBA,kBAACR,KAChED,IAAS,IAETH,EAAQM,QAAQF,GACTS,IAETV,EAAO,EACAQ,EAAWP,GACnB,CAYD,SAASU,EAAUV,GACjB,OAAa,KAATA,GACFJ,EAAQO,KAAK,oBACbP,EAAQK,MAAM,kBACdL,EAAQM,QAAQF,GAChBJ,EAAQO,KAAK,kBACbP,EAAQO,KAAK,YACNN,GAII,OAATG,GAA0B,KAATA,GAAwB,KAATA,GAAeW,eAAaX,GACvDF,EAAIE,IAEbJ,EAAQM,QAAQF,GACTU,EACR,CAYD,SAASH,EAAWP,GAClB,OAAa,KAATA,GACFJ,EAAQM,QAAQF,GACTY,GAELC,EAAAA,WAAWb,IACbJ,EAAQM,QAAQF,GACTO,GAEFT,EAAIE,EACZ,CAYD,SAASY,EAAiBZ,GACxB,OAAOQ,EAAAA,kBAAkBR,GAAQc,EAAWd,GAAQF,EAAIE,EACzD,CAYD,SAASc,EAAWd,GAClB,OAAa,KAATA,GACFJ,EAAQM,QAAQF,GAChBD,EAAO,EACAa,GAEI,KAATZ,GAEFJ,EAAQO,KAAK,oBAAoBY,KAAO,gBACxCnB,EAAQK,MAAM,kBACdL,EAAQM,QAAQF,GAChBJ,EAAQO,KAAK,kBACbP,EAAQO,KAAK,YACNN,GAEFmB,EAAWhB,EACnB,CAcD,SAASgB,EAAWhB,GAElB,IAAc,KAATA,GAAeQ,EAAAA,kBAAkBR,KAAUD,IAAS,GAAI,CAC3D,MAAMkB,EAAgB,KAATjB,EAAcgB,EAAaF,EAExC,OADAlB,EAAQM,QAAQF,GACTiB,CACR,CACD,OAAOnB,EAAIE,EACZ,CACH"}
@@ -0,0 +1,2 @@
1
+ "use strict";Object.defineProperty(exports,"__esModule",{value:!0});var e=require("../../micromark-factory-space/index.js"),r=require("../../micromark-util-character/index.js");const n={tokenize:function(n,i,t){return function(i){return r.markdownSpace(i)?e.factorySpace(n,a,"linePrefix")(i):a(i)};function a(e){return null===e||r.markdownLineEnding(e)?i(e):t(e)}},partial:!0};exports.blankLine=n;
2
+ //# sourceMappingURL=blank-line.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"blank-line.js","sources":["../../../../../../../node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/blank-line.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {factorySpace} from 'micromark-factory-space'\nimport {markdownLineEnding, markdownSpace} from 'micromark-util-character'\n/** @type {Construct} */\nexport const blankLine = {\n tokenize: tokenizeBlankLine,\n partial: true\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeBlankLine(effects, ok, nok) {\n return start\n\n /**\n * Start of blank line.\n *\n * > 👉 **Note**: `␠` represents a space character.\n *\n * ```markdown\n * > | ␠␠␊\n * ^\n * > | ␊\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n return markdownSpace(code)\n ? factorySpace(effects, after, 'linePrefix')(code)\n : after(code)\n }\n\n /**\n * At eof/eol, after optional whitespace.\n *\n * > 👉 **Note**: `␠` represents a space character.\n *\n * ```markdown\n * > | ␠␠␊\n * ^\n * > | ␊\n * ^\n * ```\n *\n * @type {State}\n */\n function after(code) {\n return code === null || markdownLineEnding(code) ? ok(code) : nok(code)\n }\n}\n"],"names":["blankLine","tokenize","effects","ok","nok","code","markdownSpace","factorySpace","after","markdownLineEnding","partial"],"mappings":"iLAUY,MAACA,EAAY,CACvBC,SAQF,SAA2BC,EAASC,EAAIC,GACtC,OAgBA,SAAeC,GACb,OAAOC,EAAAA,cAAcD,GACjBE,EAAAA,aAAaL,EAASM,EAAO,aAA7BD,CAA2CF,GAC3CG,EAAMH,EACX,EAgBD,SAASG,EAAMH,GACb,OAAgB,OAATA,GAAiBI,EAAAA,mBAAmBJ,GAAQF,EAAGE,GAAQD,EAAIC,EACnE,CACH,EA/CEK,SAAS"}
@@ -0,0 +1,2 @@
1
+ "use strict";Object.defineProperty(exports,"__esModule",{value:!0});var e=require("../../micromark-factory-space/index.js"),t=require("../../micromark-util-character/index.js");const o={name:"blockQuote",tokenize:function(e,o,n){const r=this;return function(t){if(62===t){const o=r.containerState;return o.open||(e.enter("blockQuote",{_container:!0}),o.open=!0),e.enter("blockQuotePrefix"),e.enter("blockQuoteMarker"),e.consume(t),e.exit("blockQuoteMarker"),c}return n(t)};function c(n){return t.markdownSpace(n)?(e.enter("blockQuotePrefixWhitespace"),e.consume(n),e.exit("blockQuotePrefixWhitespace"),e.exit("blockQuotePrefix"),o):(e.exit("blockQuotePrefix"),o(n))}},continuation:{tokenize:function(n,r,c){const i=this;return function(o){if(t.markdownSpace(o))return e.factorySpace(n,u,"linePrefix",i.parser.constructs.disable.null.includes("codeIndented")?void 0:4)(o);return u(o)};function u(e){return n.attempt(o,r,c)(e)}}},exit:function(e){e.exit("blockQuote")}};exports.blockQuote=o;
2
+ //# sourceMappingURL=block-quote.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"block-quote.js","sources":["../../../../../../../node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/block-quote.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').Exiter} Exiter\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {factorySpace} from 'micromark-factory-space'\nimport {markdownSpace} from 'micromark-util-character'\n/** @type {Construct} */\nexport const blockQuote = {\n name: 'blockQuote',\n tokenize: tokenizeBlockQuoteStart,\n continuation: {\n tokenize: tokenizeBlockQuoteContinuation\n },\n exit\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeBlockQuoteStart(effects, ok, nok) {\n const self = this\n return start\n\n /**\n * Start of block quote.\n *\n * ```markdown\n * > | > a\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n if (code === 62) {\n const state = self.containerState\n if (!state.open) {\n effects.enter('blockQuote', {\n _container: true\n })\n state.open = true\n }\n effects.enter('blockQuotePrefix')\n effects.enter('blockQuoteMarker')\n effects.consume(code)\n effects.exit('blockQuoteMarker')\n return after\n }\n return nok(code)\n }\n\n /**\n * After `>`, before optional whitespace.\n *\n * ```markdown\n * > | > a\n * ^\n * ```\n *\n * @type {State}\n */\n function after(code) {\n if (markdownSpace(code)) {\n effects.enter('blockQuotePrefixWhitespace')\n effects.consume(code)\n effects.exit('blockQuotePrefixWhitespace')\n effects.exit('blockQuotePrefix')\n return ok\n }\n effects.exit('blockQuotePrefix')\n return ok(code)\n }\n}\n\n/**\n * Start of block quote continuation.\n *\n * ```markdown\n * | > a\n * > | > b\n * ^\n * ```\n *\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeBlockQuoteContinuation(effects, ok, nok) {\n const self = this\n return contStart\n\n /**\n * Start of block quote continuation.\n *\n * Also used to parse the first block quote opening.\n *\n * ```markdown\n * | > a\n * > | > b\n * ^\n * ```\n *\n * @type {State}\n */\n function contStart(code) {\n if (markdownSpace(code)) {\n // Always populated by defaults.\n\n return factorySpace(\n effects,\n contBefore,\n 'linePrefix',\n self.parser.constructs.disable.null.includes('codeIndented')\n ? undefined\n : 4\n )(code)\n }\n return contBefore(code)\n }\n\n /**\n * At `>`, after optional whitespace.\n *\n * Also used to parse the first block quote opening.\n *\n * ```markdown\n * | > a\n * > | > b\n * ^\n * ```\n *\n * @type {State}\n */\n function contBefore(code) {\n return effects.attempt(blockQuote, ok, nok)(code)\n }\n}\n\n/** @type {Exiter} */\nfunction exit(effects) {\n effects.exit('blockQuote')\n}\n"],"names":["blockQuote","name","tokenize","effects","ok","nok","self","this","code","state","containerState","open","enter","_container","consume","exit","after","markdownSpace","continuation","factorySpace","contBefore","parser","constructs","disable","null","includes","undefined","attempt"],"mappings":"iLAWY,MAACA,EAAa,CACxBC,KAAM,aACNC,SAWF,SAAiCC,EAASC,EAAIC,GAC5C,MAAMC,EAAOC,KACb,OAYA,SAAeC,GACb,GAAa,KAATA,EAAa,CACf,MAAMC,EAAQH,EAAKI,eAWnB,OAVKD,EAAME,OACTR,EAAQS,MAAM,aAAc,CAC1BC,YAAY,IAEdJ,EAAME,MAAO,GAEfR,EAAQS,MAAM,oBACdT,EAAQS,MAAM,oBACdT,EAAQW,QAAQN,GAChBL,EAAQY,KAAK,oBACNC,CACR,CACD,OAAOX,EAAIG,EACZ,EAYD,SAASQ,EAAMR,GACb,OAAIS,EAAAA,cAAcT,IAChBL,EAAQS,MAAM,8BACdT,EAAQW,QAAQN,GAChBL,EAAQY,KAAK,8BACbZ,EAAQY,KAAK,oBACNX,IAETD,EAAQY,KAAK,oBACNX,EAAGI,GACX,CACH,EA/DEU,aAAc,CACZhB,SA4EJ,SAAwCC,EAASC,EAAIC,GACnD,MAAMC,EAAOC,KACb,OAeA,SAAmBC,GACjB,GAAIS,EAAAA,cAAcT,GAGhB,OAAOW,EAAYA,aACjBhB,EACAiB,EACA,aACAd,EAAKe,OAAOC,WAAWC,QAAQC,KAAKC,SAAS,qBACzCC,EACA,EANCP,CAOLX,GAEJ,OAAOY,EAAWZ,EACnB,EAeD,SAASY,EAAWZ,GAClB,OAAOL,EAAQwB,QAAQ3B,EAAYI,EAAIC,EAAhCF,CAAqCK,EAC7C,CACH,GA3HEO,KA8HF,SAAcZ,GACZA,EAAQY,KAAK,aACf"}
@@ -0,0 +1,2 @@
1
+ "use strict";Object.defineProperty(exports,"__esModule",{value:!0});var e=require("../../micromark-util-character/index.js");const r={name:"characterEscape",tokenize:function(r,c,a){return function(e){return r.enter("characterEscape"),r.enter("escapeMarker"),r.consume(e),r.exit("escapeMarker"),t};function t(t){return e.asciiPunctuation(t)?(r.enter("characterEscapeValue"),r.consume(t),r.exit("characterEscapeValue"),r.exit("characterEscape"),c):a(t)}}};exports.characterEscape=r;
2
+ //# sourceMappingURL=character-escape.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"character-escape.js","sources":["../../../../../../../node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/character-escape.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {asciiPunctuation} from 'micromark-util-character'\n/** @type {Construct} */\nexport const characterEscape = {\n name: 'characterEscape',\n tokenize: tokenizeCharacterEscape\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeCharacterEscape(effects, ok, nok) {\n return start\n\n /**\n * Start of character escape.\n *\n * ```markdown\n * > | a\\*b\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n effects.enter('characterEscape')\n effects.enter('escapeMarker')\n effects.consume(code)\n effects.exit('escapeMarker')\n return inside\n }\n\n /**\n * After `\\`, at punctuation.\n *\n * ```markdown\n * > | a\\*b\n * ^\n * ```\n *\n * @type {State}\n */\n function inside(code) {\n // ASCII punctuation.\n if (asciiPunctuation(code)) {\n effects.enter('characterEscapeValue')\n effects.consume(code)\n effects.exit('characterEscapeValue')\n effects.exit('characterEscape')\n return ok\n }\n return nok(code)\n }\n}\n"],"names":["characterEscape","name","tokenize","effects","ok","nok","code","enter","consume","exit","inside","asciiPunctuation"],"mappings":"6HASY,MAACA,EAAkB,CAC7BC,KAAM,kBACNC,SAOF,SAAiCC,EAASC,EAAIC,GAC5C,OAYA,SAAeC,GAKb,OAJAH,EAAQI,MAAM,mBACdJ,EAAQI,MAAM,gBACdJ,EAAQK,QAAQF,GAChBH,EAAQM,KAAK,gBACNC,CACR,EAYD,SAASA,EAAOJ,GAEd,OAAIK,EAAAA,iBAAiBL,IACnBH,EAAQI,MAAM,wBACdJ,EAAQK,QAAQF,GAChBH,EAAQM,KAAK,wBACbN,EAAQM,KAAK,mBACNL,GAEFC,EAAIC,EACZ,CACH"}
@@ -0,0 +1,2 @@
1
+ "use strict";Object.defineProperty(exports,"__esModule",{value:!0});var e=require("../../../../decode-named-character-reference/index.dom.js"),r=require("../../micromark-util-character/index.js");const c={name:"characterReference",tokenize:function(c,a,t){const n=this;let i,u,f=0;return function(e){return c.enter("characterReference"),c.enter("characterReferenceMarker"),c.consume(e),c.exit("characterReferenceMarker"),h};function h(e){return 35===e?(c.enter("characterReferenceMarkerNumeric"),c.consume(e),c.exit("characterReferenceMarkerNumeric"),o):(c.enter("characterReferenceValue"),i=31,u=r.asciiAlphanumeric,s(e))}function o(e){return 88===e||120===e?(c.enter("characterReferenceMarkerHexadecimal"),c.consume(e),c.exit("characterReferenceMarkerHexadecimal"),c.enter("characterReferenceValue"),i=6,u=r.asciiHexDigit,s):(c.enter("characterReferenceValue"),i=7,u=r.asciiDigit,s(e))}function s(h){if(59===h&&f){const i=c.exit("characterReferenceValue");return u!==r.asciiAlphanumeric||e.decodeNamedCharacterReference(n.sliceSerialize(i))?(c.enter("characterReferenceMarker"),c.consume(h),c.exit("characterReferenceMarker"),c.exit("characterReference"),a):t(h)}return u(h)&&f++<i?(c.consume(h),s):t(h)}}};exports.characterReference=c;
2
+ //# sourceMappingURL=character-reference.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"character-reference.js","sources":["../../../../../../../node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/character-reference.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Code} Code\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {decodeNamedCharacterReference} from 'decode-named-character-reference'\nimport {\n asciiAlphanumeric,\n asciiDigit,\n asciiHexDigit\n} from 'micromark-util-character'\n/** @type {Construct} */\nexport const characterReference = {\n name: 'characterReference',\n tokenize: tokenizeCharacterReference\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeCharacterReference(effects, ok, nok) {\n const self = this\n let size = 0\n /** @type {number} */\n let max\n /** @type {(code: Code) => boolean} */\n let test\n return start\n\n /**\n * Start of character reference.\n *\n * ```markdown\n * > | a&amp;b\n * ^\n * > | a&#123;b\n * ^\n * > | a&#x9;b\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n effects.enter('characterReference')\n effects.enter('characterReferenceMarker')\n effects.consume(code)\n effects.exit('characterReferenceMarker')\n return open\n }\n\n /**\n * After `&`, at `#` for numeric references or alphanumeric for named\n * references.\n *\n * ```markdown\n * > | a&amp;b\n * ^\n * > | a&#123;b\n * ^\n * > | a&#x9;b\n * ^\n * ```\n *\n * @type {State}\n */\n function open(code) {\n if (code === 35) {\n effects.enter('characterReferenceMarkerNumeric')\n effects.consume(code)\n effects.exit('characterReferenceMarkerNumeric')\n return numeric\n }\n effects.enter('characterReferenceValue')\n max = 31\n test = asciiAlphanumeric\n return value(code)\n }\n\n /**\n * After `#`, at `x` for hexadecimals or digit for decimals.\n *\n * ```markdown\n * > | a&#123;b\n * ^\n * > | a&#x9;b\n * ^\n * ```\n *\n * @type {State}\n */\n function numeric(code) {\n if (code === 88 || code === 120) {\n effects.enter('characterReferenceMarkerHexadecimal')\n effects.consume(code)\n effects.exit('characterReferenceMarkerHexadecimal')\n effects.enter('characterReferenceValue')\n max = 6\n test = asciiHexDigit\n return value\n }\n effects.enter('characterReferenceValue')\n max = 7\n test = asciiDigit\n return value(code)\n }\n\n /**\n * After markers (`&#x`, `&#`, or `&`), in value, before `;`.\n *\n * The character reference kind defines what and how many characters are\n * allowed.\n *\n * ```markdown\n * > | a&amp;b\n * ^^^\n * > | a&#123;b\n * ^^^\n * > | a&#x9;b\n * ^\n * ```\n *\n * @type {State}\n */\n function value(code) {\n if (code === 59 && size) {\n const token = effects.exit('characterReferenceValue')\n if (\n test === asciiAlphanumeric &&\n !decodeNamedCharacterReference(self.sliceSerialize(token))\n ) {\n return nok(code)\n }\n\n // To do: `markdown-rs` uses a different name:\n // `CharacterReferenceMarkerSemi`.\n effects.enter('characterReferenceMarker')\n effects.consume(code)\n effects.exit('characterReferenceMarker')\n effects.exit('characterReference')\n return ok\n }\n if (test(code) && size++ < max) {\n effects.consume(code)\n return value\n }\n return nok(code)\n }\n}\n"],"names":["characterReference","name","tokenize","effects","ok","nok","self","this","max","test","size","code","enter","consume","exit","open","numeric","asciiAlphanumeric","value","asciiHexDigit","asciiDigit","token","decodeNamedCharacterReference","sliceSerialize"],"mappings":"oMAeY,MAACA,EAAqB,CAChCC,KAAM,qBACNC,SAOF,SAAoCC,EAASC,EAAIC,GAC/C,MAAMC,EAAOC,KACb,IAEIC,EAEAC,EAJAC,EAAO,EAKX,OAgBA,SAAeC,GAKb,OAJAR,EAAQS,MAAM,sBACdT,EAAQS,MAAM,4BACdT,EAAQU,QAAQF,GAChBR,EAAQW,KAAK,4BACNC,CACR,EAiBD,SAASA,EAAKJ,GACZ,OAAa,KAATA,GACFR,EAAQS,MAAM,mCACdT,EAAQU,QAAQF,GAChBR,EAAQW,KAAK,mCACNE,IAETb,EAAQS,MAAM,2BACdJ,EAAM,GACNC,EAAOQ,EAAiBA,kBACjBC,EAAMP,GACd,CAcD,SAASK,EAAQL,GACf,OAAa,KAATA,GAAwB,MAATA,GACjBR,EAAQS,MAAM,uCACdT,EAAQU,QAAQF,GAChBR,EAAQW,KAAK,uCACbX,EAAQS,MAAM,2BACdJ,EAAM,EACNC,EAAOU,EAAaA,cACbD,IAETf,EAAQS,MAAM,2BACdJ,EAAM,EACNC,EAAOW,EAAUA,WACVF,EAAMP,GACd,CAmBD,SAASO,EAAMP,GACb,GAAa,KAATA,GAAeD,EAAM,CACvB,MAAMW,EAAQlB,EAAQW,KAAK,2BAC3B,OACEL,IAASQ,EAAiBA,mBACzBK,gCAA8BhB,EAAKiB,eAAeF,KAOrDlB,EAAQS,MAAM,4BACdT,EAAQU,QAAQF,GAChBR,EAAQW,KAAK,4BACbX,EAAQW,KAAK,sBACNV,GATEC,EAAIM,EAUd,CACD,OAAIF,EAAKE,IAASD,IAASF,GACzBL,EAAQU,QAAQF,GACTO,GAEFb,EAAIM,EACZ,CACH"}
@@ -0,0 +1,2 @@
1
+ "use strict";Object.defineProperty(exports,"__esModule",{value:!0});var e=require("../../micromark-factory-space/index.js"),n=require("../../micromark-util-character/index.js");const c={tokenize:function(e,n,c){const t=this;return function(n){if(null===n)return c(n);return e.enter("lineEnding"),e.consume(n),e.exit("lineEnding"),r};function r(e){return t.parser.lazy[t.now().line]?c(e):n(e)}},partial:!0},t={name:"codeFenced",tokenize:function(t,r,i){const o=this,u={tokenize:function(c,t,r){let i=0;return u;function u(e){return c.enter("lineEnding"),c.consume(e),c.exit("lineEnding"),a}function a(t){return c.enter("codeFencedFence"),n.markdownSpace(t)?e.factorySpace(c,f,"linePrefix",o.parser.constructs.disable.null.includes("codeIndented")?void 0:4)(t):f(t)}function f(e){return e===d?(c.enter("codeFencedFenceSequence"),s(e)):r(e)}function s(t){return t===d?(i++,c.consume(t),s):i>=l?(c.exit("codeFencedFenceSequence"),n.markdownSpace(t)?e.factorySpace(c,F,"whitespace")(t):F(t)):r(t)}function F(e){return null===e||n.markdownLineEnding(e)?(c.exit("codeFencedFence"),t(e)):r(e)}},partial:!0};let d,a=0,l=0;return function(e){return function(e){const n=o.events[o.events.length-1];return a=n&&"linePrefix"===n[1].type?n[2].sliceSerialize(n[1],!0).length:0,d=e,t.enter("codeFenced"),t.enter("codeFencedFence"),t.enter("codeFencedFenceSequence"),f(e)}(e)};function f(c){return c===d?(l++,t.consume(c),f):l<3?i(c):(t.exit("codeFencedFenceSequence"),n.markdownSpace(c)?e.factorySpace(t,s,"whitespace")(c):s(c))}function s(e){return null===e||n.markdownLineEnding(e)?(t.exit("codeFencedFence"),o.interrupt?r(e):t.check(c,k,h)(e)):(t.enter("codeFencedFenceInfo"),t.enter("chunkString",{contentType:"string"}),F(e))}function F(c){return null===c||n.markdownLineEnding(c)?(t.exit("chunkString"),t.exit("codeFencedFenceInfo"),s(c)):n.markdownSpace(c)?(t.exit("chunkString"),t.exit("codeFencedFenceInfo"),e.factorySpace(t,m,"whitespace")(c)):96===c&&c===d?i(c):(t.consume(c),F)}function m(e){return null===e||n.markdownLineEnding(e)?s(e):(t.enter("codeFencedFenceMeta"),t.enter("chunkString",{contentType:"string"}),p(e))}function p(e){return null===e||n.markdownLineEnding(e)?(t.exit("chunkString"),t.exit("codeFencedFenceMeta"),s(e)):96===e&&e===d?i(e):(t.consume(e),p)}function k(e){return t.attempt(u,h,g)(e)}function g(e){return t.enter("lineEnding"),t.consume(e),t.exit("lineEnding"),x}function x(c){return a>0&&n.markdownSpace(c)?e.factorySpace(t,S,"linePrefix",a+1)(c):S(c)}function S(e){return null===e||n.markdownLineEnding(e)?t.check(c,k,h)(e):(t.enter("codeFlowValue"),w(e))}function w(e){return null===e||n.markdownLineEnding(e)?(t.exit("codeFlowValue"),S(e)):(t.consume(e),w)}function h(e){return t.exit("codeFenced"),r(e)}},concrete:!0};exports.codeFenced=t;
2
+ //# sourceMappingURL=code-fenced.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"code-fenced.js","sources":["../../../../../../../node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/code-fenced.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Code} Code\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {factorySpace} from 'micromark-factory-space'\nimport {markdownLineEnding, markdownSpace} from 'micromark-util-character'\n/** @type {Construct} */\nconst nonLazyContinuation = {\n tokenize: tokenizeNonLazyContinuation,\n partial: true\n}\n\n/** @type {Construct} */\nexport const codeFenced = {\n name: 'codeFenced',\n tokenize: tokenizeCodeFenced,\n concrete: true\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeCodeFenced(effects, ok, nok) {\n const self = this\n /** @type {Construct} */\n const closeStart = {\n tokenize: tokenizeCloseStart,\n partial: true\n }\n let initialPrefix = 0\n let sizeOpen = 0\n /** @type {NonNullable<Code>} */\n let marker\n return start\n\n /**\n * Start of code.\n *\n * ```markdown\n * > | ~~~js\n * ^\n * | alert(1)\n * | ~~~\n * ```\n *\n * @type {State}\n */\n function start(code) {\n // To do: parse whitespace like `markdown-rs`.\n return beforeSequenceOpen(code)\n }\n\n /**\n * In opening fence, after prefix, at sequence.\n *\n * ```markdown\n * > | ~~~js\n * ^\n * | alert(1)\n * | ~~~\n * ```\n *\n * @type {State}\n */\n function beforeSequenceOpen(code) {\n const tail = self.events[self.events.length - 1]\n initialPrefix =\n tail && tail[1].type === 'linePrefix'\n ? tail[2].sliceSerialize(tail[1], true).length\n : 0\n marker = code\n effects.enter('codeFenced')\n effects.enter('codeFencedFence')\n effects.enter('codeFencedFenceSequence')\n return sequenceOpen(code)\n }\n\n /**\n * In opening fence sequence.\n *\n * ```markdown\n * > | ~~~js\n * ^\n * | alert(1)\n * | ~~~\n * ```\n *\n * @type {State}\n */\n function sequenceOpen(code) {\n if (code === marker) {\n sizeOpen++\n effects.consume(code)\n return sequenceOpen\n }\n if (sizeOpen < 3) {\n return nok(code)\n }\n effects.exit('codeFencedFenceSequence')\n return markdownSpace(code)\n ? factorySpace(effects, infoBefore, 'whitespace')(code)\n : infoBefore(code)\n }\n\n /**\n * In opening fence, after the sequence (and optional whitespace), before info.\n *\n * ```markdown\n * > | ~~~js\n * ^\n * | alert(1)\n * | ~~~\n * ```\n *\n * @type {State}\n */\n function infoBefore(code) {\n if (code === null || markdownLineEnding(code)) {\n effects.exit('codeFencedFence')\n return self.interrupt\n ? ok(code)\n : effects.check(nonLazyContinuation, atNonLazyBreak, after)(code)\n }\n effects.enter('codeFencedFenceInfo')\n effects.enter('chunkString', {\n contentType: 'string'\n })\n return info(code)\n }\n\n /**\n * In info.\n *\n * ```markdown\n * > | ~~~js\n * ^\n * | alert(1)\n * | ~~~\n * ```\n *\n * @type {State}\n */\n function info(code) {\n if (code === null || markdownLineEnding(code)) {\n effects.exit('chunkString')\n effects.exit('codeFencedFenceInfo')\n return infoBefore(code)\n }\n if (markdownSpace(code)) {\n effects.exit('chunkString')\n effects.exit('codeFencedFenceInfo')\n return factorySpace(effects, metaBefore, 'whitespace')(code)\n }\n if (code === 96 && code === marker) {\n return nok(code)\n }\n effects.consume(code)\n return info\n }\n\n /**\n * In opening fence, after info and whitespace, before meta.\n *\n * ```markdown\n * > | ~~~js eval\n * ^\n * | alert(1)\n * | ~~~\n * ```\n *\n * @type {State}\n */\n function metaBefore(code) {\n if (code === null || markdownLineEnding(code)) {\n return infoBefore(code)\n }\n effects.enter('codeFencedFenceMeta')\n effects.enter('chunkString', {\n contentType: 'string'\n })\n return meta(code)\n }\n\n /**\n * In meta.\n *\n * ```markdown\n * > | ~~~js eval\n * ^\n * | alert(1)\n * | ~~~\n * ```\n *\n * @type {State}\n */\n function meta(code) {\n if (code === null || markdownLineEnding(code)) {\n effects.exit('chunkString')\n effects.exit('codeFencedFenceMeta')\n return infoBefore(code)\n }\n if (code === 96 && code === marker) {\n return nok(code)\n }\n effects.consume(code)\n return meta\n }\n\n /**\n * At eol/eof in code, before a non-lazy closing fence or content.\n *\n * ```markdown\n * > | ~~~js\n * ^\n * > | alert(1)\n * ^\n * | ~~~\n * ```\n *\n * @type {State}\n */\n function atNonLazyBreak(code) {\n return effects.attempt(closeStart, after, contentBefore)(code)\n }\n\n /**\n * Before code content, not a closing fence, at eol.\n *\n * ```markdown\n * | ~~~js\n * > | alert(1)\n * ^\n * | ~~~\n * ```\n *\n * @type {State}\n */\n function contentBefore(code) {\n effects.enter('lineEnding')\n effects.consume(code)\n effects.exit('lineEnding')\n return contentStart\n }\n\n /**\n * Before code content, not a closing fence.\n *\n * ```markdown\n * | ~~~js\n * > | alert(1)\n * ^\n * | ~~~\n * ```\n *\n * @type {State}\n */\n function contentStart(code) {\n return initialPrefix > 0 && markdownSpace(code)\n ? factorySpace(\n effects,\n beforeContentChunk,\n 'linePrefix',\n initialPrefix + 1\n )(code)\n : beforeContentChunk(code)\n }\n\n /**\n * Before code content, after optional prefix.\n *\n * ```markdown\n * | ~~~js\n * > | alert(1)\n * ^\n * | ~~~\n * ```\n *\n * @type {State}\n */\n function beforeContentChunk(code) {\n if (code === null || markdownLineEnding(code)) {\n return effects.check(nonLazyContinuation, atNonLazyBreak, after)(code)\n }\n effects.enter('codeFlowValue')\n return contentChunk(code)\n }\n\n /**\n * In code content.\n *\n * ```markdown\n * | ~~~js\n * > | alert(1)\n * ^^^^^^^^\n * | ~~~\n * ```\n *\n * @type {State}\n */\n function contentChunk(code) {\n if (code === null || markdownLineEnding(code)) {\n effects.exit('codeFlowValue')\n return beforeContentChunk(code)\n }\n effects.consume(code)\n return contentChunk\n }\n\n /**\n * After code.\n *\n * ```markdown\n * | ~~~js\n * | alert(1)\n * > | ~~~\n * ^\n * ```\n *\n * @type {State}\n */\n function after(code) {\n effects.exit('codeFenced')\n return ok(code)\n }\n\n /**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\n function tokenizeCloseStart(effects, ok, nok) {\n let size = 0\n return startBefore\n\n /**\n *\n *\n * @type {State}\n */\n function startBefore(code) {\n effects.enter('lineEnding')\n effects.consume(code)\n effects.exit('lineEnding')\n return start\n }\n\n /**\n * Before closing fence, at optional whitespace.\n *\n * ```markdown\n * | ~~~js\n * | alert(1)\n * > | ~~~\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n // Always populated by defaults.\n\n // To do: `enter` here or in next state?\n effects.enter('codeFencedFence')\n return markdownSpace(code)\n ? factorySpace(\n effects,\n beforeSequenceClose,\n 'linePrefix',\n self.parser.constructs.disable.null.includes('codeIndented')\n ? undefined\n : 4\n )(code)\n : beforeSequenceClose(code)\n }\n\n /**\n * In closing fence, after optional whitespace, at sequence.\n *\n * ```markdown\n * | ~~~js\n * | alert(1)\n * > | ~~~\n * ^\n * ```\n *\n * @type {State}\n */\n function beforeSequenceClose(code) {\n if (code === marker) {\n effects.enter('codeFencedFenceSequence')\n return sequenceClose(code)\n }\n return nok(code)\n }\n\n /**\n * In closing fence sequence.\n *\n * ```markdown\n * | ~~~js\n * | alert(1)\n * > | ~~~\n * ^\n * ```\n *\n * @type {State}\n */\n function sequenceClose(code) {\n if (code === marker) {\n size++\n effects.consume(code)\n return sequenceClose\n }\n if (size >= sizeOpen) {\n effects.exit('codeFencedFenceSequence')\n return markdownSpace(code)\n ? factorySpace(effects, sequenceCloseAfter, 'whitespace')(code)\n : sequenceCloseAfter(code)\n }\n return nok(code)\n }\n\n /**\n * After closing fence sequence, after optional whitespace.\n *\n * ```markdown\n * | ~~~js\n * | alert(1)\n * > | ~~~\n * ^\n * ```\n *\n * @type {State}\n */\n function sequenceCloseAfter(code) {\n if (code === null || markdownLineEnding(code)) {\n effects.exit('codeFencedFence')\n return ok(code)\n }\n return nok(code)\n }\n }\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeNonLazyContinuation(effects, ok, nok) {\n const self = this\n return start\n\n /**\n *\n *\n * @type {State}\n */\n function start(code) {\n if (code === null) {\n return nok(code)\n }\n effects.enter('lineEnding')\n effects.consume(code)\n effects.exit('lineEnding')\n return lineStart\n }\n\n /**\n *\n *\n * @type {State}\n */\n function lineStart(code) {\n return self.parser.lazy[self.now().line] ? nok(code) : ok(code)\n }\n}\n"],"names":["nonLazyContinuation","tokenize","effects","ok","nok","self","this","code","enter","consume","exit","lineStart","parser","lazy","now","line","partial","codeFenced","name","closeStart","size","startBefore","start","markdownSpace","factorySpace","beforeSequenceClose","constructs","disable","null","includes","undefined","marker","sequenceClose","sizeOpen","sequenceCloseAfter","markdownLineEnding","initialPrefix","tail","events","length","type","sliceSerialize","sequenceOpen","beforeSequenceOpen","infoBefore","interrupt","check","atNonLazyBreak","after","contentType","info","metaBefore","meta","attempt","contentBefore","contentStart","beforeContentChunk","contentChunk","concrete"],"mappings":"iLAWA,MAAMA,EAAsB,CAC1BC,SAwbF,SAAqCC,EAASC,EAAIC,GAChD,MAAMC,EAAOC,KACb,OAOA,SAAeC,GACb,GAAa,OAATA,EACF,OAAOH,EAAIG,GAKb,OAHAL,EAAQM,MAAM,cACdN,EAAQO,QAAQF,GAChBL,EAAQQ,KAAK,cACNC,CACR,EAOD,SAASA,EAAUJ,GACjB,OAAOF,EAAKO,OAAOC,KAAKR,EAAKS,MAAMC,MAAQX,EAAIG,GAAQJ,EAAGI,EAC3D,CACH,EAldES,SAAS,GAIEC,EAAa,CACxBC,KAAM,aACNjB,SAQF,SAA4BC,EAASC,EAAIC,GACvC,MAAMC,EAAOC,KAEPa,EAAa,CACjBlB,SA+SF,SAA4BC,EAASC,EAAIC,GACvC,IAAIgB,EAAO,EACX,OAAOC,EAOP,SAASA,EAAYd,GAInB,OAHAL,EAAQM,MAAM,cACdN,EAAQO,QAAQF,GAChBL,EAAQQ,KAAK,cACNY,CACR,CAcD,SAASA,EAAMf,GAKb,OADAL,EAAQM,MAAM,mBACPe,EAAAA,cAAchB,GACjBiB,EAAYA,aACVtB,EACAuB,EACA,aACApB,EAAKO,OAAOc,WAAWC,QAAQC,KAAKC,SAAS,qBACzCC,EACA,EANNN,CAOEjB,GACFkB,EAAoBlB,EACzB,CAcD,SAASkB,EAAoBlB,GAC3B,OAAIA,IAASwB,GACX7B,EAAQM,MAAM,2BACPwB,EAAczB,IAEhBH,EAAIG,EACZ,CAcD,SAASyB,EAAczB,GACrB,OAAIA,IAASwB,GACXX,IACAlB,EAAQO,QAAQF,GACTyB,GAELZ,GAAQa,GACV/B,EAAQQ,KAAK,2BACNa,EAAAA,cAAchB,GACjBiB,EAAAA,aAAatB,EAASgC,EAAoB,aAA1CV,CAAwDjB,GACxD2B,EAAmB3B,IAElBH,EAAIG,EACZ,CAcD,SAAS2B,EAAmB3B,GAC1B,OAAa,OAATA,GAAiB4B,EAAkBA,mBAAC5B,IACtCL,EAAQQ,KAAK,mBACNP,EAAGI,IAELH,EAAIG,EACZ,CACF,EA7ZCS,SAAS,GAEX,IAGIe,EAHAK,EAAgB,EAChBH,EAAW,EAGf,OAcA,SAAe1B,GAEb,OAeF,SAA4BA,GAC1B,MAAM8B,EAAOhC,EAAKiC,OAAOjC,EAAKiC,OAAOC,OAAS,GAS9C,OARAH,EACEC,GAAyB,eAAjBA,EAAK,GAAGG,KACZH,EAAK,GAAGI,eAAeJ,EAAK,IAAI,GAAME,OACtC,EACNR,EAASxB,EACTL,EAAQM,MAAM,cACdN,EAAQM,MAAM,mBACdN,EAAQM,MAAM,2BACPkC,EAAanC,EACrB,CA1BQoC,CAAmBpC,EAC3B,EAuCD,SAASmC,EAAanC,GACpB,OAAIA,IAASwB,GACXE,IACA/B,EAAQO,QAAQF,GACTmC,GAELT,EAAW,EACN7B,EAAIG,IAEbL,EAAQQ,KAAK,2BACNa,EAAAA,cAAchB,GACjBiB,EAAAA,aAAatB,EAAS0C,EAAY,aAAlCpB,CAAgDjB,GAChDqC,EAAWrC,GAChB,CAcD,SAASqC,EAAWrC,GAClB,OAAa,OAATA,GAAiB4B,EAAkBA,mBAAC5B,IACtCL,EAAQQ,KAAK,mBACNL,EAAKwC,UACR1C,EAAGI,GACHL,EAAQ4C,MAAM9C,EAAqB+C,EAAgBC,EAAnD9C,CAA0DK,KAEhEL,EAAQM,MAAM,uBACdN,EAAQM,MAAM,cAAe,CAC3ByC,YAAa,WAERC,EAAK3C,GACb,CAcD,SAAS2C,EAAK3C,GACZ,OAAa,OAATA,GAAiB4B,EAAkBA,mBAAC5B,IACtCL,EAAQQ,KAAK,eACbR,EAAQQ,KAAK,uBACNkC,EAAWrC,IAEhBgB,EAAAA,cAAchB,IAChBL,EAAQQ,KAAK,eACbR,EAAQQ,KAAK,uBACNc,EAAYA,aAACtB,EAASiD,EAAY,aAAlC3B,CAAgDjB,IAE5C,KAATA,GAAeA,IAASwB,EACnB3B,EAAIG,IAEbL,EAAQO,QAAQF,GACT2C,EACR,CAcD,SAASC,EAAW5C,GAClB,OAAa,OAATA,GAAiB4B,EAAkBA,mBAAC5B,GAC/BqC,EAAWrC,IAEpBL,EAAQM,MAAM,uBACdN,EAAQM,MAAM,cAAe,CAC3ByC,YAAa,WAERG,EAAK7C,GACb,CAcD,SAAS6C,EAAK7C,GACZ,OAAa,OAATA,GAAiB4B,EAAkBA,mBAAC5B,IACtCL,EAAQQ,KAAK,eACbR,EAAQQ,KAAK,uBACNkC,EAAWrC,IAEP,KAATA,GAAeA,IAASwB,EACnB3B,EAAIG,IAEbL,EAAQO,QAAQF,GACT6C,EACR,CAeD,SAASL,EAAexC,GACtB,OAAOL,EAAQmD,QAAQlC,EAAY6B,EAAOM,EAAnCpD,CAAkDK,EAC1D,CAcD,SAAS+C,EAAc/C,GAIrB,OAHAL,EAAQM,MAAM,cACdN,EAAQO,QAAQF,GAChBL,EAAQQ,KAAK,cACN6C,CACR,CAcD,SAASA,EAAahD,GACpB,OAAO6B,EAAgB,GAAKb,EAAaA,cAAChB,GACtCiB,EAAYA,aACVtB,EACAsD,EACA,aACApB,EAAgB,EAJlBZ,CAKEjB,GACFiD,EAAmBjD,EACxB,CAcD,SAASiD,EAAmBjD,GAC1B,OAAa,OAATA,GAAiB4B,EAAkBA,mBAAC5B,GAC/BL,EAAQ4C,MAAM9C,EAAqB+C,EAAgBC,EAAnD9C,CAA0DK,IAEnEL,EAAQM,MAAM,iBACPiD,EAAalD,GACrB,CAcD,SAASkD,EAAalD,GACpB,OAAa,OAATA,GAAiB4B,EAAkBA,mBAAC5B,IACtCL,EAAQQ,KAAK,iBACN8C,EAAmBjD,KAE5BL,EAAQO,QAAQF,GACTkD,EACR,CAcD,SAAST,EAAMzC,GAEb,OADAL,EAAQQ,KAAK,cACNP,EAAGI,EACX,CAsHH,EA1aEmD,UAAU"}
@@ -0,0 +1,2 @@
1
+ "use strict";Object.defineProperty(exports,"__esModule",{value:!0});var e=require("../../micromark-factory-space/index.js"),n=require("../../micromark-util-character/index.js");const t={name:"codeIndented",tokenize:function(t,r,o){const c=this;return function(n){return t.enter("codeIndented"),e.factorySpace(t,u,"linePrefix",5)(n)};function u(e){const n=c.events[c.events.length-1];return n&&"linePrefix"===n[1].type&&n[2].sliceSerialize(n[1],!0).length>=4?d(e):o(e)}function d(e){return null===e?a(e):n.markdownLineEnding(e)?t.attempt(i,d,a)(e):(t.enter("codeFlowValue"),l(e))}function l(e){return null===e||n.markdownLineEnding(e)?(t.exit("codeFlowValue"),d(e)):(t.consume(e),l)}function a(e){return t.exit("codeIndented"),r(e)}}},i={tokenize:function(t,i,r){const o=this;return c;function c(i){return o.parser.lazy[o.now().line]?r(i):n.markdownLineEnding(i)?(t.enter("lineEnding"),t.consume(i),t.exit("lineEnding"),c):e.factorySpace(t,u,"linePrefix",5)(i)}function u(e){const t=o.events[o.events.length-1];return t&&"linePrefix"===t[1].type&&t[2].sliceSerialize(t[1],!0).length>=4?i(e):n.markdownLineEnding(e)?c(e):r(e)}},partial:!0};exports.codeIndented=t;
2
+ //# sourceMappingURL=code-indented.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"code-indented.js","sources":["../../../../../../../node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/code-indented.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {factorySpace} from 'micromark-factory-space'\nimport {markdownLineEnding, markdownSpace} from 'micromark-util-character'\n/** @type {Construct} */\nexport const codeIndented = {\n name: 'codeIndented',\n tokenize: tokenizeCodeIndented\n}\n\n/** @type {Construct} */\nconst furtherStart = {\n tokenize: tokenizeFurtherStart,\n partial: true\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeCodeIndented(effects, ok, nok) {\n const self = this\n return start\n\n /**\n * Start of code (indented).\n *\n * > **Parsing note**: it is not needed to check if this first line is a\n * > filled line (that it has a non-whitespace character), because blank lines\n * > are parsed already, so we never run into that.\n *\n * ```markdown\n * > | aaa\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n // To do: manually check if interrupting like `markdown-rs`.\n\n effects.enter('codeIndented')\n // To do: use an improved `space_or_tab` function like `markdown-rs`,\n // so that we can drop the next state.\n return factorySpace(effects, afterPrefix, 'linePrefix', 4 + 1)(code)\n }\n\n /**\n * At start, after 1 or 4 spaces.\n *\n * ```markdown\n * > | aaa\n * ^\n * ```\n *\n * @type {State}\n */\n function afterPrefix(code) {\n const tail = self.events[self.events.length - 1]\n return tail &&\n tail[1].type === 'linePrefix' &&\n tail[2].sliceSerialize(tail[1], true).length >= 4\n ? atBreak(code)\n : nok(code)\n }\n\n /**\n * At a break.\n *\n * ```markdown\n * > | aaa\n * ^ ^\n * ```\n *\n * @type {State}\n */\n function atBreak(code) {\n if (code === null) {\n return after(code)\n }\n if (markdownLineEnding(code)) {\n return effects.attempt(furtherStart, atBreak, after)(code)\n }\n effects.enter('codeFlowValue')\n return inside(code)\n }\n\n /**\n * In code content.\n *\n * ```markdown\n * > | aaa\n * ^^^^\n * ```\n *\n * @type {State}\n */\n function inside(code) {\n if (code === null || markdownLineEnding(code)) {\n effects.exit('codeFlowValue')\n return atBreak(code)\n }\n effects.consume(code)\n return inside\n }\n\n /** @type {State} */\n function after(code) {\n effects.exit('codeIndented')\n // To do: allow interrupting like `markdown-rs`.\n // Feel free to interrupt.\n // tokenizer.interrupt = false\n return ok(code)\n }\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeFurtherStart(effects, ok, nok) {\n const self = this\n return furtherStart\n\n /**\n * At eol, trying to parse another indent.\n *\n * ```markdown\n * > | aaa\n * ^\n * | bbb\n * ```\n *\n * @type {State}\n */\n function furtherStart(code) {\n // To do: improve `lazy` / `pierce` handling.\n // If this is a lazy line, it can’t be code.\n if (self.parser.lazy[self.now().line]) {\n return nok(code)\n }\n if (markdownLineEnding(code)) {\n effects.enter('lineEnding')\n effects.consume(code)\n effects.exit('lineEnding')\n return furtherStart\n }\n\n // To do: the code here in `micromark-js` is a bit different from\n // `markdown-rs` because there it can attempt spaces.\n // We can’t yet.\n //\n // To do: use an improved `space_or_tab` function like `markdown-rs`,\n // so that we can drop the next state.\n return factorySpace(effects, afterPrefix, 'linePrefix', 4 + 1)(code)\n }\n\n /**\n * At start, after 1 or 4 spaces.\n *\n * ```markdown\n * > | aaa\n * ^\n * ```\n *\n * @type {State}\n */\n function afterPrefix(code) {\n const tail = self.events[self.events.length - 1]\n return tail &&\n tail[1].type === 'linePrefix' &&\n tail[2].sliceSerialize(tail[1], true).length >= 4\n ? ok(code)\n : markdownLineEnding(code)\n ? furtherStart(code)\n : nok(code)\n }\n}\n"],"names":["codeIndented","name","tokenize","effects","ok","nok","self","this","code","enter","factorySpace","afterPrefix","tail","events","length","type","sliceSerialize","atBreak","after","markdownLineEnding","attempt","furtherStart","inside","exit","consume","parser","lazy","now","line","partial"],"mappings":"iLAUY,MAACA,EAAe,CAC1BC,KAAM,eACNC,SAaF,SAA8BC,EAASC,EAAIC,GACzC,MAAMC,EAAOC,KACb,OAgBA,SAAeC,GAMb,OAHAL,EAAQM,MAAM,gBAGPC,EAAAA,aAAaP,EAASQ,EAAa,aAAc,EAAjDD,CAAwDF,EAChE,EAYD,SAASG,EAAYH,GACnB,MAAMI,EAAON,EAAKO,OAAOP,EAAKO,OAAOC,OAAS,GAC9C,OAAOF,GACY,eAAjBA,EAAK,GAAGG,MACRH,EAAK,GAAGI,eAAeJ,EAAK,IAAI,GAAME,QAAU,EAC9CG,EAAQT,GACRH,EAAIG,EACT,CAYD,SAASS,EAAQT,GACf,OAAa,OAATA,EACKU,EAAMV,GAEXW,EAAAA,mBAAmBX,GACdL,EAAQiB,QAAQC,EAAcJ,EAASC,EAAvCf,CAA8CK,IAEvDL,EAAQM,MAAM,iBACPa,EAAOd,GACf,CAYD,SAASc,EAAOd,GACd,OAAa,OAATA,GAAiBW,EAAkBA,mBAACX,IACtCL,EAAQoB,KAAK,iBACNN,EAAQT,KAEjBL,EAAQqB,QAAQhB,GACTc,EACR,CAGD,SAASJ,EAAMV,GAKb,OAJAL,EAAQoB,KAAK,gBAINnB,EAAGI,EACX,CACH,GAvGMa,EAAe,CACnBnB,SA4GF,SAA8BC,EAASC,EAAIC,GACzC,MAAMC,EAAOC,KACb,OAAOc,EAaP,SAASA,EAAab,GAGpB,OAAIF,EAAKmB,OAAOC,KAAKpB,EAAKqB,MAAMC,MACvBvB,EAAIG,GAETW,EAAAA,mBAAmBX,IACrBL,EAAQM,MAAM,cACdN,EAAQqB,QAAQhB,GAChBL,EAAQoB,KAAK,cACNF,GASFX,EAAAA,aAAaP,EAASQ,EAAa,aAAc,EAAjDD,CAAwDF,EAChE,CAYD,SAASG,EAAYH,GACnB,MAAMI,EAAON,EAAKO,OAAOP,EAAKO,OAAOC,OAAS,GAC9C,OAAOF,GACY,eAAjBA,EAAK,GAAGG,MACRH,EAAK,GAAGI,eAAeJ,EAAK,IAAI,GAAME,QAAU,EAC9CV,EAAGI,GACHW,EAAAA,mBAAmBX,GACnBa,EAAab,GACbH,EAAIG,EACT,CACH,EApKEqB,SAAS"}
@@ -0,0 +1,2 @@
1
+ "use strict";Object.defineProperty(exports,"__esModule",{value:!0});var e=require("../../micromark-util-character/index.js");const n={name:"codeText",tokenize:function(n,t,i){let c,o,r=0;return function(e){return n.enter("codeText"),n.enter("codeTextSequence"),d(e)};function d(e){return 96===e?(n.consume(e),r++,d):(n.exit("codeTextSequence"),u(e))}function u(t){return null===t?i(t):32===t?(n.enter("space"),n.consume(t),n.exit("space"),u):96===t?(o=n.enter("codeTextSequence"),c=0,s(t)):e.markdownLineEnding(t)?(n.enter("lineEnding"),n.consume(t),n.exit("lineEnding"),u):(n.enter("codeTextData"),a(t))}function a(t){return null===t||32===t||96===t||e.markdownLineEnding(t)?(n.exit("codeTextData"),u(t)):(n.consume(t),a)}function s(e){return 96===e?(n.consume(e),c++,s):c===r?(n.exit("codeTextSequence"),n.exit("codeText"),t(e)):(o.type="codeTextData",a(e))}},resolve:function(e){let n,t,i=e.length-4,c=3;if(!("lineEnding"!==e[c][1].type&&"space"!==e[c][1].type||"lineEnding"!==e[i][1].type&&"space"!==e[i][1].type))for(n=c;++n<i;)if("codeTextData"===e[n][1].type){e[c][1].type="codeTextPadding",e[i][1].type="codeTextPadding",c+=2,i-=2;break}n=c-1,i++;for(;++n<=i;)void 0===t?n!==i&&"lineEnding"!==e[n][1].type&&(t=n):n!==i&&"lineEnding"!==e[n][1].type||(e[t][1].type="codeTextData",n!==t+2&&(e[t][1].end=e[n-1][1].end,e.splice(t+2,n-t-2),i-=n-t-2,n=t+2),t=void 0);return e},previous:function(e){return 96!==e||"characterEscape"===this.events[this.events.length-1][1].type}};exports.codeText=n;
2
+ //# sourceMappingURL=code-text.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"code-text.js","sources":["../../../../../../../node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/code-text.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').Previous} Previous\n * @typedef {import('micromark-util-types').Resolver} Resolver\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').Token} Token\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {markdownLineEnding} from 'micromark-util-character'\n/** @type {Construct} */\nexport const codeText = {\n name: 'codeText',\n tokenize: tokenizeCodeText,\n resolve: resolveCodeText,\n previous\n}\n\n// To do: next major: don’t resolve, like `markdown-rs`.\n/** @type {Resolver} */\nfunction resolveCodeText(events) {\n let tailExitIndex = events.length - 4\n let headEnterIndex = 3\n /** @type {number} */\n let index\n /** @type {number | undefined} */\n let enter\n\n // If we start and end with an EOL or a space.\n if (\n (events[headEnterIndex][1].type === 'lineEnding' ||\n events[headEnterIndex][1].type === 'space') &&\n (events[tailExitIndex][1].type === 'lineEnding' ||\n events[tailExitIndex][1].type === 'space')\n ) {\n index = headEnterIndex\n\n // And we have data.\n while (++index < tailExitIndex) {\n if (events[index][1].type === 'codeTextData') {\n // Then we have padding.\n events[headEnterIndex][1].type = 'codeTextPadding'\n events[tailExitIndex][1].type = 'codeTextPadding'\n headEnterIndex += 2\n tailExitIndex -= 2\n break\n }\n }\n }\n\n // Merge adjacent spaces and data.\n index = headEnterIndex - 1\n tailExitIndex++\n while (++index <= tailExitIndex) {\n if (enter === undefined) {\n if (index !== tailExitIndex && events[index][1].type !== 'lineEnding') {\n enter = index\n }\n } else if (\n index === tailExitIndex ||\n events[index][1].type === 'lineEnding'\n ) {\n events[enter][1].type = 'codeTextData'\n if (index !== enter + 2) {\n events[enter][1].end = events[index - 1][1].end\n events.splice(enter + 2, index - enter - 2)\n tailExitIndex -= index - enter - 2\n index = enter + 2\n }\n enter = undefined\n }\n }\n return events\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Previous}\n */\nfunction previous(code) {\n // If there is a previous code, there will always be a tail.\n return (\n code !== 96 ||\n this.events[this.events.length - 1][1].type === 'characterEscape'\n )\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeCodeText(effects, ok, nok) {\n const self = this\n let sizeOpen = 0\n /** @type {number} */\n let size\n /** @type {Token} */\n let token\n return start\n\n /**\n * Start of code (text).\n *\n * ```markdown\n * > | `a`\n * ^\n * > | \\`a`\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n effects.enter('codeText')\n effects.enter('codeTextSequence')\n return sequenceOpen(code)\n }\n\n /**\n * In opening sequence.\n *\n * ```markdown\n * > | `a`\n * ^\n * ```\n *\n * @type {State}\n */\n function sequenceOpen(code) {\n if (code === 96) {\n effects.consume(code)\n sizeOpen++\n return sequenceOpen\n }\n effects.exit('codeTextSequence')\n return between(code)\n }\n\n /**\n * Between something and something else.\n *\n * ```markdown\n * > | `a`\n * ^^\n * ```\n *\n * @type {State}\n */\n function between(code) {\n // EOF.\n if (code === null) {\n return nok(code)\n }\n\n // To do: next major: don’t do spaces in resolve, but when compiling,\n // like `markdown-rs`.\n // Tabs don’t work, and virtual spaces don’t make sense.\n if (code === 32) {\n effects.enter('space')\n effects.consume(code)\n effects.exit('space')\n return between\n }\n\n // Closing fence? Could also be data.\n if (code === 96) {\n token = effects.enter('codeTextSequence')\n size = 0\n return sequenceClose(code)\n }\n if (markdownLineEnding(code)) {\n effects.enter('lineEnding')\n effects.consume(code)\n effects.exit('lineEnding')\n return between\n }\n\n // Data.\n effects.enter('codeTextData')\n return data(code)\n }\n\n /**\n * In data.\n *\n * ```markdown\n * > | `a`\n * ^\n * ```\n *\n * @type {State}\n */\n function data(code) {\n if (\n code === null ||\n code === 32 ||\n code === 96 ||\n markdownLineEnding(code)\n ) {\n effects.exit('codeTextData')\n return between(code)\n }\n effects.consume(code)\n return data\n }\n\n /**\n * In closing sequence.\n *\n * ```markdown\n * > | `a`\n * ^\n * ```\n *\n * @type {State}\n */\n function sequenceClose(code) {\n // More.\n if (code === 96) {\n effects.consume(code)\n size++\n return sequenceClose\n }\n\n // Done!\n if (size === sizeOpen) {\n effects.exit('codeTextSequence')\n effects.exit('codeText')\n return ok(code)\n }\n\n // More or less accents: mark as data.\n token.type = 'codeTextData'\n return data(code)\n }\n}\n"],"names":["codeText","name","tokenize","effects","ok","nok","size","token","sizeOpen","code","enter","sequenceOpen","consume","exit","between","sequenceClose","markdownLineEnding","data","type","resolve","events","index","tailExitIndex","length","headEnterIndex","undefined","end","splice","previous","this"],"mappings":"6HAYY,MAACA,EAAW,CACtBC,KAAM,WACNC,SA8EF,SAA0BC,EAASC,EAAIC,GAErC,IAEIC,EAEAC,EAJAC,EAAW,EAKf,OAcA,SAAeC,GAGb,OAFAN,EAAQO,MAAM,YACdP,EAAQO,MAAM,oBACPC,EAAaF,EACrB,EAYD,SAASE,EAAaF,GACpB,OAAa,KAATA,GACFN,EAAQS,QAAQH,GAChBD,IACOG,IAETR,EAAQU,KAAK,oBACNC,EAAQL,GAChB,CAYD,SAASK,EAAQL,GAEf,OAAa,OAATA,EACKJ,EAAII,GAMA,KAATA,GACFN,EAAQO,MAAM,SACdP,EAAQS,QAAQH,GAChBN,EAAQU,KAAK,SACNC,GAII,KAATL,GACFF,EAAQJ,EAAQO,MAAM,oBACtBJ,EAAO,EACAS,EAAcN,IAEnBO,EAAAA,mBAAmBP,IACrBN,EAAQO,MAAM,cACdP,EAAQS,QAAQH,GAChBN,EAAQU,KAAK,cACNC,IAITX,EAAQO,MAAM,gBACPO,EAAKR,GACb,CAYD,SAASQ,EAAKR,GACZ,OACW,OAATA,GACS,KAATA,GACS,KAATA,GACAO,EAAAA,mBAAmBP,IAEnBN,EAAQU,KAAK,gBACNC,EAAQL,KAEjBN,EAAQS,QAAQH,GACTQ,EACR,CAYD,SAASF,EAAcN,GAErB,OAAa,KAATA,GACFN,EAAQS,QAAQH,GAChBH,IACOS,GAILT,IAASE,GACXL,EAAQU,KAAK,oBACbV,EAAQU,KAAK,YACNT,EAAGK,KAIZF,EAAMW,KAAO,eACND,EAAKR,GACb,CACH,EA7NEU,QAMF,SAAyBC,GACvB,IAGIC,EAEAX,EALAY,EAAgBF,EAAOG,OAAS,EAChCC,EAAiB,EAOrB,KACsC,eAAnCJ,EAAOI,GAAgB,GAAGN,MACU,UAAnCE,EAAOI,GAAgB,GAAGN,MACO,eAAlCE,EAAOE,GAAe,GAAGJ,MACU,UAAlCE,EAAOE,GAAe,GAAGJ,MAK3B,IAHAG,EAAQG,IAGCH,EAAQC,GACf,GAA8B,iBAA1BF,EAAOC,GAAO,GAAGH,KAAyB,CAE5CE,EAAOI,GAAgB,GAAGN,KAAO,kBACjCE,EAAOE,GAAe,GAAGJ,KAAO,kBAChCM,GAAkB,EAClBF,GAAiB,EACjB,KACD,CAKLD,EAAQG,EAAiB,EACzBF,IACA,OAASD,GAASC,QACFG,IAAVf,EACEW,IAAUC,GAA2C,eAA1BF,EAAOC,GAAO,GAAGH,OAC9CR,EAAQW,GAGVA,IAAUC,GACgB,eAA1BF,EAAOC,GAAO,GAAGH,OAEjBE,EAAOV,GAAO,GAAGQ,KAAO,eACpBG,IAAUX,EAAQ,IACpBU,EAAOV,GAAO,GAAGgB,IAAMN,EAAOC,EAAQ,GAAG,GAAGK,IAC5CN,EAAOO,OAAOjB,EAAQ,EAAGW,EAAQX,EAAQ,GACzCY,GAAiBD,EAAQX,EAAQ,EACjCW,EAAQX,EAAQ,GAElBA,OAAQe,GAGZ,OAAOL,CACT,EA1DEQ,SAgEF,SAAkBnB,GAEhB,OACW,KAATA,GACgD,oBAAhDoB,KAAKT,OAAOS,KAAKT,OAAOG,OAAS,GAAG,GAAGL,IAE3C"}
@@ -0,0 +1,2 @@
1
+ "use strict";Object.defineProperty(exports,"__esModule",{value:!0});var n=require("../../micromark-factory-space/index.js"),e=require("../../micromark-util-character/index.js"),t=require("../../micromark-util-subtokenize/index.js");const r={tokenize:function(n,t){let r;return function(e){return n.enter("content"),r=n.enter("chunkContent",{contentType:"content"}),o(e)};function o(t){return null===t?c(t):e.markdownLineEnding(t)?n.check(i,u,c)(t):(n.consume(t),o)}function c(e){return n.exit("chunkContent"),n.exit("content"),t(e)}function u(e){return n.consume(e),n.exit("chunkContent"),r.next=n.enter("chunkContent",{contentType:"content",previous:r}),r=r.next,o}},resolve:function(n){return t.subtokenize(n),n}},i={tokenize:function(t,r,i){const o=this;return function(e){return t.exit("chunkContent"),t.enter("lineEnding"),t.consume(e),t.exit("lineEnding"),n.factorySpace(t,c,"linePrefix")};function c(n){if(null===n||e.markdownLineEnding(n))return i(n);const c=o.events[o.events.length-1];return!o.parser.constructs.disable.null.includes("codeIndented")&&c&&"linePrefix"===c[1].type&&c[2].sliceSerialize(c[1],!0).length>=4?r(n):t.interrupt(o.parser.constructs.flow,i,r)(n)}},partial:!0};exports.content=r;
2
+ //# sourceMappingURL=content.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"content.js","sources":["../../../../../../../node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/content.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').Resolver} Resolver\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').Token} Token\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {factorySpace} from 'micromark-factory-space'\nimport {markdownLineEnding} from 'micromark-util-character'\nimport {subtokenize} from 'micromark-util-subtokenize'\n/**\n * No name because it must not be turned off.\n * @type {Construct}\n */\nexport const content = {\n tokenize: tokenizeContent,\n resolve: resolveContent\n}\n\n/** @type {Construct} */\nconst continuationConstruct = {\n tokenize: tokenizeContinuation,\n partial: true\n}\n\n/**\n * Content is transparent: it’s parsed right now. That way, definitions are also\n * parsed right now: before text in paragraphs (specifically, media) are parsed.\n *\n * @type {Resolver}\n */\nfunction resolveContent(events) {\n subtokenize(events)\n return events\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeContent(effects, ok) {\n /** @type {Token | undefined} */\n let previous\n return chunkStart\n\n /**\n * Before a content chunk.\n *\n * ```markdown\n * > | abc\n * ^\n * ```\n *\n * @type {State}\n */\n function chunkStart(code) {\n effects.enter('content')\n previous = effects.enter('chunkContent', {\n contentType: 'content'\n })\n return chunkInside(code)\n }\n\n /**\n * In a content chunk.\n *\n * ```markdown\n * > | abc\n * ^^^\n * ```\n *\n * @type {State}\n */\n function chunkInside(code) {\n if (code === null) {\n return contentEnd(code)\n }\n\n // To do: in `markdown-rs`, each line is parsed on its own, and everything\n // is stitched together resolving.\n if (markdownLineEnding(code)) {\n return effects.check(\n continuationConstruct,\n contentContinue,\n contentEnd\n )(code)\n }\n\n // Data.\n effects.consume(code)\n return chunkInside\n }\n\n /**\n *\n *\n * @type {State}\n */\n function contentEnd(code) {\n effects.exit('chunkContent')\n effects.exit('content')\n return ok(code)\n }\n\n /**\n *\n *\n * @type {State}\n */\n function contentContinue(code) {\n effects.consume(code)\n effects.exit('chunkContent')\n previous.next = effects.enter('chunkContent', {\n contentType: 'content',\n previous\n })\n previous = previous.next\n return chunkInside\n }\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeContinuation(effects, ok, nok) {\n const self = this\n return startLookahead\n\n /**\n *\n *\n * @type {State}\n */\n function startLookahead(code) {\n effects.exit('chunkContent')\n effects.enter('lineEnding')\n effects.consume(code)\n effects.exit('lineEnding')\n return factorySpace(effects, prefixed, 'linePrefix')\n }\n\n /**\n *\n *\n * @type {State}\n */\n function prefixed(code) {\n if (code === null || markdownLineEnding(code)) {\n return nok(code)\n }\n\n // Always populated by defaults.\n\n const tail = self.events[self.events.length - 1]\n if (\n !self.parser.constructs.disable.null.includes('codeIndented') &&\n tail &&\n tail[1].type === 'linePrefix' &&\n tail[2].sliceSerialize(tail[1], true).length >= 4\n ) {\n return ok(code)\n }\n return effects.interrupt(self.parser.constructs.flow, nok, ok)(code)\n }\n}\n"],"names":["content","tokenize","effects","ok","previous","code","enter","contentType","chunkInside","contentEnd","markdownLineEnding","check","continuationConstruct","contentContinue","consume","exit","next","resolve","events","subtokenize","nok","self","this","factorySpace","prefixed","tail","length","parser","constructs","disable","null","includes","type","sliceSerialize","interrupt","flow","partial"],"mappings":"wOAgBY,MAACA,EAAU,CACrBC,SAyBF,SAAyBC,EAASC,GAEhC,IAAIC,EACJ,OAYA,SAAoBC,GAKlB,OAJAH,EAAQI,MAAM,WACdF,EAAWF,EAAQI,MAAM,eAAgB,CACvCC,YAAa,YAERC,EAAYH,EACpB,EAYD,SAASG,EAAYH,GACnB,OAAa,OAATA,EACKI,EAAWJ,GAKhBK,EAAAA,mBAAmBL,GACdH,EAAQS,MACbC,EACAC,EACAJ,EAHKP,CAILG,IAIJH,EAAQY,QAAQT,GACTG,EACR,CAOD,SAASC,EAAWJ,GAGlB,OAFAH,EAAQa,KAAK,gBACbb,EAAQa,KAAK,WACNZ,EAAGE,EACX,CAOD,SAASQ,EAAgBR,GAQvB,OAPAH,EAAQY,QAAQT,GAChBH,EAAQa,KAAK,gBACbX,EAASY,KAAOd,EAAQI,MAAM,eAAgB,CAC5CC,YAAa,UACbH,aAEFA,EAAWA,EAASY,KACbR,CACR,CACH,EAvGES,QAeF,SAAwBC,GAEtB,OADAC,EAAAA,YAAYD,GACLA,CACT,GAdMN,EAAwB,CAC5BX,SAwGF,SAA8BC,EAASC,EAAIiB,GACzC,MAAMC,EAAOC,KACb,OAOA,SAAwBjB,GAKtB,OAJAH,EAAQa,KAAK,gBACbb,EAAQI,MAAM,cACdJ,EAAQY,QAAQT,GAChBH,EAAQa,KAAK,cACNQ,eAAarB,EAASsB,EAAU,aACxC,EAOD,SAASA,EAASnB,GAChB,GAAa,OAATA,GAAiBK,EAAkBA,mBAACL,GACtC,OAAOe,EAAIf,GAKb,MAAMoB,EAAOJ,EAAKH,OAAOG,EAAKH,OAAOQ,OAAS,GAC9C,OACGL,EAAKM,OAAOC,WAAWC,QAAQC,KAAKC,SAAS,iBAC9CN,GACiB,eAAjBA,EAAK,GAAGO,MACRP,EAAK,GAAGQ,eAAeR,EAAK,IAAI,GAAMC,QAAU,EAEzCvB,EAAGE,GAELH,EAAQgC,UAAUb,EAAKM,OAAOC,WAAWO,KAAMf,EAAKjB,EAApDD,CAAwDG,EAChE,CACH,EA/IE+B,SAAS"}
@@ -0,0 +1,2 @@
1
+ "use strict";Object.defineProperty(exports,"__esModule",{value:!0});var i=require("../../micromark-factory-destination/index.js"),e=require("../../micromark-factory-label/index.js"),n=require("../../micromark-factory-space/index.js"),t=require("../../micromark-factory-title/index.js"),r=require("../../micromark-factory-whitespace/index.js"),o=require("../../micromark-util-character/index.js"),a=require("../../micromark-util-normalize-identifier/index.js");const c={name:"definition",tokenize:function(t,c,u){const d=this;let s;return function(i){return t.enter("definition"),function(i){return e.factoryLabel.call(d,t,l,u,"definitionLabel","definitionLabelMarker","definitionLabelString")(i)}(i)};function l(i){return s=a.normalizeIdentifier(d.sliceSerialize(d.events[d.events.length-1][1]).slice(1,-1)),58===i?(t.enter("definitionMarker"),t.consume(i),t.exit("definitionMarker"),m):u(i)}function m(i){return o.markdownLineEndingOrSpace(i)?r.factoryWhitespace(t,k)(i):k(i)}function k(e){return i.factoryDestination(t,p,u,"definitionDestination","definitionDestinationLiteral","definitionDestinationLiteralMarker","definitionDestinationRaw","definitionDestinationString")(e)}function p(i){return t.attempt(f,y,y)(i)}function y(i){return o.markdownSpace(i)?n.factorySpace(t,x,"whitespace")(i):x(i)}function x(i){return null===i||o.markdownLineEnding(i)?(t.exit("definition"),d.parser.defined.push(s),c(i)):u(i)}}},f={tokenize:function(i,e,a){return function(e){return o.markdownLineEndingOrSpace(e)?r.factoryWhitespace(i,c)(e):a(e)};function c(e){return t.factoryTitle(i,f,a,"definitionTitle","definitionTitleMarker","definitionTitleString")(e)}function f(e){return o.markdownSpace(e)?n.factorySpace(i,u,"whitespace")(e):u(e)}function u(i){return null===i||o.markdownLineEnding(i)?e(i):a(i)}},partial:!0};exports.definition=c;
2
+ //# sourceMappingURL=definition.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"definition.js","sources":["../../../../../../../node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/definition.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {factoryDestination} from 'micromark-factory-destination'\nimport {factoryLabel} from 'micromark-factory-label'\nimport {factorySpace} from 'micromark-factory-space'\nimport {factoryTitle} from 'micromark-factory-title'\nimport {factoryWhitespace} from 'micromark-factory-whitespace'\nimport {\n markdownLineEnding,\n markdownLineEndingOrSpace,\n markdownSpace\n} from 'micromark-util-character'\nimport {normalizeIdentifier} from 'micromark-util-normalize-identifier'\n/** @type {Construct} */\nexport const definition = {\n name: 'definition',\n tokenize: tokenizeDefinition\n}\n\n/** @type {Construct} */\nconst titleBefore = {\n tokenize: tokenizeTitleBefore,\n partial: true\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeDefinition(effects, ok, nok) {\n const self = this\n /** @type {string} */\n let identifier\n return start\n\n /**\n * At start of a definition.\n *\n * ```markdown\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n // Do not interrupt paragraphs (but do follow definitions).\n // To do: do `interrupt` the way `markdown-rs` does.\n // To do: parse whitespace the way `markdown-rs` does.\n effects.enter('definition')\n return before(code)\n }\n\n /**\n * After optional whitespace, at `[`.\n *\n * ```markdown\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */\n function before(code) {\n // To do: parse whitespace the way `markdown-rs` does.\n\n return factoryLabel.call(\n self,\n effects,\n labelAfter,\n // Note: we don’t need to reset the way `markdown-rs` does.\n nok,\n 'definitionLabel',\n 'definitionLabelMarker',\n 'definitionLabelString'\n )(code)\n }\n\n /**\n * After label.\n *\n * ```markdown\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */\n function labelAfter(code) {\n identifier = normalizeIdentifier(\n self.sliceSerialize(self.events[self.events.length - 1][1]).slice(1, -1)\n )\n if (code === 58) {\n effects.enter('definitionMarker')\n effects.consume(code)\n effects.exit('definitionMarker')\n return markerAfter\n }\n return nok(code)\n }\n\n /**\n * After marker.\n *\n * ```markdown\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */\n function markerAfter(code) {\n // Note: whitespace is optional.\n return markdownLineEndingOrSpace(code)\n ? factoryWhitespace(effects, destinationBefore)(code)\n : destinationBefore(code)\n }\n\n /**\n * Before destination.\n *\n * ```markdown\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */\n function destinationBefore(code) {\n return factoryDestination(\n effects,\n destinationAfter,\n // Note: we don’t need to reset the way `markdown-rs` does.\n nok,\n 'definitionDestination',\n 'definitionDestinationLiteral',\n 'definitionDestinationLiteralMarker',\n 'definitionDestinationRaw',\n 'definitionDestinationString'\n )(code)\n }\n\n /**\n * After destination.\n *\n * ```markdown\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */\n function destinationAfter(code) {\n return effects.attempt(titleBefore, after, after)(code)\n }\n\n /**\n * After definition.\n *\n * ```markdown\n * > | [a]: b\n * ^\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */\n function after(code) {\n return markdownSpace(code)\n ? factorySpace(effects, afterWhitespace, 'whitespace')(code)\n : afterWhitespace(code)\n }\n\n /**\n * After definition, after optional whitespace.\n *\n * ```markdown\n * > | [a]: b\n * ^\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */\n function afterWhitespace(code) {\n if (code === null || markdownLineEnding(code)) {\n effects.exit('definition')\n\n // Note: we don’t care about uniqueness.\n // It’s likely that that doesn’t happen very frequently.\n // It is more likely that it wastes precious time.\n self.parser.defined.push(identifier)\n\n // To do: `markdown-rs` interrupt.\n // // You’d be interrupting.\n // tokenizer.interrupt = true\n return ok(code)\n }\n return nok(code)\n }\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeTitleBefore(effects, ok, nok) {\n return titleBefore\n\n /**\n * After destination, at whitespace.\n *\n * ```markdown\n * > | [a]: b\n * ^\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */\n function titleBefore(code) {\n return markdownLineEndingOrSpace(code)\n ? factoryWhitespace(effects, beforeMarker)(code)\n : nok(code)\n }\n\n /**\n * At title.\n *\n * ```markdown\n * | [a]: b\n * > | \"c\"\n * ^\n * ```\n *\n * @type {State}\n */\n function beforeMarker(code) {\n return factoryTitle(\n effects,\n titleAfter,\n nok,\n 'definitionTitle',\n 'definitionTitleMarker',\n 'definitionTitleString'\n )(code)\n }\n\n /**\n * After title.\n *\n * ```markdown\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */\n function titleAfter(code) {\n return markdownSpace(code)\n ? factorySpace(effects, titleAfterOptionalWhitespace, 'whitespace')(code)\n : titleAfterOptionalWhitespace(code)\n }\n\n /**\n * After title, after optional whitespace.\n *\n * ```markdown\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */\n function titleAfterOptionalWhitespace(code) {\n return code === null || markdownLineEnding(code) ? ok(code) : nok(code)\n }\n}\n"],"names":["definition","name","tokenize","effects","ok","nok","self","this","identifier","code","enter","factoryLabel","call","labelAfter","before","normalizeIdentifier","sliceSerialize","events","length","slice","consume","exit","markerAfter","markdownLineEndingOrSpace","factoryWhitespace","destinationBefore","factoryDestination","destinationAfter","attempt","titleBefore","after","markdownSpace","factorySpace","afterWhitespace","markdownLineEnding","parser","defined","push","beforeMarker","factoryTitle","titleAfter","titleAfterOptionalWhitespace","partial"],"mappings":"4cAmBY,MAACA,EAAa,CACxBC,KAAM,aACNC,SAaF,SAA4BC,EAASC,EAAIC,GACvC,MAAMC,EAAOC,KAEb,IAAIC,EACJ,OAYA,SAAeC,GAKb,OADAN,EAAQO,MAAM,cAchB,SAAgBD,GAGd,OAAOE,EAAYA,aAACC,KAClBN,EACAH,EACAU,EAEAR,EACA,kBACA,wBACA,wBARKM,CASLF,EACH,CA1BQK,CAAOL,EACf,EAqCD,SAASI,EAAWJ,GAIlB,OAHAD,EAAaO,EAAmBA,oBAC9BT,EAAKU,eAAeV,EAAKW,OAAOX,EAAKW,OAAOC,OAAS,GAAG,IAAIC,MAAM,GAAI,IAE3D,KAATV,GACFN,EAAQO,MAAM,oBACdP,EAAQiB,QAAQX,GAChBN,EAAQkB,KAAK,oBACNC,GAEFjB,EAAII,EACZ,CAYD,SAASa,EAAYb,GAEnB,OAAOc,EAAAA,0BAA0Bd,GAC7Be,EAAAA,kBAAkBrB,EAASsB,EAA3BD,CAA8Cf,GAC9CgB,EAAkBhB,EACvB,CAYD,SAASgB,EAAkBhB,GACzB,OAAOiB,EAAkBA,mBACvBvB,EACAwB,EAEAtB,EACA,wBACA,+BACA,qCACA,2BACA,8BATKqB,CAULjB,EACH,CAYD,SAASkB,EAAiBlB,GACxB,OAAON,EAAQyB,QAAQC,EAAaC,EAAOA,EAApC3B,CAA2CM,EACnD,CAcD,SAASqB,EAAMrB,GACb,OAAOsB,EAAAA,cAActB,GACjBuB,EAAAA,aAAa7B,EAAS8B,EAAiB,aAAvCD,CAAqDvB,GACrDwB,EAAgBxB,EACrB,CAcD,SAASwB,EAAgBxB,GACvB,OAAa,OAATA,GAAiByB,EAAkBA,mBAACzB,IACtCN,EAAQkB,KAAK,cAKbf,EAAK6B,OAAOC,QAAQC,KAAK7B,GAKlBJ,EAAGK,IAELJ,EAAII,EACZ,CACH,GAtLMoB,EAAc,CAClB3B,SA2LF,SAA6BC,EAASC,EAAIC,GACxC,OAcA,SAAqBI,GACnB,OAAOc,EAAAA,0BAA0Bd,GAC7Be,EAAAA,kBAAkBrB,EAASmC,EAA3Bd,CAAyCf,GACzCJ,EAAII,EACT,EAaD,SAAS6B,EAAa7B,GACpB,OAAO8B,EAAYA,aACjBpC,EACAqC,EACAnC,EACA,kBACA,wBACA,wBANKkC,CAOL9B,EACH,CAYD,SAAS+B,EAAW/B,GAClB,OAAOsB,EAAAA,cAActB,GACjBuB,EAAAA,aAAa7B,EAASsC,EAA8B,aAApDT,CAAkEvB,GAClEgC,EAA6BhC,EAClC,CAYD,SAASgC,EAA6BhC,GACpC,OAAgB,OAATA,GAAiByB,EAAAA,mBAAmBzB,GAAQL,EAAGK,GAAQJ,EAAII,EACnE,CACH,EAlQEiC,SAAS"}
@@ -0,0 +1,2 @@
1
+ "use strict";Object.defineProperty(exports,"__esModule",{value:!0});var e=require("../../micromark-util-character/index.js");const r={name:"hardBreakEscape",tokenize:function(r,n,a){return function(e){return r.enter("hardBreakEscape"),r.consume(e),t};function t(t){return e.markdownLineEnding(t)?(r.exit("hardBreakEscape"),n(t)):a(t)}}};exports.hardBreakEscape=r;
2
+ //# sourceMappingURL=hard-break-escape.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"hard-break-escape.js","sources":["../../../../../../../node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/hard-break-escape.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {markdownLineEnding} from 'micromark-util-character'\n/** @type {Construct} */\nexport const hardBreakEscape = {\n name: 'hardBreakEscape',\n tokenize: tokenizeHardBreakEscape\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeHardBreakEscape(effects, ok, nok) {\n return start\n\n /**\n * Start of a hard break (escape).\n *\n * ```markdown\n * > | a\\\n * ^\n * | b\n * ```\n *\n * @type {State}\n */\n function start(code) {\n effects.enter('hardBreakEscape')\n effects.consume(code)\n return after\n }\n\n /**\n * After `\\`, at eol.\n *\n * ```markdown\n * > | a\\\n * ^\n * | b\n * ```\n *\n * @type {State}\n */\n function after(code) {\n if (markdownLineEnding(code)) {\n effects.exit('hardBreakEscape')\n return ok(code)\n }\n return nok(code)\n }\n}\n"],"names":["hardBreakEscape","name","tokenize","effects","ok","nok","code","enter","consume","after","markdownLineEnding","exit"],"mappings":"6HASY,MAACA,EAAkB,CAC7BC,KAAM,kBACNC,SAOF,SAAiCC,EAASC,EAAIC,GAC5C,OAaA,SAAeC,GAGb,OAFAH,EAAQI,MAAM,mBACdJ,EAAQK,QAAQF,GACTG,CACR,EAaD,SAASA,EAAMH,GACb,OAAII,EAAAA,mBAAmBJ,IACrBH,EAAQQ,KAAK,mBACNP,EAAGE,IAELD,EAAIC,EACZ,CACH"}
@@ -0,0 +1,2 @@
1
+ "use strict";Object.defineProperty(exports,"__esModule",{value:!0});var e=require("../../micromark-factory-space/index.js"),n=require("../../micromark-util-character/index.js"),t=require("../../micromark-util-chunked/index.js");const r={name:"headingAtx",tokenize:function(t,r,i){let a=0;return function(e){return t.enter("atxHeading"),function(e){return t.enter("atxHeadingSequence"),c(e)}(e)};function c(e){return 35===e&&a++<6?(t.consume(e),c):null===e||n.markdownLineEndingOrSpace(e)?(t.exit("atxHeadingSequence"),u(e)):i(e)}function u(i){return 35===i?(t.enter("atxHeadingSequence"),d(i)):null===i||n.markdownLineEnding(i)?(t.exit("atxHeading"),r(i)):n.markdownSpace(i)?e.factorySpace(t,u,"whitespace")(i):(t.enter("atxHeadingText"),o(i))}function d(e){return 35===e?(t.consume(e),d):(t.exit("atxHeadingSequence"),u(e))}function o(e){return null===e||35===e||n.markdownLineEndingOrSpace(e)?(t.exit("atxHeadingText"),u(e)):(t.consume(e),o)}},resolve:function(e,n){let r,i,a=e.length-2,c=3;"whitespace"===e[c][1].type&&(c+=2);a-2>c&&"whitespace"===e[a][1].type&&(a-=2);"atxHeadingSequence"===e[a][1].type&&(c===a-1||a-4>c&&"whitespace"===e[a-2][1].type)&&(a-=c+1===a?2:4);a>c&&(r={type:"atxHeadingText",start:e[c][1].start,end:e[a][1].end},i={type:"chunkText",start:e[c][1].start,end:e[a][1].end,contentType:"text"},t.splice(e,c,a-c+1,[["enter",r,n],["enter",i,n],["exit",i,n],["exit",r,n]]));return e}};exports.headingAtx=r;
2
+ //# sourceMappingURL=heading-atx.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"heading-atx.js","sources":["../../../../../../../node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/heading-atx.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').Resolver} Resolver\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').Token} Token\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {factorySpace} from 'micromark-factory-space'\nimport {\n markdownLineEnding,\n markdownLineEndingOrSpace,\n markdownSpace\n} from 'micromark-util-character'\nimport {splice} from 'micromark-util-chunked'\n/** @type {Construct} */\nexport const headingAtx = {\n name: 'headingAtx',\n tokenize: tokenizeHeadingAtx,\n resolve: resolveHeadingAtx\n}\n\n/** @type {Resolver} */\nfunction resolveHeadingAtx(events, context) {\n let contentEnd = events.length - 2\n let contentStart = 3\n /** @type {Token} */\n let content\n /** @type {Token} */\n let text\n\n // Prefix whitespace, part of the opening.\n if (events[contentStart][1].type === 'whitespace') {\n contentStart += 2\n }\n\n // Suffix whitespace, part of the closing.\n if (\n contentEnd - 2 > contentStart &&\n events[contentEnd][1].type === 'whitespace'\n ) {\n contentEnd -= 2\n }\n if (\n events[contentEnd][1].type === 'atxHeadingSequence' &&\n (contentStart === contentEnd - 1 ||\n (contentEnd - 4 > contentStart &&\n events[contentEnd - 2][1].type === 'whitespace'))\n ) {\n contentEnd -= contentStart + 1 === contentEnd ? 2 : 4\n }\n if (contentEnd > contentStart) {\n content = {\n type: 'atxHeadingText',\n start: events[contentStart][1].start,\n end: events[contentEnd][1].end\n }\n text = {\n type: 'chunkText',\n start: events[contentStart][1].start,\n end: events[contentEnd][1].end,\n contentType: 'text'\n }\n splice(events, contentStart, contentEnd - contentStart + 1, [\n ['enter', content, context],\n ['enter', text, context],\n ['exit', text, context],\n ['exit', content, context]\n ])\n }\n return events\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeHeadingAtx(effects, ok, nok) {\n let size = 0\n return start\n\n /**\n * Start of a heading (atx).\n *\n * ```markdown\n * > | ## aa\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n // To do: parse indent like `markdown-rs`.\n effects.enter('atxHeading')\n return before(code)\n }\n\n /**\n * After optional whitespace, at `#`.\n *\n * ```markdown\n * > | ## aa\n * ^\n * ```\n *\n * @type {State}\n */\n function before(code) {\n effects.enter('atxHeadingSequence')\n return sequenceOpen(code)\n }\n\n /**\n * In opening sequence.\n *\n * ```markdown\n * > | ## aa\n * ^\n * ```\n *\n * @type {State}\n */\n function sequenceOpen(code) {\n if (code === 35 && size++ < 6) {\n effects.consume(code)\n return sequenceOpen\n }\n\n // Always at least one `#`.\n if (code === null || markdownLineEndingOrSpace(code)) {\n effects.exit('atxHeadingSequence')\n return atBreak(code)\n }\n return nok(code)\n }\n\n /**\n * After something, before something else.\n *\n * ```markdown\n * > | ## aa\n * ^\n * ```\n *\n * @type {State}\n */\n function atBreak(code) {\n if (code === 35) {\n effects.enter('atxHeadingSequence')\n return sequenceFurther(code)\n }\n if (code === null || markdownLineEnding(code)) {\n effects.exit('atxHeading')\n // To do: interrupt like `markdown-rs`.\n // // Feel free to interrupt.\n // tokenizer.interrupt = false\n return ok(code)\n }\n if (markdownSpace(code)) {\n return factorySpace(effects, atBreak, 'whitespace')(code)\n }\n\n // To do: generate `data` tokens, add the `text` token later.\n // Needs edit map, see: `markdown.rs`.\n effects.enter('atxHeadingText')\n return data(code)\n }\n\n /**\n * In further sequence (after whitespace).\n *\n * Could be normal “visible” hashes in the heading or a final sequence.\n *\n * ```markdown\n * > | ## aa ##\n * ^\n * ```\n *\n * @type {State}\n */\n function sequenceFurther(code) {\n if (code === 35) {\n effects.consume(code)\n return sequenceFurther\n }\n effects.exit('atxHeadingSequence')\n return atBreak(code)\n }\n\n /**\n * In text.\n *\n * ```markdown\n * > | ## aa\n * ^\n * ```\n *\n * @type {State}\n */\n function data(code) {\n if (code === null || code === 35 || markdownLineEndingOrSpace(code)) {\n effects.exit('atxHeadingText')\n return atBreak(code)\n }\n effects.consume(code)\n return data\n }\n}\n"],"names":["headingAtx","name","tokenize","effects","ok","nok","size","code","enter","sequenceOpen","before","consume","markdownLineEndingOrSpace","exit","atBreak","sequenceFurther","markdownLineEnding","markdownSpace","factorySpace","data","resolve","events","context","content","text","contentEnd","length","contentStart","type","start","end","contentType","splice"],"mappings":"oOAiBY,MAACA,EAAa,CACxBC,KAAM,aACNC,SA2DF,SAA4BC,EAASC,EAAIC,GACvC,IAAIC,EAAO,EACX,OAYA,SAAeC,GAGb,OADAJ,EAAQK,MAAM,cAchB,SAAgBD,GAEd,OADAJ,EAAQK,MAAM,sBACPC,EAAaF,EACrB,CAhBQG,CAAOH,EACf,EA2BD,SAASE,EAAaF,GACpB,OAAa,KAATA,GAAeD,IAAS,GAC1BH,EAAQQ,QAAQJ,GACTE,GAII,OAATF,GAAiBK,EAAyBA,0BAACL,IAC7CJ,EAAQU,KAAK,sBACNC,EAAQP,IAEVF,EAAIE,EACZ,CAYD,SAASO,EAAQP,GACf,OAAa,KAATA,GACFJ,EAAQK,MAAM,sBACPO,EAAgBR,IAEZ,OAATA,GAAiBS,EAAkBA,mBAACT,IACtCJ,EAAQU,KAAK,cAINT,EAAGG,IAERU,EAAAA,cAAcV,GACTW,EAAYA,aAACf,EAASW,EAAS,aAA/BI,CAA6CX,IAKtDJ,EAAQK,MAAM,kBACPW,EAAKZ,GACb,CAcD,SAASQ,EAAgBR,GACvB,OAAa,KAATA,GACFJ,EAAQQ,QAAQJ,GACTQ,IAETZ,EAAQU,KAAK,sBACNC,EAAQP,GAChB,CAYD,SAASY,EAAKZ,GACZ,OAAa,OAATA,GAA0B,KAATA,GAAeK,EAAAA,0BAA0BL,IAC5DJ,EAAQU,KAAK,kBACNC,EAAQP,KAEjBJ,EAAQQ,QAAQJ,GACTY,EACR,CACH,EA5LEC,QAIF,SAA2BC,EAAQC,GACjC,IAGIC,EAEAC,EALAC,EAAaJ,EAAOK,OAAS,EAC7BC,EAAe,EAOkB,eAAjCN,EAAOM,GAAc,GAAGC,OAC1BD,GAAgB,GAKhBF,EAAa,EAAIE,GACc,eAA/BN,EAAOI,GAAY,GAAGG,OAEtBH,GAAc,GAGiB,uBAA/BJ,EAAOI,GAAY,GAAGG,OACrBD,IAAiBF,EAAa,GAC5BA,EAAa,EAAIE,GACmB,eAAnCN,EAAOI,EAAa,GAAG,GAAGG,QAE9BH,GAAcE,EAAe,IAAMF,EAAa,EAAI,GAElDA,EAAaE,IACfJ,EAAU,CACRK,KAAM,iBACNC,MAAOR,EAAOM,GAAc,GAAGE,MAC/BC,IAAKT,EAAOI,GAAY,GAAGK,KAE7BN,EAAO,CACLI,KAAM,YACNC,MAAOR,EAAOM,GAAc,GAAGE,MAC/BC,IAAKT,EAAOI,GAAY,GAAGK,IAC3BC,YAAa,QAEfC,EAAAA,OAAOX,EAAQM,EAAcF,EAAaE,EAAe,EAAG,CAC1D,CAAC,QAASJ,EAASD,GACnB,CAAC,QAASE,EAAMF,GAChB,CAAC,OAAQE,EAAMF,GACf,CAAC,OAAQC,EAASD,MAGtB,OAAOD,CACT"}
@@ -0,0 +1,2 @@
1
+ "use strict";Object.defineProperty(exports,"__esModule",{value:!0});var n=require("../../micromark-util-character/index.js"),e=require("../../micromark-util-html-tag-name/index.js"),t=require("./blank-line.js");const r={name:"htmlFlow",tokenize:function(t,r,i){const c=this;let m,s,a,l,f;return function(n){return function(n){return t.enter("htmlFlow"),t.enter("htmlFlowData"),t.consume(n),d}(n)};function d(e){return 33===e?(t.consume(e),p):47===e?(t.consume(e),s=!0,k):63===e?(t.consume(e),m=3,c.interrupt?r:_):n.asciiAlpha(e)?(t.consume(e),a=String.fromCharCode(e),g):i(e)}function p(e){return 45===e?(t.consume(e),m=2,h):91===e?(t.consume(e),m=5,l=0,w):n.asciiAlpha(e)?(t.consume(e),m=4,c.interrupt?r:_):i(e)}function h(n){return 45===n?(t.consume(n),c.interrupt?r:_):i(n)}function w(n){const e="CDATA[";return n===e.charCodeAt(l++)?(t.consume(n),6===l?c.interrupt?r:b:w):i(n)}function k(e){return n.asciiAlpha(e)?(t.consume(e),a=String.fromCharCode(e),g):i(e)}function g(u){if(null===u||47===u||62===u||n.markdownLineEndingOrSpace(u)){const n=47===u,o=a.toLowerCase();return n||s||!e.htmlRawNames.includes(o)?e.htmlBlockNames.includes(a.toLowerCase())?(m=6,n?(t.consume(u),E):c.interrupt?r(u):b(u)):(m=7,c.interrupt&&!c.parser.lazy[c.now().line]?i(u):s?C(u):L(u)):(m=1,c.interrupt?r(u):b(u))}return 45===u||n.asciiAlphanumeric(u)?(t.consume(u),a+=String.fromCharCode(u),g):i(u)}function E(n){return 62===n?(t.consume(n),c.interrupt?r:b):i(n)}function C(e){return n.markdownSpace(e)?(t.consume(e),C):z(e)}function L(e){return 47===e?(t.consume(e),z):58===e||95===e||n.asciiAlpha(e)?(t.consume(e),x):n.markdownSpace(e)?(t.consume(e),L):z(e)}function x(e){return 45===e||46===e||58===e||95===e||n.asciiAlphanumeric(e)?(t.consume(e),x):S(e)}function S(e){return 61===e?(t.consume(e),A):n.markdownSpace(e)?(t.consume(e),S):L(e)}function A(e){return null===e||60===e||61===e||62===e||96===e?i(e):34===e||39===e?(t.consume(e),f=e,F):n.markdownSpace(e)?(t.consume(e),A):D(e)}function F(e){return e===f?(t.consume(e),f=null,y):null===e||n.markdownLineEnding(e)?i(e):(t.consume(e),F)}function D(e){return null===e||34===e||39===e||47===e||60===e||61===e||62===e||96===e||n.markdownLineEndingOrSpace(e)?S(e):(t.consume(e),D)}function y(e){return 47===e||62===e||n.markdownSpace(e)?L(e):i(e)}function z(n){return 62===n?(t.consume(n),j):i(n)}function j(e){return null===e||n.markdownLineEnding(e)?b(e):n.markdownSpace(e)?(t.consume(e),j):i(e)}function b(e){return 45===e&&2===m?(t.consume(e),O):60===e&&1===m?(t.consume(e),P):62===e&&4===m?(t.consume(e),B):63===e&&3===m?(t.consume(e),_):93===e&&5===m?(t.consume(e),T):!n.markdownLineEnding(e)||6!==m&&7!==m?null===e||n.markdownLineEnding(e)?(t.exit("htmlFlowData"),q(e)):(t.consume(e),b):(t.exit("htmlFlowData"),t.check(u,M,q)(e))}function q(n){return t.check(o,v,M)(n)}function v(n){return t.enter("lineEnding"),t.consume(n),t.exit("lineEnding"),N}function N(e){return null===e||n.markdownLineEnding(e)?q(e):(t.enter("htmlFlowData"),b(e))}function O(n){return 45===n?(t.consume(n),_):b(n)}function P(n){return 47===n?(t.consume(n),a="",R):b(n)}function R(r){if(62===r){const n=a.toLowerCase();return e.htmlRawNames.includes(n)?(t.consume(r),B):b(r)}return n.asciiAlpha(r)&&a.length<8?(t.consume(r),a+=String.fromCharCode(r),R):b(r)}function T(n){return 93===n?(t.consume(n),_):b(n)}function _(n){return 62===n?(t.consume(n),B):45===n&&2===m?(t.consume(n),_):b(n)}function B(e){return null===e||n.markdownLineEnding(e)?(t.exit("htmlFlowData"),M(e)):(t.consume(e),B)}function M(n){return t.exit("htmlFlow"),r(n)}},resolveTo:function(n){let e=n.length;for(;e--&&("enter"!==n[e][0]||"htmlFlow"!==n[e][1].type););e>1&&"linePrefix"===n[e-2][1].type&&(n[e][1].start=n[e-2][1].start,n[e+1][1].start=n[e-2][1].start,n.splice(e-2,2));return n},concrete:!0},u={tokenize:function(n,e,r){return function(u){return n.enter("lineEnding"),n.consume(u),n.exit("lineEnding"),n.attempt(t.blankLine,e,r)}},partial:!0},o={tokenize:function(e,t,r){const u=this;return function(t){if(n.markdownLineEnding(t))return e.enter("lineEnding"),e.consume(t),e.exit("lineEnding"),o;return r(t)};function o(n){return u.parser.lazy[u.now().line]?r(n):t(n)}},partial:!0};exports.htmlFlow=r;
2
+ //# sourceMappingURL=html-flow.js.map