@e-llm-studio/instant-learning 0.0.150 → 0.0.152

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (769) hide show
  1. package/dist/cjs/features/IL-OTJ/ILOTJ.js +1 -1
  2. package/dist/cjs/features/IL-OTJ/_components/ChatComponents/ChatComponent.js +1 -1
  3. package/dist/cjs/features/IL-OTJ/_components/CitationLink.js +1 -1
  4. package/dist/cjs/features/IL-OTJ/_components/MarkdownComponents/MarkdownSelect.js +1 -1
  5. package/dist/cjs/features/IL-OTJ/_components/MessageRendering/ClickableChatComponents.js +1 -1
  6. package/dist/cjs/features/IL-OTJ/_components/MessageRendering/MarkdownElements.js +1 -1
  7. package/dist/cjs/features/IL-OTJ/_components/RuleValidationCard.js +1 -1
  8. package/dist/cjs/features/IL-OTJ/_components/_svg/ButtonCognitiveIcon.js +2 -0
  9. package/dist/cjs/features/IL-OTJ/_components/_svg/ButtonCognitiveIcon.js.map +1 -0
  10. package/dist/cjs/node_modules/lucide-react/dist/esm/icons/send-horizontal.js +1 -1
  11. package/dist/cjs/node_modules/lucide-react/dist/esm/icons/send-horizontal.js.map +1 -1
  12. package/dist/features/IL-OTJ/ILOTJ.js +1 -1
  13. package/dist/features/IL-OTJ/_components/ChatComponents/ChatComponent.js +1 -1
  14. package/dist/features/IL-OTJ/_components/CitationLink.js +1 -1
  15. package/dist/features/IL-OTJ/_components/MarkdownComponents/MarkdownSelect.js +1 -1
  16. package/dist/features/IL-OTJ/_components/MessageRendering/ClickableChatComponents.js +1 -1
  17. package/dist/features/IL-OTJ/_components/MessageRendering/MarkdownElements.js +1 -1
  18. package/dist/features/IL-OTJ/_components/RuleValidationCard.js +1 -1
  19. package/dist/features/IL-OTJ/_components/_svg/ButtonCognitiveIcon.js +2 -0
  20. package/dist/features/IL-OTJ/_components/_svg/ButtonCognitiveIcon.js.map +1 -0
  21. package/dist/node_modules/lucide-react/dist/esm/icons/send-horizontal.js +1 -1
  22. package/dist/node_modules/lucide-react/dist/esm/icons/send-horizontal.js.map +1 -1
  23. package/dist/types/src/features/IL-OTJ/_components/ChatComponents/ChatComponent.d.ts +1 -1
  24. package/dist/types/src/features/IL-OTJ/_components/ChatComponents/ChatComponent.d.ts.map +1 -1
  25. package/dist/types/src/features/IL-OTJ/_components/CitationLink.d.ts.map +1 -1
  26. package/dist/types/src/features/IL-OTJ/_components/MarkdownComponents/MarkdownSelect.d.ts.map +1 -1
  27. package/dist/types/src/features/IL-OTJ/_components/MessageRendering/ClickableChatComponents.d.ts.map +1 -1
  28. package/dist/types/src/features/IL-OTJ/_components/MessageRendering/MarkdownElements.d.ts.map +1 -1
  29. package/dist/types/src/features/IL-OTJ/_components/RuleValidationCard.d.ts +7 -16
  30. package/dist/types/src/features/IL-OTJ/_components/RuleValidationCard.d.ts.map +1 -1
  31. package/dist/types/src/features/IL-OTJ/_components/_svg/ButtonCognitiveIcon.d.ts +2 -0
  32. package/dist/types/src/features/IL-OTJ/_components/_svg/ButtonCognitiveIcon.d.ts.map +1 -0
  33. package/package.json +1 -1
  34. package/dist/cjs/features/DocumentLearning/_components/ChatComponent.css.js +0 -2
  35. package/dist/cjs/features/DocumentLearning/_components/ChatComponent.css.js.map +0 -1
  36. package/dist/cjs/features/IL-OTJ/_components/AccordionSection.js +0 -2
  37. package/dist/cjs/features/IL-OTJ/_components/AccordionSection.js.map +0 -1
  38. package/dist/cjs/features/IL-OTJ/_components/ChatComponent.css.js +0 -2
  39. package/dist/cjs/features/IL-OTJ/_components/ChatComponent.css.js.map +0 -1
  40. package/dist/cjs/features/IL-OTJ/_components/ChatComponent.js +0 -2
  41. package/dist/cjs/features/IL-OTJ/_components/ChatComponent.js.map +0 -1
  42. package/dist/cjs/features/IL-OTJ/_components/ILComponents.js +0 -2
  43. package/dist/cjs/features/IL-OTJ/_components/ILComponents.js.map +0 -1
  44. package/dist/cjs/features/IL-OTJ/_components/ILPopup.css.js +0 -2
  45. package/dist/cjs/features/IL-OTJ/_components/ILPopup.css.js.map +0 -1
  46. package/dist/cjs/features/IL-OTJ/_components/MessageRendering/CustomComponents.js +0 -2
  47. package/dist/cjs/features/IL-OTJ/_components/MessageRendering/CustomComponents.js.map +0 -1
  48. package/dist/cjs/features/IL-OTJ/_components/MessageRendering.js +0 -2
  49. package/dist/cjs/features/IL-OTJ/_components/MessageRendering.js.map +0 -1
  50. package/dist/cjs/features/IL-OTJ/_components/_svg/ResizeChat.svg.js +0 -2
  51. package/dist/cjs/features/IL-OTJ/_components/_svg/ResizeChat.svg.js.map +0 -1
  52. package/dist/cjs/features/IL-OTJ/_components/atoms/Button.js +0 -2
  53. package/dist/cjs/features/IL-OTJ/_components/atoms/Button.js.map +0 -1
  54. package/dist/cjs/features/IL-OTJ/_components/atoms/Dropdown.js +0 -2
  55. package/dist/cjs/features/IL-OTJ/_components/atoms/Dropdown.js.map +0 -1
  56. package/dist/cjs/features/IL-OTJ/_components/atoms/Textarea.js +0 -2
  57. package/dist/cjs/features/IL-OTJ/_components/atoms/Textarea.js.map +0 -1
  58. package/dist/cjs/features/IL-OTJ/_components/atoms/Toggle.js +0 -2
  59. package/dist/cjs/features/IL-OTJ/_components/atoms/Toggle.js.map +0 -1
  60. package/dist/cjs/features/IL-OTJ/_components/molecules/ConditionList.js +0 -2
  61. package/dist/cjs/features/IL-OTJ/_components/molecules/ConditionList.js.map +0 -1
  62. package/dist/cjs/features/IL-OTJ/_components/molecules/SectionWrapper.js +0 -2
  63. package/dist/cjs/features/IL-OTJ/_components/molecules/SectionWrapper.js.map +0 -1
  64. package/dist/cjs/features/IL-OTJ/_components/molecules/SeparationRow.js +0 -2
  65. package/dist/cjs/features/IL-OTJ/_components/molecules/SeparationRow.js.map +0 -1
  66. package/dist/cjs/features/IL-OTJ/_components/organisms/ConditionSection.js +0 -2
  67. package/dist/cjs/features/IL-OTJ/_components/organisms/ConditionSection.js.map +0 -1
  68. package/dist/cjs/features/IL-OTJ/_components/organisms/ConfigSection.js +0 -2
  69. package/dist/cjs/features/IL-OTJ/_components/organisms/ConfigSection.js.map +0 -1
  70. package/dist/cjs/features/IL-OTJ/_components/organisms/ScopeSection.js +0 -2
  71. package/dist/cjs/features/IL-OTJ/_components/organisms/ScopeSection.js.map +0 -1
  72. package/dist/cjs/features/IL-OTJ/_components/organisms/TextSection.js +0 -2
  73. package/dist/cjs/features/IL-OTJ/_components/organisms/TextSection.js.map +0 -1
  74. package/dist/cjs/features/IL-OTJ/_components/templates/AudactyTemplate.js +0 -2
  75. package/dist/cjs/features/IL-OTJ/_components/templates/AudactyTemplate.js.map +0 -1
  76. package/dist/cjs/features/IL-OTJ/_components/templates/Testing.js +0 -2
  77. package/dist/cjs/features/IL-OTJ/_components/templates/Testing.js.map +0 -1
  78. package/dist/cjs/features/IL-OTJ/_components/templates/TrafficManagerTemplate.js +0 -2
  79. package/dist/cjs/features/IL-OTJ/_components/templates/TrafficManagerTemplate.js.map +0 -1
  80. package/dist/cjs/features/IL-OTJ/_components/templates/templatesMap.js +0 -2
  81. package/dist/cjs/features/IL-OTJ/_components/templates/templatesMap.js.map +0 -1
  82. package/dist/cjs/features/IL-OTJ/hooks/useHeaderObserver.js +0 -2
  83. package/dist/cjs/features/IL-OTJ/hooks/useHeaderObserver.js.map +0 -1
  84. package/dist/cjs/features/InstantLearning/_components/ChatComponent.css.js +0 -2
  85. package/dist/cjs/features/InstantLearning/_components/ChatComponent.css.js.map +0 -1
  86. package/dist/cjs/features/InstantLearning/_components/TooltipComponent.js +0 -2
  87. package/dist/cjs/features/InstantLearning/_components/TooltipComponent.js.map +0 -1
  88. package/dist/cjs/node_modules/@e-llm-studio/citation/dist/node_modules/clsx/dist/clsx.m.js +0 -2
  89. package/dist/cjs/node_modules/@e-llm-studio/citation/dist/node_modules/clsx/dist/clsx.m.js.map +0 -1
  90. package/dist/cjs/node_modules/@e-llm-studio/streaming-response/node_modules/ws/browser.js +0 -2
  91. package/dist/cjs/node_modules/@e-llm-studio/streaming-response/node_modules/ws/browser.js.map +0 -1
  92. package/dist/cjs/node_modules/@emotion/serialize/node_modules/@emotion/hash/dist/emotion-hash.esm.js +0 -2
  93. package/dist/cjs/node_modules/@emotion/serialize/node_modules/@emotion/hash/dist/emotion-hash.esm.js.map +0 -1
  94. package/dist/cjs/node_modules/hast-util-from-parse5/node_modules/comma-separated-tokens/index.js +0 -2
  95. package/dist/cjs/node_modules/hast-util-from-parse5/node_modules/comma-separated-tokens/index.js.map +0 -1
  96. package/dist/cjs/node_modules/hast-util-from-parse5/node_modules/space-separated-tokens/index.js +0 -2
  97. package/dist/cjs/node_modules/hast-util-from-parse5/node_modules/space-separated-tokens/index.js.map +0 -1
  98. package/dist/cjs/node_modules/hast-util-raw/node_modules/unist-util-position/lib/index.js +0 -2
  99. package/dist/cjs/node_modules/hast-util-raw/node_modules/unist-util-position/lib/index.js.map +0 -1
  100. package/dist/cjs/node_modules/hast-util-to-parse5/node_modules/comma-separated-tokens/index.js +0 -2
  101. package/dist/cjs/node_modules/hast-util-to-parse5/node_modules/comma-separated-tokens/index.js.map +0 -1
  102. package/dist/cjs/node_modules/hast-util-to-parse5/node_modules/space-separated-tokens/index.js +0 -2
  103. package/dist/cjs/node_modules/hast-util-to-parse5/node_modules/space-separated-tokens/index.js.map +0 -1
  104. package/dist/cjs/node_modules/mdast-util-definitions/node_modules/unist-util-is/lib/index.js +0 -2
  105. package/dist/cjs/node_modules/mdast-util-definitions/node_modules/unist-util-is/lib/index.js.map +0 -1
  106. package/dist/cjs/node_modules/mdast-util-definitions/node_modules/unist-util-visit/lib/index.js +0 -2
  107. package/dist/cjs/node_modules/mdast-util-definitions/node_modules/unist-util-visit/lib/index.js.map +0 -1
  108. package/dist/cjs/node_modules/mdast-util-definitions/node_modules/unist-util-visit-parents/lib/color.browser.js +0 -2
  109. package/dist/cjs/node_modules/mdast-util-definitions/node_modules/unist-util-visit-parents/lib/color.browser.js.map +0 -1
  110. package/dist/cjs/node_modules/mdast-util-definitions/node_modules/unist-util-visit-parents/lib/index.js +0 -2
  111. package/dist/cjs/node_modules/mdast-util-definitions/node_modules/unist-util-visit-parents/lib/index.js.map +0 -1
  112. package/dist/cjs/node_modules/mdast-util-find-and-replace/node_modules/unist-util-is/lib/index.js +0 -2
  113. package/dist/cjs/node_modules/mdast-util-find-and-replace/node_modules/unist-util-is/lib/index.js.map +0 -1
  114. package/dist/cjs/node_modules/mdast-util-find-and-replace/node_modules/unist-util-visit-parents/lib/color.browser.js +0 -2
  115. package/dist/cjs/node_modules/mdast-util-find-and-replace/node_modules/unist-util-visit-parents/lib/color.browser.js.map +0 -1
  116. package/dist/cjs/node_modules/mdast-util-find-and-replace/node_modules/unist-util-visit-parents/lib/index.js +0 -2
  117. package/dist/cjs/node_modules/mdast-util-find-and-replace/node_modules/unist-util-visit-parents/lib/index.js.map +0 -1
  118. package/dist/cjs/node_modules/mdast-util-from-markdown/node_modules/unist-util-stringify-position/lib/index.js +0 -2
  119. package/dist/cjs/node_modules/mdast-util-from-markdown/node_modules/unist-util-stringify-position/lib/index.js.map +0 -1
  120. package/dist/cjs/node_modules/mdast-util-to-hast/lib/footer.js +0 -2
  121. package/dist/cjs/node_modules/mdast-util-to-hast/lib/footer.js.map +0 -1
  122. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/blockquote.js +0 -2
  123. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/blockquote.js.map +0 -1
  124. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/break.js +0 -2
  125. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/break.js.map +0 -1
  126. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/code.js +0 -2
  127. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/code.js.map +0 -1
  128. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/delete.js +0 -2
  129. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/delete.js.map +0 -1
  130. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/emphasis.js +0 -2
  131. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/emphasis.js.map +0 -1
  132. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/footnote-reference.js +0 -2
  133. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/footnote-reference.js.map +0 -1
  134. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/footnote.js +0 -2
  135. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/footnote.js.map +0 -1
  136. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/heading.js +0 -2
  137. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/heading.js.map +0 -1
  138. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/html.js +0 -2
  139. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/html.js.map +0 -1
  140. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/image-reference.js +0 -2
  141. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/image-reference.js.map +0 -1
  142. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/image.js +0 -2
  143. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/image.js.map +0 -1
  144. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/index.js +0 -2
  145. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/index.js.map +0 -1
  146. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/inline-code.js +0 -2
  147. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/inline-code.js.map +0 -1
  148. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/link-reference.js +0 -2
  149. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/link-reference.js.map +0 -1
  150. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/link.js +0 -2
  151. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/link.js.map +0 -1
  152. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/list-item.js +0 -2
  153. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/list-item.js.map +0 -1
  154. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/list.js +0 -2
  155. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/list.js.map +0 -1
  156. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/paragraph.js +0 -2
  157. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/paragraph.js.map +0 -1
  158. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/root.js +0 -2
  159. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/root.js.map +0 -1
  160. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/strong.js +0 -2
  161. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/strong.js.map +0 -1
  162. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/table-cell.js +0 -2
  163. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/table-cell.js.map +0 -1
  164. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/table-row.js +0 -2
  165. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/table-row.js.map +0 -1
  166. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/table.js +0 -2
  167. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/table.js.map +0 -1
  168. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/text.js +0 -2
  169. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/text.js.map +0 -1
  170. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/thematic-break.js +0 -2
  171. package/dist/cjs/node_modules/mdast-util-to-hast/lib/handlers/thematic-break.js.map +0 -1
  172. package/dist/cjs/node_modules/mdast-util-to-hast/lib/index.js +0 -2
  173. package/dist/cjs/node_modules/mdast-util-to-hast/lib/index.js.map +0 -1
  174. package/dist/cjs/node_modules/mdast-util-to-hast/lib/revert.js +0 -2
  175. package/dist/cjs/node_modules/mdast-util-to-hast/lib/revert.js.map +0 -1
  176. package/dist/cjs/node_modules/mdast-util-to-hast/lib/state.js +0 -2
  177. package/dist/cjs/node_modules/mdast-util-to-hast/lib/state.js.map +0 -1
  178. package/dist/cjs/node_modules/mdast-util-to-hast/node_modules/micromark-util-character/index.js +0 -2
  179. package/dist/cjs/node_modules/mdast-util-to-hast/node_modules/micromark-util-character/index.js.map +0 -1
  180. package/dist/cjs/node_modules/mdast-util-to-hast/node_modules/micromark-util-sanitize-uri/index.js +0 -2
  181. package/dist/cjs/node_modules/mdast-util-to-hast/node_modules/micromark-util-sanitize-uri/index.js.map +0 -1
  182. package/dist/cjs/node_modules/mdast-util-to-hast/node_modules/unist-util-is/lib/index.js +0 -2
  183. package/dist/cjs/node_modules/mdast-util-to-hast/node_modules/unist-util-is/lib/index.js.map +0 -1
  184. package/dist/cjs/node_modules/mdast-util-to-hast/node_modules/unist-util-visit/lib/index.js +0 -2
  185. package/dist/cjs/node_modules/mdast-util-to-hast/node_modules/unist-util-visit/lib/index.js.map +0 -1
  186. package/dist/cjs/node_modules/mdast-util-to-hast/node_modules/unist-util-visit-parents/lib/color.browser.js +0 -2
  187. package/dist/cjs/node_modules/mdast-util-to-hast/node_modules/unist-util-visit-parents/lib/color.browser.js.map +0 -1
  188. package/dist/cjs/node_modules/mdast-util-to-hast/node_modules/unist-util-visit-parents/lib/index.js +0 -2
  189. package/dist/cjs/node_modules/mdast-util-to-hast/node_modules/unist-util-visit-parents/lib/index.js.map +0 -1
  190. package/dist/cjs/node_modules/react-hot-toast/node_modules/goober/dist/goober.modern.js +0 -2
  191. package/dist/cjs/node_modules/react-hot-toast/node_modules/goober/dist/goober.modern.js.map +0 -1
  192. package/dist/cjs/node_modules/react-markdown/node_modules/comma-separated-tokens/index.js +0 -2
  193. package/dist/cjs/node_modules/react-markdown/node_modules/comma-separated-tokens/index.js.map +0 -1
  194. package/dist/cjs/node_modules/react-markdown/node_modules/is-plain-obj/index.js +0 -2
  195. package/dist/cjs/node_modules/react-markdown/node_modules/is-plain-obj/index.js.map +0 -1
  196. package/dist/cjs/node_modules/react-markdown/node_modules/mdast-util-from-markdown/lib/index.js +0 -2
  197. package/dist/cjs/node_modules/react-markdown/node_modules/mdast-util-from-markdown/lib/index.js.map +0 -1
  198. package/dist/cjs/node_modules/react-markdown/node_modules/mdast-util-to-string/lib/index.js +0 -2
  199. package/dist/cjs/node_modules/react-markdown/node_modules/mdast-util-to-string/lib/index.js.map +0 -1
  200. package/dist/cjs/node_modules/react-markdown/node_modules/micromark/lib/constructs.js +0 -2
  201. package/dist/cjs/node_modules/react-markdown/node_modules/micromark/lib/constructs.js.map +0 -1
  202. package/dist/cjs/node_modules/react-markdown/node_modules/micromark/lib/create-tokenizer.js +0 -2
  203. package/dist/cjs/node_modules/react-markdown/node_modules/micromark/lib/create-tokenizer.js.map +0 -1
  204. package/dist/cjs/node_modules/react-markdown/node_modules/micromark/lib/initialize/content.js +0 -2
  205. package/dist/cjs/node_modules/react-markdown/node_modules/micromark/lib/initialize/content.js.map +0 -1
  206. package/dist/cjs/node_modules/react-markdown/node_modules/micromark/lib/initialize/document.js +0 -2
  207. package/dist/cjs/node_modules/react-markdown/node_modules/micromark/lib/initialize/document.js.map +0 -1
  208. package/dist/cjs/node_modules/react-markdown/node_modules/micromark/lib/initialize/flow.js +0 -2
  209. package/dist/cjs/node_modules/react-markdown/node_modules/micromark/lib/initialize/flow.js.map +0 -1
  210. package/dist/cjs/node_modules/react-markdown/node_modules/micromark/lib/initialize/text.js +0 -2
  211. package/dist/cjs/node_modules/react-markdown/node_modules/micromark/lib/initialize/text.js.map +0 -1
  212. package/dist/cjs/node_modules/react-markdown/node_modules/micromark/lib/parse.js +0 -2
  213. package/dist/cjs/node_modules/react-markdown/node_modules/micromark/lib/parse.js.map +0 -1
  214. package/dist/cjs/node_modules/react-markdown/node_modules/micromark/lib/postprocess.js +0 -2
  215. package/dist/cjs/node_modules/react-markdown/node_modules/micromark/lib/postprocess.js.map +0 -1
  216. package/dist/cjs/node_modules/react-markdown/node_modules/micromark/lib/preprocess.js +0 -2
  217. package/dist/cjs/node_modules/react-markdown/node_modules/micromark/lib/preprocess.js.map +0 -1
  218. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/attention.js +0 -2
  219. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/attention.js.map +0 -1
  220. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/autolink.js +0 -2
  221. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/autolink.js.map +0 -1
  222. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/blank-line.js +0 -2
  223. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/blank-line.js.map +0 -1
  224. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/block-quote.js +0 -2
  225. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/block-quote.js.map +0 -1
  226. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/character-escape.js +0 -2
  227. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/character-escape.js.map +0 -1
  228. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/character-reference.js +0 -2
  229. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/character-reference.js.map +0 -1
  230. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/code-fenced.js +0 -2
  231. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/code-fenced.js.map +0 -1
  232. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/code-indented.js +0 -2
  233. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/code-indented.js.map +0 -1
  234. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/code-text.js +0 -2
  235. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/code-text.js.map +0 -1
  236. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/content.js +0 -2
  237. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/content.js.map +0 -1
  238. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/definition.js +0 -2
  239. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/definition.js.map +0 -1
  240. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/hard-break-escape.js +0 -2
  241. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/hard-break-escape.js.map +0 -1
  242. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/heading-atx.js +0 -2
  243. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/heading-atx.js.map +0 -1
  244. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/html-flow.js +0 -2
  245. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/html-flow.js.map +0 -1
  246. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/html-text.js +0 -2
  247. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/html-text.js.map +0 -1
  248. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/label-end.js +0 -2
  249. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/label-end.js.map +0 -1
  250. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/label-start-image.js +0 -2
  251. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/label-start-image.js.map +0 -1
  252. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/label-start-link.js +0 -2
  253. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/label-start-link.js.map +0 -1
  254. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/line-ending.js +0 -2
  255. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/line-ending.js.map +0 -1
  256. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/list.js +0 -2
  257. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/list.js.map +0 -1
  258. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/setext-underline.js +0 -2
  259. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/setext-underline.js.map +0 -1
  260. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/thematic-break.js +0 -2
  261. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/thematic-break.js.map +0 -1
  262. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-factory-destination/index.js +0 -2
  263. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-factory-destination/index.js.map +0 -1
  264. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-factory-label/index.js +0 -2
  265. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-factory-label/index.js.map +0 -1
  266. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-factory-space/index.js +0 -2
  267. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-factory-space/index.js.map +0 -1
  268. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-factory-title/index.js +0 -2
  269. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-factory-title/index.js.map +0 -1
  270. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-factory-whitespace/index.js +0 -2
  271. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-factory-whitespace/index.js.map +0 -1
  272. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-util-character/index.js +0 -2
  273. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-util-character/index.js.map +0 -1
  274. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-util-character/lib/unicode-punctuation-regex.js +0 -2
  275. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-util-character/lib/unicode-punctuation-regex.js.map +0 -1
  276. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-util-chunked/index.js +0 -2
  277. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-util-chunked/index.js.map +0 -1
  278. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-util-classify-character/index.js +0 -2
  279. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-util-classify-character/index.js.map +0 -1
  280. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-util-combine-extensions/index.js +0 -2
  281. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-util-combine-extensions/index.js.map +0 -1
  282. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-util-decode-numeric-character-reference/index.js +0 -2
  283. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-util-decode-numeric-character-reference/index.js.map +0 -1
  284. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-util-decode-string/index.js +0 -2
  285. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-util-decode-string/index.js.map +0 -1
  286. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-util-html-tag-name/index.js +0 -2
  287. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-util-html-tag-name/index.js.map +0 -1
  288. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-util-normalize-identifier/index.js +0 -2
  289. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-util-normalize-identifier/index.js.map +0 -1
  290. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-util-resolve-all/index.js +0 -2
  291. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-util-resolve-all/index.js.map +0 -1
  292. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-util-subtokenize/index.js +0 -2
  293. package/dist/cjs/node_modules/react-markdown/node_modules/micromark-util-subtokenize/index.js.map +0 -1
  294. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/index.js +0 -2
  295. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/index.js.map +0 -1
  296. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/aria.js +0 -2
  297. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/aria.js.map +0 -1
  298. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/find.js +0 -2
  299. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/find.js.map +0 -1
  300. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/hast-to-react.js +0 -2
  301. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/hast-to-react.js.map +0 -1
  302. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/html.js +0 -2
  303. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/html.js.map +0 -1
  304. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/normalize.js +0 -2
  305. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/normalize.js.map +0 -1
  306. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/svg.js +0 -2
  307. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/svg.js.map +0 -1
  308. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/util/case-insensitive-transform.js +0 -2
  309. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/util/case-insensitive-transform.js.map +0 -1
  310. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/util/case-sensitive-transform.js +0 -2
  311. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/util/case-sensitive-transform.js.map +0 -1
  312. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/util/create.js +0 -2
  313. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/util/create.js.map +0 -1
  314. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/util/defined-info.js +0 -2
  315. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/util/defined-info.js.map +0 -1
  316. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/util/info.js +0 -2
  317. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/util/info.js.map +0 -1
  318. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/util/merge.js +0 -2
  319. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/util/merge.js.map +0 -1
  320. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/util/schema.js +0 -2
  321. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/util/schema.js.map +0 -1
  322. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/util/types.js +0 -2
  323. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/util/types.js.map +0 -1
  324. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/xlink.js +0 -2
  325. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/xlink.js.map +0 -1
  326. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/xml.js +0 -2
  327. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/xml.js.map +0 -1
  328. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/xmlns.js +0 -2
  329. package/dist/cjs/node_modules/react-markdown/node_modules/property-information/lib/xmlns.js.map +0 -1
  330. package/dist/cjs/node_modules/react-markdown/node_modules/remark-parse/lib/index.js +0 -2
  331. package/dist/cjs/node_modules/react-markdown/node_modules/remark-parse/lib/index.js.map +0 -1
  332. package/dist/cjs/node_modules/react-markdown/node_modules/space-separated-tokens/index.js +0 -2
  333. package/dist/cjs/node_modules/react-markdown/node_modules/space-separated-tokens/index.js.map +0 -1
  334. package/dist/cjs/node_modules/react-markdown/node_modules/unified/lib/index.js +0 -2
  335. package/dist/cjs/node_modules/react-markdown/node_modules/unified/lib/index.js.map +0 -1
  336. package/dist/cjs/node_modules/react-markdown/node_modules/unist-util-is/lib/index.js +0 -2
  337. package/dist/cjs/node_modules/react-markdown/node_modules/unist-util-is/lib/index.js.map +0 -1
  338. package/dist/cjs/node_modules/react-markdown/node_modules/unist-util-stringify-position/lib/index.js +0 -2
  339. package/dist/cjs/node_modules/react-markdown/node_modules/unist-util-stringify-position/lib/index.js.map +0 -1
  340. package/dist/cjs/node_modules/react-markdown/node_modules/unist-util-visit/lib/index.js +0 -2
  341. package/dist/cjs/node_modules/react-markdown/node_modules/unist-util-visit/lib/index.js.map +0 -1
  342. package/dist/cjs/node_modules/react-markdown/node_modules/unist-util-visit-parents/lib/color.browser.js +0 -2
  343. package/dist/cjs/node_modules/react-markdown/node_modules/unist-util-visit-parents/lib/color.browser.js.map +0 -1
  344. package/dist/cjs/node_modules/react-markdown/node_modules/unist-util-visit-parents/lib/index.js +0 -2
  345. package/dist/cjs/node_modules/react-markdown/node_modules/unist-util-visit-parents/lib/index.js.map +0 -1
  346. package/dist/cjs/node_modules/react-markdown/node_modules/vfile/lib/index.js +0 -2
  347. package/dist/cjs/node_modules/react-markdown/node_modules/vfile/lib/index.js.map +0 -1
  348. package/dist/cjs/node_modules/react-markdown/node_modules/vfile/lib/minpath.browser.js +0 -2
  349. package/dist/cjs/node_modules/react-markdown/node_modules/vfile/lib/minpath.browser.js.map +0 -1
  350. package/dist/cjs/node_modules/react-markdown/node_modules/vfile/lib/minproc.browser.js +0 -2
  351. package/dist/cjs/node_modules/react-markdown/node_modules/vfile/lib/minproc.browser.js.map +0 -1
  352. package/dist/cjs/node_modules/react-markdown/node_modules/vfile/lib/minurl.browser.js +0 -2
  353. package/dist/cjs/node_modules/react-markdown/node_modules/vfile/lib/minurl.browser.js.map +0 -1
  354. package/dist/cjs/node_modules/react-markdown/node_modules/vfile/lib/minurl.shared.js +0 -2
  355. package/dist/cjs/node_modules/react-markdown/node_modules/vfile/lib/minurl.shared.js.map +0 -1
  356. package/dist/cjs/node_modules/react-markdown/node_modules/vfile-message/lib/index.js +0 -2
  357. package/dist/cjs/node_modules/react-markdown/node_modules/vfile-message/lib/index.js.map +0 -1
  358. package/dist/cjs/node_modules/unist-util-visit/node_modules/unist-util-is/lib/index.js +0 -2
  359. package/dist/cjs/node_modules/unist-util-visit/node_modules/unist-util-is/lib/index.js.map +0 -1
  360. package/dist/cjs/node_modules/unist-util-visit/node_modules/unist-util-visit-parents/lib/color.browser.js +0 -2
  361. package/dist/cjs/node_modules/unist-util-visit/node_modules/unist-util-visit-parents/lib/color.browser.js.map +0 -1
  362. package/dist/cjs/node_modules/unist-util-visit/node_modules/unist-util-visit-parents/lib/index.js +0 -2
  363. package/dist/cjs/node_modules/unist-util-visit/node_modules/unist-util-visit-parents/lib/index.js.map +0 -1
  364. package/dist/cjs/node_modules/unist-util-visit-parents/lib/color.js +0 -2
  365. package/dist/cjs/node_modules/unist-util-visit-parents/lib/color.js.map +0 -1
  366. package/dist/cjs/node_modules/vfile-message/node_modules/unist-util-stringify-position/lib/index.js +0 -2
  367. package/dist/cjs/node_modules/vfile-message/node_modules/unist-util-stringify-position/lib/index.js.map +0 -1
  368. package/dist/features/DocumentLearning/_components/ChatComponent.css.js +0 -2
  369. package/dist/features/DocumentLearning/_components/ChatComponent.css.js.map +0 -1
  370. package/dist/features/IL-OTJ/_components/AccordionSection.js +0 -2
  371. package/dist/features/IL-OTJ/_components/AccordionSection.js.map +0 -1
  372. package/dist/features/IL-OTJ/_components/ChatComponent.css.js +0 -2
  373. package/dist/features/IL-OTJ/_components/ChatComponent.css.js.map +0 -1
  374. package/dist/features/IL-OTJ/_components/ChatComponent.js +0 -2
  375. package/dist/features/IL-OTJ/_components/ChatComponent.js.map +0 -1
  376. package/dist/features/IL-OTJ/_components/ILComponents.js +0 -2
  377. package/dist/features/IL-OTJ/_components/ILComponents.js.map +0 -1
  378. package/dist/features/IL-OTJ/_components/ILPopup.css.js +0 -2
  379. package/dist/features/IL-OTJ/_components/ILPopup.css.js.map +0 -1
  380. package/dist/features/IL-OTJ/_components/MessageRendering/CustomComponents.js +0 -2
  381. package/dist/features/IL-OTJ/_components/MessageRendering/CustomComponents.js.map +0 -1
  382. package/dist/features/IL-OTJ/_components/MessageRendering.js +0 -2
  383. package/dist/features/IL-OTJ/_components/MessageRendering.js.map +0 -1
  384. package/dist/features/IL-OTJ/_components/_svg/ResizeChat.svg.js +0 -2
  385. package/dist/features/IL-OTJ/_components/_svg/ResizeChat.svg.js.map +0 -1
  386. package/dist/features/IL-OTJ/_components/atoms/Button.js +0 -2
  387. package/dist/features/IL-OTJ/_components/atoms/Button.js.map +0 -1
  388. package/dist/features/IL-OTJ/_components/atoms/Dropdown.js +0 -2
  389. package/dist/features/IL-OTJ/_components/atoms/Dropdown.js.map +0 -1
  390. package/dist/features/IL-OTJ/_components/atoms/Textarea.js +0 -2
  391. package/dist/features/IL-OTJ/_components/atoms/Textarea.js.map +0 -1
  392. package/dist/features/IL-OTJ/_components/atoms/Toggle.js +0 -2
  393. package/dist/features/IL-OTJ/_components/atoms/Toggle.js.map +0 -1
  394. package/dist/features/IL-OTJ/_components/molecules/ConditionList.js +0 -2
  395. package/dist/features/IL-OTJ/_components/molecules/ConditionList.js.map +0 -1
  396. package/dist/features/IL-OTJ/_components/molecules/SectionWrapper.js +0 -2
  397. package/dist/features/IL-OTJ/_components/molecules/SectionWrapper.js.map +0 -1
  398. package/dist/features/IL-OTJ/_components/molecules/SeparationRow.js +0 -2
  399. package/dist/features/IL-OTJ/_components/molecules/SeparationRow.js.map +0 -1
  400. package/dist/features/IL-OTJ/_components/organisms/ConditionSection.js +0 -2
  401. package/dist/features/IL-OTJ/_components/organisms/ConditionSection.js.map +0 -1
  402. package/dist/features/IL-OTJ/_components/organisms/ConfigSection.js +0 -2
  403. package/dist/features/IL-OTJ/_components/organisms/ConfigSection.js.map +0 -1
  404. package/dist/features/IL-OTJ/_components/organisms/ScopeSection.js +0 -2
  405. package/dist/features/IL-OTJ/_components/organisms/ScopeSection.js.map +0 -1
  406. package/dist/features/IL-OTJ/_components/organisms/TextSection.js +0 -2
  407. package/dist/features/IL-OTJ/_components/organisms/TextSection.js.map +0 -1
  408. package/dist/features/IL-OTJ/_components/templates/AudactyTemplate.js +0 -2
  409. package/dist/features/IL-OTJ/_components/templates/AudactyTemplate.js.map +0 -1
  410. package/dist/features/IL-OTJ/_components/templates/Testing.js +0 -2
  411. package/dist/features/IL-OTJ/_components/templates/Testing.js.map +0 -1
  412. package/dist/features/IL-OTJ/_components/templates/TrafficManagerTemplate.js +0 -2
  413. package/dist/features/IL-OTJ/_components/templates/TrafficManagerTemplate.js.map +0 -1
  414. package/dist/features/IL-OTJ/_components/templates/templatesMap.js +0 -2
  415. package/dist/features/IL-OTJ/_components/templates/templatesMap.js.map +0 -1
  416. package/dist/features/IL-OTJ/hooks/useHeaderObserver.js +0 -2
  417. package/dist/features/IL-OTJ/hooks/useHeaderObserver.js.map +0 -1
  418. package/dist/features/InstantLearning/_components/ChatComponent.css.js +0 -2
  419. package/dist/features/InstantLearning/_components/ChatComponent.css.js.map +0 -1
  420. package/dist/features/InstantLearning/_components/TooltipComponent.js +0 -2
  421. package/dist/features/InstantLearning/_components/TooltipComponent.js.map +0 -1
  422. package/dist/node_modules/@e-llm-studio/citation/dist/node_modules/clsx/dist/clsx.m.js +0 -2
  423. package/dist/node_modules/@e-llm-studio/citation/dist/node_modules/clsx/dist/clsx.m.js.map +0 -1
  424. package/dist/node_modules/@e-llm-studio/streaming-response/node_modules/ws/browser.js +0 -2
  425. package/dist/node_modules/@e-llm-studio/streaming-response/node_modules/ws/browser.js.map +0 -1
  426. package/dist/node_modules/@emotion/serialize/node_modules/@emotion/hash/dist/emotion-hash.esm.js +0 -2
  427. package/dist/node_modules/@emotion/serialize/node_modules/@emotion/hash/dist/emotion-hash.esm.js.map +0 -1
  428. package/dist/node_modules/hast-util-from-parse5/node_modules/comma-separated-tokens/index.js +0 -2
  429. package/dist/node_modules/hast-util-from-parse5/node_modules/comma-separated-tokens/index.js.map +0 -1
  430. package/dist/node_modules/hast-util-from-parse5/node_modules/space-separated-tokens/index.js +0 -2
  431. package/dist/node_modules/hast-util-from-parse5/node_modules/space-separated-tokens/index.js.map +0 -1
  432. package/dist/node_modules/hast-util-raw/node_modules/unist-util-position/lib/index.js +0 -2
  433. package/dist/node_modules/hast-util-raw/node_modules/unist-util-position/lib/index.js.map +0 -1
  434. package/dist/node_modules/hast-util-to-parse5/node_modules/comma-separated-tokens/index.js +0 -2
  435. package/dist/node_modules/hast-util-to-parse5/node_modules/comma-separated-tokens/index.js.map +0 -1
  436. package/dist/node_modules/hast-util-to-parse5/node_modules/space-separated-tokens/index.js +0 -2
  437. package/dist/node_modules/hast-util-to-parse5/node_modules/space-separated-tokens/index.js.map +0 -1
  438. package/dist/node_modules/mdast-util-definitions/node_modules/unist-util-is/lib/index.js +0 -2
  439. package/dist/node_modules/mdast-util-definitions/node_modules/unist-util-is/lib/index.js.map +0 -1
  440. package/dist/node_modules/mdast-util-definitions/node_modules/unist-util-visit/lib/index.js +0 -2
  441. package/dist/node_modules/mdast-util-definitions/node_modules/unist-util-visit/lib/index.js.map +0 -1
  442. package/dist/node_modules/mdast-util-definitions/node_modules/unist-util-visit-parents/lib/color.browser.js +0 -2
  443. package/dist/node_modules/mdast-util-definitions/node_modules/unist-util-visit-parents/lib/color.browser.js.map +0 -1
  444. package/dist/node_modules/mdast-util-definitions/node_modules/unist-util-visit-parents/lib/index.js +0 -2
  445. package/dist/node_modules/mdast-util-definitions/node_modules/unist-util-visit-parents/lib/index.js.map +0 -1
  446. package/dist/node_modules/mdast-util-find-and-replace/node_modules/unist-util-is/lib/index.js +0 -2
  447. package/dist/node_modules/mdast-util-find-and-replace/node_modules/unist-util-is/lib/index.js.map +0 -1
  448. package/dist/node_modules/mdast-util-find-and-replace/node_modules/unist-util-visit-parents/lib/color.browser.js +0 -2
  449. package/dist/node_modules/mdast-util-find-and-replace/node_modules/unist-util-visit-parents/lib/color.browser.js.map +0 -1
  450. package/dist/node_modules/mdast-util-find-and-replace/node_modules/unist-util-visit-parents/lib/index.js +0 -2
  451. package/dist/node_modules/mdast-util-find-and-replace/node_modules/unist-util-visit-parents/lib/index.js.map +0 -1
  452. package/dist/node_modules/mdast-util-from-markdown/node_modules/unist-util-stringify-position/lib/index.js +0 -2
  453. package/dist/node_modules/mdast-util-from-markdown/node_modules/unist-util-stringify-position/lib/index.js.map +0 -1
  454. package/dist/node_modules/mdast-util-to-hast/lib/footer.js +0 -2
  455. package/dist/node_modules/mdast-util-to-hast/lib/footer.js.map +0 -1
  456. package/dist/node_modules/mdast-util-to-hast/lib/handlers/blockquote.js +0 -2
  457. package/dist/node_modules/mdast-util-to-hast/lib/handlers/blockquote.js.map +0 -1
  458. package/dist/node_modules/mdast-util-to-hast/lib/handlers/break.js +0 -2
  459. package/dist/node_modules/mdast-util-to-hast/lib/handlers/break.js.map +0 -1
  460. package/dist/node_modules/mdast-util-to-hast/lib/handlers/code.js +0 -2
  461. package/dist/node_modules/mdast-util-to-hast/lib/handlers/code.js.map +0 -1
  462. package/dist/node_modules/mdast-util-to-hast/lib/handlers/delete.js +0 -2
  463. package/dist/node_modules/mdast-util-to-hast/lib/handlers/delete.js.map +0 -1
  464. package/dist/node_modules/mdast-util-to-hast/lib/handlers/emphasis.js +0 -2
  465. package/dist/node_modules/mdast-util-to-hast/lib/handlers/emphasis.js.map +0 -1
  466. package/dist/node_modules/mdast-util-to-hast/lib/handlers/footnote-reference.js +0 -2
  467. package/dist/node_modules/mdast-util-to-hast/lib/handlers/footnote-reference.js.map +0 -1
  468. package/dist/node_modules/mdast-util-to-hast/lib/handlers/footnote.js +0 -2
  469. package/dist/node_modules/mdast-util-to-hast/lib/handlers/footnote.js.map +0 -1
  470. package/dist/node_modules/mdast-util-to-hast/lib/handlers/heading.js +0 -2
  471. package/dist/node_modules/mdast-util-to-hast/lib/handlers/heading.js.map +0 -1
  472. package/dist/node_modules/mdast-util-to-hast/lib/handlers/html.js +0 -2
  473. package/dist/node_modules/mdast-util-to-hast/lib/handlers/html.js.map +0 -1
  474. package/dist/node_modules/mdast-util-to-hast/lib/handlers/image-reference.js +0 -2
  475. package/dist/node_modules/mdast-util-to-hast/lib/handlers/image-reference.js.map +0 -1
  476. package/dist/node_modules/mdast-util-to-hast/lib/handlers/image.js +0 -2
  477. package/dist/node_modules/mdast-util-to-hast/lib/handlers/image.js.map +0 -1
  478. package/dist/node_modules/mdast-util-to-hast/lib/handlers/index.js +0 -2
  479. package/dist/node_modules/mdast-util-to-hast/lib/handlers/index.js.map +0 -1
  480. package/dist/node_modules/mdast-util-to-hast/lib/handlers/inline-code.js +0 -2
  481. package/dist/node_modules/mdast-util-to-hast/lib/handlers/inline-code.js.map +0 -1
  482. package/dist/node_modules/mdast-util-to-hast/lib/handlers/link-reference.js +0 -2
  483. package/dist/node_modules/mdast-util-to-hast/lib/handlers/link-reference.js.map +0 -1
  484. package/dist/node_modules/mdast-util-to-hast/lib/handlers/link.js +0 -2
  485. package/dist/node_modules/mdast-util-to-hast/lib/handlers/link.js.map +0 -1
  486. package/dist/node_modules/mdast-util-to-hast/lib/handlers/list-item.js +0 -2
  487. package/dist/node_modules/mdast-util-to-hast/lib/handlers/list-item.js.map +0 -1
  488. package/dist/node_modules/mdast-util-to-hast/lib/handlers/list.js +0 -2
  489. package/dist/node_modules/mdast-util-to-hast/lib/handlers/list.js.map +0 -1
  490. package/dist/node_modules/mdast-util-to-hast/lib/handlers/paragraph.js +0 -2
  491. package/dist/node_modules/mdast-util-to-hast/lib/handlers/paragraph.js.map +0 -1
  492. package/dist/node_modules/mdast-util-to-hast/lib/handlers/root.js +0 -2
  493. package/dist/node_modules/mdast-util-to-hast/lib/handlers/root.js.map +0 -1
  494. package/dist/node_modules/mdast-util-to-hast/lib/handlers/strong.js +0 -2
  495. package/dist/node_modules/mdast-util-to-hast/lib/handlers/strong.js.map +0 -1
  496. package/dist/node_modules/mdast-util-to-hast/lib/handlers/table-cell.js +0 -2
  497. package/dist/node_modules/mdast-util-to-hast/lib/handlers/table-cell.js.map +0 -1
  498. package/dist/node_modules/mdast-util-to-hast/lib/handlers/table-row.js +0 -2
  499. package/dist/node_modules/mdast-util-to-hast/lib/handlers/table-row.js.map +0 -1
  500. package/dist/node_modules/mdast-util-to-hast/lib/handlers/table.js +0 -2
  501. package/dist/node_modules/mdast-util-to-hast/lib/handlers/table.js.map +0 -1
  502. package/dist/node_modules/mdast-util-to-hast/lib/handlers/text.js +0 -2
  503. package/dist/node_modules/mdast-util-to-hast/lib/handlers/text.js.map +0 -1
  504. package/dist/node_modules/mdast-util-to-hast/lib/handlers/thematic-break.js +0 -2
  505. package/dist/node_modules/mdast-util-to-hast/lib/handlers/thematic-break.js.map +0 -1
  506. package/dist/node_modules/mdast-util-to-hast/lib/index.js +0 -2
  507. package/dist/node_modules/mdast-util-to-hast/lib/index.js.map +0 -1
  508. package/dist/node_modules/mdast-util-to-hast/lib/revert.js +0 -2
  509. package/dist/node_modules/mdast-util-to-hast/lib/revert.js.map +0 -1
  510. package/dist/node_modules/mdast-util-to-hast/lib/state.js +0 -2
  511. package/dist/node_modules/mdast-util-to-hast/lib/state.js.map +0 -1
  512. package/dist/node_modules/mdast-util-to-hast/node_modules/micromark-util-character/index.js +0 -2
  513. package/dist/node_modules/mdast-util-to-hast/node_modules/micromark-util-character/index.js.map +0 -1
  514. package/dist/node_modules/mdast-util-to-hast/node_modules/micromark-util-sanitize-uri/index.js +0 -2
  515. package/dist/node_modules/mdast-util-to-hast/node_modules/micromark-util-sanitize-uri/index.js.map +0 -1
  516. package/dist/node_modules/mdast-util-to-hast/node_modules/unist-util-is/lib/index.js +0 -2
  517. package/dist/node_modules/mdast-util-to-hast/node_modules/unist-util-is/lib/index.js.map +0 -1
  518. package/dist/node_modules/mdast-util-to-hast/node_modules/unist-util-visit/lib/index.js +0 -2
  519. package/dist/node_modules/mdast-util-to-hast/node_modules/unist-util-visit/lib/index.js.map +0 -1
  520. package/dist/node_modules/mdast-util-to-hast/node_modules/unist-util-visit-parents/lib/color.browser.js +0 -2
  521. package/dist/node_modules/mdast-util-to-hast/node_modules/unist-util-visit-parents/lib/color.browser.js.map +0 -1
  522. package/dist/node_modules/mdast-util-to-hast/node_modules/unist-util-visit-parents/lib/index.js +0 -2
  523. package/dist/node_modules/mdast-util-to-hast/node_modules/unist-util-visit-parents/lib/index.js.map +0 -1
  524. package/dist/node_modules/react-hot-toast/node_modules/goober/dist/goober.modern.js +0 -2
  525. package/dist/node_modules/react-hot-toast/node_modules/goober/dist/goober.modern.js.map +0 -1
  526. package/dist/node_modules/react-markdown/node_modules/comma-separated-tokens/index.js +0 -2
  527. package/dist/node_modules/react-markdown/node_modules/comma-separated-tokens/index.js.map +0 -1
  528. package/dist/node_modules/react-markdown/node_modules/is-plain-obj/index.js +0 -2
  529. package/dist/node_modules/react-markdown/node_modules/is-plain-obj/index.js.map +0 -1
  530. package/dist/node_modules/react-markdown/node_modules/mdast-util-from-markdown/lib/index.js +0 -2
  531. package/dist/node_modules/react-markdown/node_modules/mdast-util-from-markdown/lib/index.js.map +0 -1
  532. package/dist/node_modules/react-markdown/node_modules/mdast-util-to-string/lib/index.js +0 -2
  533. package/dist/node_modules/react-markdown/node_modules/mdast-util-to-string/lib/index.js.map +0 -1
  534. package/dist/node_modules/react-markdown/node_modules/micromark/lib/constructs.js +0 -2
  535. package/dist/node_modules/react-markdown/node_modules/micromark/lib/constructs.js.map +0 -1
  536. package/dist/node_modules/react-markdown/node_modules/micromark/lib/create-tokenizer.js +0 -2
  537. package/dist/node_modules/react-markdown/node_modules/micromark/lib/create-tokenizer.js.map +0 -1
  538. package/dist/node_modules/react-markdown/node_modules/micromark/lib/initialize/content.js +0 -2
  539. package/dist/node_modules/react-markdown/node_modules/micromark/lib/initialize/content.js.map +0 -1
  540. package/dist/node_modules/react-markdown/node_modules/micromark/lib/initialize/document.js +0 -2
  541. package/dist/node_modules/react-markdown/node_modules/micromark/lib/initialize/document.js.map +0 -1
  542. package/dist/node_modules/react-markdown/node_modules/micromark/lib/initialize/flow.js +0 -2
  543. package/dist/node_modules/react-markdown/node_modules/micromark/lib/initialize/flow.js.map +0 -1
  544. package/dist/node_modules/react-markdown/node_modules/micromark/lib/initialize/text.js +0 -2
  545. package/dist/node_modules/react-markdown/node_modules/micromark/lib/initialize/text.js.map +0 -1
  546. package/dist/node_modules/react-markdown/node_modules/micromark/lib/parse.js +0 -2
  547. package/dist/node_modules/react-markdown/node_modules/micromark/lib/parse.js.map +0 -1
  548. package/dist/node_modules/react-markdown/node_modules/micromark/lib/postprocess.js +0 -2
  549. package/dist/node_modules/react-markdown/node_modules/micromark/lib/postprocess.js.map +0 -1
  550. package/dist/node_modules/react-markdown/node_modules/micromark/lib/preprocess.js +0 -2
  551. package/dist/node_modules/react-markdown/node_modules/micromark/lib/preprocess.js.map +0 -1
  552. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/attention.js +0 -2
  553. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/attention.js.map +0 -1
  554. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/autolink.js +0 -2
  555. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/autolink.js.map +0 -1
  556. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/blank-line.js +0 -2
  557. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/blank-line.js.map +0 -1
  558. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/block-quote.js +0 -2
  559. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/block-quote.js.map +0 -1
  560. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/character-escape.js +0 -2
  561. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/character-escape.js.map +0 -1
  562. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/character-reference.js +0 -2
  563. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/character-reference.js.map +0 -1
  564. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/code-fenced.js +0 -2
  565. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/code-fenced.js.map +0 -1
  566. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/code-indented.js +0 -2
  567. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/code-indented.js.map +0 -1
  568. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/code-text.js +0 -2
  569. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/code-text.js.map +0 -1
  570. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/content.js +0 -2
  571. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/content.js.map +0 -1
  572. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/definition.js +0 -2
  573. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/definition.js.map +0 -1
  574. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/hard-break-escape.js +0 -2
  575. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/hard-break-escape.js.map +0 -1
  576. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/heading-atx.js +0 -2
  577. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/heading-atx.js.map +0 -1
  578. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/html-flow.js +0 -2
  579. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/html-flow.js.map +0 -1
  580. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/html-text.js +0 -2
  581. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/html-text.js.map +0 -1
  582. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/label-end.js +0 -2
  583. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/label-end.js.map +0 -1
  584. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/label-start-image.js +0 -2
  585. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/label-start-image.js.map +0 -1
  586. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/label-start-link.js +0 -2
  587. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/label-start-link.js.map +0 -1
  588. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/line-ending.js +0 -2
  589. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/line-ending.js.map +0 -1
  590. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/list.js +0 -2
  591. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/list.js.map +0 -1
  592. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/setext-underline.js +0 -2
  593. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/setext-underline.js.map +0 -1
  594. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/thematic-break.js +0 -2
  595. package/dist/node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/thematic-break.js.map +0 -1
  596. package/dist/node_modules/react-markdown/node_modules/micromark-factory-destination/index.js +0 -2
  597. package/dist/node_modules/react-markdown/node_modules/micromark-factory-destination/index.js.map +0 -1
  598. package/dist/node_modules/react-markdown/node_modules/micromark-factory-label/index.js +0 -2
  599. package/dist/node_modules/react-markdown/node_modules/micromark-factory-label/index.js.map +0 -1
  600. package/dist/node_modules/react-markdown/node_modules/micromark-factory-space/index.js +0 -2
  601. package/dist/node_modules/react-markdown/node_modules/micromark-factory-space/index.js.map +0 -1
  602. package/dist/node_modules/react-markdown/node_modules/micromark-factory-title/index.js +0 -2
  603. package/dist/node_modules/react-markdown/node_modules/micromark-factory-title/index.js.map +0 -1
  604. package/dist/node_modules/react-markdown/node_modules/micromark-factory-whitespace/index.js +0 -2
  605. package/dist/node_modules/react-markdown/node_modules/micromark-factory-whitespace/index.js.map +0 -1
  606. package/dist/node_modules/react-markdown/node_modules/micromark-util-character/index.js +0 -2
  607. package/dist/node_modules/react-markdown/node_modules/micromark-util-character/index.js.map +0 -1
  608. package/dist/node_modules/react-markdown/node_modules/micromark-util-character/lib/unicode-punctuation-regex.js +0 -2
  609. package/dist/node_modules/react-markdown/node_modules/micromark-util-character/lib/unicode-punctuation-regex.js.map +0 -1
  610. package/dist/node_modules/react-markdown/node_modules/micromark-util-chunked/index.js +0 -2
  611. package/dist/node_modules/react-markdown/node_modules/micromark-util-chunked/index.js.map +0 -1
  612. package/dist/node_modules/react-markdown/node_modules/micromark-util-classify-character/index.js +0 -2
  613. package/dist/node_modules/react-markdown/node_modules/micromark-util-classify-character/index.js.map +0 -1
  614. package/dist/node_modules/react-markdown/node_modules/micromark-util-combine-extensions/index.js +0 -2
  615. package/dist/node_modules/react-markdown/node_modules/micromark-util-combine-extensions/index.js.map +0 -1
  616. package/dist/node_modules/react-markdown/node_modules/micromark-util-decode-numeric-character-reference/index.js +0 -2
  617. package/dist/node_modules/react-markdown/node_modules/micromark-util-decode-numeric-character-reference/index.js.map +0 -1
  618. package/dist/node_modules/react-markdown/node_modules/micromark-util-decode-string/index.js +0 -2
  619. package/dist/node_modules/react-markdown/node_modules/micromark-util-decode-string/index.js.map +0 -1
  620. package/dist/node_modules/react-markdown/node_modules/micromark-util-html-tag-name/index.js +0 -2
  621. package/dist/node_modules/react-markdown/node_modules/micromark-util-html-tag-name/index.js.map +0 -1
  622. package/dist/node_modules/react-markdown/node_modules/micromark-util-normalize-identifier/index.js +0 -2
  623. package/dist/node_modules/react-markdown/node_modules/micromark-util-normalize-identifier/index.js.map +0 -1
  624. package/dist/node_modules/react-markdown/node_modules/micromark-util-resolve-all/index.js +0 -2
  625. package/dist/node_modules/react-markdown/node_modules/micromark-util-resolve-all/index.js.map +0 -1
  626. package/dist/node_modules/react-markdown/node_modules/micromark-util-subtokenize/index.js +0 -2
  627. package/dist/node_modules/react-markdown/node_modules/micromark-util-subtokenize/index.js.map +0 -1
  628. package/dist/node_modules/react-markdown/node_modules/property-information/index.js +0 -2
  629. package/dist/node_modules/react-markdown/node_modules/property-information/index.js.map +0 -1
  630. package/dist/node_modules/react-markdown/node_modules/property-information/lib/aria.js +0 -2
  631. package/dist/node_modules/react-markdown/node_modules/property-information/lib/aria.js.map +0 -1
  632. package/dist/node_modules/react-markdown/node_modules/property-information/lib/find.js +0 -2
  633. package/dist/node_modules/react-markdown/node_modules/property-information/lib/find.js.map +0 -1
  634. package/dist/node_modules/react-markdown/node_modules/property-information/lib/hast-to-react.js +0 -2
  635. package/dist/node_modules/react-markdown/node_modules/property-information/lib/hast-to-react.js.map +0 -1
  636. package/dist/node_modules/react-markdown/node_modules/property-information/lib/html.js +0 -2
  637. package/dist/node_modules/react-markdown/node_modules/property-information/lib/html.js.map +0 -1
  638. package/dist/node_modules/react-markdown/node_modules/property-information/lib/normalize.js +0 -2
  639. package/dist/node_modules/react-markdown/node_modules/property-information/lib/normalize.js.map +0 -1
  640. package/dist/node_modules/react-markdown/node_modules/property-information/lib/svg.js +0 -2
  641. package/dist/node_modules/react-markdown/node_modules/property-information/lib/svg.js.map +0 -1
  642. package/dist/node_modules/react-markdown/node_modules/property-information/lib/util/case-insensitive-transform.js +0 -2
  643. package/dist/node_modules/react-markdown/node_modules/property-information/lib/util/case-insensitive-transform.js.map +0 -1
  644. package/dist/node_modules/react-markdown/node_modules/property-information/lib/util/case-sensitive-transform.js +0 -2
  645. package/dist/node_modules/react-markdown/node_modules/property-information/lib/util/case-sensitive-transform.js.map +0 -1
  646. package/dist/node_modules/react-markdown/node_modules/property-information/lib/util/create.js +0 -2
  647. package/dist/node_modules/react-markdown/node_modules/property-information/lib/util/create.js.map +0 -1
  648. package/dist/node_modules/react-markdown/node_modules/property-information/lib/util/defined-info.js +0 -2
  649. package/dist/node_modules/react-markdown/node_modules/property-information/lib/util/defined-info.js.map +0 -1
  650. package/dist/node_modules/react-markdown/node_modules/property-information/lib/util/info.js +0 -2
  651. package/dist/node_modules/react-markdown/node_modules/property-information/lib/util/info.js.map +0 -1
  652. package/dist/node_modules/react-markdown/node_modules/property-information/lib/util/merge.js +0 -2
  653. package/dist/node_modules/react-markdown/node_modules/property-information/lib/util/merge.js.map +0 -1
  654. package/dist/node_modules/react-markdown/node_modules/property-information/lib/util/schema.js +0 -2
  655. package/dist/node_modules/react-markdown/node_modules/property-information/lib/util/schema.js.map +0 -1
  656. package/dist/node_modules/react-markdown/node_modules/property-information/lib/util/types.js +0 -2
  657. package/dist/node_modules/react-markdown/node_modules/property-information/lib/util/types.js.map +0 -1
  658. package/dist/node_modules/react-markdown/node_modules/property-information/lib/xlink.js +0 -2
  659. package/dist/node_modules/react-markdown/node_modules/property-information/lib/xlink.js.map +0 -1
  660. package/dist/node_modules/react-markdown/node_modules/property-information/lib/xml.js +0 -2
  661. package/dist/node_modules/react-markdown/node_modules/property-information/lib/xml.js.map +0 -1
  662. package/dist/node_modules/react-markdown/node_modules/property-information/lib/xmlns.js +0 -2
  663. package/dist/node_modules/react-markdown/node_modules/property-information/lib/xmlns.js.map +0 -1
  664. package/dist/node_modules/react-markdown/node_modules/remark-parse/lib/index.js +0 -2
  665. package/dist/node_modules/react-markdown/node_modules/remark-parse/lib/index.js.map +0 -1
  666. package/dist/node_modules/react-markdown/node_modules/space-separated-tokens/index.js +0 -2
  667. package/dist/node_modules/react-markdown/node_modules/space-separated-tokens/index.js.map +0 -1
  668. package/dist/node_modules/react-markdown/node_modules/unified/lib/index.js +0 -2
  669. package/dist/node_modules/react-markdown/node_modules/unified/lib/index.js.map +0 -1
  670. package/dist/node_modules/react-markdown/node_modules/unist-util-is/lib/index.js +0 -2
  671. package/dist/node_modules/react-markdown/node_modules/unist-util-is/lib/index.js.map +0 -1
  672. package/dist/node_modules/react-markdown/node_modules/unist-util-stringify-position/lib/index.js +0 -2
  673. package/dist/node_modules/react-markdown/node_modules/unist-util-stringify-position/lib/index.js.map +0 -1
  674. package/dist/node_modules/react-markdown/node_modules/unist-util-visit/lib/index.js +0 -2
  675. package/dist/node_modules/react-markdown/node_modules/unist-util-visit/lib/index.js.map +0 -1
  676. package/dist/node_modules/react-markdown/node_modules/unist-util-visit-parents/lib/color.browser.js +0 -2
  677. package/dist/node_modules/react-markdown/node_modules/unist-util-visit-parents/lib/color.browser.js.map +0 -1
  678. package/dist/node_modules/react-markdown/node_modules/unist-util-visit-parents/lib/index.js +0 -2
  679. package/dist/node_modules/react-markdown/node_modules/unist-util-visit-parents/lib/index.js.map +0 -1
  680. package/dist/node_modules/react-markdown/node_modules/vfile/lib/index.js +0 -2
  681. package/dist/node_modules/react-markdown/node_modules/vfile/lib/index.js.map +0 -1
  682. package/dist/node_modules/react-markdown/node_modules/vfile/lib/minpath.browser.js +0 -2
  683. package/dist/node_modules/react-markdown/node_modules/vfile/lib/minpath.browser.js.map +0 -1
  684. package/dist/node_modules/react-markdown/node_modules/vfile/lib/minproc.browser.js +0 -2
  685. package/dist/node_modules/react-markdown/node_modules/vfile/lib/minproc.browser.js.map +0 -1
  686. package/dist/node_modules/react-markdown/node_modules/vfile/lib/minurl.browser.js +0 -2
  687. package/dist/node_modules/react-markdown/node_modules/vfile/lib/minurl.browser.js.map +0 -1
  688. package/dist/node_modules/react-markdown/node_modules/vfile/lib/minurl.shared.js +0 -2
  689. package/dist/node_modules/react-markdown/node_modules/vfile/lib/minurl.shared.js.map +0 -1
  690. package/dist/node_modules/react-markdown/node_modules/vfile-message/lib/index.js +0 -2
  691. package/dist/node_modules/react-markdown/node_modules/vfile-message/lib/index.js.map +0 -1
  692. package/dist/node_modules/unist-util-visit/node_modules/unist-util-is/lib/index.js +0 -2
  693. package/dist/node_modules/unist-util-visit/node_modules/unist-util-is/lib/index.js.map +0 -1
  694. package/dist/node_modules/unist-util-visit/node_modules/unist-util-visit-parents/lib/color.browser.js +0 -2
  695. package/dist/node_modules/unist-util-visit/node_modules/unist-util-visit-parents/lib/color.browser.js.map +0 -1
  696. package/dist/node_modules/unist-util-visit/node_modules/unist-util-visit-parents/lib/index.js +0 -2
  697. package/dist/node_modules/unist-util-visit/node_modules/unist-util-visit-parents/lib/index.js.map +0 -1
  698. package/dist/node_modules/unist-util-visit-parents/lib/color.js +0 -2
  699. package/dist/node_modules/unist-util-visit-parents/lib/color.js.map +0 -1
  700. package/dist/node_modules/vfile-message/node_modules/unist-util-stringify-position/lib/index.js +0 -2
  701. package/dist/node_modules/vfile-message/node_modules/unist-util-stringify-position/lib/index.js.map +0 -1
  702. package/dist/types/src/features/IL-OTJ/_components/AccordionSection.d.ts +0 -7
  703. package/dist/types/src/features/IL-OTJ/_components/AccordionSection.d.ts.map +0 -1
  704. package/dist/types/src/features/IL-OTJ/_components/BookLearningForm.d.ts +0 -23
  705. package/dist/types/src/features/IL-OTJ/_components/BookLearningForm.d.ts.map +0 -1
  706. package/dist/types/src/features/IL-OTJ/_components/ChatComponent.d.ts +0 -31
  707. package/dist/types/src/features/IL-OTJ/_components/ChatComponent.d.ts.map +0 -1
  708. package/dist/types/src/features/IL-OTJ/_components/ConflictingLearningWarnCard.d.ts +0 -13
  709. package/dist/types/src/features/IL-OTJ/_components/ConflictingLearningWarnCard.d.ts.map +0 -1
  710. package/dist/types/src/features/IL-OTJ/_components/ConflictingLearningWarnCardDB.d.ts +0 -41
  711. package/dist/types/src/features/IL-OTJ/_components/ConflictingLearningWarnCardDB.d.ts.map +0 -1
  712. package/dist/types/src/features/IL-OTJ/_components/ContextualChatComponent.d.ts +0 -62
  713. package/dist/types/src/features/IL-OTJ/_components/ContextualChatComponent.d.ts.map +0 -1
  714. package/dist/types/src/features/IL-OTJ/_components/ContextualConflictDBCard.d.ts +0 -30
  715. package/dist/types/src/features/IL-OTJ/_components/ContextualConflictDBCard.d.ts.map +0 -1
  716. package/dist/types/src/features/IL-OTJ/_components/ContextualLearningForm.d.ts +0 -1
  717. package/dist/types/src/features/IL-OTJ/_components/ContextualLearningForm.d.ts.map +0 -1
  718. package/dist/types/src/features/IL-OTJ/_components/ILComponents.d.ts +0 -74
  719. package/dist/types/src/features/IL-OTJ/_components/ILComponents.d.ts.map +0 -1
  720. package/dist/types/src/features/IL-OTJ/_components/LearningDetailsForm.d.ts +0 -71
  721. package/dist/types/src/features/IL-OTJ/_components/LearningDetailsForm.d.ts.map +0 -1
  722. package/dist/types/src/features/IL-OTJ/_components/MessageRendering/CustomComponents.d.ts +0 -10
  723. package/dist/types/src/features/IL-OTJ/_components/MessageRendering/CustomComponents.d.ts.map +0 -1
  724. package/dist/types/src/features/IL-OTJ/_components/MessageRendering.d.ts +0 -14
  725. package/dist/types/src/features/IL-OTJ/_components/MessageRendering.d.ts.map +0 -1
  726. package/dist/types/src/features/IL-OTJ/_components/SimilarLearningCard.d.ts +0 -20
  727. package/dist/types/src/features/IL-OTJ/_components/SimilarLearningCard.d.ts.map +0 -1
  728. package/dist/types/src/features/IL-OTJ/_components/TemplateRetrievalForm.d.ts +0 -50
  729. package/dist/types/src/features/IL-OTJ/_components/TemplateRetrievalForm.d.ts.map +0 -1
  730. package/dist/types/src/features/IL-OTJ/_components/atoms/Button.d.ts +0 -7
  731. package/dist/types/src/features/IL-OTJ/_components/atoms/Button.d.ts.map +0 -1
  732. package/dist/types/src/features/IL-OTJ/_components/atoms/Dropdown.d.ts +0 -13
  733. package/dist/types/src/features/IL-OTJ/_components/atoms/Dropdown.d.ts.map +0 -1
  734. package/dist/types/src/features/IL-OTJ/_components/atoms/Textarea.d.ts +0 -6
  735. package/dist/types/src/features/IL-OTJ/_components/atoms/Textarea.d.ts.map +0 -1
  736. package/dist/types/src/features/IL-OTJ/_components/atoms/Toggle.d.ts +0 -7
  737. package/dist/types/src/features/IL-OTJ/_components/atoms/Toggle.d.ts.map +0 -1
  738. package/dist/types/src/features/IL-OTJ/_components/atoms/index.d.ts +0 -4
  739. package/dist/types/src/features/IL-OTJ/_components/atoms/index.d.ts.map +0 -1
  740. package/dist/types/src/features/IL-OTJ/_components/molecules/ConditionList.d.ts +0 -16
  741. package/dist/types/src/features/IL-OTJ/_components/molecules/ConditionList.d.ts.map +0 -1
  742. package/dist/types/src/features/IL-OTJ/_components/molecules/SectionWrapper.d.ts +0 -14
  743. package/dist/types/src/features/IL-OTJ/_components/molecules/SectionWrapper.d.ts.map +0 -1
  744. package/dist/types/src/features/IL-OTJ/_components/molecules/SeparationRow.d.ts +0 -10
  745. package/dist/types/src/features/IL-OTJ/_components/molecules/SeparationRow.d.ts.map +0 -1
  746. package/dist/types/src/features/IL-OTJ/_components/molecules/index.d.ts +0 -4
  747. package/dist/types/src/features/IL-OTJ/_components/molecules/index.d.ts.map +0 -1
  748. package/dist/types/src/features/IL-OTJ/_components/organisms/ConditionSection.d.ts +0 -2
  749. package/dist/types/src/features/IL-OTJ/_components/organisms/ConditionSection.d.ts.map +0 -1
  750. package/dist/types/src/features/IL-OTJ/_components/organisms/ConfigSection.d.ts +0 -2
  751. package/dist/types/src/features/IL-OTJ/_components/organisms/ConfigSection.d.ts.map +0 -1
  752. package/dist/types/src/features/IL-OTJ/_components/organisms/ScopeSection.d.ts +0 -25
  753. package/dist/types/src/features/IL-OTJ/_components/organisms/ScopeSection.d.ts.map +0 -1
  754. package/dist/types/src/features/IL-OTJ/_components/organisms/TextSection.d.ts +0 -2
  755. package/dist/types/src/features/IL-OTJ/_components/organisms/TextSection.d.ts.map +0 -1
  756. package/dist/types/src/features/IL-OTJ/_components/organisms/index.d.ts +0 -5
  757. package/dist/types/src/features/IL-OTJ/_components/organisms/index.d.ts.map +0 -1
  758. package/dist/types/src/features/IL-OTJ/_components/templates/AudactyTemplate.d.ts +0 -2
  759. package/dist/types/src/features/IL-OTJ/_components/templates/AudactyTemplate.d.ts.map +0 -1
  760. package/dist/types/src/features/IL-OTJ/_components/templates/Testing.d.ts +0 -2
  761. package/dist/types/src/features/IL-OTJ/_components/templates/Testing.d.ts.map +0 -1
  762. package/dist/types/src/features/IL-OTJ/_components/templates/TrafficManagerTemplate.d.ts +0 -2
  763. package/dist/types/src/features/IL-OTJ/_components/templates/TrafficManagerTemplate.d.ts.map +0 -1
  764. package/dist/types/src/features/IL-OTJ/_components/templates/index.d.ts +0 -3
  765. package/dist/types/src/features/IL-OTJ/_components/templates/index.d.ts.map +0 -1
  766. package/dist/types/src/features/IL-OTJ/_components/templates/templatesMap.d.ts +0 -7
  767. package/dist/types/src/features/IL-OTJ/_components/templates/templatesMap.d.ts.map +0 -1
  768. package/dist/types/src/features/IL-OTJ/hooks/useHeaderObserver.d.ts +0 -13
  769. package/dist/types/src/features/IL-OTJ/hooks/useHeaderObserver.d.ts.map +0 -1
@@ -1,2 +0,0 @@
1
- "use strict";Object.defineProperty(exports,"__esModule",{value:!0});var e=require("../../micromark-util-character/index.js"),n=require("../../micromark-util-chunked/index.js"),t=require("../../micromark-util-resolve-all/index.js");exports.createTokenizer=function(r,i,u){let o=Object.assign(u?Object.assign({},u):{line:1,column:1,offset:0},{_index:0,_bufferIndex:-1});const s={},c=[];let l=[],f=[];const a={consume:function(n){e.markdownLineEnding(n)?(o.line++,o.column=1,o.offset+=-3===n?2:1,k()):-1!==n&&(o.column++,o.offset++);o._bufferIndex<0?o._index++:(o._bufferIndex++,o._bufferIndex===l[o._index].length&&(o._bufferIndex=-1,o._index++));d.previous=n},enter:function(e,n){const t=n||{};return t.type=e,t.start=v(),d.events.push(["enter",t,d]),f.push(t),t},exit:function(e){const n=f.pop();return n.end=v(),d.events.push(["exit",n,d]),n},attempt:m((function(e,n){g(e,n.from)})),check:m(b),interrupt:m(b,{interrupt:!0})},d={previous:null,code:null,containerState:{},events:[],parser:r,sliceStream:p,sliceSerialize:function(e,n){return function(e,n){let t=-1;const r=[];let i;for(;++t<e.length;){const u=e[t];let o;if("string"==typeof u)o=u;else switch(u){case-5:o="\r";break;case-4:o="\n";break;case-3:o="\r\n";break;case-2:o=n?" ":"\t";break;case-1:if(!n&&i)continue;o=" ";break;default:o=String.fromCharCode(u)}i=-2===u,r.push(o)}return r.join("")}(p(e),n)},now:v,defineSkip:function(e){s[e.line]=e.column,k()},write:function(e){if(l=n.push(l,e),_(),null!==l[l.length-1])return[];return g(i,0),d.events=t.resolveAll(c,d.events,d),d.events}};let x=i.tokenize.call(d,a);return i.resolveAll&&c.push(i),d;function p(e){return function(e,n){const t=n.start._index,r=n.start._bufferIndex,i=n.end._index,u=n.end._bufferIndex;let o;if(t===i)o=[e[t].slice(r,u)];else{if(o=e.slice(t,i),r>-1){const e=o[0];"string"==typeof e?o[0]=e.slice(r):o.shift()}u>0&&o.push(e[i].slice(0,u))}return o}(l,e)}function v(){const{line:e,column:n,offset:t,_index:r,_bufferIndex:i}=o;return{line:e,column:n,offset:t,_index:r,_bufferIndex:i}}function _(){let e;for(;o._index<l.length;){const n=l[o._index];if("string"==typeof n)for(e=o._index,o._bufferIndex<0&&(o._bufferIndex=0);o._index===e&&o._bufferIndex<n.length;)h(n.charCodeAt(o._bufferIndex));else h(n)}}function h(e){x=x(e)}function b(e,n){n.restore()}function m(e,n){return function(t,r,i){let u,s,c,l;return Array.isArray(t)?x(t):"tokenize"in t?x([t]):function(e){return n;function n(n){const t=null!==n&&e[n],r=null!==n&&e.null;return x([...Array.isArray(t)?t:t?[t]:[],...Array.isArray(r)?r:r?[r]:[]])(n)}}(t);function x(e){return u=e,s=0,0===e.length?i:p(e[s])}function p(e){return function(t){l=function(){const e=v(),n=d.previous,t=d.currentConstruct,r=d.events.length,i=Array.from(f);return{restore:u,from:r};function u(){o=e,d.previous=n,d.currentConstruct=t,d.events.length=r,f=i,k()}}(),c=e,e.partial||(d.currentConstruct=e);if(e.name&&d.parser.constructs.disable.null.includes(e.name))return h();return e.tokenize.call(n?Object.assign(Object.create(d),n):d,a,_,h)(t)}}function _(n){return e(c,l),r}function h(e){return l.restore(),++s<u.length?p(u[s]):i}}}function g(e,t){e.resolveAll&&!c.includes(e)&&c.push(e),e.resolve&&n.splice(d.events,t,d.events.length-t,e.resolve(d.events.slice(t),d)),e.resolveTo&&(d.events=e.resolveTo(d.events,d))}function k(){o.line in s&&o.column<2&&(o.column=s[o.line],o.offset+=s[o.line]-1)}};
2
- //# sourceMappingURL=create-tokenizer.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"create-tokenizer.js","sources":["../../../../../../../node_modules/react-markdown/node_modules/micromark/lib/create-tokenizer.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Chunk} Chunk\n * @typedef {import('micromark-util-types').Code} Code\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').ConstructRecord} ConstructRecord\n * @typedef {import('micromark-util-types').Effects} Effects\n * @typedef {import('micromark-util-types').InitialConstruct} InitialConstruct\n * @typedef {import('micromark-util-types').ParseContext} ParseContext\n * @typedef {import('micromark-util-types').Point} Point\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').Token} Token\n * @typedef {import('micromark-util-types').TokenType} TokenType\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n */\n\n/**\n * @callback Restore\n * @returns {void}\n *\n * @typedef Info\n * @property {Restore} restore\n * @property {number} from\n *\n * @callback ReturnHandle\n * Handle a successful run.\n * @param {Construct} construct\n * @param {Info} info\n * @returns {void}\n */\n\nimport {markdownLineEnding} from 'micromark-util-character'\nimport {push, splice} from 'micromark-util-chunked'\nimport {resolveAll} from 'micromark-util-resolve-all'\n/**\n * Create a tokenizer.\n * Tokenizers deal with one type of data (e.g., containers, flow, text).\n * The parser is the object dealing with it all.\n * `initialize` works like other constructs, except that only its `tokenize`\n * function is used, in which case it doesn’t receive an `ok` or `nok`.\n * `from` can be given to set the point before the first character, although\n * when further lines are indented, they must be set with `defineSkip`.\n *\n * @param {ParseContext} parser\n * @param {InitialConstruct} initialize\n * @param {Omit<Point, '_bufferIndex' | '_index'> | undefined} [from]\n * @returns {TokenizeContext}\n */\nexport function createTokenizer(parser, initialize, from) {\n /** @type {Point} */\n let point = Object.assign(\n from\n ? Object.assign({}, from)\n : {\n line: 1,\n column: 1,\n offset: 0\n },\n {\n _index: 0,\n _bufferIndex: -1\n }\n )\n /** @type {Record<string, number>} */\n const columnStart = {}\n /** @type {Array<Construct>} */\n const resolveAllConstructs = []\n /** @type {Array<Chunk>} */\n let chunks = []\n /** @type {Array<Token>} */\n let stack = []\n /** @type {boolean | undefined} */\n let consumed = true\n\n /**\n * Tools used for tokenizing.\n *\n * @type {Effects}\n */\n const effects = {\n consume,\n enter,\n exit,\n attempt: constructFactory(onsuccessfulconstruct),\n check: constructFactory(onsuccessfulcheck),\n interrupt: constructFactory(onsuccessfulcheck, {\n interrupt: true\n })\n }\n\n /**\n * State and tools for resolving and serializing.\n *\n * @type {TokenizeContext}\n */\n const context = {\n previous: null,\n code: null,\n containerState: {},\n events: [],\n parser,\n sliceStream,\n sliceSerialize,\n now,\n defineSkip,\n write\n }\n\n /**\n * The state function.\n *\n * @type {State | void}\n */\n let state = initialize.tokenize.call(context, effects)\n\n /**\n * Track which character we expect to be consumed, to catch bugs.\n *\n * @type {Code}\n */\n let expectedCode\n if (initialize.resolveAll) {\n resolveAllConstructs.push(initialize)\n }\n return context\n\n /** @type {TokenizeContext['write']} */\n function write(slice) {\n chunks = push(chunks, slice)\n main()\n\n // Exit if we’re not done, resolve might change stuff.\n if (chunks[chunks.length - 1] !== null) {\n return []\n }\n addResult(initialize, 0)\n\n // Otherwise, resolve, and exit.\n context.events = resolveAll(resolveAllConstructs, context.events, context)\n return context.events\n }\n\n //\n // Tools.\n //\n\n /** @type {TokenizeContext['sliceSerialize']} */\n function sliceSerialize(token, expandTabs) {\n return serializeChunks(sliceStream(token), expandTabs)\n }\n\n /** @type {TokenizeContext['sliceStream']} */\n function sliceStream(token) {\n return sliceChunks(chunks, token)\n }\n\n /** @type {TokenizeContext['now']} */\n function now() {\n // This is a hot path, so we clone manually instead of `Object.assign({}, point)`\n const {line, column, offset, _index, _bufferIndex} = point\n return {\n line,\n column,\n offset,\n _index,\n _bufferIndex\n }\n }\n\n /** @type {TokenizeContext['defineSkip']} */\n function defineSkip(value) {\n columnStart[value.line] = value.column\n accountForPotentialSkip()\n }\n\n //\n // State management.\n //\n\n /**\n * Main loop (note that `_index` and `_bufferIndex` in `point` are modified by\n * `consume`).\n * Here is where we walk through the chunks, which either include strings of\n * several characters, or numerical character codes.\n * The reason to do this in a loop instead of a call is so the stack can\n * drain.\n *\n * @returns {void}\n */\n function main() {\n /** @type {number} */\n let chunkIndex\n while (point._index < chunks.length) {\n const chunk = chunks[point._index]\n\n // If we’re in a buffer chunk, loop through it.\n if (typeof chunk === 'string') {\n chunkIndex = point._index\n if (point._bufferIndex < 0) {\n point._bufferIndex = 0\n }\n while (\n point._index === chunkIndex &&\n point._bufferIndex < chunk.length\n ) {\n go(chunk.charCodeAt(point._bufferIndex))\n }\n } else {\n go(chunk)\n }\n }\n }\n\n /**\n * Deal with one code.\n *\n * @param {Code} code\n * @returns {void}\n */\n function go(code) {\n consumed = undefined\n expectedCode = code\n state = state(code)\n }\n\n /** @type {Effects['consume']} */\n function consume(code) {\n if (markdownLineEnding(code)) {\n point.line++\n point.column = 1\n point.offset += code === -3 ? 2 : 1\n accountForPotentialSkip()\n } else if (code !== -1) {\n point.column++\n point.offset++\n }\n\n // Not in a string chunk.\n if (point._bufferIndex < 0) {\n point._index++\n } else {\n point._bufferIndex++\n\n // At end of string chunk.\n // @ts-expect-error Points w/ non-negative `_bufferIndex` reference\n // strings.\n if (point._bufferIndex === chunks[point._index].length) {\n point._bufferIndex = -1\n point._index++\n }\n }\n\n // Expose the previous character.\n context.previous = code\n\n // Mark as consumed.\n consumed = true\n }\n\n /** @type {Effects['enter']} */\n function enter(type, fields) {\n /** @type {Token} */\n // @ts-expect-error Patch instead of assign required fields to help GC.\n const token = fields || {}\n token.type = type\n token.start = now()\n context.events.push(['enter', token, context])\n stack.push(token)\n return token\n }\n\n /** @type {Effects['exit']} */\n function exit(type) {\n const token = stack.pop()\n token.end = now()\n context.events.push(['exit', token, context])\n return token\n }\n\n /**\n * Use results.\n *\n * @type {ReturnHandle}\n */\n function onsuccessfulconstruct(construct, info) {\n addResult(construct, info.from)\n }\n\n /**\n * Discard results.\n *\n * @type {ReturnHandle}\n */\n function onsuccessfulcheck(_, info) {\n info.restore()\n }\n\n /**\n * Factory to attempt/check/interrupt.\n *\n * @param {ReturnHandle} onreturn\n * @param {{interrupt?: boolean | undefined} | undefined} [fields]\n */\n function constructFactory(onreturn, fields) {\n return hook\n\n /**\n * Handle either an object mapping codes to constructs, a list of\n * constructs, or a single construct.\n *\n * @param {Array<Construct> | Construct | ConstructRecord} constructs\n * @param {State} returnState\n * @param {State | undefined} [bogusState]\n * @returns {State}\n */\n function hook(constructs, returnState, bogusState) {\n /** @type {Array<Construct>} */\n let listOfConstructs\n /** @type {number} */\n let constructIndex\n /** @type {Construct} */\n let currentConstruct\n /** @type {Info} */\n let info\n return Array.isArray(constructs) /* c8 ignore next 1 */\n ? handleListOfConstructs(constructs)\n : 'tokenize' in constructs\n ? // @ts-expect-error Looks like a construct.\n handleListOfConstructs([constructs])\n : handleMapOfConstructs(constructs)\n\n /**\n * Handle a list of construct.\n *\n * @param {ConstructRecord} map\n * @returns {State}\n */\n function handleMapOfConstructs(map) {\n return start\n\n /** @type {State} */\n function start(code) {\n const def = code !== null && map[code]\n const all = code !== null && map.null\n const list = [\n // To do: add more extension tests.\n /* c8 ignore next 2 */\n ...(Array.isArray(def) ? def : def ? [def] : []),\n ...(Array.isArray(all) ? all : all ? [all] : [])\n ]\n return handleListOfConstructs(list)(code)\n }\n }\n\n /**\n * Handle a list of construct.\n *\n * @param {Array<Construct>} list\n * @returns {State}\n */\n function handleListOfConstructs(list) {\n listOfConstructs = list\n constructIndex = 0\n if (list.length === 0) {\n return bogusState\n }\n return handleConstruct(list[constructIndex])\n }\n\n /**\n * Handle a single construct.\n *\n * @param {Construct} construct\n * @returns {State}\n */\n function handleConstruct(construct) {\n return start\n\n /** @type {State} */\n function start(code) {\n // To do: not needed to store if there is no bogus state, probably?\n // Currently doesn’t work because `inspect` in document does a check\n // w/o a bogus, which doesn’t make sense. But it does seem to help perf\n // by not storing.\n info = store()\n currentConstruct = construct\n if (!construct.partial) {\n context.currentConstruct = construct\n }\n\n // Always populated by defaults.\n\n if (\n construct.name &&\n context.parser.constructs.disable.null.includes(construct.name)\n ) {\n return nok(code)\n }\n return construct.tokenize.call(\n // If we do have fields, create an object w/ `context` as its\n // prototype.\n // This allows a “live binding”, which is needed for `interrupt`.\n fields ? Object.assign(Object.create(context), fields) : context,\n effects,\n ok,\n nok\n )(code)\n }\n }\n\n /** @type {State} */\n function ok(code) {\n consumed = true\n onreturn(currentConstruct, info)\n return returnState\n }\n\n /** @type {State} */\n function nok(code) {\n consumed = true\n info.restore()\n if (++constructIndex < listOfConstructs.length) {\n return handleConstruct(listOfConstructs[constructIndex])\n }\n return bogusState\n }\n }\n }\n\n /**\n * @param {Construct} construct\n * @param {number} from\n * @returns {void}\n */\n function addResult(construct, from) {\n if (construct.resolveAll && !resolveAllConstructs.includes(construct)) {\n resolveAllConstructs.push(construct)\n }\n if (construct.resolve) {\n splice(\n context.events,\n from,\n context.events.length - from,\n construct.resolve(context.events.slice(from), context)\n )\n }\n if (construct.resolveTo) {\n context.events = construct.resolveTo(context.events, context)\n }\n }\n\n /**\n * Store state.\n *\n * @returns {Info}\n */\n function store() {\n const startPoint = now()\n const startPrevious = context.previous\n const startCurrentConstruct = context.currentConstruct\n const startEventsIndex = context.events.length\n const startStack = Array.from(stack)\n return {\n restore,\n from: startEventsIndex\n }\n\n /**\n * Restore state.\n *\n * @returns {void}\n */\n function restore() {\n point = startPoint\n context.previous = startPrevious\n context.currentConstruct = startCurrentConstruct\n context.events.length = startEventsIndex\n stack = startStack\n accountForPotentialSkip()\n }\n }\n\n /**\n * Move the current point a bit forward in the line when it’s on a column\n * skip.\n *\n * @returns {void}\n */\n function accountForPotentialSkip() {\n if (point.line in columnStart && point.column < 2) {\n point.column = columnStart[point.line]\n point.offset += columnStart[point.line] - 1\n }\n }\n}\n\n/**\n * Get the chunks from a slice of chunks in the range of a token.\n *\n * @param {Array<Chunk>} chunks\n * @param {Pick<Token, 'end' | 'start'>} token\n * @returns {Array<Chunk>}\n */\nfunction sliceChunks(chunks, token) {\n const startIndex = token.start._index\n const startBufferIndex = token.start._bufferIndex\n const endIndex = token.end._index\n const endBufferIndex = token.end._bufferIndex\n /** @type {Array<Chunk>} */\n let view\n if (startIndex === endIndex) {\n // @ts-expect-error `_bufferIndex` is used on string chunks.\n view = [chunks[startIndex].slice(startBufferIndex, endBufferIndex)]\n } else {\n view = chunks.slice(startIndex, endIndex)\n if (startBufferIndex > -1) {\n const head = view[0]\n if (typeof head === 'string') {\n view[0] = head.slice(startBufferIndex)\n } else {\n view.shift()\n }\n }\n if (endBufferIndex > 0) {\n // @ts-expect-error `_bufferIndex` is used on string chunks.\n view.push(chunks[endIndex].slice(0, endBufferIndex))\n }\n }\n return view\n}\n\n/**\n * Get the string value of a slice of chunks.\n *\n * @param {Array<Chunk>} chunks\n * @param {boolean | undefined} [expandTabs=false]\n * @returns {string}\n */\nfunction serializeChunks(chunks, expandTabs) {\n let index = -1\n /** @type {Array<string>} */\n const result = []\n /** @type {boolean | undefined} */\n let atTab\n while (++index < chunks.length) {\n const chunk = chunks[index]\n /** @type {string} */\n let value\n if (typeof chunk === 'string') {\n value = chunk\n } else\n switch (chunk) {\n case -5: {\n value = '\\r'\n break\n }\n case -4: {\n value = '\\n'\n break\n }\n case -3: {\n value = '\\r' + '\\n'\n break\n }\n case -2: {\n value = expandTabs ? ' ' : '\\t'\n break\n }\n case -1: {\n if (!expandTabs && atTab) continue\n value = ' '\n break\n }\n default: {\n // Currently only replacement character.\n value = String.fromCharCode(chunk)\n }\n }\n atTab = chunk === -2\n result.push(value)\n }\n return result.join('')\n}\n"],"names":["parser","initialize","from","point","Object","assign","line","column","offset","_index","_bufferIndex","columnStart","resolveAllConstructs","chunks","stack","effects","consume","code","markdownLineEnding","accountForPotentialSkip","length","context","previous","enter","type","fields","token","start","now","events","push","exit","pop","end","attempt","constructFactory","construct","info","addResult","check","onsuccessfulcheck","interrupt","containerState","sliceStream","sliceSerialize","expandTabs","index","result","atTab","chunk","value","String","fromCharCode","join","serializeChunks","defineSkip","write","slice","main","resolveAll","state","tokenize","call","startIndex","startBufferIndex","endIndex","endBufferIndex","view","head","shift","sliceChunks","chunkIndex","go","charCodeAt","_","restore","onreturn","constructs","returnState","bogusState","listOfConstructs","constructIndex","currentConstruct","Array","isArray","handleListOfConstructs","map","def","all","null","handleMapOfConstructs","list","handleConstruct","startPoint","startPrevious","startCurrentConstruct","startEventsIndex","startStack","store","partial","name","disable","includes","nok","create","ok","resolve","splice","resolveTo"],"mappings":"+PA+CO,SAAyBA,EAAQC,EAAYC,GAElD,IAAIC,EAAQC,OAAOC,OACjBH,EACIE,OAAOC,OAAO,CAAE,EAAEH,GAClB,CACEI,KAAM,EACNC,OAAQ,EACRC,OAAQ,GAEd,CACEC,OAAQ,EACRC,cAAe,IAInB,MAAMC,EAAc,CAAE,EAEhBC,EAAuB,GAE7B,IAAIC,EAAS,GAETC,EAAQ,GASZ,MAAMC,EAAU,CACdC,QAkJF,SAAiBC,GACXC,EAAAA,mBAAmBD,IACrBd,EAAMG,OACNH,EAAMI,OAAS,EACfJ,EAAMK,SAAoB,IAAVS,EAAc,EAAI,EAClCE,MACmB,IAAVF,IACTd,EAAMI,SACNJ,EAAMK,UAIJL,EAAMO,aAAe,EACvBP,EAAMM,UAENN,EAAMO,eAKFP,EAAMO,eAAiBG,EAAOV,EAAMM,QAAQW,SAC9CjB,EAAMO,cAAgB,EACtBP,EAAMM,WAKVY,EAAQC,SAAWL,CAIpB,EAhLCM,MAmLF,SAAeC,EAAMC,GAGnB,MAAMC,EAAQD,GAAU,CAAE,EAK1B,OAJAC,EAAMF,KAAOA,EACbE,EAAMC,MAAQC,IACdP,EAAQQ,OAAOC,KAAK,CAAC,QAASJ,EAAOL,IACrCP,EAAMgB,KAAKJ,GACJA,CACR,EA3LCK,KA8LF,SAAcP,GACZ,MAAME,EAAQZ,EAAMkB,MAGpB,OAFAN,EAAMO,IAAML,IACZP,EAAQQ,OAAOC,KAAK,CAAC,OAAQJ,EAAOL,IAC7BK,CACR,EAlMCQ,QAASC,GAyMX,SAA+BC,EAAWC,GACxCC,EAAUF,EAAWC,EAAKnC,KAC3B,IA1MCqC,MAAOJ,EAAiBK,GACxBC,UAAWN,EAAiBK,EAAmB,CAC7CC,WAAW,KASTpB,EAAU,CACdC,SAAU,KACVL,KAAM,KACNyB,eAAgB,CAAE,EAClBb,OAAQ,GACR7B,SACA2C,cACAC,eA6CF,SAAwBlB,EAAOmB,GAC7B,OAsYJ,SAAyBhC,EAAQgC,GAC/B,IAAIC,GAAS,EAEb,MAAMC,EAAS,GAEf,IAAIC,EACJ,OAASF,EAAQjC,EAAOO,QAAQ,CAC9B,MAAM6B,EAAQpC,EAAOiC,GAErB,IAAII,EACJ,GAAqB,iBAAVD,EACTC,EAAQD,OAER,OAAQA,GACN,KAAM,EACJC,EAAQ,KACR,MAEF,KAAM,EACJA,EAAQ,KACR,MAEF,KAAM,EACJA,EAAQ,OACR,MAEF,KAAM,EACJA,EAAQL,EAAa,IAAM,KAC3B,MAEF,KAAM,EACJ,IAAKA,GAAcG,EAAO,SAC1BE,EAAQ,IACR,MAEF,QAEEA,EAAQC,OAAOC,aAAaH,GAGlCD,GAAmB,IAAXC,EACRF,EAAOjB,KAAKoB,EACb,CACD,OAAOH,EAAOM,KAAK,GACrB,CAlbWC,CAAgBX,EAAYjB,GAAQmB,EAC5C,EA9CCjB,MACA2B,WAkEF,SAAoBL,GAClBvC,EAAYuC,EAAM5C,MAAQ4C,EAAM3C,OAChCY,GACD,EApECqC,MAsBF,SAAeC,GAKb,GAJA5C,EAASiB,EAAAA,KAAKjB,EAAQ4C,GACtBC,IAGkC,OAA9B7C,EAAOA,EAAOO,OAAS,GACzB,MAAO,GAMT,OAJAkB,EAAUrC,EAAY,GAGtBoB,EAAQQ,OAAS8B,EAAUA,WAAC/C,EAAsBS,EAAQQ,OAAQR,GAC3DA,EAAQQ,MAChB,GA3BD,IAAI+B,EAAQ3D,EAAW4D,SAASC,KAAKzC,EAASN,GAW9C,OAHId,EAAW0D,YACb/C,EAAqBkB,KAAK7B,GAErBoB,EA4BP,SAASsB,EAAYjB,GACnB,OA8VJ,SAAqBb,EAAQa,GAC3B,MAAMqC,EAAarC,EAAMC,MAAMlB,OACzBuD,EAAmBtC,EAAMC,MAAMjB,aAC/BuD,EAAWvC,EAAMO,IAAIxB,OACrByD,EAAiBxC,EAAMO,IAAIvB,aAEjC,IAAIyD,EACJ,GAAIJ,IAAeE,EAEjBE,EAAO,CAACtD,EAAOkD,GAAYN,MAAMO,EAAkBE,QAC9C,CAEL,GADAC,EAAOtD,EAAO4C,MAAMM,EAAYE,GAC5BD,GAAoB,EAAG,CACzB,MAAMI,EAAOD,EAAK,GACE,iBAATC,EACTD,EAAK,GAAKC,EAAKX,MAAMO,GAErBG,EAAKE,OAER,CACGH,EAAiB,GAEnBC,EAAKrC,KAAKjB,EAAOoD,GAAUR,MAAM,EAAGS,GAEvC,CACD,OAAOC,CACT,CAxXWG,CAAYzD,EAAQa,EAC5B,CAGD,SAASE,IAEP,MAAMtB,KAACA,EAAIC,OAAEA,EAAMC,OAAEA,EAAMC,OAAEA,EAAMC,aAAEA,GAAgBP,EACrD,MAAO,CACLG,OACAC,SACAC,SACAC,SACAC,eAEH,CAsBD,SAASgD,IAEP,IAAIa,EACJ,KAAOpE,EAAMM,OAASI,EAAOO,QAAQ,CACnC,MAAM6B,EAAQpC,EAAOV,EAAMM,QAG3B,GAAqB,iBAAVwC,EAKT,IAJAsB,EAAapE,EAAMM,OACfN,EAAMO,aAAe,IACvBP,EAAMO,aAAe,GAGrBP,EAAMM,SAAW8D,GACjBpE,EAAMO,aAAeuC,EAAM7B,QAE3BoD,EAAGvB,EAAMwB,WAAWtE,EAAMO,oBAG5B8D,EAAGvB,EAEN,CACF,CAQD,SAASuB,EAAGvD,GAGV2C,EAAQA,EAAM3C,EACf,CAsED,SAASuB,EAAkBkC,EAAGrC,GAC5BA,EAAKsC,SACN,CAQD,SAASxC,EAAiByC,EAAUnD,GAClC,OAWA,SAAcoD,EAAYC,EAAaC,GAErC,IAAIC,EAEAC,EAEAC,EAEA7C,EACJ,OAAO8C,MAAMC,QAAQP,GACjBQ,EAAuBR,GACvB,aAAcA,EAEdQ,EAAuB,CAACR,IAS5B,SAA+BS,GAC7B,OAAO3D,EAGP,SAASA,EAAMV,GACb,MAAMsE,EAAe,OAATtE,GAAiBqE,EAAIrE,GAC3BuE,EAAe,OAATvE,GAAiBqE,EAAIG,KAOjC,OAAOJ,EANM,IAGPF,MAAMC,QAAQG,GAAOA,EAAMA,EAAM,CAACA,GAAO,MACzCJ,MAAMC,QAAQI,GAAOA,EAAMA,EAAM,CAACA,GAAO,IAExCH,CAA6BpE,EACrC,CACF,CAvBGyE,CAAsBb,GA+B1B,SAASQ,EAAuBM,GAG9B,OAFAX,EAAmBW,EACnBV,EAAiB,EACG,IAAhBU,EAAKvE,OACA2D,EAEFa,EAAgBD,EAAKV,GAC7B,CAQD,SAASW,EAAgBxD,GACvB,OAGA,SAAenB,GAKboB,EAwER,WACE,MAAMwD,EAAajE,IACbkE,EAAgBzE,EAAQC,SACxByE,EAAwB1E,EAAQ6D,iBAChCc,EAAmB3E,EAAQQ,OAAOT,OAClC6E,EAAad,MAAMjF,KAAKY,GAC9B,MAAO,CACL6D,UACAzE,KAAM8F,GAQR,SAASrB,IACPxE,EAAQ0F,EACRxE,EAAQC,SAAWwE,EACnBzE,EAAQ6D,iBAAmBa,EAC3B1E,EAAQQ,OAAOT,OAAS4E,EACxBlF,EAAQmF,EACR9E,GACD,CACF,CAhGc+E,GACPhB,EAAmB9C,EACdA,EAAU+D,UACb9E,EAAQ6D,iBAAmB9C,GAK7B,GACEA,EAAUgE,MACV/E,EAAQrB,OAAO6E,WAAWwB,QAAQZ,KAAKa,SAASlE,EAAUgE,MAE1D,OAAOG,IAET,OAAOnE,EAAUyB,SAASC,KAIxBrC,EAASrB,OAAOC,OAAOD,OAAOoG,OAAOnF,GAAUI,GAAUJ,EACzDN,EACA0F,EACAF,EAPKnE,CAQLnB,EACH,CACF,CAGD,SAASwF,EAAGxF,GAGV,OADA2D,EAASM,EAAkB7C,GACpByC,CACR,CAGD,SAASyB,EAAItF,GAGX,OADAoB,EAAKsC,YACCM,EAAiBD,EAAiB5D,OAC/BwE,EAAgBZ,EAAiBC,IAEnCF,CACR,CACF,CACF,CAOD,SAASzC,EAAUF,EAAWlC,GACxBkC,EAAUuB,aAAe/C,EAAqB0F,SAASlE,IACzDxB,EAAqBkB,KAAKM,GAExBA,EAAUsE,SACZC,EAAMA,OACJtF,EAAQQ,OACR3B,EACAmB,EAAQQ,OAAOT,OAASlB,EACxBkC,EAAUsE,QAAQrF,EAAQQ,OAAO4B,MAAMvD,GAAOmB,IAG9Ce,EAAUwE,YACZvF,EAAQQ,OAASO,EAAUwE,UAAUvF,EAAQQ,OAAQR,GAExD,CAuCD,SAASF,IACHhB,EAAMG,QAAQK,GAAeR,EAAMI,OAAS,IAC9CJ,EAAMI,OAASI,EAAYR,EAAMG,MACjCH,EAAMK,QAAUG,EAAYR,EAAMG,MAAQ,EAE7C,CACH"}
@@ -1,2 +0,0 @@
1
- "use strict";Object.defineProperty(exports,"__esModule",{value:!0});var e=require("../../../micromark-factory-space/index.js"),n=require("../../../micromark-util-character/index.js");const t={tokenize:function(t){const r=t.attempt(this.parser.constructs.contentInitial,(function(n){if(null===n)return void t.consume(n);return t.enter("lineEnding"),t.consume(n),t.exit("lineEnding"),e.factorySpace(t,r,"linePrefix")}),(function(e){return t.enter("paragraph"),c(e)}));let i;return r;function c(e){const n=t.enter("chunkText",{contentType:"text",previous:i});return i&&(i.next=n),i=n,o(e)}function o(e){return null===e?(t.exit("chunkText"),t.exit("paragraph"),void t.consume(e)):n.markdownLineEnding(e)?(t.consume(e),t.exit("chunkText"),c):(t.consume(e),o)}}};exports.content=t;
2
- //# sourceMappingURL=content.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"content.js","sources":["../../../../../../../../node_modules/react-markdown/node_modules/micromark/lib/initialize/content.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').InitialConstruct} InitialConstruct\n * @typedef {import('micromark-util-types').Initializer} Initializer\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').Token} Token\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n */\n\nimport {factorySpace} from 'micromark-factory-space'\nimport {markdownLineEnding} from 'micromark-util-character'\n/** @type {InitialConstruct} */\nexport const content = {\n tokenize: initializeContent\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Initializer}\n */\nfunction initializeContent(effects) {\n const contentStart = effects.attempt(\n this.parser.constructs.contentInitial,\n afterContentStartConstruct,\n paragraphInitial\n )\n /** @type {Token} */\n let previous\n return contentStart\n\n /** @type {State} */\n function afterContentStartConstruct(code) {\n if (code === null) {\n effects.consume(code)\n return\n }\n effects.enter('lineEnding')\n effects.consume(code)\n effects.exit('lineEnding')\n return factorySpace(effects, contentStart, 'linePrefix')\n }\n\n /** @type {State} */\n function paragraphInitial(code) {\n effects.enter('paragraph')\n return lineStart(code)\n }\n\n /** @type {State} */\n function lineStart(code) {\n const token = effects.enter('chunkText', {\n contentType: 'text',\n previous\n })\n if (previous) {\n previous.next = token\n }\n previous = token\n return data(code)\n }\n\n /** @type {State} */\n function data(code) {\n if (code === null) {\n effects.exit('chunkText')\n effects.exit('paragraph')\n effects.consume(code)\n return\n }\n if (markdownLineEnding(code)) {\n effects.consume(code)\n effects.exit('chunkText')\n return lineStart\n }\n\n // Data.\n effects.consume(code)\n return data\n }\n}\n"],"names":["content","tokenize","effects","contentStart","attempt","this","parser","constructs","contentInitial","code","consume","enter","exit","factorySpace","lineStart","previous","token","contentType","next","data","markdownLineEnding"],"mappings":"uLAWY,MAACA,EAAU,CACrBC,SAOF,SAA2BC,GACzB,MAAMC,EAAeD,EAAQE,QAC3BC,KAAKC,OAAOC,WAAWC,gBASzB,SAAoCC,GAClC,GAAa,OAATA,EAEF,YADAP,EAAQQ,QAAQD,GAMlB,OAHAP,EAAQS,MAAM,cACdT,EAAQQ,QAAQD,GAChBP,EAAQU,KAAK,cACNC,eAAaX,EAASC,EAAc,aAC5C,IAGD,SAA0BM,GAExB,OADAP,EAAQS,MAAM,aACPG,EAAUL,EAClB,IAnBD,IAAIM,EACJ,OAAOZ,EAqBP,SAASW,EAAUL,GACjB,MAAMO,EAAQd,EAAQS,MAAM,YAAa,CACvCM,YAAa,OACbF,aAMF,OAJIA,IACFA,EAASG,KAAOF,GAElBD,EAAWC,EACJG,EAAKV,EACb,CAGD,SAASU,EAAKV,GACZ,OAAa,OAATA,GACFP,EAAQU,KAAK,aACbV,EAAQU,KAAK,kBACbV,EAAQQ,QAAQD,IAGdW,EAAAA,mBAAmBX,IACrBP,EAAQQ,QAAQD,GAChBP,EAAQU,KAAK,aACNE,IAITZ,EAAQQ,QAAQD,GACTU,EACR,CACH"}
@@ -1,2 +0,0 @@
1
- "use strict";Object.defineProperty(exports,"__esModule",{value:!0});var e=require("../../../micromark-factory-space/index.js"),t=require("../../../micromark-util-character/index.js"),n=require("../../../micromark-util-chunked/index.js");const r={tokenize:function(e){const r=this,i=[];let c,s,u,l=0;return a;function a(t){if(l<i.length){const n=i[l];return r.containerState=n[1],e.attempt(n[0].continuation,f,v)(t)}return v(t)}function f(e){if(l++,r.containerState._closeFlow){r.containerState._closeFlow=void 0,c&&S();const t=r.events.length;let o,i=t;for(;i--;)if("exit"===r.events[i][0]&&"chunkFlow"===r.events[i][1].type){o=r.events[i][1].end;break}x(l);let s=t;for(;s<r.events.length;)r.events[s][1].end=Object.assign({},o),s++;return n.splice(r.events,i+1,0,r.events.slice(t)),r.events.length=s,v(e)}return a(e)}function v(t){if(l===i.length){if(!c)return h(t);if(c.currentConstruct&&c.currentConstruct.concrete)return k(t);r.interrupt=Boolean(c.currentConstruct&&!c._gfmTableDynamicInterruptHack)}return r.containerState={},e.check(o,d,p)(t)}function d(e){return c&&S(),x(l),h(e)}function p(e){return r.parser.lazy[r.now().line]=l!==i.length,u=r.now().offset,k(e)}function h(t){return r.containerState={},e.attempt(o,m,k)(t)}function m(e){return l++,i.push([r.currentConstruct,r.containerState]),h(e)}function k(t){return null===t?(c&&S(),x(0),void e.consume(t)):(c=c||r.parser.flow(r.now()),e.enter("chunkFlow",{contentType:"flow",previous:s,_tokenizer:c}),g(t))}function g(n){return null===n?(w(e.exit("chunkFlow"),!0),x(0),void e.consume(n)):t.markdownLineEnding(n)?(e.consume(n),w(e.exit("chunkFlow")),l=0,r.interrupt=void 0,a):(e.consume(n),g)}function w(e,t){const o=r.sliceStream(e);if(t&&o.push(null),e.previous=s,s&&(s.next=e),s=e,c.defineSkip(e.start),c.write(o),r.parser.lazy[e.start.line]){let e=c.events.length;for(;e--;)if(c.events[e][1].start.offset<u&&(!c.events[e][1].end||c.events[e][1].end.offset>u))return;const t=r.events.length;let o,i,s=t;for(;s--;)if("exit"===r.events[s][0]&&"chunkFlow"===r.events[s][1].type){if(o){i=r.events[s][1].end;break}o=!0}for(x(l),e=t;e<r.events.length;)r.events[e][1].end=Object.assign({},i),e++;n.splice(r.events,s+1,0,r.events.slice(t)),r.events.length=e}}function x(t){let n=i.length;for(;n-- >t;){const t=i[n];r.containerState=t[1],t[0].exit.call(r,e)}i.length=t}function S(){c.write([null]),s=void 0,c=void 0,r.containerState._closeFlow=void 0}}},o={tokenize:function(t,n,r){return e.factorySpace(t,t.attempt(this.parser.constructs.document,n,r),"linePrefix",this.parser.constructs.disable.null.includes("codeIndented")?void 0:4)}};exports.document=r;
2
- //# sourceMappingURL=document.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"document.js","sources":["../../../../../../../../node_modules/react-markdown/node_modules/micromark/lib/initialize/document.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').ContainerState} ContainerState\n * @typedef {import('micromark-util-types').InitialConstruct} InitialConstruct\n * @typedef {import('micromark-util-types').Initializer} Initializer\n * @typedef {import('micromark-util-types').Point} Point\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').Token} Token\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\n/**\n * @typedef {[Construct, ContainerState]} StackItem\n */\n\nimport {factorySpace} from 'micromark-factory-space'\nimport {markdownLineEnding} from 'micromark-util-character'\nimport {splice} from 'micromark-util-chunked'\n/** @type {InitialConstruct} */\nexport const document = {\n tokenize: initializeDocument\n}\n\n/** @type {Construct} */\nconst containerConstruct = {\n tokenize: tokenizeContainer\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Initializer}\n */\nfunction initializeDocument(effects) {\n const self = this\n /** @type {Array<StackItem>} */\n const stack = []\n let continued = 0\n /** @type {TokenizeContext | undefined} */\n let childFlow\n /** @type {Token | undefined} */\n let childToken\n /** @type {number} */\n let lineStartOffset\n return start\n\n /** @type {State} */\n function start(code) {\n // First we iterate through the open blocks, starting with the root\n // document, and descending through last children down to the last open\n // block.\n // Each block imposes a condition that the line must satisfy if the block is\n // to remain open.\n // For example, a block quote requires a `>` character.\n // A paragraph requires a non-blank line.\n // In this phase we may match all or just some of the open blocks.\n // But we cannot close unmatched blocks yet, because we may have a lazy\n // continuation line.\n if (continued < stack.length) {\n const item = stack[continued]\n self.containerState = item[1]\n return effects.attempt(\n item[0].continuation,\n documentContinue,\n checkNewContainers\n )(code)\n }\n\n // Done.\n return checkNewContainers(code)\n }\n\n /** @type {State} */\n function documentContinue(code) {\n continued++\n\n // Note: this field is called `_closeFlow` but it also closes containers.\n // Perhaps a good idea to rename it but it’s already used in the wild by\n // extensions.\n if (self.containerState._closeFlow) {\n self.containerState._closeFlow = undefined\n if (childFlow) {\n closeFlow()\n }\n\n // Note: this algorithm for moving events around is similar to the\n // algorithm when dealing with lazy lines in `writeToChild`.\n const indexBeforeExits = self.events.length\n let indexBeforeFlow = indexBeforeExits\n /** @type {Point | undefined} */\n let point\n\n // Find the flow chunk.\n while (indexBeforeFlow--) {\n if (\n self.events[indexBeforeFlow][0] === 'exit' &&\n self.events[indexBeforeFlow][1].type === 'chunkFlow'\n ) {\n point = self.events[indexBeforeFlow][1].end\n break\n }\n }\n exitContainers(continued)\n\n // Fix positions.\n let index = indexBeforeExits\n while (index < self.events.length) {\n self.events[index][1].end = Object.assign({}, point)\n index++\n }\n\n // Inject the exits earlier (they’re still also at the end).\n splice(\n self.events,\n indexBeforeFlow + 1,\n 0,\n self.events.slice(indexBeforeExits)\n )\n\n // Discard the duplicate exits.\n self.events.length = index\n return checkNewContainers(code)\n }\n return start(code)\n }\n\n /** @type {State} */\n function checkNewContainers(code) {\n // Next, after consuming the continuation markers for existing blocks, we\n // look for new block starts (e.g. `>` for a block quote).\n // If we encounter a new block start, we close any blocks unmatched in\n // step 1 before creating the new block as a child of the last matched\n // block.\n if (continued === stack.length) {\n // No need to `check` whether there’s a container, of `exitContainers`\n // would be moot.\n // We can instead immediately `attempt` to parse one.\n if (!childFlow) {\n return documentContinued(code)\n }\n\n // If we have concrete content, such as block HTML or fenced code,\n // we can’t have containers “pierce” into them, so we can immediately\n // start.\n if (childFlow.currentConstruct && childFlow.currentConstruct.concrete) {\n return flowStart(code)\n }\n\n // If we do have flow, it could still be a blank line,\n // but we’d be interrupting it w/ a new container if there’s a current\n // construct.\n // To do: next major: remove `_gfmTableDynamicInterruptHack` (no longer\n // needed in micromark-extension-gfm-table@1.0.6).\n self.interrupt = Boolean(\n childFlow.currentConstruct && !childFlow._gfmTableDynamicInterruptHack\n )\n }\n\n // Check if there is a new container.\n self.containerState = {}\n return effects.check(\n containerConstruct,\n thereIsANewContainer,\n thereIsNoNewContainer\n )(code)\n }\n\n /** @type {State} */\n function thereIsANewContainer(code) {\n if (childFlow) closeFlow()\n exitContainers(continued)\n return documentContinued(code)\n }\n\n /** @type {State} */\n function thereIsNoNewContainer(code) {\n self.parser.lazy[self.now().line] = continued !== stack.length\n lineStartOffset = self.now().offset\n return flowStart(code)\n }\n\n /** @type {State} */\n function documentContinued(code) {\n // Try new containers.\n self.containerState = {}\n return effects.attempt(\n containerConstruct,\n containerContinue,\n flowStart\n )(code)\n }\n\n /** @type {State} */\n function containerContinue(code) {\n continued++\n stack.push([self.currentConstruct, self.containerState])\n // Try another.\n return documentContinued(code)\n }\n\n /** @type {State} */\n function flowStart(code) {\n if (code === null) {\n if (childFlow) closeFlow()\n exitContainers(0)\n effects.consume(code)\n return\n }\n childFlow = childFlow || self.parser.flow(self.now())\n effects.enter('chunkFlow', {\n contentType: 'flow',\n previous: childToken,\n _tokenizer: childFlow\n })\n return flowContinue(code)\n }\n\n /** @type {State} */\n function flowContinue(code) {\n if (code === null) {\n writeToChild(effects.exit('chunkFlow'), true)\n exitContainers(0)\n effects.consume(code)\n return\n }\n if (markdownLineEnding(code)) {\n effects.consume(code)\n writeToChild(effects.exit('chunkFlow'))\n // Get ready for the next line.\n continued = 0\n self.interrupt = undefined\n return start\n }\n effects.consume(code)\n return flowContinue\n }\n\n /**\n * @param {Token} token\n * @param {boolean | undefined} [eof]\n * @returns {void}\n */\n function writeToChild(token, eof) {\n const stream = self.sliceStream(token)\n if (eof) stream.push(null)\n token.previous = childToken\n if (childToken) childToken.next = token\n childToken = token\n childFlow.defineSkip(token.start)\n childFlow.write(stream)\n\n // Alright, so we just added a lazy line:\n //\n // ```markdown\n // > a\n // b.\n //\n // Or:\n //\n // > ~~~c\n // d\n //\n // Or:\n //\n // > | e |\n // f\n // ```\n //\n // The construct in the second example (fenced code) does not accept lazy\n // lines, so it marked itself as done at the end of its first line, and\n // then the content construct parses `d`.\n // Most constructs in markdown match on the first line: if the first line\n // forms a construct, a non-lazy line can’t “unmake” it.\n //\n // The construct in the third example is potentially a GFM table, and\n // those are *weird*.\n // It *could* be a table, from the first line, if the following line\n // matches a condition.\n // In this case, that second line is lazy, which “unmakes” the first line\n // and turns the whole into one content block.\n //\n // We’ve now parsed the non-lazy and the lazy line, and can figure out\n // whether the lazy line started a new flow block.\n // If it did, we exit the current containers between the two flow blocks.\n if (self.parser.lazy[token.start.line]) {\n let index = childFlow.events.length\n while (index--) {\n if (\n // The token starts before the line ending…\n childFlow.events[index][1].start.offset < lineStartOffset &&\n // …and either is not ended yet…\n (!childFlow.events[index][1].end ||\n // …or ends after it.\n childFlow.events[index][1].end.offset > lineStartOffset)\n ) {\n // Exit: there’s still something open, which means it’s a lazy line\n // part of something.\n return\n }\n }\n\n // Note: this algorithm for moving events around is similar to the\n // algorithm when closing flow in `documentContinue`.\n const indexBeforeExits = self.events.length\n let indexBeforeFlow = indexBeforeExits\n /** @type {boolean | undefined} */\n let seen\n /** @type {Point | undefined} */\n let point\n\n // Find the previous chunk (the one before the lazy line).\n while (indexBeforeFlow--) {\n if (\n self.events[indexBeforeFlow][0] === 'exit' &&\n self.events[indexBeforeFlow][1].type === 'chunkFlow'\n ) {\n if (seen) {\n point = self.events[indexBeforeFlow][1].end\n break\n }\n seen = true\n }\n }\n exitContainers(continued)\n\n // Fix positions.\n index = indexBeforeExits\n while (index < self.events.length) {\n self.events[index][1].end = Object.assign({}, point)\n index++\n }\n\n // Inject the exits earlier (they’re still also at the end).\n splice(\n self.events,\n indexBeforeFlow + 1,\n 0,\n self.events.slice(indexBeforeExits)\n )\n\n // Discard the duplicate exits.\n self.events.length = index\n }\n }\n\n /**\n * @param {number} size\n * @returns {void}\n */\n function exitContainers(size) {\n let index = stack.length\n\n // Exit open containers.\n while (index-- > size) {\n const entry = stack[index]\n self.containerState = entry[1]\n entry[0].exit.call(self, effects)\n }\n stack.length = size\n }\n function closeFlow() {\n childFlow.write([null])\n childToken = undefined\n childFlow = undefined\n self.containerState._closeFlow = undefined\n }\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeContainer(effects, ok, nok) {\n // Always populated by defaults.\n\n return factorySpace(\n effects,\n effects.attempt(this.parser.constructs.document, ok, nok),\n 'linePrefix',\n this.parser.constructs.disable.null.includes('codeIndented') ? undefined : 4\n )\n}\n"],"names":["document","tokenize","effects","self","this","stack","childFlow","childToken","lineStartOffset","continued","start","code","length","item","containerState","attempt","continuation","documentContinue","checkNewContainers","_closeFlow","undefined","closeFlow","indexBeforeExits","events","point","indexBeforeFlow","type","end","exitContainers","index","Object","assign","splice","slice","documentContinued","currentConstruct","concrete","flowStart","interrupt","Boolean","_gfmTableDynamicInterruptHack","check","containerConstruct","thereIsANewContainer","thereIsNoNewContainer","parser","lazy","now","line","offset","containerContinue","push","consume","flow","enter","contentType","previous","_tokenizer","flowContinue","writeToChild","exit","markdownLineEnding","token","eof","stream","sliceStream","next","defineSkip","write","seen","size","entry","call","ok","nok","factorySpace","constructs","disable","null","includes"],"mappings":"6OAoBY,MAACA,EAAW,CACtBC,SAYF,SAA4BC,GAC1B,MAAMC,EAAOC,KAEPC,EAAQ,GACd,IAEIC,EAEAC,EAEAC,EANAC,EAAY,EAOhB,OAAOC,EAGP,SAASA,EAAMC,GAWb,GAAIF,EAAYJ,EAAMO,OAAQ,CAC5B,MAAMC,EAAOR,EAAMI,GAEnB,OADAN,EAAKW,eAAiBD,EAAK,GACpBX,EAAQa,QACbF,EAAK,GAAGG,aACRC,EACAC,EAHKhB,CAILS,EACH,CAGD,OAAOO,EAAmBP,EAC3B,CAGD,SAASM,EAAiBN,GAMxB,GALAF,IAKIN,EAAKW,eAAeK,WAAY,CAClChB,EAAKW,eAAeK,gBAAaC,EAC7Bd,GACFe,IAKF,MAAMC,EAAmBnB,EAAKoB,OAAOX,OACrC,IAEIY,EAFAC,EAAkBH,EAKtB,KAAOG,KACL,GACsC,SAApCtB,EAAKoB,OAAOE,GAAiB,IACY,cAAzCtB,EAAKoB,OAAOE,GAAiB,GAAGC,KAChC,CACAF,EAAQrB,EAAKoB,OAAOE,GAAiB,GAAGE,IACxC,KACD,CAEHC,EAAenB,GAGf,IAAIoB,EAAQP,EACZ,KAAOO,EAAQ1B,EAAKoB,OAAOX,QACzBT,EAAKoB,OAAOM,GAAO,GAAGF,IAAMG,OAAOC,OAAO,CAAE,EAAEP,GAC9CK,IAaF,OATAG,EAAMA,OACJ7B,EAAKoB,OACLE,EAAkB,EAClB,EACAtB,EAAKoB,OAAOU,MAAMX,IAIpBnB,EAAKoB,OAAOX,OAASiB,EACdX,EAAmBP,EAC3B,CACD,OAAOD,EAAMC,EACd,CAGD,SAASO,EAAmBP,GAM1B,GAAIF,IAAcJ,EAAMO,OAAQ,CAI9B,IAAKN,EACH,OAAO4B,EAAkBvB,GAM3B,GAAIL,EAAU6B,kBAAoB7B,EAAU6B,iBAAiBC,SAC3D,OAAOC,EAAU1B,GAQnBR,EAAKmC,UAAYC,QACfjC,EAAU6B,mBAAqB7B,EAAUkC,8BAE5C,CAID,OADArC,EAAKW,eAAiB,CAAE,EACjBZ,EAAQuC,MACbC,EACAC,EACAC,EAHK1C,CAILS,EACH,CAGD,SAASgC,EAAqBhC,GAG5B,OAFIL,GAAWe,IACfO,EAAenB,GACRyB,EAAkBvB,EAC1B,CAGD,SAASiC,EAAsBjC,GAG7B,OAFAR,EAAK0C,OAAOC,KAAK3C,EAAK4C,MAAMC,MAAQvC,IAAcJ,EAAMO,OACxDJ,EAAkBL,EAAK4C,MAAME,OACtBZ,EAAU1B,EAClB,CAGD,SAASuB,EAAkBvB,GAGzB,OADAR,EAAKW,eAAiB,CAAE,EACjBZ,EAAQa,QACb2B,EACAQ,EACAb,EAHKnC,CAILS,EACH,CAGD,SAASuC,EAAkBvC,GAIzB,OAHAF,IACAJ,EAAM8C,KAAK,CAAChD,EAAKgC,iBAAkBhC,EAAKW,iBAEjCoB,EAAkBvB,EAC1B,CAGD,SAAS0B,EAAU1B,GACjB,OAAa,OAATA,GACEL,GAAWe,IACfO,EAAe,QACf1B,EAAQkD,QAAQzC,KAGlBL,EAAYA,GAAaH,EAAK0C,OAAOQ,KAAKlD,EAAK4C,OAC/C7C,EAAQoD,MAAM,YAAa,CACzBC,YAAa,OACbC,SAAUjD,EACVkD,WAAYnD,IAEPoD,EAAa/C,GACrB,CAGD,SAAS+C,EAAa/C,GACpB,OAAa,OAATA,GACFgD,EAAazD,EAAQ0D,KAAK,cAAc,GACxChC,EAAe,QACf1B,EAAQkD,QAAQzC,IAGdkD,EAAAA,mBAAmBlD,IACrBT,EAAQkD,QAAQzC,GAChBgD,EAAazD,EAAQ0D,KAAK,cAE1BnD,EAAY,EACZN,EAAKmC,eAAYlB,EACVV,IAETR,EAAQkD,QAAQzC,GACT+C,EACR,CAOD,SAASC,EAAaG,EAAOC,GAC3B,MAAMC,EAAS7D,EAAK8D,YAAYH,GAyChC,GAxCIC,GAAKC,EAAOb,KAAK,MACrBW,EAAMN,SAAWjD,EACbA,IAAYA,EAAW2D,KAAOJ,GAClCvD,EAAauD,EACbxD,EAAU6D,WAAWL,EAAMpD,OAC3BJ,EAAU8D,MAAMJ,GAmCZ7D,EAAK0C,OAAOC,KAAKgB,EAAMpD,MAAMsC,MAAO,CACtC,IAAInB,EAAQvB,EAAUiB,OAAOX,OAC7B,KAAOiB,KACL,GAEEvB,EAAUiB,OAAOM,GAAO,GAAGnB,MAAMuC,OAASzC,KAExCF,EAAUiB,OAAOM,GAAO,GAAGF,KAE3BrB,EAAUiB,OAAOM,GAAO,GAAGF,IAAIsB,OAASzC,GAI1C,OAMJ,MAAMc,EAAmBnB,EAAKoB,OAAOX,OACrC,IAEIyD,EAEA7C,EAJAC,EAAkBH,EAOtB,KAAOG,KACL,GACsC,SAApCtB,EAAKoB,OAAOE,GAAiB,IACY,cAAzCtB,EAAKoB,OAAOE,GAAiB,GAAGC,KAChC,CACA,GAAI2C,EAAM,CACR7C,EAAQrB,EAAKoB,OAAOE,GAAiB,GAAGE,IACxC,KACD,CACD0C,GAAO,CACR,CAMH,IAJAzC,EAAenB,GAGfoB,EAAQP,EACDO,EAAQ1B,EAAKoB,OAAOX,QACzBT,EAAKoB,OAAOM,GAAO,GAAGF,IAAMG,OAAOC,OAAO,CAAE,EAAEP,GAC9CK,IAIFG,EAAMA,OACJ7B,EAAKoB,OACLE,EAAkB,EAClB,EACAtB,EAAKoB,OAAOU,MAAMX,IAIpBnB,EAAKoB,OAAOX,OAASiB,CACtB,CACF,CAMD,SAASD,EAAe0C,GACtB,IAAIzC,EAAQxB,EAAMO,OAGlB,KAAOiB,KAAUyC,GAAM,CACrB,MAAMC,EAAQlE,EAAMwB,GACpB1B,EAAKW,eAAiByD,EAAM,GAC5BA,EAAM,GAAGX,KAAKY,KAAKrE,EAAMD,EAC1B,CACDG,EAAMO,OAAS0D,CAChB,CACD,SAASjD,IACPf,EAAU8D,MAAM,CAAC,OACjB7D,OAAaa,EACbd,OAAYc,EACZjB,EAAKW,eAAeK,gBAAaC,CAClC,CACH,GArVMsB,EAAqB,CACzBzC,SA0VF,SAA2BC,EAASuE,EAAIC,GAGtC,OAAOC,EAAYA,aACjBzE,EACAA,EAAQa,QAAQX,KAAKyC,OAAO+B,WAAW5E,SAAUyE,EAAIC,GACrD,aACAtE,KAAKyC,OAAO+B,WAAWC,QAAQC,KAAKC,SAAS,qBAAkB3D,EAAY,EAE/E"}
@@ -1,2 +0,0 @@
1
- "use strict";Object.defineProperty(exports,"__esModule",{value:!0});var e=require("../../../micromark-core-commonmark/lib/blank-line.js"),r=require("../../../micromark-factory-space/index.js");require("../../../../../decode-named-character-reference/index.dom.js");var n=require("../../../micromark-core-commonmark/lib/content.js");require("../../../micromark-core-commonmark/lib/label-start-image.js"),require("../../../micromark-core-commonmark/lib/label-start-link.js");const t={tokenize:function(t){const i=this,o=t.attempt(e.blankLine,(function(e){if(null===e)return void t.consume(e);return t.enter("lineEndingBlank"),t.consume(e),t.exit("lineEndingBlank"),i.currentConstruct=void 0,o}),t.attempt(this.parser.constructs.flowInitial,c,r.factorySpace(t,t.attempt(this.parser.constructs.flow,c,t.attempt(n.content,c)),"linePrefix")));return o;function c(e){if(null!==e)return t.enter("lineEnding"),t.consume(e),t.exit("lineEnding"),i.currentConstruct=void 0,o;t.consume(e)}}};exports.flow=t;
2
- //# sourceMappingURL=flow.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"flow.js","sources":["../../../../../../../../node_modules/react-markdown/node_modules/micromark/lib/initialize/flow.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').InitialConstruct} InitialConstruct\n * @typedef {import('micromark-util-types').Initializer} Initializer\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n */\n\nimport {blankLine, content} from 'micromark-core-commonmark'\nimport {factorySpace} from 'micromark-factory-space'\nimport {markdownLineEnding} from 'micromark-util-character'\n/** @type {InitialConstruct} */\nexport const flow = {\n tokenize: initializeFlow\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Initializer}\n */\nfunction initializeFlow(effects) {\n const self = this\n const initial = effects.attempt(\n // Try to parse a blank line.\n blankLine,\n atBlankEnding,\n // Try to parse initial flow (essentially, only code).\n effects.attempt(\n this.parser.constructs.flowInitial,\n afterConstruct,\n factorySpace(\n effects,\n effects.attempt(\n this.parser.constructs.flow,\n afterConstruct,\n effects.attempt(content, afterConstruct)\n ),\n 'linePrefix'\n )\n )\n )\n return initial\n\n /** @type {State} */\n function atBlankEnding(code) {\n if (code === null) {\n effects.consume(code)\n return\n }\n effects.enter('lineEndingBlank')\n effects.consume(code)\n effects.exit('lineEndingBlank')\n self.currentConstruct = undefined\n return initial\n }\n\n /** @type {State} */\n function afterConstruct(code) {\n if (code === null) {\n effects.consume(code)\n return\n }\n effects.enter('lineEnding')\n effects.consume(code)\n effects.exit('lineEnding')\n self.currentConstruct = undefined\n return initial\n }\n}\n"],"names":["flow","tokenize","effects","self","this","initial","attempt","blankLine","code","consume","enter","exit","currentConstruct","undefined","parser","constructs","flowInitial","afterConstruct","factorySpace","content"],"mappings":"ydAWY,MAACA,EAAO,CAClBC,SAOF,SAAwBC,GACtB,MAAMC,EAAOC,KACPC,EAAUH,EAAQI,QAEtBC,EAASA,WAoBX,SAAuBC,GACrB,GAAa,OAATA,EAEF,YADAN,EAAQO,QAAQD,GAOlB,OAJAN,EAAQQ,MAAM,mBACdR,EAAQO,QAAQD,GAChBN,EAAQS,KAAK,mBACbR,EAAKS,sBAAmBC,EACjBR,CACR,GA3BCH,EAAQI,QACNF,KAAKU,OAAOC,WAAWC,YACvBC,EACAC,EAAYA,aACVhB,EACAA,EAAQI,QACNF,KAAKU,OAAOC,WAAWf,KACvBiB,EACAf,EAAQI,QAAQa,EAAOA,QAAEF,IAE3B,gBAIN,OAAOZ,EAgBP,SAASY,EAAeT,GACtB,GAAa,OAATA,EAQJ,OAJAN,EAAQQ,MAAM,cACdR,EAAQO,QAAQD,GAChBN,EAAQS,KAAK,cACbR,EAAKS,sBAAmBC,EACjBR,EAPLH,EAAQO,QAAQD,EAQnB,CACH"}
@@ -1,2 +0,0 @@
1
- "use strict";Object.defineProperty(exports,"__esModule",{value:!0});const e={resolveAll:i()},t=r("string"),n=r("text");function r(e){return{tokenize:function(t){const n=this,r=this.parser.constructs[e],i=t.attempt(r,s,o);return s;function s(e){return l(e)?i(e):o(e)}function o(e){if(null!==e)return t.enter("data"),t.consume(e),f;t.consume(e)}function f(e){return l(e)?(t.exit("data"),i(e)):(t.consume(e),f)}function l(e){if(null===e)return!0;const t=r[e];let i=-1;if(t)for(;++i<t.length;){const e=t[i];if(!e.previous||e.previous.call(n,n.previous))return!0}return!1}},resolveAll:i("text"===e?s:void 0)}}function i(e){return function(t,n){let r,i=-1;for(;++i<=t.length;)void 0===r?t[i]&&"data"===t[i][1].type&&(r=i,i++):t[i]&&"data"===t[i][1].type||(i!==r+2&&(t[r][1].end=t[i-1][1].end,t.splice(r+2,i-r-2),i=r+2),r=void 0);return e?e(t,n):t}}function s(e,t){let n=0;for(;++n<=e.length;)if((n===e.length||"lineEnding"===e[n][1].type)&&"data"===e[n-1][1].type){const r=e[n-1][1],i=t.sliceStream(r);let s,o=i.length,f=-1,l=0;for(;o--;){const e=i[o];if("string"==typeof e){for(f=e.length;32===e.charCodeAt(f-1);)l++,f--;if(f)break;f=-1}else if(-2===e)s=!0,l++;else if(-1!==e){o++;break}}if(l){const i={type:n===e.length||s||l<2?"lineSuffix":"hardBreakTrailing",start:{line:r.end.line,column:r.end.column-l,offset:r.end.offset-l,_index:r.start._index+o,_bufferIndex:o?f:r.start._bufferIndex+f},end:Object.assign({},r.end)};r.end=Object.assign({},i.start),r.start.offset===r.end.offset?Object.assign(r,i):(e.splice(n,0,["enter",i,t],["exit",i,t]),n+=2)}n++}return e}exports.resolver=e,exports.string=t,exports.text=n;
2
- //# sourceMappingURL=text.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"text.js","sources":["../../../../../../../../node_modules/react-markdown/node_modules/micromark/lib/initialize/text.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Code} Code\n * @typedef {import('micromark-util-types').InitialConstruct} InitialConstruct\n * @typedef {import('micromark-util-types').Initializer} Initializer\n * @typedef {import('micromark-util-types').Resolver} Resolver\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n */\n\nexport const resolver = {\n resolveAll: createResolver()\n}\nexport const string = initializeFactory('string')\nexport const text = initializeFactory('text')\n\n/**\n * @param {'string' | 'text'} field\n * @returns {InitialConstruct}\n */\nfunction initializeFactory(field) {\n return {\n tokenize: initializeText,\n resolveAll: createResolver(\n field === 'text' ? resolveAllLineSuffixes : undefined\n )\n }\n\n /**\n * @this {TokenizeContext}\n * @type {Initializer}\n */\n function initializeText(effects) {\n const self = this\n const constructs = this.parser.constructs[field]\n const text = effects.attempt(constructs, start, notText)\n return start\n\n /** @type {State} */\n function start(code) {\n return atBreak(code) ? text(code) : notText(code)\n }\n\n /** @type {State} */\n function notText(code) {\n if (code === null) {\n effects.consume(code)\n return\n }\n effects.enter('data')\n effects.consume(code)\n return data\n }\n\n /** @type {State} */\n function data(code) {\n if (atBreak(code)) {\n effects.exit('data')\n return text(code)\n }\n\n // Data.\n effects.consume(code)\n return data\n }\n\n /**\n * @param {Code} code\n * @returns {boolean}\n */\n function atBreak(code) {\n if (code === null) {\n return true\n }\n const list = constructs[code]\n let index = -1\n if (list) {\n // Always populated by defaults.\n\n while (++index < list.length) {\n const item = list[index]\n if (!item.previous || item.previous.call(self, self.previous)) {\n return true\n }\n }\n }\n return false\n }\n }\n}\n\n/**\n * @param {Resolver | undefined} [extraResolver]\n * @returns {Resolver}\n */\nfunction createResolver(extraResolver) {\n return resolveAllText\n\n /** @type {Resolver} */\n function resolveAllText(events, context) {\n let index = -1\n /** @type {number | undefined} */\n let enter\n\n // A rather boring computation (to merge adjacent `data` events) which\n // improves mm performance by 29%.\n while (++index <= events.length) {\n if (enter === undefined) {\n if (events[index] && events[index][1].type === 'data') {\n enter = index\n index++\n }\n } else if (!events[index] || events[index][1].type !== 'data') {\n // Don’t do anything if there is one data token.\n if (index !== enter + 2) {\n events[enter][1].end = events[index - 1][1].end\n events.splice(enter + 2, index - enter - 2)\n index = enter + 2\n }\n enter = undefined\n }\n }\n return extraResolver ? extraResolver(events, context) : events\n }\n}\n\n/**\n * A rather ugly set of instructions which again looks at chunks in the input\n * stream.\n * The reason to do this here is that it is *much* faster to parse in reverse.\n * And that we can’t hook into `null` to split the line suffix before an EOF.\n * To do: figure out if we can make this into a clean utility, or even in core.\n * As it will be useful for GFMs literal autolink extension (and maybe even\n * tables?)\n *\n * @type {Resolver}\n */\nfunction resolveAllLineSuffixes(events, context) {\n let eventIndex = 0 // Skip first.\n\n while (++eventIndex <= events.length) {\n if (\n (eventIndex === events.length ||\n events[eventIndex][1].type === 'lineEnding') &&\n events[eventIndex - 1][1].type === 'data'\n ) {\n const data = events[eventIndex - 1][1]\n const chunks = context.sliceStream(data)\n let index = chunks.length\n let bufferIndex = -1\n let size = 0\n /** @type {boolean | undefined} */\n let tabs\n while (index--) {\n const chunk = chunks[index]\n if (typeof chunk === 'string') {\n bufferIndex = chunk.length\n while (chunk.charCodeAt(bufferIndex - 1) === 32) {\n size++\n bufferIndex--\n }\n if (bufferIndex) break\n bufferIndex = -1\n }\n // Number\n else if (chunk === -2) {\n tabs = true\n size++\n } else if (chunk === -1) {\n // Empty\n } else {\n // Replacement character, exit.\n index++\n break\n }\n }\n if (size) {\n const token = {\n type:\n eventIndex === events.length || tabs || size < 2\n ? 'lineSuffix'\n : 'hardBreakTrailing',\n start: {\n line: data.end.line,\n column: data.end.column - size,\n offset: data.end.offset - size,\n _index: data.start._index + index,\n _bufferIndex: index\n ? bufferIndex\n : data.start._bufferIndex + bufferIndex\n },\n end: Object.assign({}, data.end)\n }\n data.end = Object.assign({}, token.start)\n if (data.start.offset === data.end.offset) {\n Object.assign(data, token)\n } else {\n events.splice(\n eventIndex,\n 0,\n ['enter', token, context],\n ['exit', token, context]\n )\n eventIndex += 2\n }\n }\n eventIndex++\n }\n }\n return events\n}\n"],"names":["resolver","resolveAll","createResolver","string","initializeFactory","text","field","tokenize","effects","self","this","constructs","parser","attempt","start","notText","code","atBreak","enter","consume","data","exit","list","index","length","item","previous","call","resolveAllLineSuffixes","undefined","extraResolver","events","context","type","end","splice","eventIndex","chunks","sliceStream","tabs","bufferIndex","size","chunk","charCodeAt","token","line","column","offset","_index","_bufferIndex","Object","assign"],"mappings":"oEASY,MAACA,EAAW,CACtBC,WAAYC,KAEDC,EAASC,EAAkB,UAC3BC,EAAOD,EAAkB,QAMtC,SAASA,EAAkBE,GACzB,MAAO,CACLC,SAUF,SAAwBC,GACtB,MAAMC,EAAOC,KACPC,EAAaD,KAAKE,OAAOD,WAAWL,GACpCD,EAAOG,EAAQK,QAAQF,EAAYG,EAAOC,GAChD,OAAOD,EAGP,SAASA,EAAME,GACb,OAAOC,EAAQD,GAAQX,EAAKW,GAAQD,EAAQC,EAC7C,CAGD,SAASD,EAAQC,GACf,GAAa,OAATA,EAMJ,OAFAR,EAAQU,MAAM,QACdV,EAAQW,QAAQH,GACTI,EALLZ,EAAQW,QAAQH,EAMnB,CAGD,SAASI,EAAKJ,GACZ,OAAIC,EAAQD,IACVR,EAAQa,KAAK,QACNhB,EAAKW,KAIdR,EAAQW,QAAQH,GACTI,EACR,CAMD,SAASH,EAAQD,GACf,GAAa,OAATA,EACF,OAAO,EAET,MAAMM,EAAOX,EAAWK,GACxB,IAAIO,GAAS,EACb,GAAID,EAGF,OAASC,EAAQD,EAAKE,QAAQ,CAC5B,MAAMC,EAAOH,EAAKC,GAClB,IAAKE,EAAKC,UAAYD,EAAKC,SAASC,KAAKlB,EAAMA,EAAKiB,UAClD,OAAO,CAEV,CAEH,OAAO,CACR,CACF,EAjECzB,WAAYC,EACA,SAAVI,EAAmBsB,OAAyBC,GAiElD,CAMA,SAAS3B,EAAe4B,GACtB,OAGA,SAAwBC,EAAQC,GAC9B,IAEId,EAFAK,GAAS,EAMb,OAASA,GAASQ,EAAOP,aACTK,IAAVX,EACEa,EAAOR,IAAoC,SAA1BQ,EAAOR,GAAO,GAAGU,OACpCf,EAAQK,EACRA,KAEQQ,EAAOR,IAAoC,SAA1BQ,EAAOR,GAAO,GAAGU,OAExCV,IAAUL,EAAQ,IACpBa,EAAOb,GAAO,GAAGgB,IAAMH,EAAOR,EAAQ,GAAG,GAAGW,IAC5CH,EAAOI,OAAOjB,EAAQ,EAAGK,EAAQL,EAAQ,GACzCK,EAAQL,EAAQ,GAElBA,OAAQW,GAGZ,OAAOC,EAAgBA,EAAcC,EAAQC,GAAWD,CACzD,CACH,CAaA,SAASH,EAAuBG,EAAQC,GACtC,IAAII,EAAa,EAEjB,OAASA,GAAcL,EAAOP,QAC5B,IACGY,IAAeL,EAAOP,QACU,eAA/BO,EAAOK,GAAY,GAAGH,OACW,SAAnCF,EAAOK,EAAa,GAAG,GAAGH,KAC1B,CACA,MAAMb,EAAOW,EAAOK,EAAa,GAAG,GAC9BC,EAASL,EAAQM,YAAYlB,GACnC,IAIImB,EAJAhB,EAAQc,EAAOb,OACfgB,GAAe,EACfC,EAAO,EAGX,KAAOlB,KAAS,CACd,MAAMmB,EAAQL,EAAOd,GACrB,GAAqB,iBAAVmB,EAAoB,CAE7B,IADAF,EAAcE,EAAMlB,OACyB,KAAtCkB,EAAMC,WAAWH,EAAc,IACpCC,IACAD,IAEF,GAAIA,EAAa,MACjBA,GAAe,CAChB,MAEI,IAAe,IAAXE,EACPH,GAAO,EACPE,SACK,IAAe,IAAXC,EAEJ,CAELnB,IACA,KACD,CACF,CACD,GAAIkB,EAAM,CACR,MAAMG,EAAQ,CACZX,KACEG,IAAeL,EAAOP,QAAUe,GAAQE,EAAO,EAC3C,aACA,oBACN3B,MAAO,CACL+B,KAAMzB,EAAKc,IAAIW,KACfC,OAAQ1B,EAAKc,IAAIY,OAASL,EAC1BM,OAAQ3B,EAAKc,IAAIa,OAASN,EAC1BO,OAAQ5B,EAAKN,MAAMkC,OAASzB,EAC5B0B,aAAc1B,EACViB,EACApB,EAAKN,MAAMmC,aAAeT,GAEhCN,IAAKgB,OAAOC,OAAO,CAAA,EAAI/B,EAAKc,MAE9Bd,EAAKc,IAAMgB,OAAOC,OAAO,CAAE,EAAEP,EAAM9B,OAC/BM,EAAKN,MAAMiC,SAAW3B,EAAKc,IAAIa,OACjCG,OAAOC,OAAO/B,EAAMwB,IAEpBb,EAAOI,OACLC,EACA,EACA,CAAC,QAASQ,EAAOZ,GACjB,CAAC,OAAQY,EAAOZ,IAElBI,GAAc,EAEjB,CACDA,GACD,CAEH,OAAOL,CACT"}
@@ -1,2 +0,0 @@
1
- "use strict";Object.defineProperty(exports,"__esModule",{value:!0});var e=require("../../micromark-util-combine-extensions/index.js"),t=require("./initialize/content.js"),i=require("./initialize/document.js"),n=require("./initialize/flow.js"),r=require("./initialize/text.js"),o=require("./create-tokenizer.js"),s=require("./constructs.js");exports.parse=function(u){const c=u||{},a={defined:[],lazy:{},constructs:e.combineExtensions([s,...c.extensions||[]]),content:l(t.content),document:l(i.document),flow:l(n.flow),string:l(r.string),text:l(r.text)};return a;function l(e){return function(t){return o.createTokenizer(a,e,t)}}};
2
- //# sourceMappingURL=parse.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"parse.js","sources":["../../../../../../../node_modules/react-markdown/node_modules/micromark/lib/parse.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Create} Create\n * @typedef {import('micromark-util-types').FullNormalizedExtension} FullNormalizedExtension\n * @typedef {import('micromark-util-types').InitialConstruct} InitialConstruct\n * @typedef {import('micromark-util-types').ParseContext} ParseContext\n * @typedef {import('micromark-util-types').ParseOptions} ParseOptions\n */\n\nimport {combineExtensions} from 'micromark-util-combine-extensions'\nimport {content} from './initialize/content.js'\nimport {document} from './initialize/document.js'\nimport {flow} from './initialize/flow.js'\nimport {text, string} from './initialize/text.js'\nimport {createTokenizer} from './create-tokenizer.js'\nimport * as defaultConstructs from './constructs.js'\n\n/**\n * @param {ParseOptions | null | undefined} [options]\n * @returns {ParseContext}\n */\nexport function parse(options) {\n const settings = options || {}\n const constructs =\n /** @type {FullNormalizedExtension} */\n combineExtensions([defaultConstructs, ...(settings.extensions || [])])\n\n /** @type {ParseContext} */\n const parser = {\n defined: [],\n lazy: {},\n constructs,\n content: create(content),\n document: create(document),\n flow: create(flow),\n string: create(string),\n text: create(text)\n }\n return parser\n\n /**\n * @param {InitialConstruct} initial\n */\n function create(initial) {\n return creator\n /** @type {Create} */\n function creator(from) {\n return createTokenizer(parser, initial, from)\n }\n }\n}\n"],"names":["options","settings","parser","defined","lazy","constructs","combineExtensions","defaultConstructs","extensions","content","create","document","flow","string","text","initial","from","createTokenizer"],"mappings":"mWAoBO,SAAeA,GACpB,MAAMC,EAAWD,GAAW,CAAE,EAMxBE,EAAS,CACbC,QAAS,GACTC,KAAM,CAAE,EACZC,WANIC,EAAiBA,kBAAC,CAACC,KAAuBN,EAASO,YAAc,KAOjEC,QAASC,EAAOD,WAChBE,SAAUD,EAAOC,YACjBC,KAAMF,EAAOE,QACbC,OAAQH,EAAOG,UACfC,KAAMJ,EAAOI,SAEf,OAAOZ,EAKP,SAASQ,EAAOK,GACd,OAEA,SAAiBC,GACf,OAAOC,kBAAgBf,EAAQa,EAASC,EACzC,CACF,CACH"}
@@ -1,2 +0,0 @@
1
- "use strict";Object.defineProperty(exports,"__esModule",{value:!0});var e=require("../../micromark-util-subtokenize/index.js");exports.postprocess=function(r){for(;!e.subtokenize(r););return r};
2
- //# sourceMappingURL=postprocess.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"postprocess.js","sources":["../../../../../../../node_modules/react-markdown/node_modules/micromark/lib/postprocess.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Event} Event\n */\n\nimport {subtokenize} from 'micromark-util-subtokenize'\n\n/**\n * @param {Array<Event>} events\n * @returns {Array<Event>}\n */\nexport function postprocess(events) {\n while (!subtokenize(events)) {\n // Empty\n }\n return events\n}\n"],"names":["events","subtokenize"],"mappings":"mJAUO,SAAqBA,GAC1B,MAAQC,EAAAA,YAAYD,KAGpB,OAAOA,CACT"}
@@ -1,2 +0,0 @@
1
- "use strict";Object.defineProperty(exports,"__esModule",{value:!0});const e=/[\0\t\n\r]/g;exports.preprocess=function(){let s,t=1,r="",u=!0;return function(o,c,i){const n=[];let h,p,l,a,d;o=r+o.toString(c),l=0,r="",u&&(65279===o.charCodeAt(0)&&l++,u=void 0);for(;l<o.length;){if(e.lastIndex=l,h=e.exec(o),a=h&&void 0!==h.index?h.index:o.length,d=o.charCodeAt(a),!h){r=o.slice(l);break}if(10===d&&l===a&&s)n.push(-3),s=void 0;else switch(s&&(n.push(-5),s=void 0),l<a&&(n.push(o.slice(l,a)),t+=a-l),d){case 0:n.push(65533),t++;break;case 9:for(p=4*Math.ceil(t/4),n.push(-2);t++<p;)n.push(-1);break;case 10:n.push(-4),t=1;break;default:s=!0,t=1}l=a+1}i&&(s&&n.push(-5),r&&n.push(r),n.push(null));return n}};
2
- //# sourceMappingURL=preprocess.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"preprocess.js","sources":["../../../../../../../node_modules/react-markdown/node_modules/micromark/lib/preprocess.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Chunk} Chunk\n * @typedef {import('micromark-util-types').Code} Code\n * @typedef {import('micromark-util-types').Encoding} Encoding\n * @typedef {import('micromark-util-types').Value} Value\n */\n\n/**\n * @callback Preprocessor\n * @param {Value} value\n * @param {Encoding | null | undefined} [encoding]\n * @param {boolean | null | undefined} [end=false]\n * @returns {Array<Chunk>}\n */\n\nconst search = /[\\0\\t\\n\\r]/g\n\n/**\n * @returns {Preprocessor}\n */\nexport function preprocess() {\n let column = 1\n let buffer = ''\n /** @type {boolean | undefined} */\n let start = true\n /** @type {boolean | undefined} */\n let atCarriageReturn\n return preprocessor\n\n /** @type {Preprocessor} */\n function preprocessor(value, encoding, end) {\n /** @type {Array<Chunk>} */\n const chunks = []\n /** @type {RegExpMatchArray | null} */\n let match\n /** @type {number} */\n let next\n /** @type {number} */\n let startPosition\n /** @type {number} */\n let endPosition\n /** @type {Code} */\n let code\n\n // @ts-expect-error `Buffer` does allow an encoding.\n value = buffer + value.toString(encoding)\n startPosition = 0\n buffer = ''\n if (start) {\n // To do: `markdown-rs` actually parses BOMs (byte order mark).\n if (value.charCodeAt(0) === 65279) {\n startPosition++\n }\n start = undefined\n }\n while (startPosition < value.length) {\n search.lastIndex = startPosition\n match = search.exec(value)\n endPosition =\n match && match.index !== undefined ? match.index : value.length\n code = value.charCodeAt(endPosition)\n if (!match) {\n buffer = value.slice(startPosition)\n break\n }\n if (code === 10 && startPosition === endPosition && atCarriageReturn) {\n chunks.push(-3)\n atCarriageReturn = undefined\n } else {\n if (atCarriageReturn) {\n chunks.push(-5)\n atCarriageReturn = undefined\n }\n if (startPosition < endPosition) {\n chunks.push(value.slice(startPosition, endPosition))\n column += endPosition - startPosition\n }\n switch (code) {\n case 0: {\n chunks.push(65533)\n column++\n break\n }\n case 9: {\n next = Math.ceil(column / 4) * 4\n chunks.push(-2)\n while (column++ < next) chunks.push(-1)\n break\n }\n case 10: {\n chunks.push(-4)\n column = 1\n break\n }\n default: {\n atCarriageReturn = true\n column = 1\n }\n }\n }\n startPosition = endPosition + 1\n }\n if (end) {\n if (atCarriageReturn) chunks.push(-5)\n if (buffer) chunks.push(buffer)\n chunks.push(null)\n }\n return chunks\n }\n}\n"],"names":["search","atCarriageReturn","column","buffer","start","value","encoding","end","chunks","match","next","startPosition","endPosition","code","toString","charCodeAt","undefined","length","lastIndex","exec","index","slice","push","Math","ceil"],"mappings":"oEAeA,MAAMA,EAAS,iCAKR,WACL,IAKIC,EALAC,EAAS,EACTC,EAAS,GAETC,GAAQ,EAGZ,OAGA,SAAsBC,EAAOC,EAAUC,GAErC,MAAMC,EAAS,GAEf,IAAIC,EAEAC,EAEAC,EAEAC,EAEAC,EAGJR,EAAQF,EAASE,EAAMS,SAASR,GAChCK,EAAgB,EAChBR,EAAS,GACLC,IAE0B,QAAxBC,EAAMU,WAAW,IACnBJ,IAEFP,OAAQY,GAEV,KAAOL,EAAgBN,EAAMY,QAAQ,CAMnC,GALAjB,EAAOkB,UAAYP,EACnBF,EAAQT,EAAOmB,KAAKd,GACpBO,EACEH,QAAyBO,IAAhBP,EAAMW,MAAsBX,EAAMW,MAAQf,EAAMY,OAC3DJ,EAAOR,EAAMU,WAAWH,IACnBH,EAAO,CACVN,EAASE,EAAMgB,MAAMV,GACrB,KACD,CACD,GAAa,KAATE,GAAeF,IAAkBC,GAAeX,EAClDO,EAAOc,MAAM,GACbrB,OAAmBe,OAUnB,OARIf,IACFO,EAAOc,MAAM,GACbrB,OAAmBe,GAEjBL,EAAgBC,IAClBJ,EAAOc,KAAKjB,EAAMgB,MAAMV,EAAeC,IACvCV,GAAUU,EAAcD,GAElBE,GACN,KAAK,EACHL,EAAOc,KAAK,OACZpB,IACA,MAEF,KAAK,EAGH,IAFAQ,EAA+B,EAAxBa,KAAKC,KAAKtB,EAAS,GAC1BM,EAAOc,MAAM,GACNpB,IAAWQ,GAAMF,EAAOc,MAAM,GACrC,MAEF,KAAK,GACHd,EAAOc,MAAM,GACbpB,EAAS,EACT,MAEF,QACED,GAAmB,EACnBC,EAAS,EAIfS,EAAgBC,EAAc,CAC/B,CACGL,IACEN,GAAkBO,EAAOc,MAAM,GAC/BnB,GAAQK,EAAOc,KAAKnB,GACxBK,EAAOc,KAAK,OAEd,OAAOd,CACR,CACH"}
@@ -1,2 +0,0 @@
1
- "use strict";Object.defineProperty(exports,"__esModule",{value:!0});var e=require("../../micromark-util-chunked/index.js"),t=require("../../micromark-util-classify-character/index.js"),n=require("../../micromark-util-resolve-all/index.js");const s={name:"attention",tokenize:function(e,n){const s=this.parser.constructs.attentionMarkers.null,r=this.previous,i=t.classifyCharacter(r);let a;return function(t){return a=t,e.enter("attentionSequence"),o(t)};function o(c){if(c===a)return e.consume(c),o;const f=e.exit("attentionSequence"),u=t.classifyCharacter(c),l=!u||2===u&&i||s.includes(c),d=!i||2===i&&u||s.includes(r);return f._open=Boolean(42===a?l:l&&(i||!d)),f._close=Boolean(42===a?d:d&&(u||!l)),n(c)}},resolveAll:function(t,s){let i,a,o,c,f,u,l,d,p=-1;for(;++p<t.length;)if("enter"===t[p][0]&&"attentionSequence"===t[p][1].type&&t[p][1]._close)for(i=p;i--;)if("exit"===t[i][0]&&"attentionSequence"===t[i][1].type&&t[i][1]._open&&s.sliceSerialize(t[i][1]).charCodeAt(0)===s.sliceSerialize(t[p][1]).charCodeAt(0)){if((t[i][1]._close||t[p][1]._open)&&(t[p][1].end.offset-t[p][1].start.offset)%3&&!((t[i][1].end.offset-t[i][1].start.offset+t[p][1].end.offset-t[p][1].start.offset)%3))continue;u=t[i][1].end.offset-t[i][1].start.offset>1&&t[p][1].end.offset-t[p][1].start.offset>1?2:1;const h=Object.assign({},t[i][1].end),g=Object.assign({},t[p][1].start);r(h,-u),r(g,u),c={type:u>1?"strongSequence":"emphasisSequence",start:h,end:Object.assign({},t[i][1].end)},f={type:u>1?"strongSequence":"emphasisSequence",start:Object.assign({},t[p][1].start),end:g},o={type:u>1?"strongText":"emphasisText",start:Object.assign({},t[i][1].end),end:Object.assign({},t[p][1].start)},a={type:u>1?"strong":"emphasis",start:Object.assign({},c.start),end:Object.assign({},f.end)},t[i][1].end=Object.assign({},c.start),t[p][1].start=Object.assign({},f.end),l=[],t[i][1].end.offset-t[i][1].start.offset&&(l=e.push(l,[["enter",t[i][1],s],["exit",t[i][1],s]])),l=e.push(l,[["enter",a,s],["enter",c,s],["exit",c,s],["enter",o,s]]),l=e.push(l,n.resolveAll(s.parser.constructs.insideSpan.null,t.slice(i+1,p),s)),l=e.push(l,[["exit",o,s],["enter",f,s],["exit",f,s],["exit",a,s]]),t[p][1].end.offset-t[p][1].start.offset?(d=2,l=e.push(l,[["enter",t[p][1],s],["exit",t[p][1],s]])):d=0,e.splice(t,i-1,p-i+3,l),p=i+l.length-d-2;break}p=-1;for(;++p<t.length;)"attentionSequence"===t[p][1].type&&(t[p][1].type="data");return t}};function r(e,t){e.column+=t,e.offset+=t,e._bufferIndex+=t}exports.attention=s;
2
- //# sourceMappingURL=attention.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"attention.js","sources":["../../../../../../../node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/attention.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Code} Code\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').Event} Event\n * @typedef {import('micromark-util-types').Point} Point\n * @typedef {import('micromark-util-types').Resolver} Resolver\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').Token} Token\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {push, splice} from 'micromark-util-chunked'\nimport {classifyCharacter} from 'micromark-util-classify-character'\nimport {resolveAll} from 'micromark-util-resolve-all'\n/** @type {Construct} */\nexport const attention = {\n name: 'attention',\n tokenize: tokenizeAttention,\n resolveAll: resolveAllAttention\n}\n\n/**\n * Take all events and resolve attention to emphasis or strong.\n *\n * @type {Resolver}\n */\nfunction resolveAllAttention(events, context) {\n let index = -1\n /** @type {number} */\n let open\n /** @type {Token} */\n let group\n /** @type {Token} */\n let text\n /** @type {Token} */\n let openingSequence\n /** @type {Token} */\n let closingSequence\n /** @type {number} */\n let use\n /** @type {Array<Event>} */\n let nextEvents\n /** @type {number} */\n let offset\n\n // Walk through all events.\n //\n // Note: performance of this is fine on an mb of normal markdown, but it’s\n // a bottleneck for malicious stuff.\n while (++index < events.length) {\n // Find a token that can close.\n if (\n events[index][0] === 'enter' &&\n events[index][1].type === 'attentionSequence' &&\n events[index][1]._close\n ) {\n open = index\n\n // Now walk back to find an opener.\n while (open--) {\n // Find a token that can open the closer.\n if (\n events[open][0] === 'exit' &&\n events[open][1].type === 'attentionSequence' &&\n events[open][1]._open &&\n // If the markers are the same:\n context.sliceSerialize(events[open][1]).charCodeAt(0) ===\n context.sliceSerialize(events[index][1]).charCodeAt(0)\n ) {\n // If the opening can close or the closing can open,\n // and the close size *is not* a multiple of three,\n // but the sum of the opening and closing size *is* multiple of three,\n // then don’t match.\n if (\n (events[open][1]._close || events[index][1]._open) &&\n (events[index][1].end.offset - events[index][1].start.offset) % 3 &&\n !(\n (events[open][1].end.offset -\n events[open][1].start.offset +\n events[index][1].end.offset -\n events[index][1].start.offset) %\n 3\n )\n ) {\n continue\n }\n\n // Number of markers to use from the sequence.\n use =\n events[open][1].end.offset - events[open][1].start.offset > 1 &&\n events[index][1].end.offset - events[index][1].start.offset > 1\n ? 2\n : 1\n const start = Object.assign({}, events[open][1].end)\n const end = Object.assign({}, events[index][1].start)\n movePoint(start, -use)\n movePoint(end, use)\n openingSequence = {\n type: use > 1 ? 'strongSequence' : 'emphasisSequence',\n start,\n end: Object.assign({}, events[open][1].end)\n }\n closingSequence = {\n type: use > 1 ? 'strongSequence' : 'emphasisSequence',\n start: Object.assign({}, events[index][1].start),\n end\n }\n text = {\n type: use > 1 ? 'strongText' : 'emphasisText',\n start: Object.assign({}, events[open][1].end),\n end: Object.assign({}, events[index][1].start)\n }\n group = {\n type: use > 1 ? 'strong' : 'emphasis',\n start: Object.assign({}, openingSequence.start),\n end: Object.assign({}, closingSequence.end)\n }\n events[open][1].end = Object.assign({}, openingSequence.start)\n events[index][1].start = Object.assign({}, closingSequence.end)\n nextEvents = []\n\n // If there are more markers in the opening, add them before.\n if (events[open][1].end.offset - events[open][1].start.offset) {\n nextEvents = push(nextEvents, [\n ['enter', events[open][1], context],\n ['exit', events[open][1], context]\n ])\n }\n\n // Opening.\n nextEvents = push(nextEvents, [\n ['enter', group, context],\n ['enter', openingSequence, context],\n ['exit', openingSequence, context],\n ['enter', text, context]\n ])\n\n // Always populated by defaults.\n\n // Between.\n nextEvents = push(\n nextEvents,\n resolveAll(\n context.parser.constructs.insideSpan.null,\n events.slice(open + 1, index),\n context\n )\n )\n\n // Closing.\n nextEvents = push(nextEvents, [\n ['exit', text, context],\n ['enter', closingSequence, context],\n ['exit', closingSequence, context],\n ['exit', group, context]\n ])\n\n // If there are more markers in the closing, add them after.\n if (events[index][1].end.offset - events[index][1].start.offset) {\n offset = 2\n nextEvents = push(nextEvents, [\n ['enter', events[index][1], context],\n ['exit', events[index][1], context]\n ])\n } else {\n offset = 0\n }\n splice(events, open - 1, index - open + 3, nextEvents)\n index = open + nextEvents.length - offset - 2\n break\n }\n }\n }\n }\n\n // Remove remaining sequences.\n index = -1\n while (++index < events.length) {\n if (events[index][1].type === 'attentionSequence') {\n events[index][1].type = 'data'\n }\n }\n return events\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeAttention(effects, ok) {\n const attentionMarkers = this.parser.constructs.attentionMarkers.null\n const previous = this.previous\n const before = classifyCharacter(previous)\n\n /** @type {NonNullable<Code>} */\n let marker\n return start\n\n /**\n * Before a sequence.\n *\n * ```markdown\n * > | **\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n marker = code\n effects.enter('attentionSequence')\n return inside(code)\n }\n\n /**\n * In a sequence.\n *\n * ```markdown\n * > | **\n * ^^\n * ```\n *\n * @type {State}\n */\n function inside(code) {\n if (code === marker) {\n effects.consume(code)\n return inside\n }\n const token = effects.exit('attentionSequence')\n\n // To do: next major: move this to resolver, just like `markdown-rs`.\n const after = classifyCharacter(code)\n\n // Always populated by defaults.\n\n const open =\n !after || (after === 2 && before) || attentionMarkers.includes(code)\n const close =\n !before || (before === 2 && after) || attentionMarkers.includes(previous)\n token._open = Boolean(marker === 42 ? open : open && (before || !close))\n token._close = Boolean(marker === 42 ? close : close && (after || !open))\n return ok(code)\n }\n}\n\n/**\n * Move a point a bit.\n *\n * Note: `move` only works inside lines! It’s not possible to move past other\n * chunks (replacement characters, tabs, or line endings).\n *\n * @param {Point} point\n * @param {number} offset\n * @returns {void}\n */\nfunction movePoint(point, offset) {\n point.column += offset\n point.offset += offset\n point._bufferIndex += offset\n}\n"],"names":["attention","name","tokenize","effects","ok","attentionMarkers","this","parser","constructs","null","previous","before","classifyCharacter","marker","code","enter","inside","consume","token","exit","after","open","includes","close","_open","Boolean","_close","resolveAll","events","context","group","text","openingSequence","closingSequence","use","nextEvents","offset","index","length","type","sliceSerialize","charCodeAt","end","start","Object","assign","movePoint","push","insideSpan","slice","splice","point","column","_bufferIndex"],"mappings":"gPAgBY,MAACA,EAAY,CACvBC,KAAM,YACNC,SA4KF,SAA2BC,EAASC,GAClC,MAAMC,EAAmBC,KAAKC,OAAOC,WAAWH,iBAAiBI,KAC3DC,EAAWJ,KAAKI,SAChBC,EAASC,EAAiBA,kBAACF,GAGjC,IAAIG,EACJ,OAYA,SAAeC,GAGb,OAFAD,EAASC,EACTX,EAAQY,MAAM,qBACPC,EAAOF,EACf,EAYD,SAASE,EAAOF,GACd,GAAIA,IAASD,EAEX,OADAV,EAAQc,QAAQH,GACTE,EAET,MAAME,EAAQf,EAAQgB,KAAK,qBAGrBC,EAAQR,EAAiBA,kBAACE,GAI1BO,GACHD,GAAoB,IAAVA,GAAeT,GAAWN,EAAiBiB,SAASR,GAC3DS,GACHZ,GAAsB,IAAXA,GAAgBS,GAAUf,EAAiBiB,SAASZ,GAGlE,OAFAQ,EAAMM,MAAQC,QAAmB,KAAXZ,EAAgBQ,EAAOA,IAASV,IAAWY,IACjEL,EAAMQ,OAASD,QAAmB,KAAXZ,EAAgBU,EAAQA,IAAUH,IAAUC,IAC5DjB,EAAGU,EACX,CACH,EAlOEa,WAQF,SAA6BC,EAAQC,GACnC,IAEIR,EAEAS,EAEAC,EAEAC,EAEAC,EAEAC,EAEAC,EAEAC,EAhBAC,GAAS,EAsBb,OAASA,EAAQT,EAAOU,QAEtB,GACuB,UAArBV,EAAOS,GAAO,IACY,sBAA1BT,EAAOS,GAAO,GAAGE,MACjBX,EAAOS,GAAO,GAAGX,OAKjB,IAHAL,EAAOgB,EAGAhB,KAEL,GACsB,SAApBO,EAAOP,GAAM,IACY,sBAAzBO,EAAOP,GAAM,GAAGkB,MAChBX,EAAOP,GAAM,GAAGG,OAEhBK,EAAQW,eAAeZ,EAAOP,GAAM,IAAIoB,WAAW,KACjDZ,EAAQW,eAAeZ,EAAOS,GAAO,IAAII,WAAW,GACtD,CAKA,IACGb,EAAOP,GAAM,GAAGK,QAAUE,EAAOS,GAAO,GAAGb,SAC3CI,EAAOS,GAAO,GAAGK,IAAIN,OAASR,EAAOS,GAAO,GAAGM,MAAMP,QAAU,MAE7DR,EAAOP,GAAM,GAAGqB,IAAIN,OACnBR,EAAOP,GAAM,GAAGsB,MAAMP,OACtBR,EAAOS,GAAO,GAAGK,IAAIN,OACrBR,EAAOS,GAAO,GAAGM,MAAMP,QACzB,GAGF,SAIFF,EACEN,EAAOP,GAAM,GAAGqB,IAAIN,OAASR,EAAOP,GAAM,GAAGsB,MAAMP,OAAS,GAC5DR,EAAOS,GAAO,GAAGK,IAAIN,OAASR,EAAOS,GAAO,GAAGM,MAAMP,OAAS,EAC1D,EACA,EACN,MAAMO,EAAQC,OAAOC,OAAO,CAAE,EAAEjB,EAAOP,GAAM,GAAGqB,KAC1CA,EAAME,OAAOC,OAAO,CAAE,EAAEjB,EAAOS,GAAO,GAAGM,OAC/CG,EAAUH,GAAQT,GAClBY,EAAUJ,EAAKR,GACfF,EAAkB,CAChBO,KAAML,EAAM,EAAI,iBAAmB,mBACnCS,QACAD,IAAKE,OAAOC,OAAO,CAAE,EAAEjB,EAAOP,GAAM,GAAGqB,MAEzCT,EAAkB,CAChBM,KAAML,EAAM,EAAI,iBAAmB,mBACnCS,MAAOC,OAAOC,OAAO,CAAE,EAAEjB,EAAOS,GAAO,GAAGM,OAC1CD,OAEFX,EAAO,CACLQ,KAAML,EAAM,EAAI,aAAe,eAC/BS,MAAOC,OAAOC,OAAO,CAAE,EAAEjB,EAAOP,GAAM,GAAGqB,KACzCA,IAAKE,OAAOC,OAAO,CAAE,EAAEjB,EAAOS,GAAO,GAAGM,QAE1Cb,EAAQ,CACNS,KAAML,EAAM,EAAI,SAAW,WAC3BS,MAAOC,OAAOC,OAAO,CAAA,EAAIb,EAAgBW,OACzCD,IAAKE,OAAOC,OAAO,CAAA,EAAIZ,EAAgBS,MAEzCd,EAAOP,GAAM,GAAGqB,IAAME,OAAOC,OAAO,CAAA,EAAIb,EAAgBW,OACxDf,EAAOS,GAAO,GAAGM,MAAQC,OAAOC,OAAO,CAAA,EAAIZ,EAAgBS,KAC3DP,EAAa,GAGTP,EAAOP,GAAM,GAAGqB,IAAIN,OAASR,EAAOP,GAAM,GAAGsB,MAAMP,SACrDD,EAAaY,EAAIA,KAACZ,EAAY,CAC5B,CAAC,QAASP,EAAOP,GAAM,GAAIQ,GAC3B,CAAC,OAAQD,EAAOP,GAAM,GAAIQ,MAK9BM,EAAaY,EAAIA,KAACZ,EAAY,CAC5B,CAAC,QAASL,EAAOD,GACjB,CAAC,QAASG,EAAiBH,GAC3B,CAAC,OAAQG,EAAiBH,GAC1B,CAAC,QAASE,EAAMF,KAMlBM,EAAaY,EAAIA,KACfZ,EACAR,EAAUA,WACRE,EAAQtB,OAAOC,WAAWwC,WAAWvC,KACrCmB,EAAOqB,MAAM5B,EAAO,EAAGgB,GACvBR,IAKJM,EAAaY,EAAIA,KAACZ,EAAY,CAC5B,CAAC,OAAQJ,EAAMF,GACf,CAAC,QAASI,EAAiBJ,GAC3B,CAAC,OAAQI,EAAiBJ,GAC1B,CAAC,OAAQC,EAAOD,KAIdD,EAAOS,GAAO,GAAGK,IAAIN,OAASR,EAAOS,GAAO,GAAGM,MAAMP,QACvDA,EAAS,EACTD,EAAaY,EAAIA,KAACZ,EAAY,CAC5B,CAAC,QAASP,EAAOS,GAAO,GAAIR,GAC5B,CAAC,OAAQD,EAAOS,GAAO,GAAIR,MAG7BO,EAAS,EAEXc,EAAMA,OAACtB,EAAQP,EAAO,EAAGgB,EAAQhB,EAAO,EAAGc,GAC3CE,EAAQhB,EAAOc,EAAWG,OAASF,EAAS,EAC5C,KACD,CAMPC,GAAS,EACT,OAASA,EAAQT,EAAOU,QACQ,sBAA1BV,EAAOS,GAAO,GAAGE,OACnBX,EAAOS,GAAO,GAAGE,KAAO,QAG5B,OAAOX,CACT,GAyEA,SAASkB,EAAUK,EAAOf,GACxBe,EAAMC,QAAUhB,EAChBe,EAAMf,QAAUA,EAChBe,EAAME,cAAgBjB,CACxB"}
@@ -1,2 +0,0 @@
1
- "use strict";Object.defineProperty(exports,"__esModule",{value:!0});var n=require("../../micromark-util-character/index.js");const e={name:"autolink",tokenize:function(e,t,i){let r=0;return function(n){return e.enter("autolink"),e.enter("autolinkMarker"),e.consume(n),e.exit("autolinkMarker"),e.enter("autolinkProtocol"),u};function u(t){return n.asciiAlpha(t)?(e.consume(t),o):l(t)}function o(e){return 43===e||45===e||46===e||n.asciiAlphanumeric(e)?(r=1,c(e)):l(e)}function c(t){return 58===t?(e.consume(t),r=0,a):(43===t||45===t||46===t||n.asciiAlphanumeric(t))&&r++<32?(e.consume(t),c):(r=0,l(t))}function a(r){return 62===r?(e.exit("autolinkProtocol"),e.enter("autolinkMarker"),e.consume(r),e.exit("autolinkMarker"),e.exit("autolink"),t):null===r||32===r||60===r||n.asciiControl(r)?i(r):(e.consume(r),a)}function l(t){return 64===t?(e.consume(t),s):n.asciiAtext(t)?(e.consume(t),l):i(t)}function s(e){return n.asciiAlphanumeric(e)?k(e):i(e)}function k(n){return 46===n?(e.consume(n),r=0,s):62===n?(e.exit("autolinkProtocol").type="autolinkEmail",e.enter("autolinkMarker"),e.consume(n),e.exit("autolinkMarker"),e.exit("autolink"),t):m(n)}function m(t){if((45===t||n.asciiAlphanumeric(t))&&r++<63){const n=45===t?m:k;return e.consume(t),n}return i(t)}}};exports.autolink=e;
2
- //# sourceMappingURL=autolink.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"autolink.js","sources":["../../../../../../../node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/autolink.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {\n asciiAlpha,\n asciiAlphanumeric,\n asciiAtext,\n asciiControl\n} from 'micromark-util-character'\n/** @type {Construct} */\nexport const autolink = {\n name: 'autolink',\n tokenize: tokenizeAutolink\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeAutolink(effects, ok, nok) {\n let size = 0\n return start\n\n /**\n * Start of an autolink.\n *\n * ```markdown\n * > | a<https://example.com>b\n * ^\n * > | a<user@example.com>b\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n effects.enter('autolink')\n effects.enter('autolinkMarker')\n effects.consume(code)\n effects.exit('autolinkMarker')\n effects.enter('autolinkProtocol')\n return open\n }\n\n /**\n * After `<`, at protocol or atext.\n *\n * ```markdown\n * > | a<https://example.com>b\n * ^\n * > | a<user@example.com>b\n * ^\n * ```\n *\n * @type {State}\n */\n function open(code) {\n if (asciiAlpha(code)) {\n effects.consume(code)\n return schemeOrEmailAtext\n }\n return emailAtext(code)\n }\n\n /**\n * At second byte of protocol or atext.\n *\n * ```markdown\n * > | a<https://example.com>b\n * ^\n * > | a<user@example.com>b\n * ^\n * ```\n *\n * @type {State}\n */\n function schemeOrEmailAtext(code) {\n // ASCII alphanumeric and `+`, `-`, and `.`.\n if (code === 43 || code === 45 || code === 46 || asciiAlphanumeric(code)) {\n // Count the previous alphabetical from `open` too.\n size = 1\n return schemeInsideOrEmailAtext(code)\n }\n return emailAtext(code)\n }\n\n /**\n * In ambiguous protocol or atext.\n *\n * ```markdown\n * > | a<https://example.com>b\n * ^\n * > | a<user@example.com>b\n * ^\n * ```\n *\n * @type {State}\n */\n function schemeInsideOrEmailAtext(code) {\n if (code === 58) {\n effects.consume(code)\n size = 0\n return urlInside\n }\n\n // ASCII alphanumeric and `+`, `-`, and `.`.\n if (\n (code === 43 || code === 45 || code === 46 || asciiAlphanumeric(code)) &&\n size++ < 32\n ) {\n effects.consume(code)\n return schemeInsideOrEmailAtext\n }\n size = 0\n return emailAtext(code)\n }\n\n /**\n * After protocol, in URL.\n *\n * ```markdown\n * > | a<https://example.com>b\n * ^\n * ```\n *\n * @type {State}\n */\n function urlInside(code) {\n if (code === 62) {\n effects.exit('autolinkProtocol')\n effects.enter('autolinkMarker')\n effects.consume(code)\n effects.exit('autolinkMarker')\n effects.exit('autolink')\n return ok\n }\n\n // ASCII control, space, or `<`.\n if (code === null || code === 32 || code === 60 || asciiControl(code)) {\n return nok(code)\n }\n effects.consume(code)\n return urlInside\n }\n\n /**\n * In email atext.\n *\n * ```markdown\n * > | a<user.name@example.com>b\n * ^\n * ```\n *\n * @type {State}\n */\n function emailAtext(code) {\n if (code === 64) {\n effects.consume(code)\n return emailAtSignOrDot\n }\n if (asciiAtext(code)) {\n effects.consume(code)\n return emailAtext\n }\n return nok(code)\n }\n\n /**\n * In label, after at-sign or dot.\n *\n * ```markdown\n * > | a<user.name@example.com>b\n * ^ ^\n * ```\n *\n * @type {State}\n */\n function emailAtSignOrDot(code) {\n return asciiAlphanumeric(code) ? emailLabel(code) : nok(code)\n }\n\n /**\n * In label, where `.` and `>` are allowed.\n *\n * ```markdown\n * > | a<user.name@example.com>b\n * ^\n * ```\n *\n * @type {State}\n */\n function emailLabel(code) {\n if (code === 46) {\n effects.consume(code)\n size = 0\n return emailAtSignOrDot\n }\n if (code === 62) {\n // Exit, then change the token type.\n effects.exit('autolinkProtocol').type = 'autolinkEmail'\n effects.enter('autolinkMarker')\n effects.consume(code)\n effects.exit('autolinkMarker')\n effects.exit('autolink')\n return ok\n }\n return emailValue(code)\n }\n\n /**\n * In label, where `.` and `>` are *not* allowed.\n *\n * Though, this is also used in `emailLabel` to parse other values.\n *\n * ```markdown\n * > | a<user.name@ex-ample.com>b\n * ^\n * ```\n *\n * @type {State}\n */\n function emailValue(code) {\n // ASCII alphanumeric or `-`.\n if ((code === 45 || asciiAlphanumeric(code)) && size++ < 63) {\n const next = code === 45 ? emailValue : emailLabel\n effects.consume(code)\n return next\n }\n return nok(code)\n }\n}\n"],"names":["autolink","name","tokenize","effects","ok","nok","size","code","enter","consume","exit","open","asciiAlpha","schemeOrEmailAtext","emailAtext","asciiAlphanumeric","schemeInsideOrEmailAtext","urlInside","asciiControl","emailAtSignOrDot","asciiAtext","emailLabel","type","emailValue","next"],"mappings":"6HAcY,MAACA,EAAW,CACtBC,KAAM,WACNC,SAOF,SAA0BC,EAASC,EAAIC,GACrC,IAAIC,EAAO,EACX,OAcA,SAAeC,GAMb,OALAJ,EAAQK,MAAM,YACdL,EAAQK,MAAM,kBACdL,EAAQM,QAAQF,GAChBJ,EAAQO,KAAK,kBACbP,EAAQK,MAAM,oBACPG,CACR,EAcD,SAASA,EAAKJ,GACZ,OAAIK,EAAAA,WAAWL,IACbJ,EAAQM,QAAQF,GACTM,GAEFC,EAAWP,EACnB,CAcD,SAASM,EAAmBN,GAE1B,OAAa,KAATA,GAAwB,KAATA,GAAwB,KAATA,GAAeQ,oBAAkBR,IAEjED,EAAO,EACAU,EAAyBT,IAE3BO,EAAWP,EACnB,CAcD,SAASS,EAAyBT,GAChC,OAAa,KAATA,GACFJ,EAAQM,QAAQF,GAChBD,EAAO,EACAW,IAKG,KAATV,GAAwB,KAATA,GAAwB,KAATA,GAAeQ,EAAiBA,kBAACR,KAChED,IAAS,IAETH,EAAQM,QAAQF,GACTS,IAETV,EAAO,EACAQ,EAAWP,GACnB,CAYD,SAASU,EAAUV,GACjB,OAAa,KAATA,GACFJ,EAAQO,KAAK,oBACbP,EAAQK,MAAM,kBACdL,EAAQM,QAAQF,GAChBJ,EAAQO,KAAK,kBACbP,EAAQO,KAAK,YACNN,GAII,OAATG,GAA0B,KAATA,GAAwB,KAATA,GAAeW,eAAaX,GACvDF,EAAIE,IAEbJ,EAAQM,QAAQF,GACTU,EACR,CAYD,SAASH,EAAWP,GAClB,OAAa,KAATA,GACFJ,EAAQM,QAAQF,GACTY,GAELC,EAAAA,WAAWb,IACbJ,EAAQM,QAAQF,GACTO,GAEFT,EAAIE,EACZ,CAYD,SAASY,EAAiBZ,GACxB,OAAOQ,EAAAA,kBAAkBR,GAAQc,EAAWd,GAAQF,EAAIE,EACzD,CAYD,SAASc,EAAWd,GAClB,OAAa,KAATA,GACFJ,EAAQM,QAAQF,GAChBD,EAAO,EACAa,GAEI,KAATZ,GAEFJ,EAAQO,KAAK,oBAAoBY,KAAO,gBACxCnB,EAAQK,MAAM,kBACdL,EAAQM,QAAQF,GAChBJ,EAAQO,KAAK,kBACbP,EAAQO,KAAK,YACNN,GAEFmB,EAAWhB,EACnB,CAcD,SAASgB,EAAWhB,GAElB,IAAc,KAATA,GAAeQ,EAAAA,kBAAkBR,KAAUD,IAAS,GAAI,CAC3D,MAAMkB,EAAgB,KAATjB,EAAcgB,EAAaF,EAExC,OADAlB,EAAQM,QAAQF,GACTiB,CACR,CACD,OAAOnB,EAAIE,EACZ,CACH"}
@@ -1,2 +0,0 @@
1
- "use strict";Object.defineProperty(exports,"__esModule",{value:!0});var e=require("../../micromark-factory-space/index.js"),r=require("../../micromark-util-character/index.js");const n={tokenize:function(n,i,t){return function(i){return r.markdownSpace(i)?e.factorySpace(n,a,"linePrefix")(i):a(i)};function a(e){return null===e||r.markdownLineEnding(e)?i(e):t(e)}},partial:!0};exports.blankLine=n;
2
- //# sourceMappingURL=blank-line.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"blank-line.js","sources":["../../../../../../../node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/blank-line.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {factorySpace} from 'micromark-factory-space'\nimport {markdownLineEnding, markdownSpace} from 'micromark-util-character'\n/** @type {Construct} */\nexport const blankLine = {\n tokenize: tokenizeBlankLine,\n partial: true\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeBlankLine(effects, ok, nok) {\n return start\n\n /**\n * Start of blank line.\n *\n * > 👉 **Note**: `␠` represents a space character.\n *\n * ```markdown\n * > | ␠␠␊\n * ^\n * > | ␊\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n return markdownSpace(code)\n ? factorySpace(effects, after, 'linePrefix')(code)\n : after(code)\n }\n\n /**\n * At eof/eol, after optional whitespace.\n *\n * > 👉 **Note**: `␠` represents a space character.\n *\n * ```markdown\n * > | ␠␠␊\n * ^\n * > | ␊\n * ^\n * ```\n *\n * @type {State}\n */\n function after(code) {\n return code === null || markdownLineEnding(code) ? ok(code) : nok(code)\n }\n}\n"],"names":["blankLine","tokenize","effects","ok","nok","code","markdownSpace","factorySpace","after","markdownLineEnding","partial"],"mappings":"iLAUY,MAACA,EAAY,CACvBC,SAQF,SAA2BC,EAASC,EAAIC,GACtC,OAgBA,SAAeC,GACb,OAAOC,EAAAA,cAAcD,GACjBE,EAAAA,aAAaL,EAASM,EAAO,aAA7BD,CAA2CF,GAC3CG,EAAMH,EACX,EAgBD,SAASG,EAAMH,GACb,OAAgB,OAATA,GAAiBI,EAAAA,mBAAmBJ,GAAQF,EAAGE,GAAQD,EAAIC,EACnE,CACH,EA/CEK,SAAS"}
@@ -1,2 +0,0 @@
1
- "use strict";Object.defineProperty(exports,"__esModule",{value:!0});var e=require("../../micromark-factory-space/index.js"),t=require("../../micromark-util-character/index.js");const o={name:"blockQuote",tokenize:function(e,o,n){const r=this;return function(t){if(62===t){const o=r.containerState;return o.open||(e.enter("blockQuote",{_container:!0}),o.open=!0),e.enter("blockQuotePrefix"),e.enter("blockQuoteMarker"),e.consume(t),e.exit("blockQuoteMarker"),c}return n(t)};function c(n){return t.markdownSpace(n)?(e.enter("blockQuotePrefixWhitespace"),e.consume(n),e.exit("blockQuotePrefixWhitespace"),e.exit("blockQuotePrefix"),o):(e.exit("blockQuotePrefix"),o(n))}},continuation:{tokenize:function(n,r,c){const i=this;return function(o){if(t.markdownSpace(o))return e.factorySpace(n,u,"linePrefix",i.parser.constructs.disable.null.includes("codeIndented")?void 0:4)(o);return u(o)};function u(e){return n.attempt(o,r,c)(e)}}},exit:function(e){e.exit("blockQuote")}};exports.blockQuote=o;
2
- //# sourceMappingURL=block-quote.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"block-quote.js","sources":["../../../../../../../node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/block-quote.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').Exiter} Exiter\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {factorySpace} from 'micromark-factory-space'\nimport {markdownSpace} from 'micromark-util-character'\n/** @type {Construct} */\nexport const blockQuote = {\n name: 'blockQuote',\n tokenize: tokenizeBlockQuoteStart,\n continuation: {\n tokenize: tokenizeBlockQuoteContinuation\n },\n exit\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeBlockQuoteStart(effects, ok, nok) {\n const self = this\n return start\n\n /**\n * Start of block quote.\n *\n * ```markdown\n * > | > a\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n if (code === 62) {\n const state = self.containerState\n if (!state.open) {\n effects.enter('blockQuote', {\n _container: true\n })\n state.open = true\n }\n effects.enter('blockQuotePrefix')\n effects.enter('blockQuoteMarker')\n effects.consume(code)\n effects.exit('blockQuoteMarker')\n return after\n }\n return nok(code)\n }\n\n /**\n * After `>`, before optional whitespace.\n *\n * ```markdown\n * > | > a\n * ^\n * ```\n *\n * @type {State}\n */\n function after(code) {\n if (markdownSpace(code)) {\n effects.enter('blockQuotePrefixWhitespace')\n effects.consume(code)\n effects.exit('blockQuotePrefixWhitespace')\n effects.exit('blockQuotePrefix')\n return ok\n }\n effects.exit('blockQuotePrefix')\n return ok(code)\n }\n}\n\n/**\n * Start of block quote continuation.\n *\n * ```markdown\n * | > a\n * > | > b\n * ^\n * ```\n *\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeBlockQuoteContinuation(effects, ok, nok) {\n const self = this\n return contStart\n\n /**\n * Start of block quote continuation.\n *\n * Also used to parse the first block quote opening.\n *\n * ```markdown\n * | > a\n * > | > b\n * ^\n * ```\n *\n * @type {State}\n */\n function contStart(code) {\n if (markdownSpace(code)) {\n // Always populated by defaults.\n\n return factorySpace(\n effects,\n contBefore,\n 'linePrefix',\n self.parser.constructs.disable.null.includes('codeIndented')\n ? undefined\n : 4\n )(code)\n }\n return contBefore(code)\n }\n\n /**\n * At `>`, after optional whitespace.\n *\n * Also used to parse the first block quote opening.\n *\n * ```markdown\n * | > a\n * > | > b\n * ^\n * ```\n *\n * @type {State}\n */\n function contBefore(code) {\n return effects.attempt(blockQuote, ok, nok)(code)\n }\n}\n\n/** @type {Exiter} */\nfunction exit(effects) {\n effects.exit('blockQuote')\n}\n"],"names":["blockQuote","name","tokenize","effects","ok","nok","self","this","code","state","containerState","open","enter","_container","consume","exit","after","markdownSpace","continuation","factorySpace","contBefore","parser","constructs","disable","null","includes","undefined","attempt"],"mappings":"iLAWY,MAACA,EAAa,CACxBC,KAAM,aACNC,SAWF,SAAiCC,EAASC,EAAIC,GAC5C,MAAMC,EAAOC,KACb,OAYA,SAAeC,GACb,GAAa,KAATA,EAAa,CACf,MAAMC,EAAQH,EAAKI,eAWnB,OAVKD,EAAME,OACTR,EAAQS,MAAM,aAAc,CAC1BC,YAAY,IAEdJ,EAAME,MAAO,GAEfR,EAAQS,MAAM,oBACdT,EAAQS,MAAM,oBACdT,EAAQW,QAAQN,GAChBL,EAAQY,KAAK,oBACNC,CACR,CACD,OAAOX,EAAIG,EACZ,EAYD,SAASQ,EAAMR,GACb,OAAIS,EAAAA,cAAcT,IAChBL,EAAQS,MAAM,8BACdT,EAAQW,QAAQN,GAChBL,EAAQY,KAAK,8BACbZ,EAAQY,KAAK,oBACNX,IAETD,EAAQY,KAAK,oBACNX,EAAGI,GACX,CACH,EA/DEU,aAAc,CACZhB,SA4EJ,SAAwCC,EAASC,EAAIC,GACnD,MAAMC,EAAOC,KACb,OAeA,SAAmBC,GACjB,GAAIS,EAAAA,cAAcT,GAGhB,OAAOW,EAAYA,aACjBhB,EACAiB,EACA,aACAd,EAAKe,OAAOC,WAAWC,QAAQC,KAAKC,SAAS,qBACzCC,EACA,EANCP,CAOLX,GAEJ,OAAOY,EAAWZ,EACnB,EAeD,SAASY,EAAWZ,GAClB,OAAOL,EAAQwB,QAAQ3B,EAAYI,EAAIC,EAAhCF,CAAqCK,EAC7C,CACH,GA3HEO,KA8HF,SAAcZ,GACZA,EAAQY,KAAK,aACf"}
@@ -1,2 +0,0 @@
1
- "use strict";Object.defineProperty(exports,"__esModule",{value:!0});var e=require("../../micromark-util-character/index.js");const r={name:"characterEscape",tokenize:function(r,c,a){return function(e){return r.enter("characterEscape"),r.enter("escapeMarker"),r.consume(e),r.exit("escapeMarker"),t};function t(t){return e.asciiPunctuation(t)?(r.enter("characterEscapeValue"),r.consume(t),r.exit("characterEscapeValue"),r.exit("characterEscape"),c):a(t)}}};exports.characterEscape=r;
2
- //# sourceMappingURL=character-escape.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"character-escape.js","sources":["../../../../../../../node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/character-escape.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {asciiPunctuation} from 'micromark-util-character'\n/** @type {Construct} */\nexport const characterEscape = {\n name: 'characterEscape',\n tokenize: tokenizeCharacterEscape\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeCharacterEscape(effects, ok, nok) {\n return start\n\n /**\n * Start of character escape.\n *\n * ```markdown\n * > | a\\*b\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n effects.enter('characterEscape')\n effects.enter('escapeMarker')\n effects.consume(code)\n effects.exit('escapeMarker')\n return inside\n }\n\n /**\n * After `\\`, at punctuation.\n *\n * ```markdown\n * > | a\\*b\n * ^\n * ```\n *\n * @type {State}\n */\n function inside(code) {\n // ASCII punctuation.\n if (asciiPunctuation(code)) {\n effects.enter('characterEscapeValue')\n effects.consume(code)\n effects.exit('characterEscapeValue')\n effects.exit('characterEscape')\n return ok\n }\n return nok(code)\n }\n}\n"],"names":["characterEscape","name","tokenize","effects","ok","nok","code","enter","consume","exit","inside","asciiPunctuation"],"mappings":"6HASY,MAACA,EAAkB,CAC7BC,KAAM,kBACNC,SAOF,SAAiCC,EAASC,EAAIC,GAC5C,OAYA,SAAeC,GAKb,OAJAH,EAAQI,MAAM,mBACdJ,EAAQI,MAAM,gBACdJ,EAAQK,QAAQF,GAChBH,EAAQM,KAAK,gBACNC,CACR,EAYD,SAASA,EAAOJ,GAEd,OAAIK,EAAAA,iBAAiBL,IACnBH,EAAQI,MAAM,wBACdJ,EAAQK,QAAQF,GAChBH,EAAQM,KAAK,wBACbN,EAAQM,KAAK,mBACNL,GAEFC,EAAIC,EACZ,CACH"}
@@ -1,2 +0,0 @@
1
- "use strict";Object.defineProperty(exports,"__esModule",{value:!0});var e=require("../../../../decode-named-character-reference/index.dom.js"),r=require("../../micromark-util-character/index.js");const c={name:"characterReference",tokenize:function(c,a,t){const n=this;let i,u,f=0;return function(e){return c.enter("characterReference"),c.enter("characterReferenceMarker"),c.consume(e),c.exit("characterReferenceMarker"),h};function h(e){return 35===e?(c.enter("characterReferenceMarkerNumeric"),c.consume(e),c.exit("characterReferenceMarkerNumeric"),o):(c.enter("characterReferenceValue"),i=31,u=r.asciiAlphanumeric,s(e))}function o(e){return 88===e||120===e?(c.enter("characterReferenceMarkerHexadecimal"),c.consume(e),c.exit("characterReferenceMarkerHexadecimal"),c.enter("characterReferenceValue"),i=6,u=r.asciiHexDigit,s):(c.enter("characterReferenceValue"),i=7,u=r.asciiDigit,s(e))}function s(h){if(59===h&&f){const i=c.exit("characterReferenceValue");return u!==r.asciiAlphanumeric||e.decodeNamedCharacterReference(n.sliceSerialize(i))?(c.enter("characterReferenceMarker"),c.consume(h),c.exit("characterReferenceMarker"),c.exit("characterReference"),a):t(h)}return u(h)&&f++<i?(c.consume(h),s):t(h)}}};exports.characterReference=c;
2
- //# sourceMappingURL=character-reference.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"character-reference.js","sources":["../../../../../../../node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/character-reference.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Code} Code\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {decodeNamedCharacterReference} from 'decode-named-character-reference'\nimport {\n asciiAlphanumeric,\n asciiDigit,\n asciiHexDigit\n} from 'micromark-util-character'\n/** @type {Construct} */\nexport const characterReference = {\n name: 'characterReference',\n tokenize: tokenizeCharacterReference\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeCharacterReference(effects, ok, nok) {\n const self = this\n let size = 0\n /** @type {number} */\n let max\n /** @type {(code: Code) => boolean} */\n let test\n return start\n\n /**\n * Start of character reference.\n *\n * ```markdown\n * > | a&amp;b\n * ^\n * > | a&#123;b\n * ^\n * > | a&#x9;b\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n effects.enter('characterReference')\n effects.enter('characterReferenceMarker')\n effects.consume(code)\n effects.exit('characterReferenceMarker')\n return open\n }\n\n /**\n * After `&`, at `#` for numeric references or alphanumeric for named\n * references.\n *\n * ```markdown\n * > | a&amp;b\n * ^\n * > | a&#123;b\n * ^\n * > | a&#x9;b\n * ^\n * ```\n *\n * @type {State}\n */\n function open(code) {\n if (code === 35) {\n effects.enter('characterReferenceMarkerNumeric')\n effects.consume(code)\n effects.exit('characterReferenceMarkerNumeric')\n return numeric\n }\n effects.enter('characterReferenceValue')\n max = 31\n test = asciiAlphanumeric\n return value(code)\n }\n\n /**\n * After `#`, at `x` for hexadecimals or digit for decimals.\n *\n * ```markdown\n * > | a&#123;b\n * ^\n * > | a&#x9;b\n * ^\n * ```\n *\n * @type {State}\n */\n function numeric(code) {\n if (code === 88 || code === 120) {\n effects.enter('characterReferenceMarkerHexadecimal')\n effects.consume(code)\n effects.exit('characterReferenceMarkerHexadecimal')\n effects.enter('characterReferenceValue')\n max = 6\n test = asciiHexDigit\n return value\n }\n effects.enter('characterReferenceValue')\n max = 7\n test = asciiDigit\n return value(code)\n }\n\n /**\n * After markers (`&#x`, `&#`, or `&`), in value, before `;`.\n *\n * The character reference kind defines what and how many characters are\n * allowed.\n *\n * ```markdown\n * > | a&amp;b\n * ^^^\n * > | a&#123;b\n * ^^^\n * > | a&#x9;b\n * ^\n * ```\n *\n * @type {State}\n */\n function value(code) {\n if (code === 59 && size) {\n const token = effects.exit('characterReferenceValue')\n if (\n test === asciiAlphanumeric &&\n !decodeNamedCharacterReference(self.sliceSerialize(token))\n ) {\n return nok(code)\n }\n\n // To do: `markdown-rs` uses a different name:\n // `CharacterReferenceMarkerSemi`.\n effects.enter('characterReferenceMarker')\n effects.consume(code)\n effects.exit('characterReferenceMarker')\n effects.exit('characterReference')\n return ok\n }\n if (test(code) && size++ < max) {\n effects.consume(code)\n return value\n }\n return nok(code)\n }\n}\n"],"names":["characterReference","name","tokenize","effects","ok","nok","self","this","max","test","size","code","enter","consume","exit","open","numeric","asciiAlphanumeric","value","asciiHexDigit","asciiDigit","token","decodeNamedCharacterReference","sliceSerialize"],"mappings":"oMAeY,MAACA,EAAqB,CAChCC,KAAM,qBACNC,SAOF,SAAoCC,EAASC,EAAIC,GAC/C,MAAMC,EAAOC,KACb,IAEIC,EAEAC,EAJAC,EAAO,EAKX,OAgBA,SAAeC,GAKb,OAJAR,EAAQS,MAAM,sBACdT,EAAQS,MAAM,4BACdT,EAAQU,QAAQF,GAChBR,EAAQW,KAAK,4BACNC,CACR,EAiBD,SAASA,EAAKJ,GACZ,OAAa,KAATA,GACFR,EAAQS,MAAM,mCACdT,EAAQU,QAAQF,GAChBR,EAAQW,KAAK,mCACNE,IAETb,EAAQS,MAAM,2BACdJ,EAAM,GACNC,EAAOQ,EAAiBA,kBACjBC,EAAMP,GACd,CAcD,SAASK,EAAQL,GACf,OAAa,KAATA,GAAwB,MAATA,GACjBR,EAAQS,MAAM,uCACdT,EAAQU,QAAQF,GAChBR,EAAQW,KAAK,uCACbX,EAAQS,MAAM,2BACdJ,EAAM,EACNC,EAAOU,EAAaA,cACbD,IAETf,EAAQS,MAAM,2BACdJ,EAAM,EACNC,EAAOW,EAAUA,WACVF,EAAMP,GACd,CAmBD,SAASO,EAAMP,GACb,GAAa,KAATA,GAAeD,EAAM,CACvB,MAAMW,EAAQlB,EAAQW,KAAK,2BAC3B,OACEL,IAASQ,EAAiBA,mBACzBK,gCAA8BhB,EAAKiB,eAAeF,KAOrDlB,EAAQS,MAAM,4BACdT,EAAQU,QAAQF,GAChBR,EAAQW,KAAK,4BACbX,EAAQW,KAAK,sBACNV,GATEC,EAAIM,EAUd,CACD,OAAIF,EAAKE,IAASD,IAASF,GACzBL,EAAQU,QAAQF,GACTO,GAEFb,EAAIM,EACZ,CACH"}
@@ -1,2 +0,0 @@
1
- "use strict";Object.defineProperty(exports,"__esModule",{value:!0});var e=require("../../micromark-factory-space/index.js"),n=require("../../micromark-util-character/index.js");const c={tokenize:function(e,n,c){const t=this;return function(n){if(null===n)return c(n);return e.enter("lineEnding"),e.consume(n),e.exit("lineEnding"),r};function r(e){return t.parser.lazy[t.now().line]?c(e):n(e)}},partial:!0},t={name:"codeFenced",tokenize:function(t,r,i){const o=this,u={tokenize:function(c,t,r){let i=0;return u;function u(e){return c.enter("lineEnding"),c.consume(e),c.exit("lineEnding"),a}function a(t){return c.enter("codeFencedFence"),n.markdownSpace(t)?e.factorySpace(c,f,"linePrefix",o.parser.constructs.disable.null.includes("codeIndented")?void 0:4)(t):f(t)}function f(e){return e===d?(c.enter("codeFencedFenceSequence"),s(e)):r(e)}function s(t){return t===d?(i++,c.consume(t),s):i>=l?(c.exit("codeFencedFenceSequence"),n.markdownSpace(t)?e.factorySpace(c,F,"whitespace")(t):F(t)):r(t)}function F(e){return null===e||n.markdownLineEnding(e)?(c.exit("codeFencedFence"),t(e)):r(e)}},partial:!0};let d,a=0,l=0;return function(e){return function(e){const n=o.events[o.events.length-1];return a=n&&"linePrefix"===n[1].type?n[2].sliceSerialize(n[1],!0).length:0,d=e,t.enter("codeFenced"),t.enter("codeFencedFence"),t.enter("codeFencedFenceSequence"),f(e)}(e)};function f(c){return c===d?(l++,t.consume(c),f):l<3?i(c):(t.exit("codeFencedFenceSequence"),n.markdownSpace(c)?e.factorySpace(t,s,"whitespace")(c):s(c))}function s(e){return null===e||n.markdownLineEnding(e)?(t.exit("codeFencedFence"),o.interrupt?r(e):t.check(c,k,h)(e)):(t.enter("codeFencedFenceInfo"),t.enter("chunkString",{contentType:"string"}),F(e))}function F(c){return null===c||n.markdownLineEnding(c)?(t.exit("chunkString"),t.exit("codeFencedFenceInfo"),s(c)):n.markdownSpace(c)?(t.exit("chunkString"),t.exit("codeFencedFenceInfo"),e.factorySpace(t,m,"whitespace")(c)):96===c&&c===d?i(c):(t.consume(c),F)}function m(e){return null===e||n.markdownLineEnding(e)?s(e):(t.enter("codeFencedFenceMeta"),t.enter("chunkString",{contentType:"string"}),p(e))}function p(e){return null===e||n.markdownLineEnding(e)?(t.exit("chunkString"),t.exit("codeFencedFenceMeta"),s(e)):96===e&&e===d?i(e):(t.consume(e),p)}function k(e){return t.attempt(u,h,g)(e)}function g(e){return t.enter("lineEnding"),t.consume(e),t.exit("lineEnding"),x}function x(c){return a>0&&n.markdownSpace(c)?e.factorySpace(t,S,"linePrefix",a+1)(c):S(c)}function S(e){return null===e||n.markdownLineEnding(e)?t.check(c,k,h)(e):(t.enter("codeFlowValue"),w(e))}function w(e){return null===e||n.markdownLineEnding(e)?(t.exit("codeFlowValue"),S(e)):(t.consume(e),w)}function h(e){return t.exit("codeFenced"),r(e)}},concrete:!0};exports.codeFenced=t;
2
- //# sourceMappingURL=code-fenced.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"code-fenced.js","sources":["../../../../../../../node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/code-fenced.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Code} Code\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {factorySpace} from 'micromark-factory-space'\nimport {markdownLineEnding, markdownSpace} from 'micromark-util-character'\n/** @type {Construct} */\nconst nonLazyContinuation = {\n tokenize: tokenizeNonLazyContinuation,\n partial: true\n}\n\n/** @type {Construct} */\nexport const codeFenced = {\n name: 'codeFenced',\n tokenize: tokenizeCodeFenced,\n concrete: true\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeCodeFenced(effects, ok, nok) {\n const self = this\n /** @type {Construct} */\n const closeStart = {\n tokenize: tokenizeCloseStart,\n partial: true\n }\n let initialPrefix = 0\n let sizeOpen = 0\n /** @type {NonNullable<Code>} */\n let marker\n return start\n\n /**\n * Start of code.\n *\n * ```markdown\n * > | ~~~js\n * ^\n * | alert(1)\n * | ~~~\n * ```\n *\n * @type {State}\n */\n function start(code) {\n // To do: parse whitespace like `markdown-rs`.\n return beforeSequenceOpen(code)\n }\n\n /**\n * In opening fence, after prefix, at sequence.\n *\n * ```markdown\n * > | ~~~js\n * ^\n * | alert(1)\n * | ~~~\n * ```\n *\n * @type {State}\n */\n function beforeSequenceOpen(code) {\n const tail = self.events[self.events.length - 1]\n initialPrefix =\n tail && tail[1].type === 'linePrefix'\n ? tail[2].sliceSerialize(tail[1], true).length\n : 0\n marker = code\n effects.enter('codeFenced')\n effects.enter('codeFencedFence')\n effects.enter('codeFencedFenceSequence')\n return sequenceOpen(code)\n }\n\n /**\n * In opening fence sequence.\n *\n * ```markdown\n * > | ~~~js\n * ^\n * | alert(1)\n * | ~~~\n * ```\n *\n * @type {State}\n */\n function sequenceOpen(code) {\n if (code === marker) {\n sizeOpen++\n effects.consume(code)\n return sequenceOpen\n }\n if (sizeOpen < 3) {\n return nok(code)\n }\n effects.exit('codeFencedFenceSequence')\n return markdownSpace(code)\n ? factorySpace(effects, infoBefore, 'whitespace')(code)\n : infoBefore(code)\n }\n\n /**\n * In opening fence, after the sequence (and optional whitespace), before info.\n *\n * ```markdown\n * > | ~~~js\n * ^\n * | alert(1)\n * | ~~~\n * ```\n *\n * @type {State}\n */\n function infoBefore(code) {\n if (code === null || markdownLineEnding(code)) {\n effects.exit('codeFencedFence')\n return self.interrupt\n ? ok(code)\n : effects.check(nonLazyContinuation, atNonLazyBreak, after)(code)\n }\n effects.enter('codeFencedFenceInfo')\n effects.enter('chunkString', {\n contentType: 'string'\n })\n return info(code)\n }\n\n /**\n * In info.\n *\n * ```markdown\n * > | ~~~js\n * ^\n * | alert(1)\n * | ~~~\n * ```\n *\n * @type {State}\n */\n function info(code) {\n if (code === null || markdownLineEnding(code)) {\n effects.exit('chunkString')\n effects.exit('codeFencedFenceInfo')\n return infoBefore(code)\n }\n if (markdownSpace(code)) {\n effects.exit('chunkString')\n effects.exit('codeFencedFenceInfo')\n return factorySpace(effects, metaBefore, 'whitespace')(code)\n }\n if (code === 96 && code === marker) {\n return nok(code)\n }\n effects.consume(code)\n return info\n }\n\n /**\n * In opening fence, after info and whitespace, before meta.\n *\n * ```markdown\n * > | ~~~js eval\n * ^\n * | alert(1)\n * | ~~~\n * ```\n *\n * @type {State}\n */\n function metaBefore(code) {\n if (code === null || markdownLineEnding(code)) {\n return infoBefore(code)\n }\n effects.enter('codeFencedFenceMeta')\n effects.enter('chunkString', {\n contentType: 'string'\n })\n return meta(code)\n }\n\n /**\n * In meta.\n *\n * ```markdown\n * > | ~~~js eval\n * ^\n * | alert(1)\n * | ~~~\n * ```\n *\n * @type {State}\n */\n function meta(code) {\n if (code === null || markdownLineEnding(code)) {\n effects.exit('chunkString')\n effects.exit('codeFencedFenceMeta')\n return infoBefore(code)\n }\n if (code === 96 && code === marker) {\n return nok(code)\n }\n effects.consume(code)\n return meta\n }\n\n /**\n * At eol/eof in code, before a non-lazy closing fence or content.\n *\n * ```markdown\n * > | ~~~js\n * ^\n * > | alert(1)\n * ^\n * | ~~~\n * ```\n *\n * @type {State}\n */\n function atNonLazyBreak(code) {\n return effects.attempt(closeStart, after, contentBefore)(code)\n }\n\n /**\n * Before code content, not a closing fence, at eol.\n *\n * ```markdown\n * | ~~~js\n * > | alert(1)\n * ^\n * | ~~~\n * ```\n *\n * @type {State}\n */\n function contentBefore(code) {\n effects.enter('lineEnding')\n effects.consume(code)\n effects.exit('lineEnding')\n return contentStart\n }\n\n /**\n * Before code content, not a closing fence.\n *\n * ```markdown\n * | ~~~js\n * > | alert(1)\n * ^\n * | ~~~\n * ```\n *\n * @type {State}\n */\n function contentStart(code) {\n return initialPrefix > 0 && markdownSpace(code)\n ? factorySpace(\n effects,\n beforeContentChunk,\n 'linePrefix',\n initialPrefix + 1\n )(code)\n : beforeContentChunk(code)\n }\n\n /**\n * Before code content, after optional prefix.\n *\n * ```markdown\n * | ~~~js\n * > | alert(1)\n * ^\n * | ~~~\n * ```\n *\n * @type {State}\n */\n function beforeContentChunk(code) {\n if (code === null || markdownLineEnding(code)) {\n return effects.check(nonLazyContinuation, atNonLazyBreak, after)(code)\n }\n effects.enter('codeFlowValue')\n return contentChunk(code)\n }\n\n /**\n * In code content.\n *\n * ```markdown\n * | ~~~js\n * > | alert(1)\n * ^^^^^^^^\n * | ~~~\n * ```\n *\n * @type {State}\n */\n function contentChunk(code) {\n if (code === null || markdownLineEnding(code)) {\n effects.exit('codeFlowValue')\n return beforeContentChunk(code)\n }\n effects.consume(code)\n return contentChunk\n }\n\n /**\n * After code.\n *\n * ```markdown\n * | ~~~js\n * | alert(1)\n * > | ~~~\n * ^\n * ```\n *\n * @type {State}\n */\n function after(code) {\n effects.exit('codeFenced')\n return ok(code)\n }\n\n /**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\n function tokenizeCloseStart(effects, ok, nok) {\n let size = 0\n return startBefore\n\n /**\n *\n *\n * @type {State}\n */\n function startBefore(code) {\n effects.enter('lineEnding')\n effects.consume(code)\n effects.exit('lineEnding')\n return start\n }\n\n /**\n * Before closing fence, at optional whitespace.\n *\n * ```markdown\n * | ~~~js\n * | alert(1)\n * > | ~~~\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n // Always populated by defaults.\n\n // To do: `enter` here or in next state?\n effects.enter('codeFencedFence')\n return markdownSpace(code)\n ? factorySpace(\n effects,\n beforeSequenceClose,\n 'linePrefix',\n self.parser.constructs.disable.null.includes('codeIndented')\n ? undefined\n : 4\n )(code)\n : beforeSequenceClose(code)\n }\n\n /**\n * In closing fence, after optional whitespace, at sequence.\n *\n * ```markdown\n * | ~~~js\n * | alert(1)\n * > | ~~~\n * ^\n * ```\n *\n * @type {State}\n */\n function beforeSequenceClose(code) {\n if (code === marker) {\n effects.enter('codeFencedFenceSequence')\n return sequenceClose(code)\n }\n return nok(code)\n }\n\n /**\n * In closing fence sequence.\n *\n * ```markdown\n * | ~~~js\n * | alert(1)\n * > | ~~~\n * ^\n * ```\n *\n * @type {State}\n */\n function sequenceClose(code) {\n if (code === marker) {\n size++\n effects.consume(code)\n return sequenceClose\n }\n if (size >= sizeOpen) {\n effects.exit('codeFencedFenceSequence')\n return markdownSpace(code)\n ? factorySpace(effects, sequenceCloseAfter, 'whitespace')(code)\n : sequenceCloseAfter(code)\n }\n return nok(code)\n }\n\n /**\n * After closing fence sequence, after optional whitespace.\n *\n * ```markdown\n * | ~~~js\n * | alert(1)\n * > | ~~~\n * ^\n * ```\n *\n * @type {State}\n */\n function sequenceCloseAfter(code) {\n if (code === null || markdownLineEnding(code)) {\n effects.exit('codeFencedFence')\n return ok(code)\n }\n return nok(code)\n }\n }\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeNonLazyContinuation(effects, ok, nok) {\n const self = this\n return start\n\n /**\n *\n *\n * @type {State}\n */\n function start(code) {\n if (code === null) {\n return nok(code)\n }\n effects.enter('lineEnding')\n effects.consume(code)\n effects.exit('lineEnding')\n return lineStart\n }\n\n /**\n *\n *\n * @type {State}\n */\n function lineStart(code) {\n return self.parser.lazy[self.now().line] ? nok(code) : ok(code)\n }\n}\n"],"names":["nonLazyContinuation","tokenize","effects","ok","nok","self","this","code","enter","consume","exit","lineStart","parser","lazy","now","line","partial","codeFenced","name","closeStart","size","startBefore","start","markdownSpace","factorySpace","beforeSequenceClose","constructs","disable","null","includes","undefined","marker","sequenceClose","sizeOpen","sequenceCloseAfter","markdownLineEnding","initialPrefix","tail","events","length","type","sliceSerialize","sequenceOpen","beforeSequenceOpen","infoBefore","interrupt","check","atNonLazyBreak","after","contentType","info","metaBefore","meta","attempt","contentBefore","contentStart","beforeContentChunk","contentChunk","concrete"],"mappings":"iLAWA,MAAMA,EAAsB,CAC1BC,SAwbF,SAAqCC,EAASC,EAAIC,GAChD,MAAMC,EAAOC,KACb,OAOA,SAAeC,GACb,GAAa,OAATA,EACF,OAAOH,EAAIG,GAKb,OAHAL,EAAQM,MAAM,cACdN,EAAQO,QAAQF,GAChBL,EAAQQ,KAAK,cACNC,CACR,EAOD,SAASA,EAAUJ,GACjB,OAAOF,EAAKO,OAAOC,KAAKR,EAAKS,MAAMC,MAAQX,EAAIG,GAAQJ,EAAGI,EAC3D,CACH,EAldES,SAAS,GAIEC,EAAa,CACxBC,KAAM,aACNjB,SAQF,SAA4BC,EAASC,EAAIC,GACvC,MAAMC,EAAOC,KAEPa,EAAa,CACjBlB,SA+SF,SAA4BC,EAASC,EAAIC,GACvC,IAAIgB,EAAO,EACX,OAAOC,EAOP,SAASA,EAAYd,GAInB,OAHAL,EAAQM,MAAM,cACdN,EAAQO,QAAQF,GAChBL,EAAQQ,KAAK,cACNY,CACR,CAcD,SAASA,EAAMf,GAKb,OADAL,EAAQM,MAAM,mBACPe,EAAAA,cAAchB,GACjBiB,EAAYA,aACVtB,EACAuB,EACA,aACApB,EAAKO,OAAOc,WAAWC,QAAQC,KAAKC,SAAS,qBACzCC,EACA,EANNN,CAOEjB,GACFkB,EAAoBlB,EACzB,CAcD,SAASkB,EAAoBlB,GAC3B,OAAIA,IAASwB,GACX7B,EAAQM,MAAM,2BACPwB,EAAczB,IAEhBH,EAAIG,EACZ,CAcD,SAASyB,EAAczB,GACrB,OAAIA,IAASwB,GACXX,IACAlB,EAAQO,QAAQF,GACTyB,GAELZ,GAAQa,GACV/B,EAAQQ,KAAK,2BACNa,EAAAA,cAAchB,GACjBiB,EAAAA,aAAatB,EAASgC,EAAoB,aAA1CV,CAAwDjB,GACxD2B,EAAmB3B,IAElBH,EAAIG,EACZ,CAcD,SAAS2B,EAAmB3B,GAC1B,OAAa,OAATA,GAAiB4B,EAAkBA,mBAAC5B,IACtCL,EAAQQ,KAAK,mBACNP,EAAGI,IAELH,EAAIG,EACZ,CACF,EA7ZCS,SAAS,GAEX,IAGIe,EAHAK,EAAgB,EAChBH,EAAW,EAGf,OAcA,SAAe1B,GAEb,OAeF,SAA4BA,GAC1B,MAAM8B,EAAOhC,EAAKiC,OAAOjC,EAAKiC,OAAOC,OAAS,GAS9C,OARAH,EACEC,GAAyB,eAAjBA,EAAK,GAAGG,KACZH,EAAK,GAAGI,eAAeJ,EAAK,IAAI,GAAME,OACtC,EACNR,EAASxB,EACTL,EAAQM,MAAM,cACdN,EAAQM,MAAM,mBACdN,EAAQM,MAAM,2BACPkC,EAAanC,EACrB,CA1BQoC,CAAmBpC,EAC3B,EAuCD,SAASmC,EAAanC,GACpB,OAAIA,IAASwB,GACXE,IACA/B,EAAQO,QAAQF,GACTmC,GAELT,EAAW,EACN7B,EAAIG,IAEbL,EAAQQ,KAAK,2BACNa,EAAAA,cAAchB,GACjBiB,EAAAA,aAAatB,EAAS0C,EAAY,aAAlCpB,CAAgDjB,GAChDqC,EAAWrC,GAChB,CAcD,SAASqC,EAAWrC,GAClB,OAAa,OAATA,GAAiB4B,EAAkBA,mBAAC5B,IACtCL,EAAQQ,KAAK,mBACNL,EAAKwC,UACR1C,EAAGI,GACHL,EAAQ4C,MAAM9C,EAAqB+C,EAAgBC,EAAnD9C,CAA0DK,KAEhEL,EAAQM,MAAM,uBACdN,EAAQM,MAAM,cAAe,CAC3ByC,YAAa,WAERC,EAAK3C,GACb,CAcD,SAAS2C,EAAK3C,GACZ,OAAa,OAATA,GAAiB4B,EAAkBA,mBAAC5B,IACtCL,EAAQQ,KAAK,eACbR,EAAQQ,KAAK,uBACNkC,EAAWrC,IAEhBgB,EAAAA,cAAchB,IAChBL,EAAQQ,KAAK,eACbR,EAAQQ,KAAK,uBACNc,EAAYA,aAACtB,EAASiD,EAAY,aAAlC3B,CAAgDjB,IAE5C,KAATA,GAAeA,IAASwB,EACnB3B,EAAIG,IAEbL,EAAQO,QAAQF,GACT2C,EACR,CAcD,SAASC,EAAW5C,GAClB,OAAa,OAATA,GAAiB4B,EAAkBA,mBAAC5B,GAC/BqC,EAAWrC,IAEpBL,EAAQM,MAAM,uBACdN,EAAQM,MAAM,cAAe,CAC3ByC,YAAa,WAERG,EAAK7C,GACb,CAcD,SAAS6C,EAAK7C,GACZ,OAAa,OAATA,GAAiB4B,EAAkBA,mBAAC5B,IACtCL,EAAQQ,KAAK,eACbR,EAAQQ,KAAK,uBACNkC,EAAWrC,IAEP,KAATA,GAAeA,IAASwB,EACnB3B,EAAIG,IAEbL,EAAQO,QAAQF,GACT6C,EACR,CAeD,SAASL,EAAexC,GACtB,OAAOL,EAAQmD,QAAQlC,EAAY6B,EAAOM,EAAnCpD,CAAkDK,EAC1D,CAcD,SAAS+C,EAAc/C,GAIrB,OAHAL,EAAQM,MAAM,cACdN,EAAQO,QAAQF,GAChBL,EAAQQ,KAAK,cACN6C,CACR,CAcD,SAASA,EAAahD,GACpB,OAAO6B,EAAgB,GAAKb,EAAaA,cAAChB,GACtCiB,EAAYA,aACVtB,EACAsD,EACA,aACApB,EAAgB,EAJlBZ,CAKEjB,GACFiD,EAAmBjD,EACxB,CAcD,SAASiD,EAAmBjD,GAC1B,OAAa,OAATA,GAAiB4B,EAAkBA,mBAAC5B,GAC/BL,EAAQ4C,MAAM9C,EAAqB+C,EAAgBC,EAAnD9C,CAA0DK,IAEnEL,EAAQM,MAAM,iBACPiD,EAAalD,GACrB,CAcD,SAASkD,EAAalD,GACpB,OAAa,OAATA,GAAiB4B,EAAkBA,mBAAC5B,IACtCL,EAAQQ,KAAK,iBACN8C,EAAmBjD,KAE5BL,EAAQO,QAAQF,GACTkD,EACR,CAcD,SAAST,EAAMzC,GAEb,OADAL,EAAQQ,KAAK,cACNP,EAAGI,EACX,CAsHH,EA1aEmD,UAAU"}
@@ -1,2 +0,0 @@
1
- "use strict";Object.defineProperty(exports,"__esModule",{value:!0});var e=require("../../micromark-factory-space/index.js"),n=require("../../micromark-util-character/index.js");const t={name:"codeIndented",tokenize:function(t,r,o){const c=this;return function(n){return t.enter("codeIndented"),e.factorySpace(t,u,"linePrefix",5)(n)};function u(e){const n=c.events[c.events.length-1];return n&&"linePrefix"===n[1].type&&n[2].sliceSerialize(n[1],!0).length>=4?d(e):o(e)}function d(e){return null===e?a(e):n.markdownLineEnding(e)?t.attempt(i,d,a)(e):(t.enter("codeFlowValue"),l(e))}function l(e){return null===e||n.markdownLineEnding(e)?(t.exit("codeFlowValue"),d(e)):(t.consume(e),l)}function a(e){return t.exit("codeIndented"),r(e)}}},i={tokenize:function(t,i,r){const o=this;return c;function c(i){return o.parser.lazy[o.now().line]?r(i):n.markdownLineEnding(i)?(t.enter("lineEnding"),t.consume(i),t.exit("lineEnding"),c):e.factorySpace(t,u,"linePrefix",5)(i)}function u(e){const t=o.events[o.events.length-1];return t&&"linePrefix"===t[1].type&&t[2].sliceSerialize(t[1],!0).length>=4?i(e):n.markdownLineEnding(e)?c(e):r(e)}},partial:!0};exports.codeIndented=t;
2
- //# sourceMappingURL=code-indented.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"code-indented.js","sources":["../../../../../../../node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/code-indented.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {factorySpace} from 'micromark-factory-space'\nimport {markdownLineEnding, markdownSpace} from 'micromark-util-character'\n/** @type {Construct} */\nexport const codeIndented = {\n name: 'codeIndented',\n tokenize: tokenizeCodeIndented\n}\n\n/** @type {Construct} */\nconst furtherStart = {\n tokenize: tokenizeFurtherStart,\n partial: true\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeCodeIndented(effects, ok, nok) {\n const self = this\n return start\n\n /**\n * Start of code (indented).\n *\n * > **Parsing note**: it is not needed to check if this first line is a\n * > filled line (that it has a non-whitespace character), because blank lines\n * > are parsed already, so we never run into that.\n *\n * ```markdown\n * > | aaa\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n // To do: manually check if interrupting like `markdown-rs`.\n\n effects.enter('codeIndented')\n // To do: use an improved `space_or_tab` function like `markdown-rs`,\n // so that we can drop the next state.\n return factorySpace(effects, afterPrefix, 'linePrefix', 4 + 1)(code)\n }\n\n /**\n * At start, after 1 or 4 spaces.\n *\n * ```markdown\n * > | aaa\n * ^\n * ```\n *\n * @type {State}\n */\n function afterPrefix(code) {\n const tail = self.events[self.events.length - 1]\n return tail &&\n tail[1].type === 'linePrefix' &&\n tail[2].sliceSerialize(tail[1], true).length >= 4\n ? atBreak(code)\n : nok(code)\n }\n\n /**\n * At a break.\n *\n * ```markdown\n * > | aaa\n * ^ ^\n * ```\n *\n * @type {State}\n */\n function atBreak(code) {\n if (code === null) {\n return after(code)\n }\n if (markdownLineEnding(code)) {\n return effects.attempt(furtherStart, atBreak, after)(code)\n }\n effects.enter('codeFlowValue')\n return inside(code)\n }\n\n /**\n * In code content.\n *\n * ```markdown\n * > | aaa\n * ^^^^\n * ```\n *\n * @type {State}\n */\n function inside(code) {\n if (code === null || markdownLineEnding(code)) {\n effects.exit('codeFlowValue')\n return atBreak(code)\n }\n effects.consume(code)\n return inside\n }\n\n /** @type {State} */\n function after(code) {\n effects.exit('codeIndented')\n // To do: allow interrupting like `markdown-rs`.\n // Feel free to interrupt.\n // tokenizer.interrupt = false\n return ok(code)\n }\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeFurtherStart(effects, ok, nok) {\n const self = this\n return furtherStart\n\n /**\n * At eol, trying to parse another indent.\n *\n * ```markdown\n * > | aaa\n * ^\n * | bbb\n * ```\n *\n * @type {State}\n */\n function furtherStart(code) {\n // To do: improve `lazy` / `pierce` handling.\n // If this is a lazy line, it can’t be code.\n if (self.parser.lazy[self.now().line]) {\n return nok(code)\n }\n if (markdownLineEnding(code)) {\n effects.enter('lineEnding')\n effects.consume(code)\n effects.exit('lineEnding')\n return furtherStart\n }\n\n // To do: the code here in `micromark-js` is a bit different from\n // `markdown-rs` because there it can attempt spaces.\n // We can’t yet.\n //\n // To do: use an improved `space_or_tab` function like `markdown-rs`,\n // so that we can drop the next state.\n return factorySpace(effects, afterPrefix, 'linePrefix', 4 + 1)(code)\n }\n\n /**\n * At start, after 1 or 4 spaces.\n *\n * ```markdown\n * > | aaa\n * ^\n * ```\n *\n * @type {State}\n */\n function afterPrefix(code) {\n const tail = self.events[self.events.length - 1]\n return tail &&\n tail[1].type === 'linePrefix' &&\n tail[2].sliceSerialize(tail[1], true).length >= 4\n ? ok(code)\n : markdownLineEnding(code)\n ? furtherStart(code)\n : nok(code)\n }\n}\n"],"names":["codeIndented","name","tokenize","effects","ok","nok","self","this","code","enter","factorySpace","afterPrefix","tail","events","length","type","sliceSerialize","atBreak","after","markdownLineEnding","attempt","furtherStart","inside","exit","consume","parser","lazy","now","line","partial"],"mappings":"iLAUY,MAACA,EAAe,CAC1BC,KAAM,eACNC,SAaF,SAA8BC,EAASC,EAAIC,GACzC,MAAMC,EAAOC,KACb,OAgBA,SAAeC,GAMb,OAHAL,EAAQM,MAAM,gBAGPC,EAAAA,aAAaP,EAASQ,EAAa,aAAc,EAAjDD,CAAwDF,EAChE,EAYD,SAASG,EAAYH,GACnB,MAAMI,EAAON,EAAKO,OAAOP,EAAKO,OAAOC,OAAS,GAC9C,OAAOF,GACY,eAAjBA,EAAK,GAAGG,MACRH,EAAK,GAAGI,eAAeJ,EAAK,IAAI,GAAME,QAAU,EAC9CG,EAAQT,GACRH,EAAIG,EACT,CAYD,SAASS,EAAQT,GACf,OAAa,OAATA,EACKU,EAAMV,GAEXW,EAAAA,mBAAmBX,GACdL,EAAQiB,QAAQC,EAAcJ,EAASC,EAAvCf,CAA8CK,IAEvDL,EAAQM,MAAM,iBACPa,EAAOd,GACf,CAYD,SAASc,EAAOd,GACd,OAAa,OAATA,GAAiBW,EAAkBA,mBAACX,IACtCL,EAAQoB,KAAK,iBACNN,EAAQT,KAEjBL,EAAQqB,QAAQhB,GACTc,EACR,CAGD,SAASJ,EAAMV,GAKb,OAJAL,EAAQoB,KAAK,gBAINnB,EAAGI,EACX,CACH,GAvGMa,EAAe,CACnBnB,SA4GF,SAA8BC,EAASC,EAAIC,GACzC,MAAMC,EAAOC,KACb,OAAOc,EAaP,SAASA,EAAab,GAGpB,OAAIF,EAAKmB,OAAOC,KAAKpB,EAAKqB,MAAMC,MACvBvB,EAAIG,GAETW,EAAAA,mBAAmBX,IACrBL,EAAQM,MAAM,cACdN,EAAQqB,QAAQhB,GAChBL,EAAQoB,KAAK,cACNF,GASFX,EAAAA,aAAaP,EAASQ,EAAa,aAAc,EAAjDD,CAAwDF,EAChE,CAYD,SAASG,EAAYH,GACnB,MAAMI,EAAON,EAAKO,OAAOP,EAAKO,OAAOC,OAAS,GAC9C,OAAOF,GACY,eAAjBA,EAAK,GAAGG,MACRH,EAAK,GAAGI,eAAeJ,EAAK,IAAI,GAAME,QAAU,EAC9CV,EAAGI,GACHW,EAAAA,mBAAmBX,GACnBa,EAAab,GACbH,EAAIG,EACT,CACH,EApKEqB,SAAS"}
@@ -1,2 +0,0 @@
1
- "use strict";Object.defineProperty(exports,"__esModule",{value:!0});var e=require("../../micromark-util-character/index.js");const n={name:"codeText",tokenize:function(n,t,i){let c,o,r=0;return function(e){return n.enter("codeText"),n.enter("codeTextSequence"),d(e)};function d(e){return 96===e?(n.consume(e),r++,d):(n.exit("codeTextSequence"),u(e))}function u(t){return null===t?i(t):32===t?(n.enter("space"),n.consume(t),n.exit("space"),u):96===t?(o=n.enter("codeTextSequence"),c=0,s(t)):e.markdownLineEnding(t)?(n.enter("lineEnding"),n.consume(t),n.exit("lineEnding"),u):(n.enter("codeTextData"),a(t))}function a(t){return null===t||32===t||96===t||e.markdownLineEnding(t)?(n.exit("codeTextData"),u(t)):(n.consume(t),a)}function s(e){return 96===e?(n.consume(e),c++,s):c===r?(n.exit("codeTextSequence"),n.exit("codeText"),t(e)):(o.type="codeTextData",a(e))}},resolve:function(e){let n,t,i=e.length-4,c=3;if(!("lineEnding"!==e[c][1].type&&"space"!==e[c][1].type||"lineEnding"!==e[i][1].type&&"space"!==e[i][1].type))for(n=c;++n<i;)if("codeTextData"===e[n][1].type){e[c][1].type="codeTextPadding",e[i][1].type="codeTextPadding",c+=2,i-=2;break}n=c-1,i++;for(;++n<=i;)void 0===t?n!==i&&"lineEnding"!==e[n][1].type&&(t=n):n!==i&&"lineEnding"!==e[n][1].type||(e[t][1].type="codeTextData",n!==t+2&&(e[t][1].end=e[n-1][1].end,e.splice(t+2,n-t-2),i-=n-t-2,n=t+2),t=void 0);return e},previous:function(e){return 96!==e||"characterEscape"===this.events[this.events.length-1][1].type}};exports.codeText=n;
2
- //# sourceMappingURL=code-text.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"code-text.js","sources":["../../../../../../../node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/code-text.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').Previous} Previous\n * @typedef {import('micromark-util-types').Resolver} Resolver\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').Token} Token\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {markdownLineEnding} from 'micromark-util-character'\n/** @type {Construct} */\nexport const codeText = {\n name: 'codeText',\n tokenize: tokenizeCodeText,\n resolve: resolveCodeText,\n previous\n}\n\n// To do: next major: don’t resolve, like `markdown-rs`.\n/** @type {Resolver} */\nfunction resolveCodeText(events) {\n let tailExitIndex = events.length - 4\n let headEnterIndex = 3\n /** @type {number} */\n let index\n /** @type {number | undefined} */\n let enter\n\n // If we start and end with an EOL or a space.\n if (\n (events[headEnterIndex][1].type === 'lineEnding' ||\n events[headEnterIndex][1].type === 'space') &&\n (events[tailExitIndex][1].type === 'lineEnding' ||\n events[tailExitIndex][1].type === 'space')\n ) {\n index = headEnterIndex\n\n // And we have data.\n while (++index < tailExitIndex) {\n if (events[index][1].type === 'codeTextData') {\n // Then we have padding.\n events[headEnterIndex][1].type = 'codeTextPadding'\n events[tailExitIndex][1].type = 'codeTextPadding'\n headEnterIndex += 2\n tailExitIndex -= 2\n break\n }\n }\n }\n\n // Merge adjacent spaces and data.\n index = headEnterIndex - 1\n tailExitIndex++\n while (++index <= tailExitIndex) {\n if (enter === undefined) {\n if (index !== tailExitIndex && events[index][1].type !== 'lineEnding') {\n enter = index\n }\n } else if (\n index === tailExitIndex ||\n events[index][1].type === 'lineEnding'\n ) {\n events[enter][1].type = 'codeTextData'\n if (index !== enter + 2) {\n events[enter][1].end = events[index - 1][1].end\n events.splice(enter + 2, index - enter - 2)\n tailExitIndex -= index - enter - 2\n index = enter + 2\n }\n enter = undefined\n }\n }\n return events\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Previous}\n */\nfunction previous(code) {\n // If there is a previous code, there will always be a tail.\n return (\n code !== 96 ||\n this.events[this.events.length - 1][1].type === 'characterEscape'\n )\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeCodeText(effects, ok, nok) {\n const self = this\n let sizeOpen = 0\n /** @type {number} */\n let size\n /** @type {Token} */\n let token\n return start\n\n /**\n * Start of code (text).\n *\n * ```markdown\n * > | `a`\n * ^\n * > | \\`a`\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n effects.enter('codeText')\n effects.enter('codeTextSequence')\n return sequenceOpen(code)\n }\n\n /**\n * In opening sequence.\n *\n * ```markdown\n * > | `a`\n * ^\n * ```\n *\n * @type {State}\n */\n function sequenceOpen(code) {\n if (code === 96) {\n effects.consume(code)\n sizeOpen++\n return sequenceOpen\n }\n effects.exit('codeTextSequence')\n return between(code)\n }\n\n /**\n * Between something and something else.\n *\n * ```markdown\n * > | `a`\n * ^^\n * ```\n *\n * @type {State}\n */\n function between(code) {\n // EOF.\n if (code === null) {\n return nok(code)\n }\n\n // To do: next major: don’t do spaces in resolve, but when compiling,\n // like `markdown-rs`.\n // Tabs don’t work, and virtual spaces don’t make sense.\n if (code === 32) {\n effects.enter('space')\n effects.consume(code)\n effects.exit('space')\n return between\n }\n\n // Closing fence? Could also be data.\n if (code === 96) {\n token = effects.enter('codeTextSequence')\n size = 0\n return sequenceClose(code)\n }\n if (markdownLineEnding(code)) {\n effects.enter('lineEnding')\n effects.consume(code)\n effects.exit('lineEnding')\n return between\n }\n\n // Data.\n effects.enter('codeTextData')\n return data(code)\n }\n\n /**\n * In data.\n *\n * ```markdown\n * > | `a`\n * ^\n * ```\n *\n * @type {State}\n */\n function data(code) {\n if (\n code === null ||\n code === 32 ||\n code === 96 ||\n markdownLineEnding(code)\n ) {\n effects.exit('codeTextData')\n return between(code)\n }\n effects.consume(code)\n return data\n }\n\n /**\n * In closing sequence.\n *\n * ```markdown\n * > | `a`\n * ^\n * ```\n *\n * @type {State}\n */\n function sequenceClose(code) {\n // More.\n if (code === 96) {\n effects.consume(code)\n size++\n return sequenceClose\n }\n\n // Done!\n if (size === sizeOpen) {\n effects.exit('codeTextSequence')\n effects.exit('codeText')\n return ok(code)\n }\n\n // More or less accents: mark as data.\n token.type = 'codeTextData'\n return data(code)\n }\n}\n"],"names":["codeText","name","tokenize","effects","ok","nok","size","token","sizeOpen","code","enter","sequenceOpen","consume","exit","between","sequenceClose","markdownLineEnding","data","type","resolve","events","index","tailExitIndex","length","headEnterIndex","undefined","end","splice","previous","this"],"mappings":"6HAYY,MAACA,EAAW,CACtBC,KAAM,WACNC,SA8EF,SAA0BC,EAASC,EAAIC,GAErC,IAEIC,EAEAC,EAJAC,EAAW,EAKf,OAcA,SAAeC,GAGb,OAFAN,EAAQO,MAAM,YACdP,EAAQO,MAAM,oBACPC,EAAaF,EACrB,EAYD,SAASE,EAAaF,GACpB,OAAa,KAATA,GACFN,EAAQS,QAAQH,GAChBD,IACOG,IAETR,EAAQU,KAAK,oBACNC,EAAQL,GAChB,CAYD,SAASK,EAAQL,GAEf,OAAa,OAATA,EACKJ,EAAII,GAMA,KAATA,GACFN,EAAQO,MAAM,SACdP,EAAQS,QAAQH,GAChBN,EAAQU,KAAK,SACNC,GAII,KAATL,GACFF,EAAQJ,EAAQO,MAAM,oBACtBJ,EAAO,EACAS,EAAcN,IAEnBO,EAAAA,mBAAmBP,IACrBN,EAAQO,MAAM,cACdP,EAAQS,QAAQH,GAChBN,EAAQU,KAAK,cACNC,IAITX,EAAQO,MAAM,gBACPO,EAAKR,GACb,CAYD,SAASQ,EAAKR,GACZ,OACW,OAATA,GACS,KAATA,GACS,KAATA,GACAO,EAAAA,mBAAmBP,IAEnBN,EAAQU,KAAK,gBACNC,EAAQL,KAEjBN,EAAQS,QAAQH,GACTQ,EACR,CAYD,SAASF,EAAcN,GAErB,OAAa,KAATA,GACFN,EAAQS,QAAQH,GAChBH,IACOS,GAILT,IAASE,GACXL,EAAQU,KAAK,oBACbV,EAAQU,KAAK,YACNT,EAAGK,KAIZF,EAAMW,KAAO,eACND,EAAKR,GACb,CACH,EA7NEU,QAMF,SAAyBC,GACvB,IAGIC,EAEAX,EALAY,EAAgBF,EAAOG,OAAS,EAChCC,EAAiB,EAOrB,KACsC,eAAnCJ,EAAOI,GAAgB,GAAGN,MACU,UAAnCE,EAAOI,GAAgB,GAAGN,MACO,eAAlCE,EAAOE,GAAe,GAAGJ,MACU,UAAlCE,EAAOE,GAAe,GAAGJ,MAK3B,IAHAG,EAAQG,IAGCH,EAAQC,GACf,GAA8B,iBAA1BF,EAAOC,GAAO,GAAGH,KAAyB,CAE5CE,EAAOI,GAAgB,GAAGN,KAAO,kBACjCE,EAAOE,GAAe,GAAGJ,KAAO,kBAChCM,GAAkB,EAClBF,GAAiB,EACjB,KACD,CAKLD,EAAQG,EAAiB,EACzBF,IACA,OAASD,GAASC,QACFG,IAAVf,EACEW,IAAUC,GAA2C,eAA1BF,EAAOC,GAAO,GAAGH,OAC9CR,EAAQW,GAGVA,IAAUC,GACgB,eAA1BF,EAAOC,GAAO,GAAGH,OAEjBE,EAAOV,GAAO,GAAGQ,KAAO,eACpBG,IAAUX,EAAQ,IACpBU,EAAOV,GAAO,GAAGgB,IAAMN,EAAOC,EAAQ,GAAG,GAAGK,IAC5CN,EAAOO,OAAOjB,EAAQ,EAAGW,EAAQX,EAAQ,GACzCY,GAAiBD,EAAQX,EAAQ,EACjCW,EAAQX,EAAQ,GAElBA,OAAQe,GAGZ,OAAOL,CACT,EA1DEQ,SAgEF,SAAkBnB,GAEhB,OACW,KAATA,GACgD,oBAAhDoB,KAAKT,OAAOS,KAAKT,OAAOG,OAAS,GAAG,GAAGL,IAE3C"}
@@ -1,2 +0,0 @@
1
- "use strict";Object.defineProperty(exports,"__esModule",{value:!0});var n=require("../../micromark-factory-space/index.js"),e=require("../../micromark-util-character/index.js"),t=require("../../micromark-util-subtokenize/index.js");const r={tokenize:function(n,t){let r;return function(e){return n.enter("content"),r=n.enter("chunkContent",{contentType:"content"}),o(e)};function o(t){return null===t?c(t):e.markdownLineEnding(t)?n.check(i,u,c)(t):(n.consume(t),o)}function c(e){return n.exit("chunkContent"),n.exit("content"),t(e)}function u(e){return n.consume(e),n.exit("chunkContent"),r.next=n.enter("chunkContent",{contentType:"content",previous:r}),r=r.next,o}},resolve:function(n){return t.subtokenize(n),n}},i={tokenize:function(t,r,i){const o=this;return function(e){return t.exit("chunkContent"),t.enter("lineEnding"),t.consume(e),t.exit("lineEnding"),n.factorySpace(t,c,"linePrefix")};function c(n){if(null===n||e.markdownLineEnding(n))return i(n);const c=o.events[o.events.length-1];return!o.parser.constructs.disable.null.includes("codeIndented")&&c&&"linePrefix"===c[1].type&&c[2].sliceSerialize(c[1],!0).length>=4?r(n):t.interrupt(o.parser.constructs.flow,i,r)(n)}},partial:!0};exports.content=r;
2
- //# sourceMappingURL=content.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"content.js","sources":["../../../../../../../node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/content.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').Resolver} Resolver\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').Token} Token\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {factorySpace} from 'micromark-factory-space'\nimport {markdownLineEnding} from 'micromark-util-character'\nimport {subtokenize} from 'micromark-util-subtokenize'\n/**\n * No name because it must not be turned off.\n * @type {Construct}\n */\nexport const content = {\n tokenize: tokenizeContent,\n resolve: resolveContent\n}\n\n/** @type {Construct} */\nconst continuationConstruct = {\n tokenize: tokenizeContinuation,\n partial: true\n}\n\n/**\n * Content is transparent: it’s parsed right now. That way, definitions are also\n * parsed right now: before text in paragraphs (specifically, media) are parsed.\n *\n * @type {Resolver}\n */\nfunction resolveContent(events) {\n subtokenize(events)\n return events\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeContent(effects, ok) {\n /** @type {Token | undefined} */\n let previous\n return chunkStart\n\n /**\n * Before a content chunk.\n *\n * ```markdown\n * > | abc\n * ^\n * ```\n *\n * @type {State}\n */\n function chunkStart(code) {\n effects.enter('content')\n previous = effects.enter('chunkContent', {\n contentType: 'content'\n })\n return chunkInside(code)\n }\n\n /**\n * In a content chunk.\n *\n * ```markdown\n * > | abc\n * ^^^\n * ```\n *\n * @type {State}\n */\n function chunkInside(code) {\n if (code === null) {\n return contentEnd(code)\n }\n\n // To do: in `markdown-rs`, each line is parsed on its own, and everything\n // is stitched together resolving.\n if (markdownLineEnding(code)) {\n return effects.check(\n continuationConstruct,\n contentContinue,\n contentEnd\n )(code)\n }\n\n // Data.\n effects.consume(code)\n return chunkInside\n }\n\n /**\n *\n *\n * @type {State}\n */\n function contentEnd(code) {\n effects.exit('chunkContent')\n effects.exit('content')\n return ok(code)\n }\n\n /**\n *\n *\n * @type {State}\n */\n function contentContinue(code) {\n effects.consume(code)\n effects.exit('chunkContent')\n previous.next = effects.enter('chunkContent', {\n contentType: 'content',\n previous\n })\n previous = previous.next\n return chunkInside\n }\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeContinuation(effects, ok, nok) {\n const self = this\n return startLookahead\n\n /**\n *\n *\n * @type {State}\n */\n function startLookahead(code) {\n effects.exit('chunkContent')\n effects.enter('lineEnding')\n effects.consume(code)\n effects.exit('lineEnding')\n return factorySpace(effects, prefixed, 'linePrefix')\n }\n\n /**\n *\n *\n * @type {State}\n */\n function prefixed(code) {\n if (code === null || markdownLineEnding(code)) {\n return nok(code)\n }\n\n // Always populated by defaults.\n\n const tail = self.events[self.events.length - 1]\n if (\n !self.parser.constructs.disable.null.includes('codeIndented') &&\n tail &&\n tail[1].type === 'linePrefix' &&\n tail[2].sliceSerialize(tail[1], true).length >= 4\n ) {\n return ok(code)\n }\n return effects.interrupt(self.parser.constructs.flow, nok, ok)(code)\n }\n}\n"],"names":["content","tokenize","effects","ok","previous","code","enter","contentType","chunkInside","contentEnd","markdownLineEnding","check","continuationConstruct","contentContinue","consume","exit","next","resolve","events","subtokenize","nok","self","this","factorySpace","prefixed","tail","length","parser","constructs","disable","null","includes","type","sliceSerialize","interrupt","flow","partial"],"mappings":"wOAgBY,MAACA,EAAU,CACrBC,SAyBF,SAAyBC,EAASC,GAEhC,IAAIC,EACJ,OAYA,SAAoBC,GAKlB,OAJAH,EAAQI,MAAM,WACdF,EAAWF,EAAQI,MAAM,eAAgB,CACvCC,YAAa,YAERC,EAAYH,EACpB,EAYD,SAASG,EAAYH,GACnB,OAAa,OAATA,EACKI,EAAWJ,GAKhBK,EAAAA,mBAAmBL,GACdH,EAAQS,MACbC,EACAC,EACAJ,EAHKP,CAILG,IAIJH,EAAQY,QAAQT,GACTG,EACR,CAOD,SAASC,EAAWJ,GAGlB,OAFAH,EAAQa,KAAK,gBACbb,EAAQa,KAAK,WACNZ,EAAGE,EACX,CAOD,SAASQ,EAAgBR,GAQvB,OAPAH,EAAQY,QAAQT,GAChBH,EAAQa,KAAK,gBACbX,EAASY,KAAOd,EAAQI,MAAM,eAAgB,CAC5CC,YAAa,UACbH,aAEFA,EAAWA,EAASY,KACbR,CACR,CACH,EAvGES,QAeF,SAAwBC,GAEtB,OADAC,EAAAA,YAAYD,GACLA,CACT,GAdMN,EAAwB,CAC5BX,SAwGF,SAA8BC,EAASC,EAAIiB,GACzC,MAAMC,EAAOC,KACb,OAOA,SAAwBjB,GAKtB,OAJAH,EAAQa,KAAK,gBACbb,EAAQI,MAAM,cACdJ,EAAQY,QAAQT,GAChBH,EAAQa,KAAK,cACNQ,eAAarB,EAASsB,EAAU,aACxC,EAOD,SAASA,EAASnB,GAChB,GAAa,OAATA,GAAiBK,EAAkBA,mBAACL,GACtC,OAAOe,EAAIf,GAKb,MAAMoB,EAAOJ,EAAKH,OAAOG,EAAKH,OAAOQ,OAAS,GAC9C,OACGL,EAAKM,OAAOC,WAAWC,QAAQC,KAAKC,SAAS,iBAC9CN,GACiB,eAAjBA,EAAK,GAAGO,MACRP,EAAK,GAAGQ,eAAeR,EAAK,IAAI,GAAMC,QAAU,EAEzCvB,EAAGE,GAELH,EAAQgC,UAAUb,EAAKM,OAAOC,WAAWO,KAAMf,EAAKjB,EAApDD,CAAwDG,EAChE,CACH,EA/IE+B,SAAS"}
@@ -1,2 +0,0 @@
1
- "use strict";Object.defineProperty(exports,"__esModule",{value:!0});var i=require("../../micromark-factory-destination/index.js"),e=require("../../micromark-factory-label/index.js"),n=require("../../micromark-factory-space/index.js"),t=require("../../micromark-factory-title/index.js"),r=require("../../micromark-factory-whitespace/index.js"),o=require("../../micromark-util-character/index.js"),a=require("../../micromark-util-normalize-identifier/index.js");const c={name:"definition",tokenize:function(t,c,u){const d=this;let s;return function(i){return t.enter("definition"),function(i){return e.factoryLabel.call(d,t,l,u,"definitionLabel","definitionLabelMarker","definitionLabelString")(i)}(i)};function l(i){return s=a.normalizeIdentifier(d.sliceSerialize(d.events[d.events.length-1][1]).slice(1,-1)),58===i?(t.enter("definitionMarker"),t.consume(i),t.exit("definitionMarker"),m):u(i)}function m(i){return o.markdownLineEndingOrSpace(i)?r.factoryWhitespace(t,k)(i):k(i)}function k(e){return i.factoryDestination(t,p,u,"definitionDestination","definitionDestinationLiteral","definitionDestinationLiteralMarker","definitionDestinationRaw","definitionDestinationString")(e)}function p(i){return t.attempt(f,y,y)(i)}function y(i){return o.markdownSpace(i)?n.factorySpace(t,x,"whitespace")(i):x(i)}function x(i){return null===i||o.markdownLineEnding(i)?(t.exit("definition"),d.parser.defined.push(s),c(i)):u(i)}}},f={tokenize:function(i,e,a){return function(e){return o.markdownLineEndingOrSpace(e)?r.factoryWhitespace(i,c)(e):a(e)};function c(e){return t.factoryTitle(i,f,a,"definitionTitle","definitionTitleMarker","definitionTitleString")(e)}function f(e){return o.markdownSpace(e)?n.factorySpace(i,u,"whitespace")(e):u(e)}function u(i){return null===i||o.markdownLineEnding(i)?e(i):a(i)}},partial:!0};exports.definition=c;
2
- //# sourceMappingURL=definition.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"definition.js","sources":["../../../../../../../node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/definition.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {factoryDestination} from 'micromark-factory-destination'\nimport {factoryLabel} from 'micromark-factory-label'\nimport {factorySpace} from 'micromark-factory-space'\nimport {factoryTitle} from 'micromark-factory-title'\nimport {factoryWhitespace} from 'micromark-factory-whitespace'\nimport {\n markdownLineEnding,\n markdownLineEndingOrSpace,\n markdownSpace\n} from 'micromark-util-character'\nimport {normalizeIdentifier} from 'micromark-util-normalize-identifier'\n/** @type {Construct} */\nexport const definition = {\n name: 'definition',\n tokenize: tokenizeDefinition\n}\n\n/** @type {Construct} */\nconst titleBefore = {\n tokenize: tokenizeTitleBefore,\n partial: true\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeDefinition(effects, ok, nok) {\n const self = this\n /** @type {string} */\n let identifier\n return start\n\n /**\n * At start of a definition.\n *\n * ```markdown\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n // Do not interrupt paragraphs (but do follow definitions).\n // To do: do `interrupt` the way `markdown-rs` does.\n // To do: parse whitespace the way `markdown-rs` does.\n effects.enter('definition')\n return before(code)\n }\n\n /**\n * After optional whitespace, at `[`.\n *\n * ```markdown\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */\n function before(code) {\n // To do: parse whitespace the way `markdown-rs` does.\n\n return factoryLabel.call(\n self,\n effects,\n labelAfter,\n // Note: we don’t need to reset the way `markdown-rs` does.\n nok,\n 'definitionLabel',\n 'definitionLabelMarker',\n 'definitionLabelString'\n )(code)\n }\n\n /**\n * After label.\n *\n * ```markdown\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */\n function labelAfter(code) {\n identifier = normalizeIdentifier(\n self.sliceSerialize(self.events[self.events.length - 1][1]).slice(1, -1)\n )\n if (code === 58) {\n effects.enter('definitionMarker')\n effects.consume(code)\n effects.exit('definitionMarker')\n return markerAfter\n }\n return nok(code)\n }\n\n /**\n * After marker.\n *\n * ```markdown\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */\n function markerAfter(code) {\n // Note: whitespace is optional.\n return markdownLineEndingOrSpace(code)\n ? factoryWhitespace(effects, destinationBefore)(code)\n : destinationBefore(code)\n }\n\n /**\n * Before destination.\n *\n * ```markdown\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */\n function destinationBefore(code) {\n return factoryDestination(\n effects,\n destinationAfter,\n // Note: we don’t need to reset the way `markdown-rs` does.\n nok,\n 'definitionDestination',\n 'definitionDestinationLiteral',\n 'definitionDestinationLiteralMarker',\n 'definitionDestinationRaw',\n 'definitionDestinationString'\n )(code)\n }\n\n /**\n * After destination.\n *\n * ```markdown\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */\n function destinationAfter(code) {\n return effects.attempt(titleBefore, after, after)(code)\n }\n\n /**\n * After definition.\n *\n * ```markdown\n * > | [a]: b\n * ^\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */\n function after(code) {\n return markdownSpace(code)\n ? factorySpace(effects, afterWhitespace, 'whitespace')(code)\n : afterWhitespace(code)\n }\n\n /**\n * After definition, after optional whitespace.\n *\n * ```markdown\n * > | [a]: b\n * ^\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */\n function afterWhitespace(code) {\n if (code === null || markdownLineEnding(code)) {\n effects.exit('definition')\n\n // Note: we don’t care about uniqueness.\n // It’s likely that that doesn’t happen very frequently.\n // It is more likely that it wastes precious time.\n self.parser.defined.push(identifier)\n\n // To do: `markdown-rs` interrupt.\n // // You’d be interrupting.\n // tokenizer.interrupt = true\n return ok(code)\n }\n return nok(code)\n }\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeTitleBefore(effects, ok, nok) {\n return titleBefore\n\n /**\n * After destination, at whitespace.\n *\n * ```markdown\n * > | [a]: b\n * ^\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */\n function titleBefore(code) {\n return markdownLineEndingOrSpace(code)\n ? factoryWhitespace(effects, beforeMarker)(code)\n : nok(code)\n }\n\n /**\n * At title.\n *\n * ```markdown\n * | [a]: b\n * > | \"c\"\n * ^\n * ```\n *\n * @type {State}\n */\n function beforeMarker(code) {\n return factoryTitle(\n effects,\n titleAfter,\n nok,\n 'definitionTitle',\n 'definitionTitleMarker',\n 'definitionTitleString'\n )(code)\n }\n\n /**\n * After title.\n *\n * ```markdown\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */\n function titleAfter(code) {\n return markdownSpace(code)\n ? factorySpace(effects, titleAfterOptionalWhitespace, 'whitespace')(code)\n : titleAfterOptionalWhitespace(code)\n }\n\n /**\n * After title, after optional whitespace.\n *\n * ```markdown\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */\n function titleAfterOptionalWhitespace(code) {\n return code === null || markdownLineEnding(code) ? ok(code) : nok(code)\n }\n}\n"],"names":["definition","name","tokenize","effects","ok","nok","self","this","identifier","code","enter","factoryLabel","call","labelAfter","before","normalizeIdentifier","sliceSerialize","events","length","slice","consume","exit","markerAfter","markdownLineEndingOrSpace","factoryWhitespace","destinationBefore","factoryDestination","destinationAfter","attempt","titleBefore","after","markdownSpace","factorySpace","afterWhitespace","markdownLineEnding","parser","defined","push","beforeMarker","factoryTitle","titleAfter","titleAfterOptionalWhitespace","partial"],"mappings":"4cAmBY,MAACA,EAAa,CACxBC,KAAM,aACNC,SAaF,SAA4BC,EAASC,EAAIC,GACvC,MAAMC,EAAOC,KAEb,IAAIC,EACJ,OAYA,SAAeC,GAKb,OADAN,EAAQO,MAAM,cAchB,SAAgBD,GAGd,OAAOE,EAAYA,aAACC,KAClBN,EACAH,EACAU,EAEAR,EACA,kBACA,wBACA,wBARKM,CASLF,EACH,CA1BQK,CAAOL,EACf,EAqCD,SAASI,EAAWJ,GAIlB,OAHAD,EAAaO,EAAmBA,oBAC9BT,EAAKU,eAAeV,EAAKW,OAAOX,EAAKW,OAAOC,OAAS,GAAG,IAAIC,MAAM,GAAI,IAE3D,KAATV,GACFN,EAAQO,MAAM,oBACdP,EAAQiB,QAAQX,GAChBN,EAAQkB,KAAK,oBACNC,GAEFjB,EAAII,EACZ,CAYD,SAASa,EAAYb,GAEnB,OAAOc,EAAAA,0BAA0Bd,GAC7Be,EAAAA,kBAAkBrB,EAASsB,EAA3BD,CAA8Cf,GAC9CgB,EAAkBhB,EACvB,CAYD,SAASgB,EAAkBhB,GACzB,OAAOiB,EAAkBA,mBACvBvB,EACAwB,EAEAtB,EACA,wBACA,+BACA,qCACA,2BACA,8BATKqB,CAULjB,EACH,CAYD,SAASkB,EAAiBlB,GACxB,OAAON,EAAQyB,QAAQC,EAAaC,EAAOA,EAApC3B,CAA2CM,EACnD,CAcD,SAASqB,EAAMrB,GACb,OAAOsB,EAAAA,cAActB,GACjBuB,EAAAA,aAAa7B,EAAS8B,EAAiB,aAAvCD,CAAqDvB,GACrDwB,EAAgBxB,EACrB,CAcD,SAASwB,EAAgBxB,GACvB,OAAa,OAATA,GAAiByB,EAAkBA,mBAACzB,IACtCN,EAAQkB,KAAK,cAKbf,EAAK6B,OAAOC,QAAQC,KAAK7B,GAKlBJ,EAAGK,IAELJ,EAAII,EACZ,CACH,GAtLMoB,EAAc,CAClB3B,SA2LF,SAA6BC,EAASC,EAAIC,GACxC,OAcA,SAAqBI,GACnB,OAAOc,EAAAA,0BAA0Bd,GAC7Be,EAAAA,kBAAkBrB,EAASmC,EAA3Bd,CAAyCf,GACzCJ,EAAII,EACT,EAaD,SAAS6B,EAAa7B,GACpB,OAAO8B,EAAYA,aACjBpC,EACAqC,EACAnC,EACA,kBACA,wBACA,wBANKkC,CAOL9B,EACH,CAYD,SAAS+B,EAAW/B,GAClB,OAAOsB,EAAAA,cAActB,GACjBuB,EAAAA,aAAa7B,EAASsC,EAA8B,aAApDT,CAAkEvB,GAClEgC,EAA6BhC,EAClC,CAYD,SAASgC,EAA6BhC,GACpC,OAAgB,OAATA,GAAiByB,EAAAA,mBAAmBzB,GAAQL,EAAGK,GAAQJ,EAAII,EACnE,CACH,EAlQEiC,SAAS"}
@@ -1,2 +0,0 @@
1
- "use strict";Object.defineProperty(exports,"__esModule",{value:!0});var e=require("../../micromark-util-character/index.js");const r={name:"hardBreakEscape",tokenize:function(r,n,a){return function(e){return r.enter("hardBreakEscape"),r.consume(e),t};function t(t){return e.markdownLineEnding(t)?(r.exit("hardBreakEscape"),n(t)):a(t)}}};exports.hardBreakEscape=r;
2
- //# sourceMappingURL=hard-break-escape.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"hard-break-escape.js","sources":["../../../../../../../node_modules/react-markdown/node_modules/micromark-core-commonmark/lib/hard-break-escape.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {markdownLineEnding} from 'micromark-util-character'\n/** @type {Construct} */\nexport const hardBreakEscape = {\n name: 'hardBreakEscape',\n tokenize: tokenizeHardBreakEscape\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeHardBreakEscape(effects, ok, nok) {\n return start\n\n /**\n * Start of a hard break (escape).\n *\n * ```markdown\n * > | a\\\n * ^\n * | b\n * ```\n *\n * @type {State}\n */\n function start(code) {\n effects.enter('hardBreakEscape')\n effects.consume(code)\n return after\n }\n\n /**\n * After `\\`, at eol.\n *\n * ```markdown\n * > | a\\\n * ^\n * | b\n * ```\n *\n * @type {State}\n */\n function after(code) {\n if (markdownLineEnding(code)) {\n effects.exit('hardBreakEscape')\n return ok(code)\n }\n return nok(code)\n }\n}\n"],"names":["hardBreakEscape","name","tokenize","effects","ok","nok","code","enter","consume","after","markdownLineEnding","exit"],"mappings":"6HASY,MAACA,EAAkB,CAC7BC,KAAM,kBACNC,SAOF,SAAiCC,EAASC,EAAIC,GAC5C,OAaA,SAAeC,GAGb,OAFAH,EAAQI,MAAM,mBACdJ,EAAQK,QAAQF,GACTG,CACR,EAaD,SAASA,EAAMH,GACb,OAAII,EAAAA,mBAAmBJ,IACrBH,EAAQQ,KAAK,mBACNP,EAAGE,IAELD,EAAIC,EACZ,CACH"}
@@ -1,2 +0,0 @@
1
- "use strict";Object.defineProperty(exports,"__esModule",{value:!0});var e=require("../../micromark-factory-space/index.js"),n=require("../../micromark-util-character/index.js"),t=require("../../micromark-util-chunked/index.js");const r={name:"headingAtx",tokenize:function(t,r,i){let a=0;return function(e){return t.enter("atxHeading"),function(e){return t.enter("atxHeadingSequence"),c(e)}(e)};function c(e){return 35===e&&a++<6?(t.consume(e),c):null===e||n.markdownLineEndingOrSpace(e)?(t.exit("atxHeadingSequence"),u(e)):i(e)}function u(i){return 35===i?(t.enter("atxHeadingSequence"),d(i)):null===i||n.markdownLineEnding(i)?(t.exit("atxHeading"),r(i)):n.markdownSpace(i)?e.factorySpace(t,u,"whitespace")(i):(t.enter("atxHeadingText"),o(i))}function d(e){return 35===e?(t.consume(e),d):(t.exit("atxHeadingSequence"),u(e))}function o(e){return null===e||35===e||n.markdownLineEndingOrSpace(e)?(t.exit("atxHeadingText"),u(e)):(t.consume(e),o)}},resolve:function(e,n){let r,i,a=e.length-2,c=3;"whitespace"===e[c][1].type&&(c+=2);a-2>c&&"whitespace"===e[a][1].type&&(a-=2);"atxHeadingSequence"===e[a][1].type&&(c===a-1||a-4>c&&"whitespace"===e[a-2][1].type)&&(a-=c+1===a?2:4);a>c&&(r={type:"atxHeadingText",start:e[c][1].start,end:e[a][1].end},i={type:"chunkText",start:e[c][1].start,end:e[a][1].end,contentType:"text"},t.splice(e,c,a-c+1,[["enter",r,n],["enter",i,n],["exit",i,n],["exit",r,n]]));return e}};exports.headingAtx=r;
2
- //# sourceMappingURL=heading-atx.js.map