securemark 0.294.9 → 0.294.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. package/.eslintrc.json +0 -5
  2. package/CHANGELOG.md +8 -0
  3. package/dist/index.js +138 -122
  4. package/markdown.d.ts +1 -1
  5. package/package.json +1 -1
  6. package/src/combinator/control/manipulation/convert.ts +9 -19
  7. package/src/combinator/control/manipulation/scope.ts +23 -7
  8. package/src/parser/api/body.test.ts +1 -1
  9. package/src/parser/api/header.test.ts +2 -2
  10. package/src/parser/api/normalize.test.ts +2 -0
  11. package/src/parser/api/normalize.ts +1 -1
  12. package/src/parser/block/blockquote.ts +4 -4
  13. package/src/parser/block/extension/fig.ts +2 -3
  14. package/src/parser/block/extension/figure.ts +1 -1
  15. package/src/parser/block/extension/table.ts +2 -2
  16. package/src/parser/block/heading.ts +3 -3
  17. package/src/parser/block/paragraph.test.ts +1 -1
  18. package/src/parser/block/reply/quote.ts +2 -4
  19. package/src/parser/block/sidefence.ts +2 -2
  20. package/src/parser/block/table.ts +4 -4
  21. package/src/parser/block/ulist.ts +3 -3
  22. package/src/parser/context.ts +9 -6
  23. package/src/parser/header.test.ts +2 -2
  24. package/src/parser/header.ts +5 -5
  25. package/src/parser/inline/annotation.ts +1 -1
  26. package/src/parser/inline/autolink/account.ts +7 -4
  27. package/src/parser/inline/autolink/anchor.ts +1 -1
  28. package/src/parser/inline/autolink/email.ts +1 -1
  29. package/src/parser/inline/autolink/hashnum.ts +1 -1
  30. package/src/parser/inline/autolink/hashtag.ts +1 -1
  31. package/src/parser/inline/autolink/url.ts +11 -11
  32. package/src/parser/inline/bracket.ts +11 -8
  33. package/src/parser/inline/extension/index.ts +3 -3
  34. package/src/parser/inline/extension/label.ts +1 -1
  35. package/src/parser/inline/extension/placeholder.ts +1 -1
  36. package/src/parser/inline/htmlentity.ts +1 -1
  37. package/src/parser/inline/link.ts +11 -13
  38. package/src/parser/inline/math.ts +2 -2
  39. package/src/parser/inline/media.ts +15 -17
  40. package/src/parser/inline/reference.ts +3 -3
  41. package/src/parser/inline/ruby.ts +3 -3
  42. package/src/parser/inline/shortmedia.ts +3 -5
  43. package/src/parser/inline/template.ts +6 -10
  44. package/src/parser/visibility.ts +8 -26
package/markdown.d.ts CHANGED
@@ -1112,7 +1112,7 @@ export namespace MarkdownParser {
1112
1112
  SourceParser.StrParser,
1113
1113
  Parser<string | HTMLElement, Context, [
1114
1114
  Parser<HTMLAnchorElement, Context, []>,
1115
- InlineParser,
1115
+ SourceParser.StrParser,
1116
1116
  ]>,
1117
1117
  ]> {
1118
1118
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "securemark",
3
- "version": "0.294.9",
3
+ "version": "0.294.11",
4
4
  "description": "Secure markdown renderer working on browsers for user input data.",
5
5
  "private": false,
6
6
  "homepage": "https://github.com/falsandtru/securemark",
@@ -1,7 +1,7 @@
1
1
  import { Parser, List, Ctx, Context, subinput, failsafe } from '../../data/parser';
2
2
 
3
- export function convert<P extends Parser<unknown>>(conv: (source: string, context: Context<P>) => string, parser: P, continuous: boolean, empty?: boolean): P;
4
- export function convert<N>(conv: (source: string, context: Ctx) => string, parser: Parser<N>, continuous: boolean, empty = false): Parser<N> {
3
+ export function convert<P extends Parser<unknown>>(conv: (source: string, context: Context<P>) => string, parser: P, empty?: boolean): P;
4
+ export function convert<N>(conv: (source: string, context: Ctx) => string, parser: Parser<N>, empty = false): Parser<N> {
5
5
  assert(parser);
6
6
  return failsafe(input => {
7
7
  const { context } = input;
@@ -14,22 +14,12 @@ export function convert<N>(conv: (source: string, context: Ctx) => string, parse
14
14
  context.position = source.length;
15
15
  return new List();
16
16
  }
17
- assert(source.endsWith(src) || src.endsWith(source, position) || !continuous);
18
- if (continuous) {
19
- context.position += source.length - position - src.length;
20
- const result = parser(input);
21
- assert(context.position > position || !result);
22
- context.source = source;
23
- return result;
24
- }
25
- else {
26
- const { offset, backtracks } = context;
27
- const result = parser(subinput(src, context));
28
- context.position = context.source.length
29
- assert(context.offset === offset);
30
- assert(context.source === source);
31
- assert(context.backtracks === backtracks);
32
- return result;
33
- }
17
+ const { offset, backtracks } = context;
18
+ const result = parser(subinput(src, context));
19
+ context.position = context.source.length
20
+ assert(context.offset === offset);
21
+ assert(context.source === source);
22
+ assert(context.backtracks === backtracks);
23
+ return result;
34
24
  });
35
25
  }
@@ -1,17 +1,24 @@
1
1
  import { Parser, Context, input, failsafe } from '../../data/parser';
2
2
  import { matcher } from '../../../combinator';
3
3
 
4
- export function focus<P extends Parser<unknown>>(scope: string | RegExp, parser: P): P;
5
- export function focus<N>(scope: string | RegExp, parser: Parser<N>): Parser<N> {
4
+ export function focus<P extends Parser<unknown>>(scope: string | RegExp, parser: P, slice?: boolean): P;
5
+ export function focus<N>(scope: string | RegExp, parser: Parser<N>, slice = true): Parser<N> {
6
6
  assert(parser);
7
7
  const match = matcher(scope, false);
8
- return failsafe(({ context }) => {
8
+ return failsafe(arg => {
9
+ const { context } = arg;
9
10
  const { source, position } = context;
10
11
  if (position === source.length) return;
11
12
  const src = match({ context })?.head?.value ?? '';
12
13
  assert(source.startsWith(src, position));
13
14
  if (src === '') return;
14
- context.range = src.length;
15
+ const range = context.range = src.length;
16
+ if (!slice) {
17
+ const result = parser(arg);
18
+ context.position += result && context.position === position ? range : 0;
19
+ assert(context.position > position || !result);
20
+ return result;
21
+ }
15
22
  context.offset ??= 0;
16
23
  context.offset += position;
17
24
  const result = parser(input(src, context));
@@ -25,16 +32,25 @@ export function focus<N>(scope: string | RegExp, parser: Parser<N>): Parser<N> {
25
32
  }
26
33
 
27
34
  //export function rewrite<N, C extends Ctx, D extends Parser<unknown, C>[]>(scope: Parser<unknown, C, D>, parser: Parser<N, C, never>): Parser<N, C, D>;
28
- export function rewrite<P extends Parser<unknown>>(scope: Parser<unknown, Context<P>>, parser: P): P;
29
- export function rewrite<N>(scope: Parser<unknown>, parser: Parser<N>): Parser<N> {
35
+ export function rewrite<P extends Parser<unknown>>(scope: Parser<unknown, Context<P>>, parser: P, slice?: boolean): P;
36
+ export function rewrite<N>(scope: Parser<unknown>, parser: Parser<N>, slice = true): Parser<N> {
30
37
  assert(scope);
31
38
  assert(parser);
32
- return failsafe(({ context }) => {
39
+ return failsafe(arg => {
40
+ const { context } = arg;
33
41
  const { source, position } = context;
34
42
  if (position === source.length) return;
35
43
  const res1 = scope({ context });
36
44
  assert(context.position > position || !res1);
37
45
  if (res1 === undefined || context.position < position) return;
46
+ const range = context.range = context.position - position;
47
+ if (!slice) {
48
+ context.position = position;
49
+ const res2 = parser(arg);
50
+ context.position += res2 && context.position === position ? range : 0;
51
+ assert(context.position > position || !res2);
52
+ return res2;
53
+ }
38
54
  const src = source.slice(position, context.position);
39
55
  assert(src !== '');
40
56
  assert(source.startsWith(src, position));
@@ -13,7 +13,7 @@ describe('Unit: parser/api/body', () => {
13
13
  assert.deepStrictEqual(body('---\na: b\n---\n\n'), '');
14
14
  assert.deepStrictEqual(body('---\na: b\n---\n\n\n'), '\n');
15
15
  assert.deepStrictEqual(body('---\na: b\n---\n\n\na'), '\na');
16
- assert.deepStrictEqual(body('--- \r\na: b \r\n--- \r\n \r\n \r\na'), ' \r\na');
16
+ //assert.deepStrictEqual(body('--- \r\na: b \r\n--- \r\n \r\n \r\na'), ' \r\na');
17
17
  });
18
18
 
19
19
  });
@@ -13,13 +13,13 @@ describe('Unit: parser/api/header', () => {
13
13
  assert.deepStrictEqual(headers('---\na: b\n---\nc'), []);
14
14
  assert.deepStrictEqual(headers('---\r \na: b\n---'), []);
15
15
  assert.deepStrictEqual(headers('---\na:\rb\n---'), []);
16
- assert.deepStrictEqual(headers('---\na: b\r \n---'), []);
16
+ //assert.deepStrictEqual(headers('---\na: b\r \n---'), []);
17
17
  assert.deepStrictEqual(headers('---\n\n---'), []);
18
18
  assert.deepStrictEqual(headers('---\n \n---'), []);
19
19
  assert.deepStrictEqual(headers('---\n-\n---'), []);
20
20
  assert.deepStrictEqual(headers('---\na: b\n---'), ['a: b']);
21
21
  assert.deepStrictEqual(headers('---\na: b\nC: D e\n---\n'), ['a: b', 'C: D e']);
22
- assert.deepStrictEqual(headers('--- \r\na: b \r\n--- \r\n \r\n'), ['a: b']);
22
+ //assert.deepStrictEqual(headers('--- \r\na: b \r\n--- \r\n \r\n'), ['a: b']);
23
23
  });
24
24
 
25
25
  });
@@ -6,6 +6,8 @@ describe('Unit: parser/normalize', () => {
6
6
  assert(normalize('\r') === '\n');
7
7
  assert(normalize('\r\n') === '\n');
8
8
  assert(normalize('\n\r') === '\n\n');
9
+ assert(normalize('\u2028') === '\n');
10
+ assert(normalize('\u2029') === '\n');
9
11
  assert(normalize('\x00') === '\uFFFD');
10
12
  assert(normalize('\x01') === '\uFFFD');
11
13
  assert(normalize('\x02') === '\uFFFD');
@@ -9,7 +9,7 @@ export function normalize(source: string): string {
9
9
  }
10
10
 
11
11
  function format(source: string): string {
12
- return source.replace(/\r\n?/g, '\n');
12
+ return source.replace(/\r\n?|[\u2028\u2029]/g, '\n');
13
13
  }
14
14
 
15
15
  const invalid = new RegExp([
@@ -25,10 +25,10 @@ const source: BlockquoteParser.SourceParser = lazy(() => fmap(
25
25
  some(recursion(Recursion.blockquote, union([
26
26
  rewrite(
27
27
  indent,
28
- convert(unindent, source, false, true)),
28
+ convert(unindent, source, true)),
29
29
  rewrite(
30
30
  some(contentline, opener),
31
- convert(unindent, fmap(autolink, ns => new List([new Data(html('pre', defrag(unwrap(ns))))])), false, true)),
31
+ convert(unindent, fmap(autolink, ns => new List([new Data(html('pre', defrag(unwrap(ns))))])), true)),
32
32
  ]))),
33
33
  ns => new List([new Data(html('blockquote', unwrap(ns)))])));
34
34
 
@@ -36,7 +36,7 @@ const markdown: BlockquoteParser.MarkdownParser = lazy(() => fmap(
36
36
  some(recursion(Recursion.blockquote, union([
37
37
  rewrite(
38
38
  indent,
39
- convert(unindent, markdown, false, true)),
39
+ convert(unindent, markdown, true)),
40
40
  rewrite(
41
41
  some(contentline, opener),
42
42
  convert(unindent, ({ context }) => {
@@ -51,6 +51,6 @@ const markdown: BlockquoteParser.MarkdownParser = lazy(() => fmap(
51
51
  }, context);
52
52
  context.position = source.length;
53
53
  return new List([new Data(html('section', [document, html('h2', 'References'), references]))]);
54
- }, false, true)),
54
+ }, true)),
55
55
  ]))),
56
56
  ns => new List([new Data(html('blockquote', unwrap(ns)))])));
@@ -28,7 +28,7 @@ export const segment: FigParser.SegmentParser = block(
28
28
  export const fig: FigParser = block(rewrite(segment, verify(convert(
29
29
  (source, context) => {
30
30
  // Bug: TypeScript
31
- const fence = (/^[^\n]*\n!?>+ /.test(source) && source.match(/^~{3,}(?=[^\S\n]*$)/mg) as string[] || [])
31
+ const fence = (/^[^\n]*\n!?>+ /.test(source) && source.match(/^~{3,}(?=[^\S\n]*$)/gm) as string[] || [])
32
32
  .reduce((max, fence) => fence > max ? fence : max, '~~') + '~';
33
33
  const { position } = context;
34
34
  const result = parser({ context });
@@ -37,8 +37,7 @@ export const fig: FigParser = block(rewrite(segment, verify(convert(
37
37
  ? `${fence}figure ${source.replace(/^(.+\n.+\n)([\S\s]+?)\n?$/, '$1\n$2')}\n${fence}`
38
38
  : `${fence}figure ${source}\n\n${fence}`;
39
39
  },
40
- union([figure]),
41
- false),
40
+ union([figure])),
42
41
  ([{ value: el }]) => el.tagName === 'FIGURE')));
43
42
 
44
43
  const parser = sequence([
@@ -67,7 +67,7 @@ export const figure: FigureParser = block(fallback(rewrite(segment, fmap(
67
67
  emptyline,
68
68
  block(visualize(trimBlank(some(inline)))),
69
69
  ]),
70
- ]), false),
70
+ ])),
71
71
  nodes => {
72
72
  const [label, param, content, ...caption] = unwrap(nodes) as [HTMLAnchorElement, string, ...HTMLElement[]];
73
73
  return new List([
@@ -133,8 +133,8 @@ const dataline: CellParser.DatalineParser = line(
133
133
  rewrite(
134
134
  contentline,
135
135
  union([
136
- validate(/!+ /y, convert(source => `:${source}`, data, false)),
137
- convert(source => `: ${source}`, data, false),
136
+ validate(/!+ /y, convert(source => `:${source}`, data)),
137
+ convert(source => `: ${source}`, data),
138
138
  ])));
139
139
 
140
140
  function attributes(source: string): Record<string, string | undefined> {
@@ -12,15 +12,15 @@ export const segment: HeadingParser.SegmentParser = block(focus(
12
12
  /#+ +\S[^\n]*(?:\n#+(?=$|[ \n])[^\n]*)*(?:$|\n)/y,
13
13
  input => {
14
14
  const { context } = input;
15
- const { source } = context;
15
+ const { source, range = 0 } = context;
16
16
  const acc = new List<Data<string>>();
17
- for (; context.position < source.length;) {
17
+ for (const len = context.position + range; context.position < len;) {
18
18
  const line = firstline(source, context.position);
19
19
  acc.push(new Data(line));
20
20
  context.position += line.length;
21
21
  }
22
22
  return acc;
23
- }));
23
+ }, false));
24
24
 
25
25
  export const heading: HeadingParser = block(rewrite(segment,
26
26
  // その他の表示制御は各所のCSSで行う。
@@ -23,7 +23,7 @@ describe('Unit: parser/block/paragraph', () => {
23
23
  assert.deepStrictEqual(inspect(parser('a\\\n'), ctx), [['<p>a</p>'], '']);
24
24
  assert.deepStrictEqual(inspect(parser('a\\\nb'), ctx), [['<p>a<br>b</p>'], '']);
25
25
  assert.deepStrictEqual(inspect(parser('a&NewLine;b'), ctx), [['<p>a b</p>'], '']);
26
- assert.deepStrictEqual(inspect(parser('&Tab;&NewLine;'), ctx), [['<p>&amp;NewLine;</p>'], '']);
26
+ assert.deepStrictEqual(inspect(parser('&Tab;&NewLine;'), ctx), [['<p>&amp;Tab;</p>'], '']);
27
27
  assert.deepStrictEqual(inspect(parser('<wbr>'), ctx), [['<p>&lt;wbr&gt;</p>'], '']);
28
28
  assert.deepStrictEqual(inspect(parser('<wbr>\n'), ctx), [['<p>&lt;wbr&gt;</p>'], '']);
29
29
  assert.deepStrictEqual(inspect(parser('<wbr>\na'), ctx), [['<p>&lt;wbr&gt;<br>a</p>'], '']);
@@ -13,15 +13,13 @@ export const quote: ReplyParser.QuoteParser = lazy(() => block(fmap(
13
13
  rewrite(
14
14
  some(validate(syntax, anyline)),
15
15
  convert(
16
- // TODO: インデント数を渡してインデント数前の行頭確認を行う実装に置き換える
17
- source => source.replace(/(?<=^>+ )/mg, '\r'),
16
+ source => source.replace(/(?<=^>+ )/gm, '\r'),
18
17
  some(union([
19
18
  // quote補助関数が残した数式をパースする。
20
19
  math,
21
20
  autolink,
22
21
  unescsource,
23
- ])),
24
- false)),
22
+ ])))),
25
23
  (ns, { source, position }) => new List([
26
24
  new Data(source[position - 1] === '\n' ? ns.pop()!.value as HTMLBRElement : html('br')),
27
25
  new Data(html('span', { class: 'quote' }, defrag(unwrap(ns)))),
@@ -24,9 +24,9 @@ const source: SidefenceParser.SourceParser = lazy(() => fmap(
24
24
  some(recursion(Recursion.block, union([
25
25
  focus(
26
26
  /(?:\|\|+(?=$|[ \n])[^\n]*(?:$|\n))+/y,
27
- convert(unindent, source, false, true)),
27
+ convert(unindent, source, true)),
28
28
  rewrite(
29
29
  some(contentline, opener),
30
- convert(unindent, fmap(autolink, ns => new List([new Data(html('pre', defrag(unwrap(ns))))])), false, true)),
30
+ convert(unindent, fmap(autolink, ns => new List([new Data(html('pre', defrag(unwrap(ns))))])), true)),
31
31
  ]))),
32
32
  ns => new List([new Data(html('blockquote', unwrap(ns)))])));
@@ -40,10 +40,10 @@ const row = <P extends CellParser | AlignParser>(parser: P, optional: boolean):
40
40
  const align: AlignParser = fmap(open(
41
41
  '|',
42
42
  union([
43
- focus(/:-+:?/y, ({ context: { source } }) =>
44
- new List([new Data(source.at(-1) === ':' ? 'center' : 'start')])),
45
- focus(/-+:?/y, ({ context: { source } }) =>
46
- new List([new Data(source.at(-1) === ':' ? 'end' : '')])),
43
+ focus(/:-+:?/y, ({ context: { source, position, range = 0 } }) =>
44
+ new List([new Data(source[position + range - 1] === ':' ? 'center' : 'start')]), false),
45
+ focus(/-+:?/y, ({ context: { source, position, range = 0 } }) =>
46
+ new List([new Data(source[position + range - 1] === ':' ? 'end' : '')]), false),
47
47
  ])),
48
48
  ns => new List([new Data(html('td', defrag(unwrap(ns))))]));
49
49
 
@@ -31,9 +31,9 @@ export const ulist_: UListParser = lazy(() => block(fmap(validate(
31
31
 
32
32
  export const checkbox = focus(
33
33
  /\[[xX ]\](?=$|[ \n])/y,
34
- ({ context: { source } }) => new List([
35
- new Data(html('span', { class: 'checkbox' }, source[1].trimStart() ? '☑' : '☐')),
36
- ]));
34
+ ({ context: { source, position } }) => new List([
35
+ new Data(html('span', { class: 'checkbox' }, source[position + 1].trimStart() ? '☑' : '☐')),
36
+ ]), false);
37
37
 
38
38
  export function fillFirstLine(nodes: List<Data<string | HTMLElement>>): List<Data<string | HTMLElement>> {
39
39
  const node = nodes.head?.value;
@@ -28,12 +28,15 @@ export const enum Recursion {
28
28
  }
29
29
 
30
30
  export const enum Backtrack {
31
- bracket = 1 << 6,
32
- doublebracket = 1 << 5,
33
- link = 1 << 4,
34
- ruby = 1 << 3,
35
- escbracket = 1 << 2,
36
- autolink = 0 << 2,
31
+ // 構文
32
+ doublebracket = 1 << 6,
33
+ link = 1 << 5,
34
+ ruby = 1 << 4,
35
+ // 特殊構造
36
+ escapable = 1 << 3,
37
+ unescapable = 1 << 2,
38
+ // 共通構造
39
+ common = 0 << 2,
37
40
  }
38
41
 
39
42
  export const enum Command {
@@ -17,7 +17,7 @@ describe('Unit: parser/header', () => {
17
17
  assert.deepStrictEqual(inspect(parser('---\na: b\n---\nc'), ctx), undefined);
18
18
  assert.deepStrictEqual(inspect(parser('---\r \na: b\n---'), ctx), undefined);
19
19
  assert.deepStrictEqual(inspect(parser('---\na:\rb\n---'), ctx), [['<pre class="invalid" translate="no">---\na:\nb\n---</pre>'], '']);
20
- assert.deepStrictEqual(inspect(parser('---\na: b\r \n---'), ctx), [['<pre class="invalid" translate="no">---\na: b\n \n---</pre>'], '']);
20
+ //assert.deepStrictEqual(inspect(parser('---\na: b\r \n---'), ctx), [['<pre class="invalid" translate="no">---\na: b\n \n---</pre>'], '']);
21
21
  assert.deepStrictEqual(inspect(parser('---\n\n---'), ctx), undefined);
22
22
  assert.deepStrictEqual(inspect(parser('---\n \n---'), ctx), undefined);
23
23
  assert.deepStrictEqual(inspect(parser('---\n-\n---'), ctx), [['<pre class="invalid" translate="no">---\n-\n---</pre>'], '']);
@@ -30,7 +30,7 @@ describe('Unit: parser/header', () => {
30
30
  assert.deepStrictEqual(inspect(parser('---\na: b\n---'), ctx), [['<aside class="header"><details open=""><summary>Header</summary><span class="field" data-name="a" data-value="b"><span class="field-name">a</span>: <span class="field-value">b</span>\n</span></details></aside>'], '']);
31
31
  assert.deepStrictEqual(inspect(parser('---\na: b\n---\n'), ctx), [['<aside class="header"><details open=""><summary>Header</summary><span class="field" data-name="a" data-value="b"><span class="field-name">a</span>: <span class="field-value">b</span>\n</span></details></aside>'], '']);
32
32
  assert.deepStrictEqual(inspect(parser('---\na: b\nC: D e\n---\n'), ctx), [['<aside class="header"><details open=""><summary>Header</summary><span class="field" data-name="a" data-value="b"><span class="field-name">a</span>: <span class="field-value">b</span>\n</span><span class="field" data-name="c" data-value="D e"><span class="field-name">C</span>: <span class="field-value">D e</span>\n</span></details></aside>'], '']);
33
- assert.deepStrictEqual(inspect(parser('--- \r\na: b \r\n--- \r\n \r\n \r\na'), ctx), [['<aside class="header"><details open=""><summary>Header</summary><span class="field" data-name="a" data-value="b"><span class="field-name">a</span>: <span class="field-value">b</span>\n</span></details></aside>'], ' \r\na']);
33
+ //assert.deepStrictEqual(inspect(parser('--- \r\na: b \r\n--- \r\n \r\n \r\na'), ctx), [['<aside class="header"><details open=""><summary>Header</summary><span class="field" data-name="a" data-value="b"><span class="field-name">a</span>: <span class="field-value">b</span>\n</span></details></aside>'], ' \r\na']);
34
34
  assert.deepStrictEqual(inspect(parser('----\na: b\n----'), ctx), [['<aside class="header"><details open=""><summary>Header</summary><span class="field" data-name="a" data-value="b"><span class="field-name">a</span>: <span class="field-value">b</span>\n</span></details></aside>'], '']);
35
35
  });
36
36
 
@@ -7,14 +7,14 @@ import { normalize } from './api/normalize';
7
7
  import { html, defrag } from 'typed-dom/dom';
8
8
 
9
9
  export const header: MarkdownParser.HeaderParser = lazy(() => validate(
10
- /---+ *\r?\n(?=\S)/y,
10
+ /---+ *\n(?=\S)/y,
11
11
  inits([
12
12
  block(
13
13
  union([
14
14
  validate(({ context }) => context.header ?? true,
15
- focus(/(---+) *\r?\n(?:[A-Za-z][0-9A-Za-z]*(?:-[0-9A-Za-z]+)*:[ \t]+\S[^\r\n]*\r?\n){1,100}\1 *(?:$|\r?\n)/y,
15
+ focus(/(---+) *\n(?:[A-Za-z][0-9A-Za-z]*(?:-[0-9A-Za-z]+)*:[ \t]+\S[^\n]*\n){1,100}\1 *(?:$|\n)/y,
16
16
  convert(source =>
17
- normalize(source.slice(source.indexOf('\n') + 1, source.trimEnd().lastIndexOf('\n'))).replace(/(\S)\s+$/mg, '$1'),
17
+ normalize(source.slice(source.indexOf('\n') + 1, source.trimEnd().lastIndexOf('\n'))),
18
18
  fmap(
19
19
  some(union([field])),
20
20
  ns => new List([
@@ -23,7 +23,7 @@ export const header: MarkdownParser.HeaderParser = lazy(() => validate(
23
23
  { open: '' },
24
24
  defrag(unwrap(ns.unshift(new Data(html('summary', 'Header'))) && ns))),
25
25
  ])),
26
- ])), false))),
26
+ ]))))),
27
27
  ({ context }) => {
28
28
  const { source, position } = context;
29
29
  context.position += source.length;
@@ -36,7 +36,7 @@ export const header: MarkdownParser.HeaderParser = lazy(() => validate(
36
36
  ]);
37
37
  },
38
38
  ])),
39
- clear(str(/ *\r?\n/y)),
39
+ clear(str(/ *\n/y)),
40
40
  ])));
41
41
 
42
42
  const field: MarkdownParser.HeaderParser.FieldParser = line(({ context: { source, position } }) => {
@@ -13,7 +13,7 @@ export const annotation: AnnotationParser = lazy(() => constraint(State.annotati
13
13
  trimBlankStart(some(union([inline]), ')', [[')', 1]])))),
14
14
  '))',
15
15
  false,
16
- [1 | Backtrack.bracket, 3 | Backtrack.doublebracket],
16
+ [1 | Backtrack.common, 3 | Backtrack.doublebracket],
17
17
  ([, ns], context) =>
18
18
  context.linebreak === 0
19
19
  ? new List([new Data(html('sup', { class: 'annotation' }, [html('span', defrag(unwrap(trimBlankNodeEnd(ns))))]))])
@@ -1,7 +1,7 @@
1
1
  import { AutolinkParser } from '../../inline';
2
2
  import { State, Backtrack } from '../../context';
3
3
  import { List, Data } from '../../../combinator/data/parser';
4
- import { some, state, constraint, verify, surround, lazy } from '../../../combinator';
4
+ import { some, state, constraint, verify, surround, setBacktrack, lazy } from '../../../combinator';
5
5
  import { parse } from '../link';
6
6
  import { emoji } from './hashtag';
7
7
  import { str } from '../../source';
@@ -17,7 +17,7 @@ export const account: AutolinkParser.AccountParser = lazy(() => constraint(State
17
17
  str(/[0-9a-z](?:[.-](?=[0-9a-z])|[0-9a-z]){0,254}\/|/yi),
18
18
  str(/[a-z][0-9a-z]*(?:[-.][0-9a-z]+)*(?![-.]?[0-9a-z@]|>>|:\S)/yi),
19
19
  false,
20
- [3 | Backtrack.autolink]),
20
+ [3 | Backtrack.unescapable]),
21
21
  some(surround(
22
22
  '#',
23
23
  verify(
@@ -29,7 +29,7 @@ export const account: AutolinkParser.AccountParser = lazy(() => constraint(State
29
29
  /(?![0-9a-z@]|>>|:\S|[^\p{C}\p{S}\p{P}\s]|emoji)/yu.source,
30
30
  ].join('|').replace(/emoji/g, emoji.source), 'yu'),
31
31
  false,
32
- [3 | Backtrack.autolink])),
32
+ [3 | Backtrack.unescapable])),
33
33
  '',
34
34
  false, [],
35
35
  ([[{ value: host }, { value: account }], nodes], context) => {
@@ -45,7 +45,10 @@ export const account: AutolinkParser.AccountParser = lazy(() => constraint(State
45
45
  ]);
46
46
  },
47
47
  ([[{ value: host }, { value: account }]], context) => {
48
- if (context.source[context.position] === '#') return;
48
+ if (context.source[context.position] === '#') {
49
+ assert(context.source[context.position - context.range!] === '@');
50
+ return void setBacktrack(context, [2 | Backtrack.unescapable], context.position - context.range!);
51
+ }
49
52
  return new List([
50
53
  new Data(define(
51
54
  parse(
@@ -22,7 +22,7 @@ export const anchor: AutolinkParser.AnchorParser = lazy(() => constraint(State.a
22
22
  str(/[0-9a-z]+(?:-[0-9a-z]+)*(?!-?[0-9a-z@#]|>>|:\S)/yi),
23
23
  '',
24
24
  false,
25
- [3 | Backtrack.autolink],
25
+ [3 | Backtrack.unescapable],
26
26
  ([, [{ value }]], context) =>
27
27
  new List([
28
28
  new Data(define(parse(
@@ -15,6 +15,6 @@ export const email: AutolinkParser.EmailParser = constraint(State.autolink, stat
15
15
  ([{ value }]) => value.length <= 254),
16
16
  '',
17
17
  false,
18
- [3 | Backtrack.autolink],
18
+ [3 | Backtrack.unescapable],
19
19
  ([, [{ value }]]) =>
20
20
  new List([new Data(html('a', { class: 'email', href: `mailto:${value}` }, value))]))));
@@ -17,7 +17,7 @@ export const hashnum: AutolinkParser.HashnumParser = lazy(() => constraint(State
17
17
  ].join('|').replace(/emoji/g, emoji.source), 'yu')),
18
18
  '',
19
19
  false,
20
- [1 | Backtrack.autolink],
20
+ [1 | Backtrack.unescapable],
21
21
  ([, [{ value }]], context) =>
22
22
  new List([
23
23
  new Data(define(parse(
@@ -25,7 +25,7 @@ export const hashtag: AutolinkParser.HashtagParser = lazy(() => constraint(State
25
25
  /(?![0-9a-z@#]|>>|:\S|[^\p{C}\p{S}\p{P}\s]|emoji)/yu.source,
26
26
  ].join('|').replace(/emoji/g, emoji.source), 'yu'),
27
27
  false,
28
- [3 | Backtrack.autolink],
28
+ [3 | Backtrack.unescapable],
29
29
  ([, [{ value }]], context) =>
30
30
  new List([
31
31
  new Data(define(parse(
@@ -14,7 +14,7 @@ export const url: AutolinkParser.UrlParser = lazy(() => rewrite(
14
14
  precedence(1, verify(bracket, ns => ns.length > 0)),
15
15
  ]), undefined, [[/[^\x21-\x7E]|\$/y, 9]])),
16
16
  false,
17
- [3 | Backtrack.autolink]),
17
+ [3 | Backtrack.unescapable]),
18
18
  union([
19
19
  constraint(State.autolink, state(State.autolink, ({ context }) =>
20
20
  new List([new Data(parse(new List(), new List([new Data(context.source)]), context))]))),
@@ -27,27 +27,27 @@ export const lineurl: AutolinkParser.UrlParser.LineUrlParser = lazy(() => focus(
27
27
  str('!'),
28
28
  union([
29
29
  constraint(State.autolink, state(State.autolink, ({ context }) => {
30
- const { source, position } = context;
31
- context.position -= source[0] === '!' ? 1 : 0;
32
- context.position += source.length;
30
+ const { source, position, range = 0 } = context;
31
+ context.position -= position > 0 && source[position - 1] === '!' ? 1 : 0;
32
+ context.position += range;
33
33
  return new List([
34
34
  new Data(parse(
35
35
  new List(),
36
- new List([new Data(source.slice(position))]),
36
+ new List([new Data(source.slice(position, context.position))]),
37
37
  context))
38
38
  ]);
39
39
  })),
40
- open(str(/[^:]+/y), some(inline)),
40
+ str(/[^:]+/y),
41
41
  ]),
42
- ])));
42
+ ]), false));
43
43
 
44
44
  const bracket: AutolinkParser.UrlParser.BracketParser = lazy(() => union([
45
45
  surround(str('('), recursion(Recursion.terminal, some(union([bracket, unescsource]), ')')), str(')'),
46
- true, [3 | Backtrack.autolink], undefined, () => new List()),
46
+ true, [3 | Backtrack.unescapable], undefined, () => new List()),
47
47
  surround(str('['), recursion(Recursion.terminal, some(union([bracket, unescsource]), ']')), str(']'),
48
- true, [3 | Backtrack.autolink], undefined, () => new List()),
48
+ true, [3 | Backtrack.unescapable], undefined, () => new List()),
49
49
  surround(str('{'), recursion(Recursion.terminal, some(union([bracket, unescsource]), '}')), str('}'),
50
- true, [3 | Backtrack.autolink], undefined, () => new List()),
50
+ true, [3 | Backtrack.unescapable], undefined, () => new List()),
51
51
  surround(str('"'), precedence(2, recursion(Recursion.terminal, some(unescsource, '"'))), str('"'),
52
- true, [3 | Backtrack.autolink], undefined, () => new List()),
52
+ true, [3 | Backtrack.unescapable], undefined, () => new List()),
53
53
  ]));
@@ -39,7 +39,7 @@ const p1 = lazy(() => surround(
39
39
  precedence(1, recursion(Recursion.bracket, some(inline, ')', [[')', 1]]))),
40
40
  str(')'),
41
41
  true,
42
- [2 | Backtrack.bracket],
42
+ [2 | Backtrack.common],
43
43
  ([as, bs = new List(), cs], { source, position, range = 0 }) => {
44
44
  const str = source.slice(position - range + 1, position - 1);
45
45
  return indexA.test(str)
@@ -53,7 +53,7 @@ const p2 = lazy(() => surround(
53
53
  precedence(1, recursion(Recursion.bracket, some(inline, ')', [[')', 1]]))),
54
54
  str(')'),
55
55
  true,
56
- [2 | Backtrack.bracket],
56
+ [2 | Backtrack.common],
57
57
  ([as, bs = [], cs], { source, position, range = 0 }) => {
58
58
  const str = source.slice(position - range + 1, position - 1);
59
59
  return indexF.test(str)
@@ -67,12 +67,15 @@ const s1 = lazy(() => surround(
67
67
  precedence(1, recursion(Recursion.bracket, some(inline, ']', [[']', 1]]))),
68
68
  str(']'),
69
69
  true,
70
- [2 | Backtrack.bracket],
70
+ [2 | Backtrack.common],
71
71
  ([as, bs = new List(), cs], context) => {
72
72
  if (context.state! & State.link) {
73
73
  const { source, position, range = 0 } = context;
74
74
  const head = position - range;
75
- if (context.linebreak !== 0 || source[position] !== '{') {
75
+ if (context.linebreak !== 0) {
76
+ setBacktrack(context, [2 | Backtrack.link, 2 | Backtrack.ruby], head);
77
+ }
78
+ else if (source[position] !== '{') {
76
79
  setBacktrack(context, [2 | Backtrack.link], head);
77
80
  }
78
81
  else {
@@ -97,7 +100,7 @@ const s2 = lazy(() => surround(
97
100
  precedence(1, recursion(Recursion.bracket, some(inline, ']', [[']', 1]]))),
98
101
  str(']'),
99
102
  true,
100
- [2 | Backtrack.bracket],
103
+ [2 | Backtrack.common],
101
104
  undefined,
102
105
  ([as, bs = new List()]) => as.import(bs as List<Data<string>>)));
103
106
 
@@ -106,7 +109,7 @@ const c1 = lazy(() => surround(
106
109
  precedence(1, recursion(Recursion.bracket, some(inline, '}', [['}', 1]]))),
107
110
  str('}'),
108
111
  true,
109
- [2 | Backtrack.bracket],
112
+ [2 | Backtrack.common],
110
113
  undefined,
111
114
  ([as, bs = new List()]) => as.import(bs as List<Data<string>>)));
112
115
 
@@ -115,7 +118,7 @@ const c2 = lazy(() => surround(
115
118
  precedence(1, recursion(Recursion.bracket, some(inline, '}', [['}', 1]]))),
116
119
  str('}'),
117
120
  true,
118
- [2 | Backtrack.bracket],
121
+ [2 | Backtrack.common],
119
122
  undefined,
120
123
  ([as, bs = new List()]) => as.import(bs as List<Data<string>>)));
121
124
 
@@ -125,6 +128,6 @@ const d1 = lazy(() => surround(
125
128
  precedence(2, recursion(Recursion.bracket, some(inline, /["\n]/y, [['"', 2], ['\n', 3]]))),
126
129
  str('"'),
127
130
  true,
128
- [2 | Backtrack.bracket],
131
+ [2 | Backtrack.common],
129
132
  undefined,
130
133
  ([as, bs = new List()]) => as.import(bs as List<Data<string>>)));