securemark 0.294.4 → 0.294.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. package/CHANGELOG.md +8 -0
  2. package/dist/index.js +159 -114
  3. package/markdown.d.ts +3 -1
  4. package/package.json +3 -3
  5. package/src/combinator/control/manipulation/fence.ts +2 -0
  6. package/src/combinator/control/manipulation/indent.ts +1 -1
  7. package/src/combinator/control/manipulation/match.ts +11 -8
  8. package/src/combinator/data/parser.ts +3 -0
  9. package/src/parser/api/normalize.test.ts +9 -1
  10. package/src/parser/api/normalize.ts +17 -10
  11. package/src/parser/api/parse.test.ts +3 -3
  12. package/src/parser/block/blockquote.test.ts +3 -9
  13. package/src/parser/block/blockquote.ts +4 -4
  14. package/src/parser/block/dlist.ts +4 -4
  15. package/src/parser/block/extension/example.ts +1 -3
  16. package/src/parser/block/extension/fig.test.ts +0 -1
  17. package/src/parser/block/extension/fig.ts +6 -6
  18. package/src/parser/block/extension/figbase.ts +1 -1
  19. package/src/parser/block/extension/figure.test.ts +1 -1
  20. package/src/parser/block/extension/figure.ts +6 -6
  21. package/src/parser/block/extension/message.ts +1 -1
  22. package/src/parser/block/extension/table.ts +4 -4
  23. package/src/parser/block/heading.ts +4 -4
  24. package/src/parser/block/reply/cite.ts +1 -1
  25. package/src/parser/block/reply/quote.ts +2 -2
  26. package/src/parser/block/sidefence.test.ts +1 -3
  27. package/src/parser/block/sidefence.ts +4 -4
  28. package/src/parser/block/table.ts +2 -2
  29. package/src/parser/block.ts +1 -1
  30. package/src/parser/header.ts +3 -3
  31. package/src/parser/inline/autolink/account.ts +5 -7
  32. package/src/parser/inline/autolink/channel.ts +15 -15
  33. package/src/parser/inline/autolink/hashnum.ts +2 -2
  34. package/src/parser/inline/autolink/hashtag.test.ts +6 -2
  35. package/src/parser/inline/autolink/hashtag.ts +12 -10
  36. package/src/parser/inline/autolink.ts +1 -1
  37. package/src/parser/inline/code.ts +12 -18
  38. package/src/parser/inline/deletion.ts +3 -3
  39. package/src/parser/inline/emstrong.ts +3 -3
  40. package/src/parser/inline/extension/indexer.ts +1 -1
  41. package/src/parser/inline/html.ts +1 -1
  42. package/src/parser/inline/htmlentity.ts +13 -16
  43. package/src/parser/inline/insertion.ts +3 -3
  44. package/src/parser/inline/italic.ts +3 -3
  45. package/src/parser/inline/link.ts +3 -3
  46. package/src/parser/inline/mark.ts +3 -3
  47. package/src/parser/inline/remark.ts +3 -3
  48. package/src/parser/inline/ruby.ts +7 -2
  49. package/src/parser/inline.ts +2 -0
  50. package/src/parser/source/text.ts +11 -5
  51. package/src/parser/util.ts +1 -1
  52. package/src/parser/visibility.ts +1 -1
@@ -8,7 +8,7 @@ import { normalize } from './api/normalize';
8
8
  import { html, defrag } from 'typed-dom/dom';
9
9
 
10
10
  export const header: MarkdownParser.HeaderParser = lazy(() => validate(
11
- /---+[^\S\v\f\r\n]*\r?\n(?=\S)/y,
11
+ /---+ *\r?\n(?=\S)/y,
12
12
  inits([
13
13
  rewrite(
14
14
  ({ context }) => {
@@ -24,7 +24,7 @@ export const header: MarkdownParser.HeaderParser = lazy(() => validate(
24
24
  block(
25
25
  union([
26
26
  validate(({ context }) => context.header ?? true,
27
- focus(/(---+)[^\S\v\f\r\n]*\r?\n(?:[A-Za-z][0-9A-Za-z]*(?:-[A-Za-z][0-9A-Za-z]*)*:[ \t]+\S[^\v\f\r\n]*\r?\n){1,100}\1[^\S\v\f\r\n]*(?:$|\r?\n)/y,
27
+ focus(/(---+) *\r?\n(?:[A-Za-z][0-9A-Za-z]*(?:-[0-9A-Za-z]+)*:[ \t]+\S[^\r\n]*\r?\n){1,100}\1 *(?:$|\r?\n)/y,
28
28
  convert(source =>
29
29
  normalize(source.slice(source.indexOf('\n') + 1, source.trimEnd().lastIndexOf('\n'))).replace(/(\S)\s+$/mg, '$1'),
30
30
  fmap(
@@ -48,7 +48,7 @@ export const header: MarkdownParser.HeaderParser = lazy(() => validate(
48
48
  ]);
49
49
  },
50
50
  ]))),
51
- clear(str(/[^\S\v\f\r\n]*\r?\n/y)),
51
+ clear(str(/ *\r?\n/y)),
52
52
  ])));
53
53
 
54
54
  const field: MarkdownParser.HeaderParser.FieldParser = line(({ context: { source, position } }) => {
@@ -1,7 +1,7 @@
1
1
  import { AutolinkParser } from '../../inline';
2
2
  import { State, Backtrack } from '../../context';
3
3
  import { List, Data } from '../../../combinator/data/parser';
4
- import { union, tails, state, constraint, rewrite, open, convert, fmap, lazy } from '../../../combinator';
4
+ import { union, state, constraint, rewrite, surround, convert, fmap, lazy } from '../../../combinator';
5
5
  import { unsafelink } from '../link';
6
6
  import { str } from '../../source';
7
7
  import { define } from 'typed-dom/dom';
@@ -9,13 +9,11 @@ import { define } from 'typed-dom/dom';
9
9
  // https://example/@user must be a user page or a redirect page going there.
10
10
 
11
11
  export const account: AutolinkParser.AccountParser = lazy(() => rewrite(
12
- open(
12
+ surround(
13
13
  /(?<![0-9a-z])@/yi,
14
- tails([
15
- str(/[0-9a-z](?:(?:[0-9a-z]|-(?=[0-9a-z])){0,61}[0-9a-z])?(?:\.[0-9a-z](?:(?:[0-9a-z]|-(?=[0-9a-z])){0,61}[0-9a-z])?)*\//yi),
16
- str(/[a-z][0-9a-z]*(?:[-.][0-9a-z]+)*(?![-.]?[0-9a-z@#]|>>|:\S)/yi),
17
- ]),
18
- false,
14
+ str(/[0-9a-z](?:(?:[0-9a-z]|-(?=[0-9a-z])){0,61}[0-9a-z])?(?:\.[0-9a-z](?:(?:[0-9a-z]|-(?=[0-9a-z])){0,61}[0-9a-z])?)*\//yi),
15
+ str(/[a-z][0-9a-z]*(?:[-.][0-9a-z]+)*(?![-.]?[0-9a-z@#]|>>|:\S)/yi),
16
+ true, undefined, undefined,
19
17
  [3 | Backtrack.autolink]),
20
18
  constraint(State.autolink, state(State.autolink, fmap(convert(
21
19
  source =>
@@ -1,7 +1,7 @@
1
1
  import { AutolinkParser } from '../../inline';
2
2
  import { State, Backtrack } from '../../context';
3
3
  import { List, Data } from '../../../combinator/data/parser';
4
- import { union, tails, sequence, some, state, constraint, verify, rewrite, open, convert, fmap, lazy } from '../../../combinator';
4
+ import { union, sequence, some, state, constraint, verify, rewrite, surround, convert, fmap, lazy } from '../../../combinator';
5
5
  import { unsafelink } from '../link';
6
6
  import { emoji } from './hashtag';
7
7
  import { str } from '../../source';
@@ -11,23 +11,23 @@ import { define } from 'typed-dom/dom';
11
11
 
12
12
  export const channel: AutolinkParser.ChannelParser = lazy(() => rewrite(
13
13
  sequence([
14
- open(
14
+ surround(
15
15
  /(?<![0-9a-z])@/yi,
16
- tails([
17
- str(/[0-9a-z](?:(?:[0-9a-z]|-(?=[0-9a-z])){0,61}[0-9a-z])?(?:\.[0-9a-z](?:(?:[0-9a-z]|-(?=[0-9a-z])){0,61}[0-9a-z])?)*\//yi),
18
- str(/[a-z][0-9a-z]*(?:[-.][0-9a-z]+)*(?![-.]?[0-9a-z@]|>>|:\S)/yi),
19
- ]),
20
- false,
16
+ str(/[0-9a-z](?:(?:[0-9a-z]|-(?=[0-9a-z])){0,61}[0-9a-z])?(?:\.[0-9a-z](?:(?:[0-9a-z]|-(?=[0-9a-z])){0,61}[0-9a-z])?)*\//yi),
17
+ str(/[a-z][0-9a-z]*(?:[-.][0-9a-z]+)*(?![-.]?[0-9a-z@]|>>|:\S)/yi),
18
+ true, undefined, undefined,
21
19
  [3 | Backtrack.autolink]),
22
- some(open(
20
+ some(verify(surround(
23
21
  '#',
24
- verify(
25
- str(new RegExp([
26
- /(?!['_])(?:[^\p{C}\p{S}\p{P}\s]|emoji|'(?=[0-9A-Za-z])|_(?=[^'\p{C}\p{S}\p{P}\s]|emoji))+(?![0-9a-z@]|>>|:\S|[^\p{C}\p{S}\p{P}\s]|emoji)/yu.source,
27
- ].join('').replace(/emoji/g, emoji), 'yu')),
28
- ([{ value }]) => !/^[0-9]{1,4}$|^[0-9]{5}/.test(value)),
29
- false,
30
- [3 | Backtrack.autolink])),
22
+ str(new RegExp([
23
+ /(?!['_])(?:[^\p{C}\p{S}\p{P}\s]|emoji|'(?=[0-9A-Za-z])|_(?=[^\p{C}\p{S}\p{P}\s]|emoji))+/yu.source,
24
+ ].join('').replace(/emoji/g, emoji.source), 'yu')),
25
+ str(new RegExp([
26
+ /(?![0-9a-z@]|>>|:\S|[^\p{C}\p{S}\p{P}\s]|emoji)/yu.source,
27
+ ].join('').replace(/emoji/g, emoji.source), 'yu')),
28
+ false, undefined, undefined,
29
+ [3 | Backtrack.autolink]),
30
+ ([{ value }]) => !/^[0-9]{1,4}$|^[0-9]{5}/.test(value as string))),
31
31
  ]),
32
32
  constraint(State.autolink, state(State.autolink, fmap(convert(
33
33
  source =>
@@ -11,10 +11,10 @@ export const hashnum: AutolinkParser.HashnumParser = lazy(() => rewrite(
11
11
  open(
12
12
  new RegExp([
13
13
  /(?<![^\p{C}\p{S}\p{P}\s]|emoji)#/yiu.source,
14
- ].join('').replace(/emoji/g, emoji), 'yu'),
14
+ ].join('').replace(/emoji/g, emoji.source), 'yu'),
15
15
  str(new RegExp([
16
16
  /[0-9]{1,9}(?![0-9a-z@#]|>>|:\S|[^\p{C}\p{S}\p{P}\s]|emoji)/yu.source,
17
- ].join('').replace(/emoji/g, emoji), 'yu')),
17
+ ].join('').replace(/emoji/g, emoji.source), 'yu')),
18
18
  false,
19
19
  [1 | Backtrack.autolink]),
20
20
  constraint(State.autolink, state(State.autolink, fmap(convert(
@@ -54,10 +54,14 @@ describe('Unit: parser/inline/autolink/hashtag', () => {
54
54
  assert.deepStrictEqual(inspect(parser('#a_b'), ctx), [['<a class="hashtag" href="/hashtags/a_b">#a_b</a>'], '']);
55
55
  assert.deepStrictEqual(inspect(parser('#a__b'), ctx), [['<a class="hashtag" href="/hashtags/a">#a</a>'], '__b']);
56
56
  assert.deepStrictEqual(inspect(parser('#あ'), ctx), [['<a class="hashtag" href="/hashtags/あ">#あ</a>'], '']);
57
- assert.deepStrictEqual(inspect(parser('#👩'), ctx), [['<a class="hashtag" href="/hashtags/👩">#👩</a>'], '']);
57
+ assert.deepStrictEqual(inspect(parser('#😀'), ctx), [['<a class="hashtag" href="/hashtags/😀">#😀</a>'], '']);
58
+ assert.deepStrictEqual(inspect(parser('#🤚🏽'), ctx), [['<a class="hashtag" href="/hashtags/🤚🏽">#🤚🏽</a>'], '']);
59
+ assert.deepStrictEqual(inspect(parser('#👨‍👩‍👧'), ctx), [['<a class="hashtag" href="/hashtags/👨‍👩‍👧">#👨‍👩‍👧</a>'], '']);
60
+ assert.deepStrictEqual(inspect(parser('#🇺🇳'), ctx), [['<a class="hashtag" href="/hashtags/🇺🇳">#🇺🇳</a>'], '']);
61
+ assert.deepStrictEqual(inspect(parser('##️⃣*️⃣0️⃣1️⃣2️⃣3️⃣4️⃣5️⃣6️⃣7️⃣8️⃣9️⃣'), ctx), [['<a class="hashtag" href="/hashtags/#️⃣*️⃣0️⃣1️⃣2️⃣3️⃣4️⃣5️⃣6️⃣7️⃣8️⃣9️⃣">##️⃣*️⃣0️⃣1️⃣2️⃣3️⃣4️⃣5️⃣6️⃣7️⃣8️⃣9️⃣</a>'], '']);
58
62
  assert.deepStrictEqual(inspect(parser('#1a'), ctx), [['<a class="hashtag" href="/hashtags/1a">#1a</a>'], '']);
59
63
  assert.deepStrictEqual(inspect(parser('#1あ'), ctx), [['<a class="hashtag" href="/hashtags/1あ">#1あ</a>'], '']);
60
- assert.deepStrictEqual(inspect(parser('#1👩'), ctx), [['<a class="hashtag" href="/hashtags/1👩">#1👩</a>'], '']);
64
+ assert.deepStrictEqual(inspect(parser('#1😀'), ctx), [['<a class="hashtag" href="/hashtags/1😀">#1😀</a>'], '']);
61
65
  assert.deepStrictEqual(inspect(parser(`#a'`), ctx), [[`<a class="hashtag" href="/hashtags/a">#a</a>`], `'`]);
62
66
  assert.deepStrictEqual(inspect(parser(`#a''`), ctx), [[`<a class="hashtag" href="/hashtags/a">#a</a>`], `''`]);
63
67
  assert.deepStrictEqual(inspect(parser(`#a'b`), ctx), [[`<a class="hashtag" href="/hashtags/a'b">#a'b</a>`], '']);
@@ -1,7 +1,7 @@
1
1
  import { AutolinkParser } from '../../inline';
2
2
  import { State, Backtrack } from '../../context';
3
3
  import { List, Data } from '../../../combinator/data/parser';
4
- import { union, state, constraint, verify, rewrite, open, convert, fmap, lazy } from '../../../combinator';
4
+ import { union, state, constraint, verify, rewrite, surround, convert, fmap, lazy } from '../../../combinator';
5
5
  import { unsafelink } from '../link';
6
6
  import { str } from '../../source';
7
7
  import { define } from 'typed-dom/dom';
@@ -9,20 +9,22 @@ import { define } from 'typed-dom/dom';
9
9
  // https://example/hashtags/a must be a hashtag page or a redirect page going there.
10
10
 
11
11
  // https://github.com/tc39/proposal-regexp-unicode-property-escapes#matching-emoji
12
- export const emoji = String.raw`\p{Emoji_Modifier_Base}\p{Emoji_Modifier}?|\p{Emoji_Presentation}|\p{Emoji}\uFE0F`;
12
+ export const emoji = /\p{Emoji_Modifier_Base}\p{Emoji_Modifier}?|\p{Emoji_Presentation}|\p{Emoji}\uFE0F|\u200D/u;
13
13
 
14
14
  export const hashtag: AutolinkParser.HashtagParser = lazy(() => rewrite(
15
- open(
15
+ verify(surround(
16
16
  new RegExp([
17
17
  /(?<![^\p{C}\p{S}\p{P}\s]|emoji)#/yiu.source,
18
- ].join('').replace(/emoji/g, emoji), 'yu'),
19
- verify(
20
- str(new RegExp([
21
- /(?!['_])(?:[^\p{C}\p{S}\p{P}\s]|emoji|'(?=[0-9A-Za-z])|_(?=[^'\p{C}\p{S}\p{P}\s]|emoji))+(?![0-9a-z@#]|>>|:\S|[^\p{C}\p{S}\p{P}\s]|emoji)/yu.source,
22
- ].join('').replace(/emoji/g, emoji), 'yu')),
23
- ([{ value }]) => !/^[0-9]{1,4}$|^[0-9]{5}/.test(value)),
24
- false,
18
+ ].join('').replace(/emoji/g, emoji.source), 'yu'),
19
+ str(new RegExp([
20
+ /(?!['_])(?:[^\p{C}\p{S}\p{P}\s]|emoji|'(?=[0-9A-Za-z])|_(?=[^\p{C}\p{S}\p{P}\s]|emoji))+/yu.source,
21
+ ].join('').replace(/emoji/g, emoji.source), 'yu')),
22
+ str(new RegExp([
23
+ /(?![0-9a-z@#]|>>|:\S|[^\p{C}\p{S}\p{P}\s]|emoji)/yu.source,
24
+ ].join('').replace(/emoji/g, emoji.source), 'yu')),
25
+ false, undefined, undefined,
25
26
  [3 | Backtrack.autolink]),
27
+ ([{ value }]) => !/^[0-9]{1,4}$|^[0-9]{5}/.test(value)),
26
28
  constraint(State.autolink, state(State.autolink, fmap(convert(
27
29
  source => `[${source}]{ ${`/hashtags/${source.slice(1)}`} }`,
28
30
  union([unsafelink]),
@@ -15,7 +15,7 @@ export const autolink: AutolinkParser = lazy(() =>
15
15
  /(?<![^\p{C}\p{S}\p{P}\s]|emoji)#/yiu.source,
16
16
  /(?<![0-9a-z])>>/yi.source,
17
17
  /(?<![0-9a-z][.+-]?|[@#])!?[0-9a-z]/yi.source,
18
- ].join('|').replace(/emoji/g, emoji), 'yiu'),
18
+ ].join('|').replace(/emoji/g, emoji.source), 'yiu'),
19
19
  state(~State.autolink,
20
20
  union([
21
21
  lineurl,
@@ -1,26 +1,20 @@
1
1
  import { CodeParser } from '../inline';
2
2
  import { List, Data } from '../../combinator/data/parser';
3
- import { open, match } from '../../combinator';
4
- import { Backtrack } from '../context';
3
+ import { match } from '../../combinator';
5
4
  import { invalid } from '../util';
6
5
  import { html } from 'typed-dom/dom';
7
6
 
8
- export const code: CodeParser = open(
9
- /(?=`)/y,
10
- match(
11
- /(`+)(?!`)([^\n]*?)(?:((?<!`)\1(?!`))|(?=$|\n))/y,
12
- ([whole, opener, body, closer]) => () =>
13
- closer
14
- ? new List([new Data(html('code', { 'data-src': whole }, format(body)))])
15
- : body
16
- ? new List([new Data(html('code', {
17
- class: 'invalid',
18
- ...invalid('code', 'syntax', `Missing the closing symbol "${opener}"`)
19
- }, whole))])
20
- : new List([new Data(opener)]),
21
- true),
22
- false,
23
- [3 | Backtrack.bracket]);
7
+ export const code: CodeParser = match(
8
+ /(`+)(?!`)([^\n]*?)(?:((?<!`)\1(?!`))|(?=$|\n))/y,
9
+ ([whole, opener, body, closer]) => () =>
10
+ closer
11
+ ? new List([new Data(html('code', { 'data-src': whole }, format(body)))])
12
+ : body
13
+ ? new List([new Data(html('code', {
14
+ class: 'invalid',
15
+ ...invalid('code', 'syntax', `Missing the closing symbol "${opener}"`)
16
+ }, whole))])
17
+ : new List([new Data(opener)]));
24
18
 
25
19
  function format(text: string): string {
26
20
  return text.length > 2
@@ -1,13 +1,13 @@
1
1
  import { DeletionParser } from '../inline';
2
2
  import { Recursion, Command } from '../context';
3
3
  import { List, Data } from '../../combinator/data/parser';
4
- import { union, some, recursion, precedence, validate, surround, open, lazy } from '../../combinator';
4
+ import { union, some, recursion, precedence, surround, open, lazy } from '../../combinator';
5
5
  import { inline } from '../inline';
6
6
  import { blankWith } from '../visibility';
7
7
  import { unwrap, repeat } from '../util';
8
8
  import { html, defrag } from 'typed-dom/dom';
9
9
 
10
- export const deletion: DeletionParser = lazy(() => validate('~~',
10
+ export const deletion: DeletionParser = lazy(() =>
11
11
  precedence(0, repeat('~~', surround(
12
12
  '',
13
13
  recursion(Recursion.inline,
@@ -18,4 +18,4 @@ export const deletion: DeletionParser = lazy(() => validate('~~',
18
18
  '~~', false,
19
19
  ([, bs], { buffer }) => buffer!.import(bs),
20
20
  ([, bs], { buffer }) => bs && buffer!.import(bs).push(new Data(Command.Cancel)) && buffer!),
21
- nodes => new List([new Data(html('del', defrag(unwrap(nodes))))])))));
21
+ nodes => new List([new Data(html('del', defrag(unwrap(nodes))))]))));
@@ -1,7 +1,7 @@
1
1
  import { EmStrongParser, EmphasisParser, StrongParser } from '../inline';
2
2
  import { Recursion, Command } from '../context';
3
3
  import { Result, List, Data, Node, Context, IntermediateParser } from '../../combinator/data/parser';
4
- import { union, some, recursion, precedence, validate, surround, open, lazy, bind } from '../../combinator';
4
+ import { union, some, recursion, precedence, surround, open, lazy, bind } from '../../combinator';
5
5
  import { inline } from '../inline';
6
6
  import { strong } from './strong';
7
7
  import { emphasis } from './emphasis';
@@ -24,7 +24,7 @@ const subemphasis: IntermediateParser<EmphasisParser> = lazy(() => some(union([
24
24
  // 開閉が明示的でない構文は開閉の不明確な記号による再帰的適用を行わず
25
25
  // 可能な限り早く閉じるよう解析しなければならない。
26
26
  // このため終端記号の後ろを見て終端を中止し同じ構文を再帰的に適用してはならない。
27
- export const emstrong: EmStrongParser = lazy(() => validate('***',
27
+ export const emstrong: EmStrongParser = lazy(() =>
28
28
  precedence(0, repeat('***', surround(
29
29
  '',
30
30
  recursion(Recursion.inline,
@@ -142,7 +142,7 @@ export const emstrong: EmStrongParser = lazy(() => validate('***',
142
142
  nodes = prepend('*'.repeat(prefix - postfix), nodes);
143
143
  }
144
144
  return nodes;
145
- }))));
145
+ })));
146
146
 
147
147
  function prepend<N>(prefix: string, nodes: List<Data<N>>): List<Data<N>> {
148
148
  if (typeof nodes.head?.value === 'string') {
@@ -11,7 +11,7 @@ import { html } from 'typed-dom/dom';
11
11
  // 継続的編集において最も簡便となる。
12
12
 
13
13
  export const indexer: ExtensionParser.IndexerParser = surround(
14
- /\s\[(?=\|\S)/y,
14
+ / \[(?=\|\S)/y,
15
15
  union([
16
16
  signature,
17
17
  focus(/\|(?=\])/y, () => new List([new Data(html('span', { class: 'indexer', 'data-index': '' }))])),
@@ -32,7 +32,7 @@ export const html: HTMLParser = lazy(() => validate(/<[a-z]+(?=[ >])/yi,
32
32
  ([as, bs = new List()], context) =>
33
33
  new List([new Data(elem(as.head!.value.slice(1), false, [...unwrap(as.import(bs))], new List(), new List(), context))])),
34
34
  match(
35
- new RegExp(String.raw`<(${TAGS.join('|')})(?=[^\S\n]|>)`, 'y'),
35
+ new RegExp(String.raw`<(${TAGS.join('|')})(?=[ >])`, 'y'),
36
36
  memoize(
37
37
  ([, tag]) =>
38
38
  surround<HTMLParser.TagParser, string>(
@@ -1,19 +1,19 @@
1
1
  import { HTMLEntityParser, UnsafeHTMLEntityParser } from '../inline';
2
+ import { Backtrack } from '../context';
2
3
  import { List, Data } from '../../combinator/data/parser';
3
- import { union, focus, fmap } from '../../combinator';
4
+ import { union, surround, fmap } from '../../combinator';
5
+ import { str } from '../source';
4
6
  import { invalid } from '../util';
5
7
  import { html } from 'typed-dom/dom';
6
8
 
7
- export const unsafehtmlentity: UnsafeHTMLEntityParser = focus(
8
- /&(?:[0-9A-Za-z]+;?)?/y,
9
- //({ source }) => [[parser(source) ?? `${Command.Error}${source}`], '']));
10
- ({ context }) => {
11
- const { source } = context;
12
- context.position += source.length;
13
- return source.length > 1 && source.at(-1) === ';'
14
- ? new List([new Data(parser(source) ?? source)])
15
- : new List([new Data(source)]);
16
- });
9
+ export const unsafehtmlentity: UnsafeHTMLEntityParser = surround(
10
+ str('&'), str(/[0-9A-Za-z]+/y), str(';'),
11
+ false,
12
+ ([as, bs, cs]) =>
13
+ new List([new Data(parser(as.head!.value + bs.head!.value + cs.head!.value))]),
14
+ ([as, bs]) =>
15
+ new List([new Data(as.head!.value + (bs?.head?.value ?? ''))]),
16
+ [3 | Backtrack.bracket]);
17
17
 
18
18
  export const htmlentity: HTMLEntityParser = fmap(
19
19
  union([unsafehtmlentity]),
@@ -26,11 +26,8 @@ export const htmlentity: HTMLEntityParser = fmap(
26
26
  }, value))
27
27
  ]));
28
28
 
29
- const parser = (el => (entity: string): string | undefined => {
29
+ const parser = (el => (entity: string): string => {
30
30
  if (entity === '&NewLine;') return ' ';
31
31
  el.innerHTML = entity;
32
- const text = el.textContent!;
33
- return entity === text
34
- ? undefined
35
- : text;
32
+ return el.textContent!;
36
33
  })(html('span'));
@@ -1,13 +1,13 @@
1
1
  import { InsertionParser } from '../inline';
2
2
  import { Recursion, Command } from '../context';
3
3
  import { List, Data } from '../../combinator/data/parser';
4
- import { union, some, recursion, precedence, validate, surround, open, lazy } from '../../combinator';
4
+ import { union, some, recursion, precedence, surround, open, lazy } from '../../combinator';
5
5
  import { inline } from '../inline';
6
6
  import { blankWith } from '../visibility';
7
7
  import { unwrap, repeat } from '../util';
8
8
  import { html, defrag } from 'typed-dom/dom';
9
9
 
10
- export const insertion: InsertionParser = lazy(() => validate('++',
10
+ export const insertion: InsertionParser = lazy(() =>
11
11
  precedence(0, repeat('++', surround(
12
12
  '',
13
13
  recursion(Recursion.inline,
@@ -18,4 +18,4 @@ export const insertion: InsertionParser = lazy(() => validate('++',
18
18
  '++', false,
19
19
  ([, bs], { buffer }) => buffer!.import(bs),
20
20
  ([, bs], { buffer }) => bs && buffer!.import(bs).push(new Data(Command.Cancel)) && buffer!),
21
- nodes => new List([new Data(html('ins', defrag(unwrap(nodes))))])))));
21
+ nodes => new List([new Data(html('ins', defrag(unwrap(nodes))))]))));
@@ -1,7 +1,7 @@
1
1
  import { ItalicParser } from '../inline';
2
2
  import { Recursion, Command } from '../context';
3
3
  import { List, Data } from '../../combinator/data/parser';
4
- import { union, some, recursion, precedence, validate, surround, open, lazy } from '../../combinator';
4
+ import { union, some, recursion, precedence, surround, open, lazy } from '../../combinator';
5
5
  import { inline } from '../inline';
6
6
  import { tightStart, blankWith } from '../visibility';
7
7
  import { unwrap, repeat } from '../util';
@@ -10,7 +10,7 @@ import { html, defrag } from 'typed-dom/dom';
10
10
  // 可読性のため実際にはオブリーク体を指定する。
11
11
  // 斜体は単語に使うとかえって見づらく読み飛ばしやすくなるため使わないべきであり
12
12
  // ある程度の長さのある文に使うのが望ましい。
13
- export const italic: ItalicParser = lazy(() => validate('///',
13
+ export const italic: ItalicParser = lazy(() =>
14
14
  precedence(0, repeat('///', surround(
15
15
  '',
16
16
  recursion(Recursion.inline,
@@ -21,4 +21,4 @@ export const italic: ItalicParser = lazy(() => validate('///',
21
21
  '///', false,
22
22
  ([, bs], { buffer }) => buffer!.import(bs),
23
23
  ([, bs], { buffer }) => bs && buffer!.import(bs).push(new Data(Command.Cancel)) && buffer!),
24
- nodes => new List([new Data(html('i', defrag(unwrap(nodes))))])))));
24
+ nodes => new List([new Data(html('i', defrag(unwrap(nodes))))]))));
@@ -2,7 +2,7 @@ import { MarkdownParser } from '../../../markdown';
2
2
  import { LinkParser } from '../inline';
3
3
  import { State, Backtrack, Command } from '../context';
4
4
  import { List, Data } from '../../combinator/data/parser';
5
- import { union, inits, tails, sequence, subsequence, some, creation, precedence, state, constraint, validate, surround, open, setBacktrack, dup, reverse, lazy, fmap, bind } from '../../combinator';
5
+ import { union, inits, tails, sequence, subsequence, some, creation, precedence, state, constraint, surround, open, setBacktrack, dup, reverse, lazy, fmap, bind } from '../../combinator';
6
6
  import { inline, media, shortmedia } from '../inline';
7
7
  import { attributes } from './html';
8
8
  import { unescsource, str } from '../source';
@@ -75,7 +75,7 @@ export const textlink: LinkParser.TextLinkParser = lazy(() => constraint(State.l
75
75
  return new List([new Data(parse(content, params as List<Data<string>>, context))]);
76
76
  }))))));
77
77
 
78
- export const medialink: LinkParser.MediaLinkParser = lazy(() => constraint(State.link | State.media, validate(/[[{]/y, creation(10,
78
+ export const medialink: LinkParser.MediaLinkParser = lazy(() => constraint(State.link | State.media, creation(10,
79
79
  state(State.linkers,
80
80
  bind(sequence([
81
81
  dup(surround(
@@ -85,7 +85,7 @@ export const medialink: LinkParser.MediaLinkParser = lazy(() => constraint(State
85
85
  dup(surround(/{(?![{}])/y, inits([uri, some(option)]), / ?}/y)),
86
86
  ]),
87
87
  ([{ value: content }, { value: params }], context) =>
88
- new List([new Data(parse(content, params as List<Data<string>>, context))])))))));
88
+ new List([new Data(parse(content, params as List<Data<string>>, context))]))))));
89
89
 
90
90
  export const unsafelink: LinkParser.UnsafeLinkParser = lazy(() =>
91
91
  creation(10,
@@ -1,14 +1,14 @@
1
1
  import { MarkParser } from '../inline';
2
2
  import { State, Recursion, Command } from '../context';
3
3
  import { List, Data } from '../../combinator/data/parser';
4
- import { union, some, recursion, precedence, state, constraint, validate, surround, open, lazy } from '../../combinator';
4
+ import { union, some, recursion, precedence, state, constraint, surround, open, lazy } from '../../combinator';
5
5
  import { inline } from '../inline';
6
6
  import { identity, signature } from './extension/indexee';
7
7
  import { tightStart, blankWith } from '../visibility';
8
8
  import { unwrap, repeat } from '../util';
9
9
  import { html, define, defrag } from 'typed-dom/dom';
10
10
 
11
- export const mark: MarkParser = lazy(() => constraint(State.linkers & ~State.mark, validate('==',
11
+ export const mark: MarkParser = lazy(() => constraint(State.linkers & ~State.mark,
12
12
  precedence(0, state(State.mark, repeat('==', surround(
13
13
  '',
14
14
  recursion(Recursion.inline,
@@ -25,4 +25,4 @@ export const mark: MarkParser = lazy(() => constraint(State.linkers & ~State.mar
25
25
  return el.id
26
26
  ? new List([new Data(el), new Data(html('a', { href: `#${el.id}` }))])
27
27
  : new List([new Data(el)]);
28
- }))))));
28
+ })))));
@@ -8,9 +8,9 @@ import { unwrap, invalid } from '../util';
8
8
  import { html, defrag } from 'typed-dom/dom';
9
9
 
10
10
  export const remark: RemarkParser = lazy(() => fallback(surround(
11
- str(/\[%(?=\s)/y),
11
+ str(/\[%(?=[ \n])/y),
12
12
  precedence(3, recursion(Recursion.inline,
13
- some(union([inline]), /\s%\]/y, [[/\s%\]/y, 3]]))),
13
+ some(union([inline]), /[ \n]%\]/y, [[/[ \n]%\]/y, 3]]))),
14
14
  close(text, str(`%]`)), true,
15
15
  ([as, bs = new List(), cs]) => new List([
16
16
  new Data(html('span', { class: 'remark' }, [
@@ -19,6 +19,6 @@ export const remark: RemarkParser = lazy(() => fallback(surround(
19
19
  ])),
20
20
  ]),
21
21
  ([as, bs]) => bs && as.import(bs as List<Data<string>>)),
22
- focus(/\[%+(?=\s)/y, ({ context: { source } }) => new List([
22
+ focus(/\[%+(?=[ \n])/y, ({ context: { source } }) => new List([
23
23
  new Data(html('span', { class: 'invalid', ...invalid('remark', 'syntax', 'Invalid start symbol') }, source))
24
24
  ]))));
@@ -63,6 +63,8 @@ export const ruby: RubyParser = lazy(() => bind(
63
63
  }
64
64
  }));
65
65
 
66
+ const delimiter = /[$"`\[\](){}<>()[]{}]|\\?\n/y;
67
+
66
68
  const text: RubyParser.TextParser = input => {
67
69
  const { context } = input;
68
70
  const { source } = context;
@@ -70,11 +72,14 @@ const text: RubyParser.TextParser = input => {
70
72
  let state = false;
71
73
  context.sequential = true;
72
74
  for (let { position } = context; position < source.length; position = context.position) {
73
- if (/[$"`\[\](){}<>()[]{}]|\\?\n/yi.test(source.slice(position, position + 2))) break;
75
+ delimiter.lastIndex = position;
76
+ if (delimiter.test(source)) break;
74
77
  assert(source[position] !== '\n');
75
78
  switch (source[position]) {
76
79
  case '&': {
77
- const result = unsafehtmlentity(input) ?? txt(input)!;
80
+ const result = source[position + 1] !== ' '
81
+ ? unsafehtmlentity(input) ?? txt(input)!
82
+ : txt(input)!;
78
83
  assert(result);
79
84
  acc.last!.value += result.head!.value;
80
85
  continue;
@@ -66,6 +66,7 @@ export const inline: InlineParser = lazy(() => union([
66
66
  case '%':
67
67
  return remark(input)
68
68
  || textlink(input)
69
+ || ruby(input)
69
70
  || bracket(input);
70
71
  case '#':
71
72
  case '$':
@@ -74,6 +75,7 @@ export const inline: InlineParser = lazy(() => union([
74
75
  case '|':
75
76
  return extension(input)
76
77
  || textlink(input)
78
+ || ruby(input)
77
79
  || bracket(input);
78
80
  }
79
81
  return textlink(input)
@@ -103,7 +103,6 @@ export function next(source: string, position: number, delimiter?: RegExp): numb
103
103
  const char = source[index];
104
104
  switch (char) {
105
105
  case '$':
106
- case '%':
107
106
  case '*':
108
107
  case '+':
109
108
  case '~':
@@ -111,10 +110,15 @@ export function next(source: string, position: number, delimiter?: RegExp): numb
111
110
  case '/':
112
111
  index = backToWhitespace(source, position, index);
113
112
  break;
113
+ case '%':
114
+ index += index - 1 > position && source.startsWith(' %]', index - 1)
115
+ ? -1
116
+ : 0;
117
+ break;
114
118
  case '[':
115
- index = source[index + 1] === '|'
116
- ? backToWhitespace(source, position, index)
117
- : index;
119
+ index += index - 1 > position && source.startsWith(' [|', index - 1)
120
+ ? -1
121
+ : 0;
118
122
  break;
119
123
  case ':':
120
124
  index = source.startsWith('//', index + 1)
@@ -234,7 +238,6 @@ function seek(source: string, position: number): number {
234
238
  case '@':
235
239
  case '#':
236
240
  case '$':
237
- case '&':
238
241
  case '"':
239
242
  case '`':
240
243
  case '[':
@@ -270,6 +273,9 @@ function seek(source: string, position: number): number {
270
273
  case ':':
271
274
  if (source[i + 1] === '/' && source[i + 2] === '/') return i;
272
275
  continue;
276
+ case '&':
277
+ if (source[i + 1] !== ' ') return i;
278
+ continue;
273
279
  case ' ':
274
280
  case '\t':
275
281
  case ' ':
@@ -28,7 +28,7 @@ export function repeat<N extends HTMLElement | string>(symbol: string, parser: P
28
28
  return failsafe(input => {
29
29
  const { context } = input;
30
30
  const { source, position } = context;
31
- assert(source.startsWith(symbol, context.position));
31
+ if (!source.startsWith(symbol, context.position)) return;
32
32
  let nodes = new List<Data<N>>();
33
33
  let i = symbol.length;
34
34
  for (; source[context.position + i] === source[context.position];) ++i;
@@ -8,7 +8,7 @@ import { invisibleHTMLEntityNames } from './api/normalize';
8
8
  export namespace blank {
9
9
  export const line = new RegExp(
10
10
  // TODO: 行全体をエスケープ
11
- /^(?:[^\S\r\n])*(?!\s)(\\?[^\S\r\n]|&IHN;|<wbr ?>|\\$)+$/mg.source
11
+ /^(\\?[^\S\r\n]|&IHN;|<wbr ?>|\\$)+$/mg.source
12
12
  .replace('IHN', `(?:${invisibleHTMLEntityNames.join('|')})`),
13
13
  'gm');
14
14
  export const start = new RegExp(