securemark 0.258.0 → 0.258.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -0
- package/dist/index.js +200 -196
- package/package.json +1 -1
- package/src/combinator/control/manipulation/convert.ts +7 -7
- package/src/combinator/control/manipulation/scope.ts +4 -4
- package/src/combinator/data/parser/context/memo.ts +20 -11
- package/src/combinator/data/parser/context.test.ts +3 -3
- package/src/combinator/data/parser/context.ts +22 -31
- package/src/combinator/data/parser/inits.ts +3 -2
- package/src/combinator/data/parser/sequence.ts +3 -2
- package/src/combinator/data/parser/some.ts +4 -2
- package/src/combinator/data/parser/subsequence.ts +3 -3
- package/src/combinator/data/parser/tails.ts +3 -3
- package/src/combinator/data/parser.ts +1 -2
- package/src/parser/api/bind.ts +1 -1
- package/src/parser/api/parse.test.ts +15 -12
- package/src/parser/api/parse.ts +1 -1
- package/src/parser/block/blockquote.ts +4 -4
- package/src/parser/block/dlist.ts +3 -3
- package/src/parser/block/extension/table.ts +4 -4
- package/src/parser/block/ilist.ts +2 -2
- package/src/parser/block/olist.ts +2 -2
- package/src/parser/block/reply/cite.ts +2 -2
- package/src/parser/block/reply/quote.ts +2 -2
- package/src/parser/block/sidefence.ts +2 -2
- package/src/parser/block/table.ts +5 -5
- package/src/parser/block/ulist.ts +2 -2
- package/src/parser/block.ts +2 -2
- package/src/parser/context.ts +7 -7
- package/src/parser/inline/annotation.test.ts +5 -5
- package/src/parser/inline/annotation.ts +6 -5
- package/src/parser/inline/autolink/email.ts +2 -2
- package/src/parser/inline/autolink/url.ts +2 -2
- package/src/parser/inline/autolink.ts +2 -2
- package/src/parser/inline/bracket.ts +13 -13
- package/src/parser/inline/code.ts +2 -2
- package/src/parser/inline/comment.ts +2 -2
- package/src/parser/inline/deletion.ts +5 -4
- package/src/parser/inline/emphasis.ts +5 -4
- package/src/parser/inline/emstrong.ts +5 -4
- package/src/parser/inline/extension/index.ts +8 -7
- package/src/parser/inline/extension/indexer.ts +2 -2
- package/src/parser/inline/extension/label.ts +2 -2
- package/src/parser/inline/extension/placeholder.ts +5 -4
- package/src/parser/inline/html.ts +2 -2
- package/src/parser/inline/htmlentity.ts +2 -2
- package/src/parser/inline/insertion.ts +5 -4
- package/src/parser/inline/link.test.ts +2 -1
- package/src/parser/inline/link.ts +26 -17
- package/src/parser/inline/mark.ts +5 -4
- package/src/parser/inline/math.ts +3 -3
- package/src/parser/inline/media.test.ts +1 -0
- package/src/parser/inline/media.ts +7 -6
- package/src/parser/inline/reference.test.ts +5 -5
- package/src/parser/inline/reference.ts +7 -6
- package/src/parser/inline/ruby.test.ts +1 -0
- package/src/parser/inline/ruby.ts +4 -4
- package/src/parser/inline/strong.ts +5 -4
- package/src/parser/inline/template.ts +6 -6
- package/src/parser/inline.test.ts +4 -1
- package/src/parser/source/escapable.ts +2 -2
- package/src/parser/source/str.ts +5 -5
- package/src/parser/source/text.ts +2 -2
- package/src/parser/source/unescapable.ts +2 -2
package/package.json
CHANGED
|
@@ -1,17 +1,17 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import { Parser } from '../../data/parser';
|
|
1
|
+
import { Parser, check } from '../../data/parser';
|
|
3
2
|
|
|
4
3
|
export function convert<P extends Parser<unknown>>(conv: (source: string) => string, parser: P): P;
|
|
5
4
|
export function convert<T>(conv: (source: string) => string, parser: Parser<T>): Parser<T> {
|
|
6
5
|
assert(parser);
|
|
7
6
|
return (source, context = {}) => {
|
|
8
7
|
if (source === '') return;
|
|
9
|
-
|
|
10
|
-
if (
|
|
8
|
+
const src = conv(source);
|
|
9
|
+
if (src === '') return [[], ''];
|
|
11
10
|
const memo = context.memo;
|
|
12
|
-
|
|
13
|
-
const result = parser(
|
|
14
|
-
|
|
11
|
+
memo && (memo.offset += source.length - src.length);
|
|
12
|
+
const result = parser(src, context);
|
|
13
|
+
assert(check(src, result));
|
|
14
|
+
memo && (memo.offset -= source.length - src.length);
|
|
15
15
|
return result;
|
|
16
16
|
};
|
|
17
17
|
}
|
|
@@ -14,10 +14,10 @@ export function focus<T>(scope: string | RegExp, parser: Parser<T>): Parser<T> {
|
|
|
14
14
|
assert(source.startsWith(src));
|
|
15
15
|
if (src === '') return;
|
|
16
16
|
const memo = context.memo;
|
|
17
|
-
|
|
17
|
+
memo && (memo.offset += source.length - src.length);
|
|
18
18
|
const result = parser(src, context);
|
|
19
19
|
assert(check(src, result));
|
|
20
|
-
|
|
20
|
+
memo && (memo.offset -= source.length - src.length);
|
|
21
21
|
if (!result) return;
|
|
22
22
|
assert(exec(result).length < src.length);
|
|
23
23
|
return exec(result).length < src.length
|
|
@@ -42,10 +42,10 @@ export function rewrite<T>(scope: Parser<unknown>, parser: Parser<T>): Parser<T>
|
|
|
42
42
|
const src = source.slice(0, source.length - exec(res1).length);
|
|
43
43
|
assert(src !== '');
|
|
44
44
|
assert(source.startsWith(src));
|
|
45
|
-
|
|
45
|
+
memo && (memo.offset += source.length - src.length);
|
|
46
46
|
const res2 = parser(src, context);
|
|
47
47
|
assert(check(src, res2));
|
|
48
|
-
|
|
48
|
+
memo && (memo.offset -= source.length - src.length);
|
|
49
49
|
if (!res2) return;
|
|
50
50
|
assert(exec(res2) === '');
|
|
51
51
|
return exec(res2).length < src.length
|
|
@@ -1,30 +1,39 @@
|
|
|
1
|
-
import { splice } from 'spica/array';
|
|
2
|
-
|
|
3
1
|
export class Memo {
|
|
4
|
-
private memory: Record<string, readonly [any[], number]>[/* pos */] = [];
|
|
2
|
+
private readonly memory: Record<string, readonly [any[], number] | readonly []>[/* pos */] = [];
|
|
5
3
|
public get length(): number {
|
|
6
4
|
return this.memory.length;
|
|
7
5
|
}
|
|
6
|
+
public offset = 0;
|
|
8
7
|
public get(
|
|
9
8
|
position: number,
|
|
10
|
-
rule: number,
|
|
11
9
|
syntax: number,
|
|
12
10
|
state: number,
|
|
13
|
-
): readonly [any[], number] | undefined {
|
|
14
|
-
|
|
11
|
+
): readonly [any[], number] | readonly [] | undefined {
|
|
12
|
+
//console.log('get', position + this.offset, syntax, state, this.memory[position + this.offset - 1]?.[`${syntax}:${state}`]);;
|
|
13
|
+
const cache = this.memory[position + this.offset - 1]?.[`${syntax}:${state}`];
|
|
14
|
+
return cache?.length === 2
|
|
15
|
+
? [cache[0].slice(), cache[1]]
|
|
16
|
+
: cache;
|
|
15
17
|
}
|
|
16
18
|
public set(
|
|
17
19
|
position: number,
|
|
18
|
-
rule: number,
|
|
19
20
|
syntax: number,
|
|
20
21
|
state: number,
|
|
21
|
-
nodes: any[],
|
|
22
|
+
nodes: any[] | undefined,
|
|
22
23
|
offset: number,
|
|
23
24
|
): void {
|
|
24
|
-
const record = this.memory[position - 1] ??= {};
|
|
25
|
-
record[`${
|
|
25
|
+
const record = this.memory[position + this.offset - 1] ??= {};
|
|
26
|
+
assert(!record[`${syntax}:${state}`]);
|
|
27
|
+
record[`${syntax}:${state}`] = nodes
|
|
28
|
+
? [nodes.slice(), offset]
|
|
29
|
+
: [];
|
|
30
|
+
//console.log('set', position + this.offset, syntax, state, record[`${syntax}:${state}`]);
|
|
26
31
|
}
|
|
27
32
|
public clear(position: number): void {
|
|
28
|
-
|
|
33
|
+
const memory = this.memory;
|
|
34
|
+
for (let i = position + this.offset, len = memory.length; i < len; ++i) {
|
|
35
|
+
memory.pop();
|
|
36
|
+
}
|
|
37
|
+
//console.log('clear', position);
|
|
29
38
|
}
|
|
30
39
|
}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { Parser, Ctx } from '../parser';
|
|
2
2
|
import { some } from './some';
|
|
3
3
|
import { reset, context } from './context';
|
|
4
|
-
import {
|
|
4
|
+
import { creation } from './context';
|
|
5
5
|
|
|
6
6
|
describe('Unit: combinator/data/parser/context', () => {
|
|
7
7
|
interface Context extends Ctx {
|
|
@@ -9,7 +9,7 @@ describe('Unit: combinator/data/parser/context', () => {
|
|
|
9
9
|
}
|
|
10
10
|
|
|
11
11
|
describe('reset', () => {
|
|
12
|
-
const parser: Parser<number> = some(
|
|
12
|
+
const parser: Parser<number> = some(creation(
|
|
13
13
|
(s, context) => [[context.resources?.budget ?? NaN], s.slice(1)]));
|
|
14
14
|
|
|
15
15
|
it('root', () => {
|
|
@@ -36,7 +36,7 @@ describe('Unit: combinator/data/parser/context', () => {
|
|
|
36
36
|
});
|
|
37
37
|
|
|
38
38
|
describe('context', () => {
|
|
39
|
-
const parser: Parser<boolean, Context> = some(
|
|
39
|
+
const parser: Parser<boolean, Context> = some(creation(
|
|
40
40
|
(s, context) => [[context.status!], s.slice(1)]));
|
|
41
41
|
|
|
42
42
|
it('', () => {
|
|
@@ -56,19 +56,12 @@ function apply<T>(parser: Parser<T>, source: string, context: Ctx, changes: [str
|
|
|
56
56
|
return result;
|
|
57
57
|
}
|
|
58
58
|
|
|
59
|
-
export function syntax<P extends Parser<unknown>>(syntax: number, precedence: number, parser: P): P;
|
|
60
59
|
export function syntax<P extends Parser<unknown>>(syntax: number, precedence: number, cost: number, parser: P): P;
|
|
61
|
-
export function syntax<T>(syntax: number, precedence: number, cost: number
|
|
62
|
-
if (typeof cost === 'function') {
|
|
63
|
-
parser = cost;
|
|
64
|
-
cost = 1;
|
|
65
|
-
}
|
|
60
|
+
export function syntax<T>(syntax: number, precedence: number, cost: number, parser?: Parser<T>): Parser<T> {
|
|
66
61
|
return (source, context) => {
|
|
67
62
|
if (source === '') return;
|
|
68
|
-
const
|
|
69
|
-
context.
|
|
70
|
-
context.backtrackable ??= ~0;
|
|
71
|
-
context.state ??= 0;
|
|
63
|
+
const memo = context.memo ??= new Memo();
|
|
64
|
+
context.memorable ??= ~0;
|
|
72
65
|
const p = context.precedence;
|
|
73
66
|
context.precedence = precedence;
|
|
74
67
|
const { resources = { budget: 1, recursion: 1 } } = context;
|
|
@@ -76,38 +69,36 @@ export function syntax<T>(syntax: number, precedence: number, cost: number | Par
|
|
|
76
69
|
if (resources.recursion <= 0) throw new Error('Too much recursion');
|
|
77
70
|
--resources.recursion;
|
|
78
71
|
const pos = source.length;
|
|
79
|
-
const
|
|
72
|
+
const state = context.state ?? 0;
|
|
73
|
+
const cache = syntax && memo.get(pos, syntax, state);
|
|
80
74
|
const result: Result<T> = cache
|
|
81
|
-
?
|
|
75
|
+
? cache.length === 0
|
|
76
|
+
? undefined
|
|
77
|
+
: [cache[0], source.slice(cache[1])]
|
|
82
78
|
: parser!(source, context);
|
|
83
79
|
++resources.recursion;
|
|
84
|
-
if (result) {
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
80
|
+
if (result && !cache) {
|
|
81
|
+
resources.budget -= cost;
|
|
82
|
+
}
|
|
83
|
+
if (syntax) {
|
|
84
|
+
if (state & context.memorable!) {
|
|
85
|
+
cache ?? memo.set(pos, syntax, state, eval(result), source.length - exec(result, '').length);
|
|
86
|
+
assert.deepStrictEqual(cache && cache, cache && memo.get(pos, syntax, state));
|
|
88
87
|
}
|
|
89
|
-
if (
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
cache ?? context.memo.set(pos, context.rule, syntax, context.state, eval(result), source.length - exec(result).length);
|
|
93
|
-
assert.deepStrictEqual(cache && cache, cache && context.memo.get(pos, context.rule, syntax, context.state));
|
|
94
|
-
}
|
|
95
|
-
else if (context.memo?.length! >= pos) {
|
|
96
|
-
assert(!(r & context.backtrackable));
|
|
97
|
-
context.memo!.clear(pos);
|
|
98
|
-
}
|
|
88
|
+
else if (result && memo.length! >= pos) {
|
|
89
|
+
assert(!(state & context.memorable!));
|
|
90
|
+
memo.clear(pos);
|
|
99
91
|
}
|
|
100
92
|
}
|
|
101
93
|
context.precedence = p;
|
|
102
|
-
context.rule = r;
|
|
103
94
|
return result;
|
|
104
95
|
};
|
|
105
96
|
}
|
|
106
97
|
|
|
107
|
-
export function
|
|
108
|
-
export function
|
|
109
|
-
export function
|
|
110
|
-
if (typeof cost === 'function') return
|
|
98
|
+
export function creation<P extends Parser<unknown>>(parser: P): P;
|
|
99
|
+
export function creation<P extends Parser<unknown>>(cost: number, parser: P): P;
|
|
100
|
+
export function creation(cost: number | Parser<unknown>, parser?: Parser<unknown>): Parser<unknown> {
|
|
101
|
+
if (typeof cost === 'function') return creation(1, cost);
|
|
111
102
|
assert(cost >= 0);
|
|
112
103
|
return (source, context) => {
|
|
113
104
|
const { resources = { budget: 1, recursion: 1 } } = context;
|
|
@@ -2,8 +2,8 @@ import { undefined } from 'spica/global';
|
|
|
2
2
|
import { Parser, Ctx, Tree, Context, SubParsers, SubTree, eval, exec, check } from '../parser';
|
|
3
3
|
import { push } from 'spica/array';
|
|
4
4
|
|
|
5
|
-
export function inits<P extends Parser<unknown>>(parsers: SubParsers<P>): SubTree<P> extends Tree<P> ? P : Parser<SubTree<P>, Context<P>, SubParsers<P>>;
|
|
6
|
-
export function inits<T, D extends Parser<T>[]>(parsers: D): Parser<T, Ctx, D> {
|
|
5
|
+
export function inits<P extends Parser<unknown>>(parsers: SubParsers<P>, resume?: (nodes: SubTree<P>[], rest: string) => boolean): SubTree<P> extends Tree<P> ? P : Parser<SubTree<P>, Context<P>, SubParsers<P>>;
|
|
6
|
+
export function inits<T, D extends Parser<T>[]>(parsers: D, resume?: (nodes: T[], rest: string) => boolean): Parser<T, Ctx, D> {
|
|
7
7
|
assert(parsers.every(f => f));
|
|
8
8
|
if (parsers.length === 1) return parsers[0];
|
|
9
9
|
return (source, context) => {
|
|
@@ -19,6 +19,7 @@ export function inits<T, D extends Parser<T>[]>(parsers: D): Parser<T, Ctx, D> {
|
|
|
19
19
|
? push(nodes, eval(result))
|
|
20
20
|
: eval(result);
|
|
21
21
|
rest = exec(result);
|
|
22
|
+
if (resume?.(eval(result), exec(result)) === false) break;
|
|
22
23
|
}
|
|
23
24
|
assert(rest.length <= source.length);
|
|
24
25
|
return nodes && rest.length < source.length
|
|
@@ -2,8 +2,8 @@ import { undefined } from 'spica/global';
|
|
|
2
2
|
import { Parser, Ctx, Tree, Context, SubParsers, SubTree, eval, exec, check } from '../parser';
|
|
3
3
|
import { push } from 'spica/array';
|
|
4
4
|
|
|
5
|
-
export function sequence<P extends Parser<unknown>>(parsers: SubParsers<P>): SubTree<P> extends Tree<P> ? P : Parser<SubTree<P>, Context<P>, SubParsers<P>>;
|
|
6
|
-
export function sequence<T, D extends Parser<T>[]>(parsers: D): Parser<T, Ctx, D> {
|
|
5
|
+
export function sequence<P extends Parser<unknown>>(parsers: SubParsers<P>, resume?: (nodes: SubTree<P>[], rest: string) => boolean): SubTree<P> extends Tree<P> ? P : Parser<SubTree<P>, Context<P>, SubParsers<P>>;
|
|
6
|
+
export function sequence<T, D extends Parser<T>[]>(parsers: D, resume?: (nodes: T[], rest: string) => boolean): Parser<T, Ctx, D> {
|
|
7
7
|
assert(parsers.every(f => f));
|
|
8
8
|
if (parsers.length === 1) return parsers[0];
|
|
9
9
|
return (source, context) => {
|
|
@@ -19,6 +19,7 @@ export function sequence<T, D extends Parser<T>[]>(parsers: D): Parser<T, Ctx, D
|
|
|
19
19
|
? push(nodes, eval(result))
|
|
20
20
|
: eval(result);
|
|
21
21
|
rest = exec(result);
|
|
22
|
+
if (resume?.(eval(result), exec(result)) === false) return;
|
|
22
23
|
}
|
|
23
24
|
assert(rest.length <= source.length);
|
|
24
25
|
return nodes && rest.length < source.length
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { undefined } from 'spica/global';
|
|
2
2
|
import { Parser, eval, exec, check } from '../parser';
|
|
3
3
|
import { Delimiters } from './context/delimiter';
|
|
4
|
-
import { push } from 'spica/array';
|
|
4
|
+
import { unshift, push } from 'spica/array';
|
|
5
5
|
|
|
6
6
|
type DelimiterOption = readonly [delimiter: string | RegExp, precedence: number];
|
|
7
7
|
|
|
@@ -32,7 +32,9 @@ export function some<T>(parser: Parser<T>, end?: string | RegExp | number, delim
|
|
|
32
32
|
assert.doesNotThrow(() => limit < 0 && check(rest, result));
|
|
33
33
|
if (!result) break;
|
|
34
34
|
nodes = nodes
|
|
35
|
-
?
|
|
35
|
+
? nodes.length < eval(result).length
|
|
36
|
+
? unshift(nodes, eval(result))
|
|
37
|
+
: push(nodes, eval(result))
|
|
36
38
|
: eval(result);
|
|
37
39
|
rest = exec(result);
|
|
38
40
|
if (limit >= 0 && source.length - rest.length > limit) break;
|
|
@@ -2,12 +2,12 @@ import { Parser, Ctx, Tree, Context, SubParsers, SubTree } from '../parser';
|
|
|
2
2
|
import { union } from './union';
|
|
3
3
|
import { inits } from './inits';
|
|
4
4
|
|
|
5
|
-
export function subsequence<P extends Parser<unknown>>(parsers: SubParsers<P>): SubTree<P> extends Tree<P> ? P : Parser<SubTree<P>, Context<P>, SubParsers<P>>;
|
|
6
|
-
export function subsequence<T, D extends Parser<T>[]>(parsers: D): Parser<T, Ctx, D> {
|
|
5
|
+
export function subsequence<P extends Parser<unknown>>(parsers: SubParsers<P>, resume?: (nodes: SubTree<P>[], rest: string) => boolean): SubTree<P> extends Tree<P> ? P : Parser<SubTree<P>, Context<P>, SubParsers<P>>;
|
|
6
|
+
export function subsequence<T, D extends Parser<T>[]>(parsers: D, resume?: (nodes: T[], rest: string) => boolean): Parser<T, Ctx, D> {
|
|
7
7
|
assert(parsers.every(f => f));
|
|
8
8
|
return union(
|
|
9
9
|
parsers.map((_, i) =>
|
|
10
10
|
i + 1 < parsers.length
|
|
11
|
-
? inits([parsers[i], subsequence(parsers.slice(i + 1))])
|
|
11
|
+
? inits([parsers[i], subsequence(parsers.slice(i + 1), resume)], resume)
|
|
12
12
|
: parsers[i]) as D);
|
|
13
13
|
}
|
|
@@ -2,7 +2,7 @@ import { Parser, Ctx, Tree, Context, SubParsers, SubTree } from '../parser';
|
|
|
2
2
|
import { union } from './union';
|
|
3
3
|
import { sequence } from './sequence';
|
|
4
4
|
|
|
5
|
-
export function tails<P extends Parser<unknown>>(parsers: SubParsers<P>): SubTree<P> extends Tree<P> ? P : Parser<SubTree<P>, Context<P>, SubParsers<P>>;
|
|
6
|
-
export function tails<T, D extends Parser<T>[]>(parsers: D): Parser<T, Ctx, D> {
|
|
7
|
-
return union(parsers.map((_, i) => sequence(parsers.slice(i))) as D);
|
|
5
|
+
export function tails<P extends Parser<unknown>>(parsers: SubParsers<P>, resume?: (nodes: SubTree<P>[], rest: string) => boolean): SubTree<P> extends Tree<P> ? P : Parser<SubTree<P>, Context<P>, SubParsers<P>>;
|
|
6
|
+
export function tails<T, D extends Parser<T>[]>(parsers: D, resume?: (nodes: T[], rest: string) => boolean): Parser<T, Ctx, D> {
|
|
7
|
+
return union(parsers.map((_, i) => sequence(parsers.slice(i), resume)) as D);
|
|
8
8
|
}
|
|
@@ -15,8 +15,7 @@ export interface Ctx {
|
|
|
15
15
|
precedence?: number;
|
|
16
16
|
delimiters?: Delimiters;
|
|
17
17
|
state?: number;
|
|
18
|
-
|
|
19
|
-
backtrackable?: number;
|
|
18
|
+
memorable?: number;
|
|
20
19
|
memo?: Memo;
|
|
21
20
|
}
|
|
22
21
|
export type Tree<P extends Parser<unknown>> = P extends Parser<infer T> ? T : never;
|
package/src/parser/api/bind.ts
CHANGED
|
@@ -24,7 +24,7 @@ export function bind(target: DocumentFragment | HTMLElement | ShadowRoot, settin
|
|
|
24
24
|
let context: MarkdownParser.Context = {
|
|
25
25
|
...settings,
|
|
26
26
|
host: settings.host ?? new ReadonlyURL(location.pathname, location.origin),
|
|
27
|
-
backtrackable,
|
|
27
|
+
memorable: backtrackable,
|
|
28
28
|
};
|
|
29
29
|
if (context.host?.origin === 'null') throw new Error(`Invalid host: ${context.host.href}`);
|
|
30
30
|
assert(!settings.id);
|
|
@@ -235,32 +235,35 @@ describe('Unit: parser/api/parse', () => {
|
|
|
235
235
|
|
|
236
236
|
it('recursion', () => {
|
|
237
237
|
assert.deepStrictEqual(
|
|
238
|
-
[...parse('
|
|
239
|
-
[`<p>${'
|
|
238
|
+
[...parse('{'.repeat(20)).children].map(el => el.outerHTML),
|
|
239
|
+
[`<p>${'{'.repeat(20)}</p>`]);
|
|
240
240
|
assert.deepStrictEqual(
|
|
241
|
-
[...parse('
|
|
241
|
+
[...parse('{'.repeat(21)).children].map(el => el.outerHTML.replace(/:\w+/, ':rnd')),
|
|
242
242
|
[
|
|
243
243
|
'<h1 id="error:rnd" class="error">Error: Too much recursion</h1>',
|
|
244
|
-
`<pre class="error" translate="no">${'
|
|
244
|
+
`<pre class="error" translate="no">${'{'.repeat(21)}</pre>`,
|
|
245
245
|
]);
|
|
246
246
|
assert.deepStrictEqual(
|
|
247
|
-
[...parse('
|
|
248
|
-
[`<p>${'
|
|
247
|
+
[...parse('('.repeat(20)).children].map(el => el.outerHTML),
|
|
248
|
+
[`<p>${'('.repeat(20)}</p>`]);
|
|
249
249
|
assert.deepStrictEqual(
|
|
250
|
-
[...parse('
|
|
250
|
+
[...parse('('.repeat(22)).children].map(el => el.outerHTML.replace(/:\w+/, ':rnd')),
|
|
251
251
|
[
|
|
252
252
|
'<h1 id="error:rnd" class="error">Error: Too much recursion</h1>',
|
|
253
|
-
`<pre class="error" translate="no">${'
|
|
253
|
+
`<pre class="error" translate="no">${'('.repeat(22)}</pre>`,
|
|
254
254
|
]);
|
|
255
255
|
assert.deepStrictEqual(
|
|
256
|
-
[...parse('
|
|
257
|
-
[`<p>${'
|
|
256
|
+
[...parse('['.repeat(20)).children].map(el => el.outerHTML),
|
|
257
|
+
[`<p>${'['.repeat(20)}</p>`]);
|
|
258
258
|
assert.deepStrictEqual(
|
|
259
|
-
[...parse('
|
|
259
|
+
[...parse('['.repeat(22)).children].map(el => el.outerHTML.replace(/:\w+/, ':rnd')),
|
|
260
260
|
[
|
|
261
261
|
'<h1 id="error:rnd" class="error">Error: Too much recursion</h1>',
|
|
262
|
-
`<pre class="error" translate="no">${'
|
|
262
|
+
`<pre class="error" translate="no">${'['.repeat(22)}</pre>`,
|
|
263
263
|
]);
|
|
264
|
+
assert.deepStrictEqual(
|
|
265
|
+
[...parse('['.repeat(17) + '\na').children].map(el => el.outerHTML),
|
|
266
|
+
[`<p>${'['.repeat(17)}<br>a</p>`]);
|
|
264
267
|
});
|
|
265
268
|
|
|
266
269
|
if (!navigator.userAgent.includes('Chrome')) return;
|
package/src/parser/api/parse.ts
CHANGED
|
@@ -30,7 +30,7 @@ export function parse(source: string, opts: Options = {}, context?: MarkdownPars
|
|
|
30
30
|
...context?.resources && {
|
|
31
31
|
resources: context.resources,
|
|
32
32
|
},
|
|
33
|
-
backtrackable,
|
|
33
|
+
memorable: backtrackable,
|
|
34
34
|
};
|
|
35
35
|
if (context.host?.origin === 'null') throw new Error(`Invalid host: ${context.host.href}`);
|
|
36
36
|
const node = frag();
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { BlockquoteParser } from '../block';
|
|
2
|
-
import { union, some,
|
|
2
|
+
import { union, some, creation, block, validate, rewrite, open, convert, lazy, fmap } from '../../combinator';
|
|
3
3
|
import { autolink } from '../autolink';
|
|
4
4
|
import { contentline } from '../source';
|
|
5
5
|
import { parse } from '../api/parse';
|
|
@@ -19,7 +19,7 @@ const indent = block(open(opener, some(contentline, /^>(?:$|\s)/)), false);
|
|
|
19
19
|
const unindent = (source: string) => source.replace(/(^|\n)>(?:[^\S\n]|(?=>*(?:$|\s)))|\n$/g, '$1');
|
|
20
20
|
|
|
21
21
|
const source: BlockquoteParser.SourceParser = lazy(() => fmap(
|
|
22
|
-
some(
|
|
22
|
+
some(creation(union([
|
|
23
23
|
rewrite(
|
|
24
24
|
indent,
|
|
25
25
|
convert(unindent, source)),
|
|
@@ -30,11 +30,11 @@ const source: BlockquoteParser.SourceParser = lazy(() => fmap(
|
|
|
30
30
|
ns => [html('blockquote', ns)]));
|
|
31
31
|
|
|
32
32
|
const markdown: BlockquoteParser.MarkdownParser = lazy(() => fmap(
|
|
33
|
-
some(
|
|
33
|
+
some(creation(union([
|
|
34
34
|
rewrite(
|
|
35
35
|
indent,
|
|
36
36
|
convert(unindent, markdown)),
|
|
37
|
-
|
|
37
|
+
creation(99,
|
|
38
38
|
rewrite(
|
|
39
39
|
some(contentline, opener),
|
|
40
40
|
convert(unindent, (source, context) => {
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { DListParser } from '../block';
|
|
2
|
-
import { union, inits, some,
|
|
2
|
+
import { union, inits, some, creation, state, block, line, validate, rewrite, open, trimEnd, lazy, fmap } from '../../combinator';
|
|
3
3
|
import { inline, indexee, indexer } from '../inline';
|
|
4
4
|
import { anyline } from '../source';
|
|
5
5
|
import { State } from '../context';
|
|
@@ -17,13 +17,13 @@ export const dlist: DListParser = lazy(() => block(localize(fmap(validate(
|
|
|
17
17
|
]))),
|
|
18
18
|
es => [html('dl', fillTrailingDescription(es))]))));
|
|
19
19
|
|
|
20
|
-
const term: DListParser.TermParser =
|
|
20
|
+
const term: DListParser.TermParser = creation(line(indexee(fmap(open(
|
|
21
21
|
/^~[^\S\n]+(?=\S)/,
|
|
22
22
|
visualize(trimBlank(some(union([indexer, inline])))),
|
|
23
23
|
true),
|
|
24
24
|
ns => [html('dt', defrag(ns))]))));
|
|
25
25
|
|
|
26
|
-
const desc: DListParser.DescriptionParser =
|
|
26
|
+
const desc: DListParser.DescriptionParser = creation(block(fmap(open(
|
|
27
27
|
/^:[^\S\n]+(?=\S)|/,
|
|
28
28
|
rewrite(
|
|
29
29
|
some(anyline, /^[~:][^\S\n]+\S/),
|
|
@@ -2,7 +2,7 @@ import { undefined, BigInt, Array } from 'spica/global';
|
|
|
2
2
|
import { max, min, isArray } from 'spica/alias';
|
|
3
3
|
import { ExtensionParser } from '../../block';
|
|
4
4
|
import { Tree, eval } from '../../../combinator/data/parser';
|
|
5
|
-
import { union, subsequence, inits, some,
|
|
5
|
+
import { union, subsequence, inits, some, creation, block, line, validate, fence, rewrite, open, clear, convert, trim, dup, lazy, fmap } from '../../../combinator';
|
|
6
6
|
import { inline } from '../../inline';
|
|
7
7
|
import { str, anyline, emptyline, contentline } from '../../source';
|
|
8
8
|
import { localize } from '../../locale';
|
|
@@ -79,7 +79,7 @@ const align: AlignParser = line(fmap(
|
|
|
79
79
|
|
|
80
80
|
const delimiter = /^[-=<>]+(?:\/[-=^v]*)?(?=[^\S\n]*\n)|^[#:](?:(?!:\D|0)\d*:(?!0)\d*)?!*(?=\s)/;
|
|
81
81
|
|
|
82
|
-
const head: CellParser.HeadParser =
|
|
82
|
+
const head: CellParser.HeadParser = creation(block(fmap(open(
|
|
83
83
|
str(/^#(?:(?!:\D|0)\d*:(?!0)\d*)?!*(?=\s)/),
|
|
84
84
|
rewrite(
|
|
85
85
|
inits([
|
|
@@ -91,7 +91,7 @@ const head: CellParser.HeadParser = creator(block(fmap(open(
|
|
|
91
91
|
ns => [html('th', attributes(ns.shift()! as string), defrag(ns))]),
|
|
92
92
|
false));
|
|
93
93
|
|
|
94
|
-
const data: CellParser.DataParser =
|
|
94
|
+
const data: CellParser.DataParser = creation(block(fmap(open(
|
|
95
95
|
str(/^:(?:(?!:\D|0)\d*:(?!0)\d*)?!*(?=\s)/),
|
|
96
96
|
rewrite(
|
|
97
97
|
inits([
|
|
@@ -103,7 +103,7 @@ const data: CellParser.DataParser = creator(block(fmap(open(
|
|
|
103
103
|
ns => [html('td', attributes(ns.shift()! as string), defrag(ns))]),
|
|
104
104
|
false));
|
|
105
105
|
|
|
106
|
-
const dataline: CellParser.DatalineParser =
|
|
106
|
+
const dataline: CellParser.DatalineParser = creation(line(
|
|
107
107
|
rewrite(
|
|
108
108
|
contentline,
|
|
109
109
|
union([
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { IListParser } from '../block';
|
|
2
|
-
import { union, inits, some,
|
|
2
|
+
import { union, inits, some, creation, state, block, line, validate, indent, open, fallback, lazy, fmap } from '../../combinator';
|
|
3
3
|
import { ulist_, fillFirstLine } from './ulist';
|
|
4
4
|
import { olist_, invalid } from './olist';
|
|
5
5
|
import { inline } from '../inline';
|
|
@@ -13,7 +13,7 @@ export const ilist: IListParser = lazy(() => block(validate(
|
|
|
13
13
|
|
|
14
14
|
export const ilist_: IListParser = lazy(() => block(fmap(validate(
|
|
15
15
|
/^[-+*](?:$|\s)/,
|
|
16
|
-
some(
|
|
16
|
+
some(creation(union([
|
|
17
17
|
fmap(fallback(
|
|
18
18
|
inits([
|
|
19
19
|
line(open(/^[-+*](?:$|\s)/, some(inline), true)),
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { undefined } from 'spica/global';
|
|
2
2
|
import { OListParser } from '../block';
|
|
3
|
-
import { union, inits, subsequence, some,
|
|
3
|
+
import { union, inits, subsequence, some, creation, state, block, line, validate, indent, focus, rewrite, open, match, fallback, lazy, fmap } from '../../combinator';
|
|
4
4
|
import { checkbox, ulist_, fillFirstLine } from './ulist';
|
|
5
5
|
import { ilist_ } from './ilist';
|
|
6
6
|
import { inline, indexee, indexer } from '../inline';
|
|
@@ -36,7 +36,7 @@ export const olist_: OListParser = lazy(() => block(union([
|
|
|
36
36
|
])));
|
|
37
37
|
|
|
38
38
|
const list = (type: string, form: string): OListParser.ListParser => fmap(
|
|
39
|
-
some(
|
|
39
|
+
some(creation(union([
|
|
40
40
|
indexee(fmap(fallback(
|
|
41
41
|
inits([
|
|
42
42
|
line(open(heads[form], subsequence([checkbox, trimBlank(some(union([indexer, inline])))]), true)),
|
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
import { ReplyParser } from '../../block';
|
|
2
|
-
import { union, tails,
|
|
2
|
+
import { union, tails, creation, line, validate, focus, reverse, fmap } from '../../../combinator';
|
|
3
3
|
import { anchor } from '../../inline/autolink/anchor';
|
|
4
4
|
import { str } from '../../source';
|
|
5
5
|
import { html, define, defrag } from 'typed-dom/dom';
|
|
6
6
|
|
|
7
|
-
export const cite: ReplyParser.CiteParser =
|
|
7
|
+
export const cite: ReplyParser.CiteParser = creation(line(fmap(validate(
|
|
8
8
|
'>>',
|
|
9
9
|
reverse(tails([
|
|
10
10
|
str(/^>*(?=>>[^>\s]+[^\S\n]*(?:$|\n))/),
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { ReplyParser } from '../../block';
|
|
2
2
|
import { eval } from '../../../combinator/data/parser';
|
|
3
|
-
import { union, some,
|
|
3
|
+
import { union, some, creation, block, line, validate, rewrite, lazy, fmap } from '../../../combinator';
|
|
4
4
|
import { math } from '../../inline/math';
|
|
5
5
|
import { str, anyline } from '../../source';
|
|
6
6
|
import { autolink } from '../../autolink';
|
|
@@ -8,7 +8,7 @@ import { html, defrag } from 'typed-dom/dom';
|
|
|
8
8
|
|
|
9
9
|
export const syntax = /^>+(?=[^\S\n])|^>(?=[^\s>])|^>+(?=[^\s>])(?![0-9a-z]+(?:-[0-9a-z]+)*(?![0-9A-Za-z@#:]))/;
|
|
10
10
|
|
|
11
|
-
export const quote: ReplyParser.QuoteParser = lazy(() =>
|
|
11
|
+
export const quote: ReplyParser.QuoteParser = lazy(() => creation(block(fmap(validate(
|
|
12
12
|
'>',
|
|
13
13
|
union([
|
|
14
14
|
rewrite(
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { SidefenceParser } from '../block';
|
|
2
|
-
import { union, some,
|
|
2
|
+
import { union, some, creation, block, focus, rewrite, convert, lazy, fmap } from '../../combinator';
|
|
3
3
|
import { autolink } from '../autolink';
|
|
4
4
|
import { contentline } from '../source';
|
|
5
5
|
import { html, define, defrag } from 'typed-dom/dom';
|
|
@@ -20,7 +20,7 @@ const opener = /^(?=\|\|+(?:$|\s))/;
|
|
|
20
20
|
const unindent = (source: string) => source.replace(/(^|\n)\|(?:[^\S\n]|(?=\|*(?:$|\s)))|\n$/g, '$1');
|
|
21
21
|
|
|
22
22
|
const source: SidefenceParser.SourceParser = lazy(() => fmap(
|
|
23
|
-
some(
|
|
23
|
+
some(creation(union([
|
|
24
24
|
focus(
|
|
25
25
|
/^(?:\|\|+(?:[^\S\n][^\n]*)?(?:$|\n))+/,
|
|
26
26
|
convert(unindent, source)),
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { TableParser } from '../block';
|
|
2
|
-
import { union, sequence, some,
|
|
2
|
+
import { union, sequence, some, creation, block, line, validate, focus, rewrite, surround, open, fallback, lazy, fmap } from '../../combinator';
|
|
3
3
|
import { inline } from '../inline';
|
|
4
4
|
import { contentline } from '../source';
|
|
5
5
|
import { trimNode } from '../visibility';
|
|
@@ -25,7 +25,7 @@ export const table: TableParser = lazy(() => block(fmap(validate(
|
|
|
25
25
|
]),
|
|
26
26
|
])));
|
|
27
27
|
|
|
28
|
-
const row = <P extends CellParser | AlignParser>(parser: P, optional: boolean): RowParser<P> =>
|
|
28
|
+
const row = <P extends CellParser | AlignParser>(parser: P, optional: boolean): RowParser<P> => creation(fallback(fmap(
|
|
29
29
|
line(surround(/^(?=\|)/, some(union([parser])), /^[|\\]?\s*$/, optional)),
|
|
30
30
|
es => [html('tr', es)]),
|
|
31
31
|
rewrite(contentline, source => [[
|
|
@@ -37,7 +37,7 @@ const row = <P extends CellParser | AlignParser>(parser: P, optional: boolean):
|
|
|
37
37
|
}, [html('td', source.replace('\n', ''))])
|
|
38
38
|
], ''])));
|
|
39
39
|
|
|
40
|
-
const align: AlignParser =
|
|
40
|
+
const align: AlignParser = creation(fmap(open(
|
|
41
41
|
'|',
|
|
42
42
|
union([
|
|
43
43
|
focus(/^:-+:/, () => [['center'], '']),
|
|
@@ -52,11 +52,11 @@ const cell: CellParser = surround(
|
|
|
52
52
|
some(union([inline]), /^\|/, [[/^[|\\]?\s*$/, 9]]),
|
|
53
53
|
/^[^|]*/, true);
|
|
54
54
|
|
|
55
|
-
const head: CellParser.HeadParser =
|
|
55
|
+
const head: CellParser.HeadParser = creation(fmap(
|
|
56
56
|
cell,
|
|
57
57
|
ns => [html('th', trimNode(defrag(ns)))]));
|
|
58
58
|
|
|
59
|
-
const data: CellParser.DataParser =
|
|
59
|
+
const data: CellParser.DataParser = creation(fmap(
|
|
60
60
|
cell,
|
|
61
61
|
ns => [html('td', trimNode(defrag(ns)))]));
|
|
62
62
|
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { UListParser } from '../block';
|
|
2
|
-
import { union, inits, subsequence, some,
|
|
2
|
+
import { union, inits, subsequence, some, creation, state, block, line, validate, indent, focus, open, fallback, lazy, fmap } from '../../combinator';
|
|
3
3
|
import { olist_, invalid } from './olist';
|
|
4
4
|
import { ilist_ } from './ilist';
|
|
5
5
|
import { inline, indexer, indexee } from '../inline';
|
|
@@ -15,7 +15,7 @@ export const ulist: UListParser = lazy(() => block(validate(
|
|
|
15
15
|
|
|
16
16
|
export const ulist_: UListParser = lazy(() => block(fmap(validate(
|
|
17
17
|
/^-(?=$|\s)/,
|
|
18
|
-
some(
|
|
18
|
+
some(creation(union([
|
|
19
19
|
indexee(fmap(fallback(
|
|
20
20
|
inits([
|
|
21
21
|
line(open(/^-(?:$|\s)/, subsequence([checkbox, trimBlank(some(union([indexer, inline])))]), true)),
|