@yozora/tokenizer-emphasis 1.3.0 → 2.0.0-alpha.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +4 -6
- package/lib/cjs/index.js +38 -24
- package/lib/esm/index.js +37 -25
- package/lib/types/index.d.ts +4 -4
- package/lib/types/match.d.ts +7 -0
- package/lib/types/parse.d.ts +3 -0
- package/lib/types/tokenizer.d.ts +6 -26
- package/lib/types/types.d.ts +7 -6
- package/package.json +5 -5
package/README.md
CHANGED
|
@@ -84,14 +84,14 @@ so you can use `YozoraParser` / `GfmExParser` / `GfmParser` directly.
|
|
|
84
84
|
registered in *YastParser* as a plugin-in before it can be used.
|
|
85
85
|
|
|
86
86
|
```typescript {4,9}
|
|
87
|
-
import {
|
|
87
|
+
import { DefaultParser } from '@yozora/core-parser'
|
|
88
88
|
import ParagraphTokenizer from '@yozora/tokenizer-paragraph'
|
|
89
89
|
import TextTokenizer from '@yozora/tokenizer-text'
|
|
90
90
|
import EmphasisTokenizer from '@yozora/tokenizer-emphasis'
|
|
91
91
|
|
|
92
|
-
const parser = new
|
|
93
|
-
.
|
|
94
|
-
.
|
|
92
|
+
const parser = new DefaultParser()
|
|
93
|
+
.useFallbackTokenizer(new ParagraphTokenizer())
|
|
94
|
+
.useFallbackTokenizer(new TextTokenizer())
|
|
95
95
|
.useTokenizer(new EmphasisTokenizer())
|
|
96
96
|
|
|
97
97
|
// parse source markdown content
|
|
@@ -265,7 +265,6 @@ Name | Type | Required | Default
|
|
|
265
265
|
[@yozora/tokenizer-link]: https://github.com/yozorajs/yozora/tree/main/tokenizers/link#readme
|
|
266
266
|
[@yozora/tokenizer-link-reference]: https://github.com/yozorajs/yozora/tree/main/tokenizers/link-reference#readme
|
|
267
267
|
[@yozora/tokenizer-list]: https://github.com/yozorajs/yozora/tree/main/tokenizers/list#readme
|
|
268
|
-
[@yozora/tokenizer-list-item]: https://github.com/yozorajs/yozora/tree/main/tokenizers/list-item#readme
|
|
269
268
|
[@yozora/tokenizer-math]: https://github.com/yozorajs/yozora/tree/main/tokenizers/math#readme
|
|
270
269
|
[@yozora/tokenizer-paragraph]: https://github.com/yozorajs/yozora/tree/main/tokenizers/paragraph#readme
|
|
271
270
|
[@yozora/tokenizer-setext-heading]: https://github.com/yozorajs/yozora/tree/main/tokenizers/setext-heading#readme
|
|
@@ -325,7 +324,6 @@ Name | Type | Required | Default
|
|
|
325
324
|
[doc-@yozora/tokenizer-definition]: https://yozora.guanghechen.com/docs/package/tokenizer-definition
|
|
326
325
|
[doc-@yozora/tokenizer-link-reference]: https://yozora.guanghechen.com/docs/package/tokenizer-link-reference
|
|
327
326
|
[doc-@yozora/tokenizer-list]: https://yozora.guanghechen.com/docs/package/tokenizer-list
|
|
328
|
-
[doc-@yozora/tokenizer-list-item]: https://yozora.guanghechen.com/docs/package/tokenizer-list-item
|
|
329
327
|
[doc-@yozora/tokenizer-math]: https://yozora.guanghechen.com/docs/package/tokenizer-math
|
|
330
328
|
[doc-@yozora/tokenizer-paragraph]: https://yozora.guanghechen.com/docs/package/tokenizer-paragraph
|
|
331
329
|
[doc-@yozora/tokenizer-setext-heading]: https://yozora.guanghechen.com/docs/package/tokenizer-setext-heading
|
package/lib/cjs/index.js
CHANGED
|
@@ -6,17 +6,13 @@ var ast = require('@yozora/ast');
|
|
|
6
6
|
var character = require('@yozora/character');
|
|
7
7
|
var coreTokenizer = require('@yozora/core-tokenizer');
|
|
8
8
|
|
|
9
|
-
const
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
priority: (_b = props.priority) !== null && _b !== void 0 ? _b : coreTokenizer.TokenizerPriority.CONTAINING_INLINE,
|
|
17
|
-
});
|
|
18
|
-
}
|
|
19
|
-
_findDelimiter(startIndex, endIndex, api) {
|
|
9
|
+
const match = function (api) {
|
|
10
|
+
return {
|
|
11
|
+
findDelimiter: () => coreTokenizer.genFindDelimiter(_findDelimiter),
|
|
12
|
+
isDelimiterPair,
|
|
13
|
+
processDelimiterPair,
|
|
14
|
+
};
|
|
15
|
+
function _findDelimiter(startIndex, endIndex) {
|
|
20
16
|
const nodePoints = api.getNodePoints();
|
|
21
17
|
const blockStartIndex = api.getBlockStartIndex();
|
|
22
18
|
const blockEndIndex = api.getBlockEndIndex();
|
|
@@ -94,21 +90,18 @@ class EmphasisTokenizer extends coreTokenizer.BaseInlineTokenizer {
|
|
|
94
90
|
}
|
|
95
91
|
return null;
|
|
96
92
|
}
|
|
97
|
-
isDelimiterPair(openerDelimiter, closerDelimiter
|
|
93
|
+
function isDelimiterPair(openerDelimiter, closerDelimiter) {
|
|
98
94
|
const nodePoints = api.getNodePoints();
|
|
99
95
|
if (nodePoints[openerDelimiter.startIndex].codePoint !==
|
|
100
96
|
nodePoints[closerDelimiter.startIndex].codePoint ||
|
|
101
97
|
((openerDelimiter.type === 'both' || closerDelimiter.type === 'both') &&
|
|
102
|
-
(openerDelimiter.originalThickness +
|
|
103
|
-
closerDelimiter.originalThickness) %
|
|
104
|
-
3 ===
|
|
105
|
-
0 &&
|
|
98
|
+
(openerDelimiter.originalThickness + closerDelimiter.originalThickness) % 3 === 0 &&
|
|
106
99
|
openerDelimiter.originalThickness % 3 !== 0)) {
|
|
107
100
|
return { paired: false, opener: true, closer: true };
|
|
108
101
|
}
|
|
109
102
|
return { paired: true };
|
|
110
103
|
}
|
|
111
|
-
processDelimiterPair(openerDelimiter, closerDelimiter, internalTokens
|
|
104
|
+
function processDelimiterPair(openerDelimiter, closerDelimiter, internalTokens) {
|
|
112
105
|
let thickness = 1;
|
|
113
106
|
if (openerDelimiter.thickness > 1 && closerDelimiter.thickness > 1) {
|
|
114
107
|
thickness = 2;
|
|
@@ -145,15 +138,36 @@ class EmphasisTokenizer extends coreTokenizer.BaseInlineTokenizer {
|
|
|
145
138
|
remainCloserDelimiter,
|
|
146
139
|
};
|
|
147
140
|
}
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
141
|
+
};
|
|
142
|
+
|
|
143
|
+
const parse = function (api) {
|
|
144
|
+
return {
|
|
145
|
+
parse: tokens => tokens.map(token => {
|
|
146
|
+
const children = api.parseInlineTokens(token.children);
|
|
147
|
+
const node = api.shouldReservePosition
|
|
148
|
+
? { type: token.nodeType, position: api.calcPosition(token), children }
|
|
149
|
+
: { type: token.nodeType, children };
|
|
150
|
+
return node;
|
|
151
|
+
}),
|
|
152
|
+
};
|
|
153
|
+
};
|
|
154
|
+
|
|
155
|
+
const uniqueName = '@yozora/tokenizer-emphasis';
|
|
156
|
+
|
|
157
|
+
class EmphasisTokenizer extends coreTokenizer.BaseInlineTokenizer {
|
|
158
|
+
constructor(props = {}) {
|
|
159
|
+
var _a, _b;
|
|
160
|
+
super({
|
|
161
|
+
name: (_a = props.name) !== null && _a !== void 0 ? _a : uniqueName,
|
|
162
|
+
priority: (_b = props.priority) !== null && _b !== void 0 ? _b : coreTokenizer.TokenizerPriority.CONTAINING_INLINE,
|
|
163
|
+
});
|
|
164
|
+
this.match = match;
|
|
165
|
+
this.parse = parse;
|
|
154
166
|
}
|
|
155
167
|
}
|
|
156
168
|
|
|
157
169
|
exports.EmphasisTokenizer = EmphasisTokenizer;
|
|
158
170
|
exports.EmphasisTokenizerName = uniqueName;
|
|
159
|
-
exports[
|
|
171
|
+
exports["default"] = EmphasisTokenizer;
|
|
172
|
+
exports.emphasisMatch = match;
|
|
173
|
+
exports.emphasisParse = parse;
|
package/lib/esm/index.js
CHANGED
|
@@ -1,18 +1,14 @@
|
|
|
1
1
|
import { EmphasisType, StrongType } from '@yozora/ast';
|
|
2
2
|
import { AsciiCodePoint, isPunctuationCharacter, isUnicodeWhitespaceCharacter } from '@yozora/character';
|
|
3
|
-
import { BaseInlineTokenizer, TokenizerPriority
|
|
3
|
+
import { genFindDelimiter, eatOptionalCharacters, BaseInlineTokenizer, TokenizerPriority } from '@yozora/core-tokenizer';
|
|
4
4
|
|
|
5
|
-
const
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
priority: (_b = props.priority) !== null && _b !== void 0 ? _b : TokenizerPriority.CONTAINING_INLINE,
|
|
13
|
-
});
|
|
14
|
-
}
|
|
15
|
-
_findDelimiter(startIndex, endIndex, api) {
|
|
5
|
+
const match = function (api) {
|
|
6
|
+
return {
|
|
7
|
+
findDelimiter: () => genFindDelimiter(_findDelimiter),
|
|
8
|
+
isDelimiterPair,
|
|
9
|
+
processDelimiterPair,
|
|
10
|
+
};
|
|
11
|
+
function _findDelimiter(startIndex, endIndex) {
|
|
16
12
|
const nodePoints = api.getNodePoints();
|
|
17
13
|
const blockStartIndex = api.getBlockStartIndex();
|
|
18
14
|
const blockEndIndex = api.getBlockEndIndex();
|
|
@@ -90,21 +86,18 @@ class EmphasisTokenizer extends BaseInlineTokenizer {
|
|
|
90
86
|
}
|
|
91
87
|
return null;
|
|
92
88
|
}
|
|
93
|
-
isDelimiterPair(openerDelimiter, closerDelimiter
|
|
89
|
+
function isDelimiterPair(openerDelimiter, closerDelimiter) {
|
|
94
90
|
const nodePoints = api.getNodePoints();
|
|
95
91
|
if (nodePoints[openerDelimiter.startIndex].codePoint !==
|
|
96
92
|
nodePoints[closerDelimiter.startIndex].codePoint ||
|
|
97
93
|
((openerDelimiter.type === 'both' || closerDelimiter.type === 'both') &&
|
|
98
|
-
(openerDelimiter.originalThickness +
|
|
99
|
-
closerDelimiter.originalThickness) %
|
|
100
|
-
3 ===
|
|
101
|
-
0 &&
|
|
94
|
+
(openerDelimiter.originalThickness + closerDelimiter.originalThickness) % 3 === 0 &&
|
|
102
95
|
openerDelimiter.originalThickness % 3 !== 0)) {
|
|
103
96
|
return { paired: false, opener: true, closer: true };
|
|
104
97
|
}
|
|
105
98
|
return { paired: true };
|
|
106
99
|
}
|
|
107
|
-
processDelimiterPair(openerDelimiter, closerDelimiter, internalTokens
|
|
100
|
+
function processDelimiterPair(openerDelimiter, closerDelimiter, internalTokens) {
|
|
108
101
|
let thickness = 1;
|
|
109
102
|
if (openerDelimiter.thickness > 1 && closerDelimiter.thickness > 1) {
|
|
110
103
|
thickness = 2;
|
|
@@ -141,13 +134,32 @@ class EmphasisTokenizer extends BaseInlineTokenizer {
|
|
|
141
134
|
remainCloserDelimiter,
|
|
142
135
|
};
|
|
143
136
|
}
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
137
|
+
};
|
|
138
|
+
|
|
139
|
+
const parse = function (api) {
|
|
140
|
+
return {
|
|
141
|
+
parse: tokens => tokens.map(token => {
|
|
142
|
+
const children = api.parseInlineTokens(token.children);
|
|
143
|
+
const node = api.shouldReservePosition
|
|
144
|
+
? { type: token.nodeType, position: api.calcPosition(token), children }
|
|
145
|
+
: { type: token.nodeType, children };
|
|
146
|
+
return node;
|
|
147
|
+
}),
|
|
148
|
+
};
|
|
149
|
+
};
|
|
150
|
+
|
|
151
|
+
const uniqueName = '@yozora/tokenizer-emphasis';
|
|
152
|
+
|
|
153
|
+
class EmphasisTokenizer extends BaseInlineTokenizer {
|
|
154
|
+
constructor(props = {}) {
|
|
155
|
+
var _a, _b;
|
|
156
|
+
super({
|
|
157
|
+
name: (_a = props.name) !== null && _a !== void 0 ? _a : uniqueName,
|
|
158
|
+
priority: (_b = props.priority) !== null && _b !== void 0 ? _b : TokenizerPriority.CONTAINING_INLINE,
|
|
159
|
+
});
|
|
160
|
+
this.match = match;
|
|
161
|
+
this.parse = parse;
|
|
150
162
|
}
|
|
151
163
|
}
|
|
152
164
|
|
|
153
|
-
export { EmphasisTokenizer, uniqueName as EmphasisTokenizerName, EmphasisTokenizer as default };
|
|
165
|
+
export { EmphasisTokenizer, uniqueName as EmphasisTokenizerName, EmphasisTokenizer as default, match as emphasisMatch, parse as emphasisParse };
|
package/lib/types/index.d.ts
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
|
|
2
|
-
export {
|
|
1
|
+
export { match as emphasisMatch } from './match';
|
|
2
|
+
export { parse as emphasisParse } from './parse';
|
|
3
|
+
export { EmphasisTokenizer, EmphasisTokenizer as default } from './tokenizer';
|
|
3
4
|
export { uniqueName as EmphasisTokenizerName } from './types';
|
|
4
|
-
export type {
|
|
5
|
-
export default EmphasisTokenizer;
|
|
5
|
+
export type { IThis as IEmphasisHookContext, IToken as IEmphasisToken, ITokenizerProps as IEmphasisTokenizerProps, } from './types';
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import type { IMatchInlineHookCreator } from '@yozora/core-tokenizer';
|
|
2
|
+
import type { IDelimiter, IThis, IToken, T } from './types';
|
|
3
|
+
/**
|
|
4
|
+
* @see https://github.com/syntax-tree/mdast#strong
|
|
5
|
+
* @see https://github.github.com/gfm/#emphasis-and-strong-emphasis
|
|
6
|
+
*/
|
|
7
|
+
export declare const match: IMatchInlineHookCreator<T, IDelimiter, IToken, IThis>;
|
package/lib/types/tokenizer.d.ts
CHANGED
|
@@ -1,33 +1,13 @@
|
|
|
1
|
-
import type {
|
|
2
|
-
import type { MatchInlinePhaseApi, ResultOfIsDelimiterPair, ResultOfProcessDelimiterPair, Tokenizer, TokenizerMatchInlineHook, TokenizerParseInlineHook, YastInlineToken } from '@yozora/core-tokenizer';
|
|
1
|
+
import type { IInlineTokenizer, IMatchInlineHookCreator, IParseInlineHookCreator } from '@yozora/core-tokenizer';
|
|
3
2
|
import { BaseInlineTokenizer } from '@yozora/core-tokenizer';
|
|
4
|
-
import type {
|
|
3
|
+
import type { IDelimiter, INode, IThis, IToken, ITokenizerProps, T } from './types';
|
|
5
4
|
/**
|
|
6
5
|
* Lexical Analyzer for Emphasis and Strong Emphasis.
|
|
7
|
-
*
|
|
8
6
|
* @see https://github.com/syntax-tree/mdast#strong
|
|
9
7
|
* @see https://github.github.com/gfm/#emphasis-and-strong-emphasis
|
|
10
8
|
*/
|
|
11
|
-
export declare class EmphasisTokenizer extends BaseInlineTokenizer<
|
|
12
|
-
constructor(props?:
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
* @see BaseInlineTokenizer
|
|
16
|
-
*/
|
|
17
|
-
protected _findDelimiter(startIndex: number, endIndex: number, api: Readonly<MatchInlinePhaseApi>): Delimiter | null;
|
|
18
|
-
/**
|
|
19
|
-
* @override
|
|
20
|
-
* @see TokenizerMatchInlineHook
|
|
21
|
-
*/
|
|
22
|
-
isDelimiterPair(openerDelimiter: Delimiter, closerDelimiter: Delimiter, internalTokens: ReadonlyArray<YastInlineToken>, api: Readonly<MatchInlinePhaseApi>): ResultOfIsDelimiterPair;
|
|
23
|
-
/**
|
|
24
|
-
* @override
|
|
25
|
-
* @see TokenizerMatchInlineHook
|
|
26
|
-
*/
|
|
27
|
-
processDelimiterPair(openerDelimiter: Delimiter, closerDelimiter: Delimiter, internalTokens: ReadonlyArray<YastInlineToken>, api: Readonly<MatchInlinePhaseApi>): ResultOfProcessDelimiterPair<T, Token, Delimiter>;
|
|
28
|
-
/**
|
|
29
|
-
* @override
|
|
30
|
-
* @see TokenizerParseInlineHook
|
|
31
|
-
*/
|
|
32
|
-
parseInline(token: Token, children: YastNode[]): Node;
|
|
9
|
+
export declare class EmphasisTokenizer extends BaseInlineTokenizer<T, IDelimiter, IToken, INode, IThis> implements IInlineTokenizer<T, IDelimiter, IToken, INode, IThis> {
|
|
10
|
+
constructor(props?: ITokenizerProps);
|
|
11
|
+
readonly match: IMatchInlineHookCreator<T, IDelimiter, IToken, IThis>;
|
|
12
|
+
readonly parse: IParseInlineHookCreator<T, IToken, INode, IThis>;
|
|
33
13
|
}
|
package/lib/types/types.d.ts
CHANGED
|
@@ -1,15 +1,15 @@
|
|
|
1
1
|
import type { Emphasis, EmphasisType, Strong, StrongType } from '@yozora/ast';
|
|
2
|
-
import type {
|
|
2
|
+
import type { IBaseInlineTokenizerProps, IPartialYastInlineToken, ITokenizer, IYastTokenDelimiter } from '@yozora/core-tokenizer';
|
|
3
3
|
export declare type T = EmphasisType | StrongType;
|
|
4
|
-
export declare type
|
|
4
|
+
export declare type INode = Emphasis | Strong;
|
|
5
5
|
export declare const uniqueName = "@yozora/tokenizer-emphasis";
|
|
6
|
-
export interface
|
|
6
|
+
export interface IToken extends IPartialYastInlineToken<T> {
|
|
7
7
|
/**
|
|
8
|
-
*
|
|
8
|
+
* IDelimiter thickness.
|
|
9
9
|
*/
|
|
10
10
|
thickness: number;
|
|
11
11
|
}
|
|
12
|
-
export interface
|
|
12
|
+
export interface IDelimiter extends IYastTokenDelimiter {
|
|
13
13
|
/**
|
|
14
14
|
* Thickness of the delimiter.
|
|
15
15
|
*/
|
|
@@ -19,4 +19,5 @@ export interface Delimiter extends YastTokenDelimiter {
|
|
|
19
19
|
*/
|
|
20
20
|
originalThickness: number;
|
|
21
21
|
}
|
|
22
|
-
export declare type
|
|
22
|
+
export declare type IThis = ITokenizer;
|
|
23
|
+
export declare type ITokenizerProps = Partial<IBaseInlineTokenizerProps>;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@yozora/tokenizer-emphasis",
|
|
3
|
-
"version": "
|
|
3
|
+
"version": "2.0.0-alpha.3",
|
|
4
4
|
"author": {
|
|
5
5
|
"name": "guanghechen",
|
|
6
6
|
"url": "https://github.com/guanghechen/"
|
|
@@ -35,9 +35,9 @@
|
|
|
35
35
|
"test": "cross-env TS_NODE_FILES=true jest --config ../../jest.config.js --rootDir ."
|
|
36
36
|
},
|
|
37
37
|
"dependencies": {
|
|
38
|
-
"@yozora/ast": "^
|
|
39
|
-
"@yozora/character": "^
|
|
40
|
-
"@yozora/core-tokenizer": "^
|
|
38
|
+
"@yozora/ast": "^2.0.0-alpha.3",
|
|
39
|
+
"@yozora/character": "^2.0.0-alpha.3",
|
|
40
|
+
"@yozora/core-tokenizer": "^2.0.0-alpha.3"
|
|
41
41
|
},
|
|
42
|
-
"gitHead": "
|
|
42
|
+
"gitHead": "9f274fc7487a8c1dd213405d92508f9a7621f730"
|
|
43
43
|
}
|