@yozora/tokenizer-definition 2.0.0-alpha.0 → 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -84,14 +84,14 @@ so you can use `YozoraParser` / `GfmExParser` / `GfmParser` directly.
84
84
  registered in *YastParser* as a plugin-in before it can be used.
85
85
 
86
86
  ```typescript {4,9}
87
- import { DefaultYastParser } from '@yozora/core-parser'
87
+ import { DefaultParser } from '@yozora/core-parser'
88
88
  import ParagraphTokenizer from '@yozora/tokenizer-paragraph'
89
89
  import TextTokenizer from '@yozora/tokenizer-text'
90
90
  import DefinitionTokenizer from '@yozora/tokenizer-definition'
91
91
 
92
- const parser = new DefaultYastParser()
93
- .useBlockFallbackTokenizer(new ParagraphTokenizer())
94
- .useInlineFallbackTokenizer(new TextTokenizer())
92
+ const parser = new DefaultParser()
93
+ .useFallbackTokenizer(new ParagraphTokenizer())
94
+ .useFallbackTokenizer(new TextTokenizer())
95
95
  .useTokenizer(new DefinitionTokenizer())
96
96
 
97
97
  // parse source markdown content
@@ -242,7 +242,6 @@ Name | Type | Required | Default
242
242
  [@yozora/tokenizer-link]: https://github.com/yozorajs/yozora/tree/main/tokenizers/link#readme
243
243
  [@yozora/tokenizer-link-reference]: https://github.com/yozorajs/yozora/tree/main/tokenizers/link-reference#readme
244
244
  [@yozora/tokenizer-list]: https://github.com/yozorajs/yozora/tree/main/tokenizers/list#readme
245
- [@yozora/tokenizer-list-item]: https://github.com/yozorajs/yozora/tree/main/tokenizers/list-item#readme
246
245
  [@yozora/tokenizer-math]: https://github.com/yozorajs/yozora/tree/main/tokenizers/math#readme
247
246
  [@yozora/tokenizer-paragraph]: https://github.com/yozorajs/yozora/tree/main/tokenizers/paragraph#readme
248
247
  [@yozora/tokenizer-setext-heading]: https://github.com/yozorajs/yozora/tree/main/tokenizers/setext-heading#readme
@@ -302,7 +301,6 @@ Name | Type | Required | Default
302
301
  [doc-@yozora/tokenizer-definition]: https://yozora.guanghechen.com/docs/package/tokenizer-definition
303
302
  [doc-@yozora/tokenizer-link-reference]: https://yozora.guanghechen.com/docs/package/tokenizer-link-reference
304
303
  [doc-@yozora/tokenizer-list]: https://yozora.guanghechen.com/docs/package/tokenizer-list
305
- [doc-@yozora/tokenizer-list-item]: https://yozora.guanghechen.com/docs/package/tokenizer-list-item
306
304
  [doc-@yozora/tokenizer-math]: https://yozora.guanghechen.com/docs/package/tokenizer-math
307
305
  [doc-@yozora/tokenizer-paragraph]: https://yozora.guanghechen.com/docs/package/tokenizer-paragraph
308
306
  [doc-@yozora/tokenizer-setext-heading]: https://yozora.guanghechen.com/docs/package/tokenizer-setext-heading
package/lib/cjs/index.js CHANGED
@@ -241,8 +241,8 @@ const match = function (api) {
241
241
  const token = {
242
242
  nodeType: ast.DefinitionType,
243
243
  position: {
244
- start: coreTokenizer.calcStartYastNodePoint(nodePoints, startIndex),
245
- end: coreTokenizer.calcEndYastNodePoint(nodePoints, endIndex - 1),
244
+ start: coreTokenizer.calcStartPoint(nodePoints, startIndex),
245
+ end: coreTokenizer.calcEndPoint(nodePoints, endIndex - 1),
246
246
  },
247
247
  label: labelState,
248
248
  destination: null,
@@ -350,7 +350,7 @@ const match = function (api) {
350
350
  }
351
351
  const lastLine = token.lines[token.lines.length - 1];
352
352
  token.title = null;
353
- token.position.end = coreTokenizer.calcEndYastNodePoint(lastLine.nodePoints, lastLine.endIndex - 1);
353
+ token.position.end = coreTokenizer.calcEndPoint(lastLine.nodePoints, lastLine.endIndex - 1);
354
354
  return {
355
355
  status: 'closingAndRollback',
356
356
  lines: token.lines.slice(token.lineNoOfTitle - 1),
@@ -376,7 +376,7 @@ const match = function (api) {
376
376
  const lines = token.lines.splice(token.lineNoOfTitle - 1);
377
377
  const lastLine = token.lines[token.lines.length - 1];
378
378
  token.title = null;
379
- token.position.end = coreTokenizer.calcEndYastNodePoint(lastLine.nodePoints, lastLine.endIndex - 1);
379
+ token.position.end = coreTokenizer.calcEndPoint(lastLine.nodePoints, lastLine.endIndex - 1);
380
380
  result = { status: 'closingAndRollback', lines };
381
381
  }
382
382
  }
@@ -390,9 +390,9 @@ const match = function (api) {
390
390
  }
391
391
  };
392
392
 
393
- const parse = function () {
393
+ const parse = function (api) {
394
394
  return {
395
- parse: token => {
395
+ parse: tokens => tokens.map(token => {
396
396
  const label = token._label;
397
397
  const identifier = token._identifier;
398
398
  const destinationPoints = token.destination.nodePoints;
@@ -403,15 +403,11 @@ const parse = function () {
403
403
  const title = token.title == null
404
404
  ? undefined
405
405
  : character.calcEscapedStringFromNodePoints(token.title.nodePoints, 1, token.title.nodePoints.length - 1);
406
- const node = {
407
- type: ast.DefinitionType,
408
- identifier,
409
- label,
410
- url,
411
- title,
412
- };
406
+ const node = api.shouldReservePosition
407
+ ? { type: ast.DefinitionType, position: token.position, identifier, label, url, title }
408
+ : { type: ast.DefinitionType, identifier, label, url, title };
413
409
  return node;
414
- },
410
+ }),
415
411
  };
416
412
  };
417
413
 
package/lib/esm/index.js CHANGED
@@ -1,5 +1,5 @@
1
1
  import { AsciiCodePoint, VirtualCodePoint, isWhitespaceCharacter, isAsciiControlCharacter, calcStringFromNodePoints, calcEscapedStringFromNodePoints } from '@yozora/character';
2
- import { eatOptionalWhitespaces, calcEndYastNodePoint, resolveLabelToIdentifier, calcStartYastNodePoint, encodeLinkDestination, BaseBlockTokenizer, TokenizerPriority } from '@yozora/core-tokenizer';
2
+ import { eatOptionalWhitespaces, calcEndPoint, resolveLabelToIdentifier, calcStartPoint, encodeLinkDestination, BaseBlockTokenizer, TokenizerPriority } from '@yozora/core-tokenizer';
3
3
  import { DefinitionType } from '@yozora/ast';
4
4
 
5
5
  function eatAndCollectLinkDestination(nodePoints, startIndex, endIndex, state) {
@@ -237,8 +237,8 @@ const match = function (api) {
237
237
  const token = {
238
238
  nodeType: DefinitionType,
239
239
  position: {
240
- start: calcStartYastNodePoint(nodePoints, startIndex),
241
- end: calcEndYastNodePoint(nodePoints, endIndex - 1),
240
+ start: calcStartPoint(nodePoints, startIndex),
241
+ end: calcEndPoint(nodePoints, endIndex - 1),
242
242
  },
243
243
  label: labelState,
244
244
  destination: null,
@@ -346,7 +346,7 @@ const match = function (api) {
346
346
  }
347
347
  const lastLine = token.lines[token.lines.length - 1];
348
348
  token.title = null;
349
- token.position.end = calcEndYastNodePoint(lastLine.nodePoints, lastLine.endIndex - 1);
349
+ token.position.end = calcEndPoint(lastLine.nodePoints, lastLine.endIndex - 1);
350
350
  return {
351
351
  status: 'closingAndRollback',
352
352
  lines: token.lines.slice(token.lineNoOfTitle - 1),
@@ -372,7 +372,7 @@ const match = function (api) {
372
372
  const lines = token.lines.splice(token.lineNoOfTitle - 1);
373
373
  const lastLine = token.lines[token.lines.length - 1];
374
374
  token.title = null;
375
- token.position.end = calcEndYastNodePoint(lastLine.nodePoints, lastLine.endIndex - 1);
375
+ token.position.end = calcEndPoint(lastLine.nodePoints, lastLine.endIndex - 1);
376
376
  result = { status: 'closingAndRollback', lines };
377
377
  }
378
378
  }
@@ -386,9 +386,9 @@ const match = function (api) {
386
386
  }
387
387
  };
388
388
 
389
- const parse = function () {
389
+ const parse = function (api) {
390
390
  return {
391
- parse: token => {
391
+ parse: tokens => tokens.map(token => {
392
392
  const label = token._label;
393
393
  const identifier = token._identifier;
394
394
  const destinationPoints = token.destination.nodePoints;
@@ -399,15 +399,11 @@ const parse = function () {
399
399
  const title = token.title == null
400
400
  ? undefined
401
401
  : calcEscapedStringFromNodePoints(token.title.nodePoints, 1, token.title.nodePoints.length - 1);
402
- const node = {
403
- type: DefinitionType,
404
- identifier,
405
- label,
406
- url,
407
- title,
408
- };
402
+ const node = api.shouldReservePosition
403
+ ? { type: DefinitionType, position: token.position, identifier, label, url, title }
404
+ : { type: DefinitionType, identifier, label, url, title };
409
405
  return node;
410
- },
406
+ }),
411
407
  };
412
408
  };
413
409
 
@@ -5,4 +5,4 @@ export { match as definitionMatch } from './match';
5
5
  export { parse as definitionParse } from './parse';
6
6
  export { DefinitionTokenizer, DefinitionTokenizer as default } from './tokenizer';
7
7
  export { uniqueName as DefinitionTokenizerName } from './types';
8
- export type { IHookContext as IDefinitionHookContext, IToken as IDefinitionToken, ITokenizerProps as IDefinitionTokenizerProps, } from './types';
8
+ export type { IThis as IDefinitionHookContext, IToken as IDefinitionToken, ITokenizerProps as IDefinitionTokenizerProps, } from './types';
@@ -1,5 +1,5 @@
1
1
  import type { IMatchBlockHookCreator } from '@yozora/core-tokenizer';
2
- import type { IHookContext, IToken, T } from './types';
2
+ import type { IThis, IToken, T } from './types';
3
3
  /**
4
4
  * A link reference definition consists of a link label, indented up to three
5
5
  * spaces, followed by a colon (:), optional whitespace (including up to one
@@ -15,4 +15,4 @@ import type { IHookContext, IToken, T } from './types';
15
15
  *
16
16
  * @see https://github.github.com/gfm/#link-reference-definition
17
17
  */
18
- export declare const match: IMatchBlockHookCreator<T, IToken, IHookContext>;
18
+ export declare const match: IMatchBlockHookCreator<T, IToken, IThis>;
@@ -1,3 +1,3 @@
1
1
  import type { IParseBlockHookCreator } from '@yozora/core-tokenizer';
2
- import type { IHookContext, INode, IToken, T } from './types';
3
- export declare const parse: IParseBlockHookCreator<T, IToken, INode, IHookContext>;
2
+ import type { INode, IThis, IToken, T } from './types';
3
+ export declare const parse: IParseBlockHookCreator<T, IToken, INode, IThis>;
@@ -1,12 +1,12 @@
1
1
  import type { IBlockTokenizer, IMatchBlockHookCreator, IParseBlockHookCreator } from '@yozora/core-tokenizer';
2
2
  import { BaseBlockTokenizer } from '@yozora/core-tokenizer';
3
- import type { IHookContext, INode, IToken, ITokenizerProps, T } from './types';
3
+ import type { INode, IThis, IToken, ITokenizerProps, T } from './types';
4
4
  /**
5
5
  * Lexical Analyzer for Definition.
6
6
  * @see https://github.github.com/gfm/#link-reference-definition
7
7
  */
8
- export declare class DefinitionTokenizer extends BaseBlockTokenizer<T, IToken, INode, IHookContext> implements IBlockTokenizer<T, IToken, INode, IHookContext> {
8
+ export declare class DefinitionTokenizer extends BaseBlockTokenizer<T, IToken, INode, IThis> implements IBlockTokenizer<T, IToken, INode, IThis> {
9
9
  constructor(props?: ITokenizerProps);
10
- readonly match: IMatchBlockHookCreator<T, IToken, IHookContext>;
11
- readonly parse: IParseBlockHookCreator<T, IToken, INode, IHookContext>;
10
+ readonly match: IMatchBlockHookCreator<T, IToken, IThis>;
11
+ readonly parse: IParseBlockHookCreator<T, IToken, INode, IThis>;
12
12
  }
@@ -1,10 +1,10 @@
1
- import type { DefinitionType, IDefinition } from '@yozora/ast';
1
+ import type { Definition, DefinitionType } from '@yozora/ast';
2
2
  import type { IBaseBlockTokenizerProps, IPartialYastBlockToken, IPhrasingContentLine, ITokenizer } from '@yozora/core-tokenizer';
3
- import type { LinkDestinationCollectingState } from './util/link-destination';
4
- import type { LinkLabelCollectingState } from './util/link-label';
5
- import type { LinkTitleCollectingState } from './util/link-title';
3
+ import type { ILinkDestinationCollectingState } from './util/link-destination';
4
+ import type { ILinkLabelCollectingState } from './util/link-label';
5
+ import type { ILinkTitleCollectingState } from './util/link-title';
6
6
  export declare type T = DefinitionType;
7
- export declare type INode = IDefinition;
7
+ export declare type INode = Definition;
8
8
  export declare const uniqueName = "@yozora/tokenizer-definition";
9
9
  export interface IToken extends IPartialYastBlockToken<T> {
10
10
  /**
@@ -15,15 +15,15 @@ export interface IToken extends IPartialYastBlockToken<T> {
15
15
  * Link label
16
16
  * Trimmed, Case-Insensitive
17
17
  */
18
- label: LinkLabelCollectingState;
18
+ label: ILinkLabelCollectingState;
19
19
  /**
20
20
  * Link destination
21
21
  */
22
- destination: LinkDestinationCollectingState | null;
22
+ destination: ILinkDestinationCollectingState | null;
23
23
  /**
24
24
  * Link title
25
25
  */
26
- title: LinkTitleCollectingState | null;
26
+ title: ILinkTitleCollectingState | null;
27
27
  /**
28
28
  * The line number of the first matched character of the link label
29
29
  */
@@ -45,5 +45,5 @@ export interface IToken extends IPartialYastBlockToken<T> {
45
45
  */
46
46
  _identifier?: string;
47
47
  }
48
- export declare type IHookContext = ITokenizer;
48
+ export declare type IThis = ITokenizer;
49
49
  export declare type ITokenizerProps = Partial<IBaseBlockTokenizerProps>;
@@ -5,7 +5,7 @@ import type { INodePoint } from '@yozora/character';
5
5
  *
6
6
  * @see https://github.github.com/gfm/#link-destination
7
7
  */
8
- export interface LinkDestinationCollectingState {
8
+ export interface ILinkDestinationCollectingState {
9
9
  /**
10
10
  * Whether the current token has collected a legal LinkDestination
11
11
  */
@@ -31,7 +31,7 @@ export interface LinkDestinationCollectingState {
31
31
  * @param state
32
32
  * @see https://github.github.com/gfm/#link-destination
33
33
  */
34
- export declare function eatAndCollectLinkDestination(nodePoints: ReadonlyArray<INodePoint>, startIndex: number, endIndex: number, state: LinkDestinationCollectingState | null): {
34
+ export declare function eatAndCollectLinkDestination(nodePoints: ReadonlyArray<INodePoint>, startIndex: number, endIndex: number, state: ILinkDestinationCollectingState | null): {
35
35
  nextIndex: number;
36
- state: LinkDestinationCollectingState;
36
+ state: ILinkDestinationCollectingState;
37
37
  };
@@ -5,7 +5,7 @@ import type { INodePoint } from '@yozora/character';
5
5
  *
6
6
  * @see https://github.github.com/gfm/#link-label
7
7
  */
8
- export interface LinkLabelCollectingState {
8
+ export interface ILinkLabelCollectingState {
9
9
  /**
10
10
  * Whether the current token has collected a legal LinkDestination
11
11
  */
@@ -38,7 +38,7 @@ export interface LinkLabelCollectingState {
38
38
  * @param state
39
39
  * @see https://github.github.com/gfm/#link-label
40
40
  */
41
- export declare function eatAndCollectLinkLabel(nodePoints: ReadonlyArray<INodePoint>, startIndex: number, endIndex: number, state: LinkLabelCollectingState | null): {
41
+ export declare function eatAndCollectLinkLabel(nodePoints: ReadonlyArray<INodePoint>, startIndex: number, endIndex: number, state: ILinkLabelCollectingState | null): {
42
42
  nextIndex: number;
43
- state: LinkLabelCollectingState;
43
+ state: ILinkLabelCollectingState;
44
44
  };
@@ -5,7 +5,7 @@ import type { INodePoint } from '@yozora/character';
5
5
  *
6
6
  * @see https://github.github.com/gfm/#link-title
7
7
  */
8
- export interface LinkTitleCollectingState {
8
+ export interface ILinkTitleCollectingState {
9
9
  /**
10
10
  * Whether the current token has collected a legal LinkDestination
11
11
  */
@@ -27,7 +27,7 @@ export interface LinkTitleCollectingState {
27
27
  * @param state
28
28
  * @see https://github.github.com/gfm/#link-title
29
29
  */
30
- export declare function eatAndCollectLinkTitle(nodePoints: ReadonlyArray<INodePoint>, startIndex: number, endIndex: number, state: LinkTitleCollectingState | null): {
30
+ export declare function eatAndCollectLinkTitle(nodePoints: ReadonlyArray<INodePoint>, startIndex: number, endIndex: number, state: ILinkTitleCollectingState | null): {
31
31
  nextIndex: number;
32
- state: LinkTitleCollectingState;
32
+ state: ILinkTitleCollectingState;
33
33
  };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@yozora/tokenizer-definition",
3
- "version": "2.0.0-alpha.0",
3
+ "version": "2.0.0",
4
4
  "author": {
5
5
  "name": "guanghechen",
6
6
  "url": "https://github.com/guanghechen/"
@@ -35,9 +35,9 @@
35
35
  "test": "cross-env TS_NODE_FILES=true jest --config ../../jest.config.js --rootDir ."
36
36
  },
37
37
  "dependencies": {
38
- "@yozora/ast": "^2.0.0-alpha.0",
39
- "@yozora/character": "^2.0.0-alpha.0",
40
- "@yozora/core-tokenizer": "^2.0.0-alpha.0"
38
+ "@yozora/ast": "^2.0.0",
39
+ "@yozora/character": "^2.0.0",
40
+ "@yozora/core-tokenizer": "^2.0.0"
41
41
  },
42
- "gitHead": "0171501339c49ffd02ed16a63447fa20a47a29a7"
42
+ "gitHead": "65e99d1709fdd1c918465dce6b1e91de96bdab5e"
43
43
  }