@yozora/tokenizer-ecma-import 2.0.0-alpha.0 → 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -105,14 +105,14 @@ so you can use `YozoraParser` directly.
105
105
  registered in *YastParser* as a plugin-in before it can be used.
106
106
 
107
107
  ```typescript {4,9}
108
- import { DefaultYastParser } from '@yozora/core-parser'
108
+ import { DefaultParser } from '@yozora/core-parser'
109
109
  import ParagraphTokenizer from '@yozora/tokenizer-paragraph'
110
110
  import TextTokenizer from '@yozora/tokenizer-text'
111
111
  import EcmaImportTokenizer from '@yozora/tokenizer-ecma-import'
112
112
 
113
- const parser = new DefaultYastParser()
114
- .useBlockFallbackTokenizer(new ParagraphTokenizer())
115
- .useInlineFallbackTokenizer(new TextTokenizer())
113
+ const parser = new DefaultParser()
114
+ .useFallbackTokenizer(new ParagraphTokenizer())
115
+ .useFallbackTokenizer(new TextTokenizer())
116
116
  .useTokenizer(new EcmaImportTokenizer())
117
117
 
118
118
  // parse source markdown content
@@ -248,7 +248,6 @@ Name | Type | Required | Default
248
248
  [@yozora/tokenizer-link]: https://github.com/yozorajs/yozora/tree/main/tokenizers/link#readme
249
249
  [@yozora/tokenizer-link-reference]: https://github.com/yozorajs/yozora/tree/main/tokenizers/link-reference#readme
250
250
  [@yozora/tokenizer-list]: https://github.com/yozorajs/yozora/tree/main/tokenizers/list#readme
251
- [@yozora/tokenizer-list-item]: https://github.com/yozorajs/yozora/tree/main/tokenizers/list-item#readme
252
251
  [@yozora/tokenizer-math]: https://github.com/yozorajs/yozora/tree/main/tokenizers/math#readme
253
252
  [@yozora/tokenizer-paragraph]: https://github.com/yozorajs/yozora/tree/main/tokenizers/paragraph#readme
254
253
  [@yozora/tokenizer-setext-heading]: https://github.com/yozorajs/yozora/tree/main/tokenizers/setext-heading#readme
@@ -308,7 +307,6 @@ Name | Type | Required | Default
308
307
  [doc-@yozora/tokenizer-definition]: https://yozora.guanghechen.com/docs/package/tokenizer-definition
309
308
  [doc-@yozora/tokenizer-link-reference]: https://yozora.guanghechen.com/docs/package/tokenizer-link-reference
310
309
  [doc-@yozora/tokenizer-list]: https://yozora.guanghechen.com/docs/package/tokenizer-list
311
- [doc-@yozora/tokenizer-list-item]: https://yozora.guanghechen.com/docs/package/tokenizer-list-item
312
310
  [doc-@yozora/tokenizer-math]: https://yozora.guanghechen.com/docs/package/tokenizer-math
313
311
  [doc-@yozora/tokenizer-paragraph]: https://yozora.guanghechen.com/docs/package/tokenizer-paragraph
314
312
  [doc-@yozora/tokenizer-setext-heading]: https://yozora.guanghechen.com/docs/package/tokenizer-setext-heading
package/lib/cjs/index.js CHANGED
@@ -52,8 +52,8 @@ const match = function () {
52
52
  let m;
53
53
  let token = null;
54
54
  const position = () => ({
55
- start: coreTokenizer.calcStartYastNodePoint(nodePoints, startIndex),
56
- end: coreTokenizer.calcEndYastNodePoint(nodePoints, endIndex - 1),
55
+ start: coreTokenizer.calcStartPoint(nodePoints, startIndex),
56
+ end: coreTokenizer.calcEndPoint(nodePoints, endIndex - 1),
57
57
  });
58
58
  if ((m = regex1.exec(text)) != null) {
59
59
  token = {
@@ -86,17 +86,25 @@ const match = function () {
86
86
  }
87
87
  };
88
88
 
89
- const parse = function () {
89
+ const parse = function (api) {
90
90
  return {
91
- parse: token => {
92
- const node = {
93
- type: ast.EcmaImportType,
94
- moduleName: token.moduleName,
95
- defaultImport: token.defaultImport,
96
- namedImports: token.namedImports,
97
- };
91
+ parse: tokens => tokens.map(token => {
92
+ const node = api.shouldReservePosition
93
+ ? {
94
+ type: ast.EcmaImportType,
95
+ position: token.position,
96
+ moduleName: token.moduleName,
97
+ defaultImport: token.defaultImport,
98
+ namedImports: token.namedImports,
99
+ }
100
+ : {
101
+ type: ast.EcmaImportType,
102
+ moduleName: token.moduleName,
103
+ defaultImport: token.defaultImport,
104
+ namedImports: token.namedImports,
105
+ };
98
106
  return node;
99
- },
107
+ }),
100
108
  };
101
109
  };
102
110
 
package/lib/esm/index.js CHANGED
@@ -1,6 +1,6 @@
1
1
  import { EcmaImportType } from '@yozora/ast';
2
2
  import { AsciiCodePoint, calcTrimBoundaryOfCodePoints, calcStringFromNodePoints } from '@yozora/character';
3
- import { calcStartYastNodePoint, calcEndYastNodePoint, BaseBlockTokenizer, TokenizerPriority } from '@yozora/core-tokenizer';
3
+ import { calcStartPoint, calcEndPoint, BaseBlockTokenizer, TokenizerPriority } from '@yozora/core-tokenizer';
4
4
 
5
5
  const namedImportItemRegex = /^(\w+)(?:\s+as\s+(\w+))?$/;
6
6
  const namedImportRegex = /\{\s*((?:[\w]+(?:\s+as\s+[\w]+)?\s*,\s*)*[\w]+(?:\s+as\s+[\w]+)?)\s*\}\s*/;
@@ -48,8 +48,8 @@ const match = function () {
48
48
  let m;
49
49
  let token = null;
50
50
  const position = () => ({
51
- start: calcStartYastNodePoint(nodePoints, startIndex),
52
- end: calcEndYastNodePoint(nodePoints, endIndex - 1),
51
+ start: calcStartPoint(nodePoints, startIndex),
52
+ end: calcEndPoint(nodePoints, endIndex - 1),
53
53
  });
54
54
  if ((m = regex1.exec(text)) != null) {
55
55
  token = {
@@ -82,17 +82,25 @@ const match = function () {
82
82
  }
83
83
  };
84
84
 
85
- const parse = function () {
85
+ const parse = function (api) {
86
86
  return {
87
- parse: token => {
88
- const node = {
89
- type: EcmaImportType,
90
- moduleName: token.moduleName,
91
- defaultImport: token.defaultImport,
92
- namedImports: token.namedImports,
93
- };
87
+ parse: tokens => tokens.map(token => {
88
+ const node = api.shouldReservePosition
89
+ ? {
90
+ type: EcmaImportType,
91
+ position: token.position,
92
+ moduleName: token.moduleName,
93
+ defaultImport: token.defaultImport,
94
+ namedImports: token.namedImports,
95
+ }
96
+ : {
97
+ type: EcmaImportType,
98
+ moduleName: token.moduleName,
99
+ defaultImport: token.defaultImport,
100
+ namedImports: token.namedImports,
101
+ };
94
102
  return node;
95
- },
103
+ }),
96
104
  };
97
105
  };
98
106
 
@@ -2,4 +2,4 @@ export { match as ecmaImportMatch } from './match';
2
2
  export { parse as ecmaImportParse } from './parse';
3
3
  export { EcmaImportTokenizer, EcmaImportTokenizer as default } from './tokenizer';
4
4
  export { uniqueName as EcmaImportTokenizerName } from './types';
5
- export type { IHookContext as ecmaImportHookContext, IToken as IEcmaImportToken, ITokenizerProps as IEcmaImportProps, } from './types';
5
+ export type { IThis as ecmaImportHookContext, IToken as IEcmaImportToken, ITokenizerProps as IEcmaImportProps, } from './types';
@@ -1,5 +1,5 @@
1
1
  import type { IMatchBlockHookCreator } from '@yozora/core-tokenizer';
2
- import type { IHookContext, IToken, T } from './types';
2
+ import type { IThis, IToken, T } from './types';
3
3
  /**
4
4
  * Examples
5
5
  *
@@ -12,4 +12,4 @@ import type { IHookContext, IToken, T } from './types';
12
12
  * @see https://github.com/syntax-tree/mdast#strong
13
13
  * @see https://github.github.com/gfm/#emphasis-and-strong-emphasis
14
14
  */
15
- export declare const match: IMatchBlockHookCreator<T, IToken, IHookContext>;
15
+ export declare const match: IMatchBlockHookCreator<T, IToken, IThis>;
@@ -1,3 +1,3 @@
1
1
  import type { IParseBlockHookCreator } from '@yozora/core-tokenizer';
2
- import type { IHookContext, INode, IToken, T } from './types';
3
- export declare const parse: IParseBlockHookCreator<T, IToken, INode, IHookContext>;
2
+ import type { INode, IThis, IToken, T } from './types';
3
+ export declare const parse: IParseBlockHookCreator<T, IToken, INode, IThis>;
@@ -1,13 +1,13 @@
1
1
  import type { IBlockTokenizer, IMatchBlockHookCreator, IParseBlockHookCreator } from '@yozora/core-tokenizer';
2
2
  import { BaseBlockTokenizer } from '@yozora/core-tokenizer';
3
- import type { IHookContext, INode, IToken, ITokenizerProps, T } from './types';
3
+ import type { INode, IThis, IToken, ITokenizerProps, T } from './types';
4
4
  /**
5
5
  * Lexical Analyzer for Ecma Import statement
6
6
  * @see https://github.com/syntax-tree/mdast#strong
7
7
  * @see https://github.github.com/gfm/#emphasis-and-strong-emphasis
8
8
  */
9
- export declare class EcmaImportTokenizer extends BaseBlockTokenizer<T, IToken, INode, IHookContext> implements IBlockTokenizer<T, IToken, INode, IHookContext> {
9
+ export declare class EcmaImportTokenizer extends BaseBlockTokenizer<T, IToken, INode, IThis> implements IBlockTokenizer<T, IToken, INode, IThis> {
10
10
  constructor(props?: ITokenizerProps);
11
- readonly match: IMatchBlockHookCreator<T, IToken, IHookContext>;
12
- readonly parse: IParseBlockHookCreator<T, IToken, INode, IHookContext>;
11
+ readonly match: IMatchBlockHookCreator<T, IToken, IThis>;
12
+ readonly parse: IParseBlockHookCreator<T, IToken, INode, IThis>;
13
13
  }
@@ -1,8 +1,8 @@
1
- import type { EcmaImportType, IEcmaImport } from '@yozora/ast';
1
+ import type { EcmaImport, EcmaImportType } from '@yozora/ast';
2
2
  import type { IBaseBlockTokenizerProps, IPartialYastBlockToken, ITokenizer } from '@yozora/core-tokenizer';
3
3
  export declare type T = EcmaImportType;
4
- export declare type INode = IEcmaImport;
4
+ export declare type INode = EcmaImport;
5
5
  export declare const uniqueName = "@yozora/tokenizer-ecma-import";
6
- export declare type IToken = IPartialYastBlockToken<T> & Omit<IEcmaImport, 'type'>;
7
- export declare type IHookContext = ITokenizer;
6
+ export declare type IToken = IPartialYastBlockToken<T> & Omit<EcmaImport, 'type'>;
7
+ export declare type IThis = ITokenizer;
8
8
  export declare type ITokenizerProps = Partial<IBaseBlockTokenizerProps>;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@yozora/tokenizer-ecma-import",
3
- "version": "2.0.0-alpha.0",
3
+ "version": "2.0.0",
4
4
  "author": {
5
5
  "name": "guanghechen",
6
6
  "url": "https://github.com/guanghechen/"
@@ -35,9 +35,9 @@
35
35
  "test": "cross-env TS_NODE_FILES=true jest --config ../../jest.config.js --rootDir ."
36
36
  },
37
37
  "dependencies": {
38
- "@yozora/ast": "^2.0.0-alpha.0",
39
- "@yozora/character": "^2.0.0-alpha.0",
40
- "@yozora/core-tokenizer": "^2.0.0-alpha.0"
38
+ "@yozora/ast": "^2.0.0",
39
+ "@yozora/character": "^2.0.0",
40
+ "@yozora/core-tokenizer": "^2.0.0"
41
41
  },
42
- "gitHead": "0171501339c49ffd02ed16a63447fa20a47a29a7"
42
+ "gitHead": "65e99d1709fdd1c918465dce6b1e91de96bdab5e"
43
43
  }