comment-parser 1.2.1 → 1.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +13 -0
- package/browser/index.js +53 -36
- package/es6/index.d.ts +7 -0
- package/es6/index.js +2 -0
- package/es6/parser/index.d.ts +3 -2
- package/es6/parser/index.js +4 -3
- package/es6/parser/source-parser.d.ts +3 -2
- package/es6/parser/source-parser.js +12 -12
- package/es6/parser/tokenizers/description.d.ts +4 -3
- package/es6/parser/tokenizers/description.js +7 -6
- package/es6/primitives.d.ts +7 -0
- package/es6/primitives.js +1 -0
- package/es6/transforms/align.d.ts +2 -1
- package/es6/transforms/align.js +7 -7
- package/lib/index.cjs +9 -1
- package/lib/index.cjs.map +1 -1
- package/lib/index.d.ts +7 -0
- package/lib/parser/index.cjs +6 -2
- package/lib/parser/index.cjs.map +1 -1
- package/lib/parser/index.d.ts +3 -2
- package/lib/parser/source-parser.cjs +11 -10
- package/lib/parser/source-parser.cjs.map +1 -1
- package/lib/parser/source-parser.d.ts +3 -2
- package/lib/parser/tokenizers/description.cjs +7 -6
- package/lib/parser/tokenizers/description.cjs.map +1 -1
- package/lib/parser/tokenizers/description.d.ts +4 -3
- package/lib/primitives.cjs +2 -0
- package/lib/primitives.cjs.map +1 -1
- package/lib/primitives.d.ts +7 -0
- package/lib/transforms/align.cjs +7 -7
- package/lib/transforms/align.cjs.map +1 -1
- package/lib/transforms/align.d.ts +2 -1
- package/package.json +6 -5
- package/src/index.ts +3 -0
- package/src/parser/index.ts +6 -3
- package/src/parser/source-parser.ts +14 -12
- package/src/parser/tokenizers/description.ts +11 -8
- package/src/primitives.ts +8 -0
- package/src/transforms/align.ts +14 -12
- package/tests/e2e/examples.js +1 -1
- package/tests/unit/source-parser.spec.ts +150 -0
package/CHANGELOG.md
CHANGED
|
@@ -1,3 +1,16 @@
|
|
|
1
|
+
# v1.3.0
|
|
2
|
+
- add support for custom block markers
|
|
3
|
+
|
|
4
|
+
# v1.2.4
|
|
5
|
+
- reverting engine constraint back to ^12.0.0
|
|
6
|
+
|
|
7
|
+
# v1.2.3
|
|
8
|
+
- publishing missing fix: point package's main to .cjs file
|
|
9
|
+
|
|
10
|
+
# v1.2.2
|
|
11
|
+
- re-export ./util on the top-level for compatibility with older Node
|
|
12
|
+
- point package's main to .cjs file
|
|
13
|
+
|
|
1
14
|
# v1.2.1
|
|
2
15
|
- bump `engines` per `exports` issues in earlier Node versions
|
|
3
16
|
|
package/browser/index.js
CHANGED
|
@@ -1,6 +1,15 @@
|
|
|
1
1
|
var CommentParser = (function (exports) {
|
|
2
2
|
'use strict';
|
|
3
3
|
|
|
4
|
+
/** @deprecated */
|
|
5
|
+
exports.Markers = void 0;
|
|
6
|
+
(function (Markers) {
|
|
7
|
+
Markers["start"] = "/**";
|
|
8
|
+
Markers["nostart"] = "/***";
|
|
9
|
+
Markers["delim"] = "*";
|
|
10
|
+
Markers["end"] = "*/";
|
|
11
|
+
})(exports.Markers || (exports.Markers = {}));
|
|
12
|
+
|
|
4
13
|
function isSpace(source) {
|
|
5
14
|
return /^\s+$/.test(source);
|
|
6
15
|
}
|
|
@@ -19,6 +28,9 @@ var CommentParser = (function (exports) {
|
|
|
19
28
|
function splitLines(source) {
|
|
20
29
|
return source.split(/\n/);
|
|
21
30
|
}
|
|
31
|
+
function seedBlock(block = {}) {
|
|
32
|
+
return Object.assign({ description: '', tags: [], source: [], problems: [] }, block);
|
|
33
|
+
}
|
|
22
34
|
function seedSpec(spec = {}) {
|
|
23
35
|
return Object.assign({ tag: '', name: '', type: '', optional: false, description: '', problems: [], source: [] }, spec);
|
|
24
36
|
}
|
|
@@ -37,6 +49,16 @@ var CommentParser = (function (exports) {
|
|
|
37
49
|
}
|
|
38
50
|
return block;
|
|
39
51
|
}
|
|
52
|
+
/**
|
|
53
|
+
* Assures Block.source contains references to the Block.tags[].source items,
|
|
54
|
+
* using Block.tags[].source as a source of truth. This is a counterpart of rewireSource
|
|
55
|
+
* @param block parsed coments block
|
|
56
|
+
*/
|
|
57
|
+
function rewireSpecs(block) {
|
|
58
|
+
const source = block.tags.reduce((acc, spec) => spec.source.reduce((acc, line) => acc.set(line.number, line), acc), new Map());
|
|
59
|
+
block.source = block.source.map((line) => source.get(line.number) || line);
|
|
60
|
+
return block;
|
|
61
|
+
}
|
|
40
62
|
|
|
41
63
|
const reTag = /^@\S+/;
|
|
42
64
|
/**
|
|
@@ -68,15 +90,7 @@ var CommentParser = (function (exports) {
|
|
|
68
90
|
return fence;
|
|
69
91
|
}
|
|
70
92
|
|
|
71
|
-
exports.Markers =
|
|
72
|
-
(function (Markers) {
|
|
73
|
-
Markers["start"] = "/**";
|
|
74
|
-
Markers["nostart"] = "/***";
|
|
75
|
-
Markers["delim"] = "*";
|
|
76
|
-
Markers["end"] = "*/";
|
|
77
|
-
})(exports.Markers || (exports.Markers = {}));
|
|
78
|
-
|
|
79
|
-
function getParser$2({ startLine = 0, } = {}) {
|
|
93
|
+
function getParser$2({ startLine = 0, markers = exports.Markers, } = {}) {
|
|
80
94
|
let block = null;
|
|
81
95
|
let num = startLine;
|
|
82
96
|
return function parseSource(source) {
|
|
@@ -85,29 +99,29 @@ var CommentParser = (function (exports) {
|
|
|
85
99
|
[tokens.lineEnd, rest] = splitCR(rest);
|
|
86
100
|
[tokens.start, rest] = splitSpace(rest);
|
|
87
101
|
if (block === null &&
|
|
88
|
-
rest.startsWith(
|
|
89
|
-
!rest.startsWith(
|
|
102
|
+
rest.startsWith(markers.start) &&
|
|
103
|
+
!rest.startsWith(markers.nostart)) {
|
|
90
104
|
block = [];
|
|
91
|
-
tokens.delimiter = rest.slice(0,
|
|
92
|
-
rest = rest.slice(
|
|
105
|
+
tokens.delimiter = rest.slice(0, markers.start.length);
|
|
106
|
+
rest = rest.slice(markers.start.length);
|
|
93
107
|
[tokens.postDelimiter, rest] = splitSpace(rest);
|
|
94
108
|
}
|
|
95
109
|
if (block === null) {
|
|
96
110
|
num++;
|
|
97
111
|
return null;
|
|
98
112
|
}
|
|
99
|
-
const isClosed = rest.trimRight().endsWith(
|
|
113
|
+
const isClosed = rest.trimRight().endsWith(markers.end);
|
|
100
114
|
if (tokens.delimiter === '' &&
|
|
101
|
-
rest.startsWith(
|
|
102
|
-
!rest.startsWith(
|
|
103
|
-
tokens.delimiter =
|
|
104
|
-
rest = rest.slice(
|
|
115
|
+
rest.startsWith(markers.delim) &&
|
|
116
|
+
!rest.startsWith(markers.end)) {
|
|
117
|
+
tokens.delimiter = markers.delim;
|
|
118
|
+
rest = rest.slice(markers.delim.length);
|
|
105
119
|
[tokens.postDelimiter, rest] = splitSpace(rest);
|
|
106
120
|
}
|
|
107
121
|
if (isClosed) {
|
|
108
122
|
const trimmed = rest.trimRight();
|
|
109
|
-
tokens.end = rest.slice(trimmed.length -
|
|
110
|
-
rest = trimmed.slice(0, -
|
|
123
|
+
tokens.end = rest.slice(trimmed.length - markers.end.length);
|
|
124
|
+
rest = trimmed.slice(0, -markers.end.length);
|
|
111
125
|
}
|
|
112
126
|
tokens.description = rest;
|
|
113
127
|
block.push({ number: num, source, tokens });
|
|
@@ -319,11 +333,12 @@ var CommentParser = (function (exports) {
|
|
|
319
333
|
* Makes no changes to `spec.lines[].tokens` but joins them into `spec.description`
|
|
320
334
|
* following given spacing srtategy
|
|
321
335
|
* @param {Spacing} spacing tells how to handle the whitespace
|
|
336
|
+
* @param {BlockMarkers} markers tells how to handle comment block delimitation
|
|
322
337
|
*/
|
|
323
|
-
function descriptionTokenizer(spacing = 'compact') {
|
|
338
|
+
function descriptionTokenizer(spacing = 'compact', markers = exports.Markers) {
|
|
324
339
|
const join = getJoiner(spacing);
|
|
325
340
|
return (spec) => {
|
|
326
|
-
spec.description = join(spec.source);
|
|
341
|
+
spec.description = join(spec.source, markers);
|
|
327
342
|
return spec;
|
|
328
343
|
};
|
|
329
344
|
}
|
|
@@ -334,7 +349,7 @@ var CommentParser = (function (exports) {
|
|
|
334
349
|
return preserveJoiner;
|
|
335
350
|
return spacing;
|
|
336
351
|
}
|
|
337
|
-
function compactJoiner(lines) {
|
|
352
|
+
function compactJoiner(lines, markers = exports.Markers) {
|
|
338
353
|
return lines
|
|
339
354
|
.map(({ tokens: { description } }) => description.trim())
|
|
340
355
|
.filter((description) => description !== '')
|
|
@@ -343,25 +358,25 @@ var CommentParser = (function (exports) {
|
|
|
343
358
|
const lineNo = (num, { tokens }, i) => tokens.type === '' ? num : i;
|
|
344
359
|
const getDescription = ({ tokens }) => (tokens.delimiter === '' ? tokens.start : tokens.postDelimiter.slice(1)) +
|
|
345
360
|
tokens.description;
|
|
346
|
-
function preserveJoiner(lines) {
|
|
361
|
+
function preserveJoiner(lines, markers = exports.Markers) {
|
|
347
362
|
if (lines.length === 0)
|
|
348
363
|
return '';
|
|
349
364
|
// skip the opening line with no description
|
|
350
365
|
if (lines[0].tokens.description === '' &&
|
|
351
|
-
lines[0].tokens.delimiter ===
|
|
366
|
+
lines[0].tokens.delimiter === markers.start)
|
|
352
367
|
lines = lines.slice(1);
|
|
353
368
|
// skip the closing line with no description
|
|
354
369
|
const lastLine = lines[lines.length - 1];
|
|
355
370
|
if (lastLine !== undefined &&
|
|
356
371
|
lastLine.tokens.description === '' &&
|
|
357
|
-
lastLine.tokens.end.endsWith(
|
|
372
|
+
lastLine.tokens.end.endsWith(markers.end))
|
|
358
373
|
lines = lines.slice(0, -1);
|
|
359
374
|
// description starts at the last line of type definition
|
|
360
375
|
lines = lines.slice(lines.reduce(lineNo, 0));
|
|
361
376
|
return lines.map(getDescription).join('\n');
|
|
362
377
|
}
|
|
363
378
|
|
|
364
|
-
function getParser({ startLine = 0, fence = '```', spacing = 'compact', tokenizers = [
|
|
379
|
+
function getParser({ startLine = 0, fence = '```', spacing = 'compact', markers = exports.Markers, tokenizers = [
|
|
365
380
|
tagTokenizer(),
|
|
366
381
|
typeTokenizer(spacing),
|
|
367
382
|
nameTokenizer(),
|
|
@@ -369,7 +384,7 @@ var CommentParser = (function (exports) {
|
|
|
369
384
|
], } = {}) {
|
|
370
385
|
if (startLine < 0 || startLine % 1 > 0)
|
|
371
386
|
throw new Error('Invalid startLine');
|
|
372
|
-
const parseSource = getParser$2({ startLine });
|
|
387
|
+
const parseSource = getParser$2({ startLine, markers });
|
|
373
388
|
const parseBlock = getParser$3({ fence });
|
|
374
389
|
const parseSpec = getParser$1({ tokenizers });
|
|
375
390
|
const joinDescription = getJoiner(spacing);
|
|
@@ -385,7 +400,7 @@ var CommentParser = (function (exports) {
|
|
|
385
400
|
const sections = parseBlock(lines);
|
|
386
401
|
const specs = sections.slice(1).map(parseSpec);
|
|
387
402
|
blocks.push({
|
|
388
|
-
description: joinDescription(sections[0]),
|
|
403
|
+
description: joinDescription(sections[0], markers),
|
|
389
404
|
tags: specs,
|
|
390
405
|
source: lines,
|
|
391
406
|
problems: specs.reduce((acc, spec) => acc.concat(spec.problems), []),
|
|
@@ -430,14 +445,14 @@ var CommentParser = (function (exports) {
|
|
|
430
445
|
type: 0,
|
|
431
446
|
name: 0,
|
|
432
447
|
};
|
|
433
|
-
const getWidth = (w, { tokens: t }) => ({
|
|
434
|
-
start: t.delimiter ===
|
|
448
|
+
const getWidth = (markers = exports.Markers) => (w, { tokens: t }) => ({
|
|
449
|
+
start: t.delimiter === markers.start ? t.start.length : w.start,
|
|
435
450
|
tag: Math.max(w.tag, t.tag.length),
|
|
436
451
|
type: Math.max(w.type, t.type.length),
|
|
437
452
|
name: Math.max(w.name, t.name.length),
|
|
438
453
|
});
|
|
439
454
|
const space = (len) => ''.padStart(len, ' ');
|
|
440
|
-
function align$1() {
|
|
455
|
+
function align$1(markers = exports.Markers) {
|
|
441
456
|
let intoTags = false;
|
|
442
457
|
let w;
|
|
443
458
|
function update(line) {
|
|
@@ -449,15 +464,15 @@ var CommentParser = (function (exports) {
|
|
|
449
464
|
tokens.type === '' &&
|
|
450
465
|
tokens.description === '';
|
|
451
466
|
// dangling '*/'
|
|
452
|
-
if (tokens.end ===
|
|
467
|
+
if (tokens.end === markers.end && isEmpty) {
|
|
453
468
|
tokens.start = space(w.start + 1);
|
|
454
469
|
return Object.assign(Object.assign({}, line), { tokens });
|
|
455
470
|
}
|
|
456
471
|
switch (tokens.delimiter) {
|
|
457
|
-
case
|
|
472
|
+
case markers.start:
|
|
458
473
|
tokens.start = space(w.start);
|
|
459
474
|
break;
|
|
460
|
-
case
|
|
475
|
+
case markers.delim:
|
|
461
476
|
tokens.start = space(w.start + 1);
|
|
462
477
|
break;
|
|
463
478
|
default:
|
|
@@ -500,7 +515,7 @@ var CommentParser = (function (exports) {
|
|
|
500
515
|
}
|
|
501
516
|
return (_a) => {
|
|
502
517
|
var { source } = _a, fields = __rest$2(_a, ["source"]);
|
|
503
|
-
w = source.reduce(getWidth, Object.assign({}, zeroWidth$1));
|
|
518
|
+
w = source.reduce(getWidth(markers), Object.assign({}, zeroWidth$1));
|
|
504
519
|
return rewireSource(Object.assign(Object.assign({}, fields), { source: source.map(update) }));
|
|
505
520
|
};
|
|
506
521
|
}
|
|
@@ -622,12 +637,14 @@ var CommentParser = (function (exports) {
|
|
|
622
637
|
name: nameTokenizer,
|
|
623
638
|
description: descriptionTokenizer,
|
|
624
639
|
};
|
|
640
|
+
const util = { rewireSpecs, rewireSource, seedBlock, seedTokens };
|
|
625
641
|
|
|
626
642
|
exports.inspect = inspect;
|
|
627
643
|
exports.parse = parse;
|
|
628
644
|
exports.stringify = stringify;
|
|
629
645
|
exports.tokenizers = tokenizers;
|
|
630
646
|
exports.transforms = transforms;
|
|
647
|
+
exports.util = util;
|
|
631
648
|
|
|
632
649
|
Object.defineProperty(exports, '__esModule', { value: true });
|
|
633
650
|
|
package/es6/index.d.ts
CHANGED
|
@@ -7,6 +7,7 @@ import alignTransform from './transforms/align';
|
|
|
7
7
|
import indentTransform from './transforms/indent';
|
|
8
8
|
import crlfTransform from './transforms/crlf';
|
|
9
9
|
import { flow as flowTransform } from './transforms/index';
|
|
10
|
+
import { rewireSpecs, rewireSource, seedBlock, seedTokens } from './util';
|
|
10
11
|
export * from './primitives';
|
|
11
12
|
export declare function parse(source: string, options?: Partial<ParserOptions>): import("./primitives").Block[];
|
|
12
13
|
export declare const stringify: import("./stringifier/index").Stringifier;
|
|
@@ -23,3 +24,9 @@ export declare const tokenizers: {
|
|
|
23
24
|
name: typeof nameTokenizer;
|
|
24
25
|
description: typeof descriptionTokenizer;
|
|
25
26
|
};
|
|
27
|
+
export declare const util: {
|
|
28
|
+
rewireSpecs: typeof rewireSpecs;
|
|
29
|
+
rewireSource: typeof rewireSource;
|
|
30
|
+
seedBlock: typeof seedBlock;
|
|
31
|
+
seedTokens: typeof seedTokens;
|
|
32
|
+
};
|
package/es6/index.js
CHANGED
|
@@ -8,6 +8,7 @@ import alignTransform from './transforms/align.js';
|
|
|
8
8
|
import indentTransform from './transforms/indent.js';
|
|
9
9
|
import crlfTransform from './transforms/crlf.js';
|
|
10
10
|
import { flow as flowTransform } from './transforms/index.js';
|
|
11
|
+
import { rewireSpecs, rewireSource, seedBlock, seedTokens } from './util.js';
|
|
11
12
|
export * from './primitives.js';
|
|
12
13
|
export function parse(source, options = {}) {
|
|
13
14
|
return getParser(options)(source);
|
|
@@ -26,3 +27,4 @@ export const tokenizers = {
|
|
|
26
27
|
name: nameTokenizer,
|
|
27
28
|
description: descriptionTokenizer,
|
|
28
29
|
};
|
|
30
|
+
export const util = { rewireSpecs, rewireSource, seedBlock, seedTokens };
|
package/es6/parser/index.d.ts
CHANGED
|
@@ -1,10 +1,11 @@
|
|
|
1
|
-
import { Block } from '../primitives';
|
|
1
|
+
import { Block, BlockMarkers } from '../primitives';
|
|
2
2
|
import { Tokenizer } from './tokenizers/index';
|
|
3
3
|
export interface Options {
|
|
4
4
|
startLine: number;
|
|
5
5
|
fence: string;
|
|
6
6
|
spacing: 'compact' | 'preserve';
|
|
7
|
+
markers: BlockMarkers;
|
|
7
8
|
tokenizers: Tokenizer[];
|
|
8
9
|
}
|
|
9
10
|
export declare type Parser = (source: string) => Block[];
|
|
10
|
-
export default function getParser({ startLine, fence, spacing, tokenizers, }?: Partial<Options>): Parser;
|
|
11
|
+
export default function getParser({ startLine, fence, spacing, markers, tokenizers, }?: Partial<Options>): Parser;
|
package/es6/parser/index.js
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import { Markers } from '../primitives.js';
|
|
1
2
|
import { splitLines } from '../util.js';
|
|
2
3
|
import blockParser from './block-parser.js';
|
|
3
4
|
import sourceParser from './source-parser.js';
|
|
@@ -6,7 +7,7 @@ import tokenizeTag from './tokenizers/tag.js';
|
|
|
6
7
|
import tokenizeType from './tokenizers/type.js';
|
|
7
8
|
import tokenizeName from './tokenizers/name.js';
|
|
8
9
|
import tokenizeDescription, { getJoiner as getDescriptionJoiner, } from './tokenizers/description.js';
|
|
9
|
-
export default function getParser({ startLine = 0, fence = '```', spacing = 'compact', tokenizers = [
|
|
10
|
+
export default function getParser({ startLine = 0, fence = '```', spacing = 'compact', markers = Markers, tokenizers = [
|
|
10
11
|
tokenizeTag(),
|
|
11
12
|
tokenizeType(spacing),
|
|
12
13
|
tokenizeName(),
|
|
@@ -14,7 +15,7 @@ export default function getParser({ startLine = 0, fence = '```', spacing = 'com
|
|
|
14
15
|
], } = {}) {
|
|
15
16
|
if (startLine < 0 || startLine % 1 > 0)
|
|
16
17
|
throw new Error('Invalid startLine');
|
|
17
|
-
const parseSource = sourceParser({ startLine });
|
|
18
|
+
const parseSource = sourceParser({ startLine, markers });
|
|
18
19
|
const parseBlock = blockParser({ fence });
|
|
19
20
|
const parseSpec = specParser({ tokenizers });
|
|
20
21
|
const joinDescription = getDescriptionJoiner(spacing);
|
|
@@ -30,7 +31,7 @@ export default function getParser({ startLine = 0, fence = '```', spacing = 'com
|
|
|
30
31
|
const sections = parseBlock(lines);
|
|
31
32
|
const specs = sections.slice(1).map(parseSpec);
|
|
32
33
|
blocks.push({
|
|
33
|
-
description: joinDescription(sections[0]),
|
|
34
|
+
description: joinDescription(sections[0], markers),
|
|
34
35
|
tags: specs,
|
|
35
36
|
source: lines,
|
|
36
37
|
problems: specs.reduce((acc, spec) => acc.concat(spec.problems), []),
|
|
@@ -1,6 +1,7 @@
|
|
|
1
|
-
import { Line } from '../primitives';
|
|
1
|
+
import { Line, BlockMarkers } from '../primitives';
|
|
2
2
|
export interface Options {
|
|
3
3
|
startLine: number;
|
|
4
|
+
markers: BlockMarkers;
|
|
4
5
|
}
|
|
5
6
|
export declare type Parser = (source: string) => Line[] | null;
|
|
6
|
-
export default function getParser({ startLine, }?: Partial<Options>): Parser;
|
|
7
|
+
export default function getParser({ startLine, markers, }?: Partial<Options>): Parser;
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { Markers } from '../primitives.js';
|
|
2
2
|
import { seedTokens, splitSpace, splitCR } from '../util.js';
|
|
3
|
-
export default function getParser({ startLine = 0, } = {}) {
|
|
3
|
+
export default function getParser({ startLine = 0, markers = Markers, } = {}) {
|
|
4
4
|
let block = null;
|
|
5
5
|
let num = startLine;
|
|
6
6
|
return function parseSource(source) {
|
|
@@ -9,29 +9,29 @@ export default function getParser({ startLine = 0, } = {}) {
|
|
|
9
9
|
[tokens.lineEnd, rest] = splitCR(rest);
|
|
10
10
|
[tokens.start, rest] = splitSpace(rest);
|
|
11
11
|
if (block === null &&
|
|
12
|
-
rest.startsWith(
|
|
13
|
-
!rest.startsWith(
|
|
12
|
+
rest.startsWith(markers.start) &&
|
|
13
|
+
!rest.startsWith(markers.nostart)) {
|
|
14
14
|
block = [];
|
|
15
|
-
tokens.delimiter = rest.slice(0,
|
|
16
|
-
rest = rest.slice(
|
|
15
|
+
tokens.delimiter = rest.slice(0, markers.start.length);
|
|
16
|
+
rest = rest.slice(markers.start.length);
|
|
17
17
|
[tokens.postDelimiter, rest] = splitSpace(rest);
|
|
18
18
|
}
|
|
19
19
|
if (block === null) {
|
|
20
20
|
num++;
|
|
21
21
|
return null;
|
|
22
22
|
}
|
|
23
|
-
const isClosed = rest.trimRight().endsWith(
|
|
23
|
+
const isClosed = rest.trimRight().endsWith(markers.end);
|
|
24
24
|
if (tokens.delimiter === '' &&
|
|
25
|
-
rest.startsWith(
|
|
26
|
-
!rest.startsWith(
|
|
27
|
-
tokens.delimiter =
|
|
28
|
-
rest = rest.slice(
|
|
25
|
+
rest.startsWith(markers.delim) &&
|
|
26
|
+
!rest.startsWith(markers.end)) {
|
|
27
|
+
tokens.delimiter = markers.delim;
|
|
28
|
+
rest = rest.slice(markers.delim.length);
|
|
29
29
|
[tokens.postDelimiter, rest] = splitSpace(rest);
|
|
30
30
|
}
|
|
31
31
|
if (isClosed) {
|
|
32
32
|
const trimmed = rest.trimRight();
|
|
33
|
-
tokens.end = rest.slice(trimmed.length -
|
|
34
|
-
rest = trimmed.slice(0, -
|
|
33
|
+
tokens.end = rest.slice(trimmed.length - markers.end.length);
|
|
34
|
+
rest = trimmed.slice(0, -markers.end.length);
|
|
35
35
|
}
|
|
36
36
|
tokens.description = rest;
|
|
37
37
|
block.push({ number: num, source, tokens });
|
|
@@ -1,9 +1,9 @@
|
|
|
1
|
-
import { Line } from '../../primitives';
|
|
1
|
+
import { Line, BlockMarkers, Markers } from '../../primitives';
|
|
2
2
|
import { Tokenizer } from './index';
|
|
3
3
|
/**
|
|
4
4
|
* Walks over provided lines joining description token into a single string.
|
|
5
5
|
* */
|
|
6
|
-
export declare type Joiner = (lines: Line[]) => string;
|
|
6
|
+
export declare type Joiner = (lines: Line[], markers?: BlockMarkers) => string;
|
|
7
7
|
/**
|
|
8
8
|
* Shortcut for standard Joiners
|
|
9
9
|
* compact - strip surrounding whitespace and concat lines using a single string
|
|
@@ -14,6 +14,7 @@ export declare type Spacing = 'compact' | 'preserve' | Joiner;
|
|
|
14
14
|
* Makes no changes to `spec.lines[].tokens` but joins them into `spec.description`
|
|
15
15
|
* following given spacing srtategy
|
|
16
16
|
* @param {Spacing} spacing tells how to handle the whitespace
|
|
17
|
+
* @param {BlockMarkers} markers tells how to handle comment block delimitation
|
|
17
18
|
*/
|
|
18
|
-
export default function descriptionTokenizer(spacing?: Spacing): Tokenizer;
|
|
19
|
+
export default function descriptionTokenizer(spacing?: Spacing, markers?: typeof Markers): Tokenizer;
|
|
19
20
|
export declare function getJoiner(spacing: Spacing): Joiner;
|
|
@@ -3,11 +3,12 @@ import { Markers } from '../../primitives.js';
|
|
|
3
3
|
* Makes no changes to `spec.lines[].tokens` but joins them into `spec.description`
|
|
4
4
|
* following given spacing srtategy
|
|
5
5
|
* @param {Spacing} spacing tells how to handle the whitespace
|
|
6
|
+
* @param {BlockMarkers} markers tells how to handle comment block delimitation
|
|
6
7
|
*/
|
|
7
|
-
export default function descriptionTokenizer(spacing = 'compact') {
|
|
8
|
+
export default function descriptionTokenizer(spacing = 'compact', markers = Markers) {
|
|
8
9
|
const join = getJoiner(spacing);
|
|
9
10
|
return (spec) => {
|
|
10
|
-
spec.description = join(spec.source);
|
|
11
|
+
spec.description = join(spec.source, markers);
|
|
11
12
|
return spec;
|
|
12
13
|
};
|
|
13
14
|
}
|
|
@@ -18,7 +19,7 @@ export function getJoiner(spacing) {
|
|
|
18
19
|
return preserveJoiner;
|
|
19
20
|
return spacing;
|
|
20
21
|
}
|
|
21
|
-
function compactJoiner(lines) {
|
|
22
|
+
function compactJoiner(lines, markers = Markers) {
|
|
22
23
|
return lines
|
|
23
24
|
.map(({ tokens: { description } }) => description.trim())
|
|
24
25
|
.filter((description) => description !== '')
|
|
@@ -27,18 +28,18 @@ function compactJoiner(lines) {
|
|
|
27
28
|
const lineNo = (num, { tokens }, i) => tokens.type === '' ? num : i;
|
|
28
29
|
const getDescription = ({ tokens }) => (tokens.delimiter === '' ? tokens.start : tokens.postDelimiter.slice(1)) +
|
|
29
30
|
tokens.description;
|
|
30
|
-
function preserveJoiner(lines) {
|
|
31
|
+
function preserveJoiner(lines, markers = Markers) {
|
|
31
32
|
if (lines.length === 0)
|
|
32
33
|
return '';
|
|
33
34
|
// skip the opening line with no description
|
|
34
35
|
if (lines[0].tokens.description === '' &&
|
|
35
|
-
lines[0].tokens.delimiter ===
|
|
36
|
+
lines[0].tokens.delimiter === markers.start)
|
|
36
37
|
lines = lines.slice(1);
|
|
37
38
|
// skip the closing line with no description
|
|
38
39
|
const lastLine = lines[lines.length - 1];
|
|
39
40
|
if (lastLine !== undefined &&
|
|
40
41
|
lastLine.tokens.description === '' &&
|
|
41
|
-
lastLine.tokens.end.endsWith(
|
|
42
|
+
lastLine.tokens.end.endsWith(markers.end))
|
|
42
43
|
lines = lines.slice(0, -1);
|
|
43
44
|
// description starts at the last line of type definition
|
|
44
45
|
lines = lines.slice(lines.reduce(lineNo, 0));
|
package/es6/primitives.d.ts
CHANGED
|
@@ -1,9 +1,16 @@
|
|
|
1
|
+
/** @deprecated */
|
|
1
2
|
export declare enum Markers {
|
|
2
3
|
start = "/**",
|
|
3
4
|
nostart = "/***",
|
|
4
5
|
delim = "*",
|
|
5
6
|
end = "*/"
|
|
6
7
|
}
|
|
8
|
+
export interface BlockMarkers {
|
|
9
|
+
start: string;
|
|
10
|
+
nostart: string;
|
|
11
|
+
delim: string;
|
|
12
|
+
end: string;
|
|
13
|
+
}
|
|
7
14
|
export interface Block {
|
|
8
15
|
description: string;
|
|
9
16
|
tags: Spec[];
|
package/es6/primitives.js
CHANGED
package/es6/transforms/align.js
CHANGED
|
@@ -17,14 +17,14 @@ const zeroWidth = {
|
|
|
17
17
|
type: 0,
|
|
18
18
|
name: 0,
|
|
19
19
|
};
|
|
20
|
-
const getWidth = (w, { tokens: t }) => ({
|
|
21
|
-
start: t.delimiter ===
|
|
20
|
+
const getWidth = (markers = Markers) => (w, { tokens: t }) => ({
|
|
21
|
+
start: t.delimiter === markers.start ? t.start.length : w.start,
|
|
22
22
|
tag: Math.max(w.tag, t.tag.length),
|
|
23
23
|
type: Math.max(w.type, t.type.length),
|
|
24
24
|
name: Math.max(w.name, t.name.length),
|
|
25
25
|
});
|
|
26
26
|
const space = (len) => ''.padStart(len, ' ');
|
|
27
|
-
export default function align() {
|
|
27
|
+
export default function align(markers = Markers) {
|
|
28
28
|
let intoTags = false;
|
|
29
29
|
let w;
|
|
30
30
|
function update(line) {
|
|
@@ -36,15 +36,15 @@ export default function align() {
|
|
|
36
36
|
tokens.type === '' &&
|
|
37
37
|
tokens.description === '';
|
|
38
38
|
// dangling '*/'
|
|
39
|
-
if (tokens.end ===
|
|
39
|
+
if (tokens.end === markers.end && isEmpty) {
|
|
40
40
|
tokens.start = space(w.start + 1);
|
|
41
41
|
return Object.assign(Object.assign({}, line), { tokens });
|
|
42
42
|
}
|
|
43
43
|
switch (tokens.delimiter) {
|
|
44
|
-
case
|
|
44
|
+
case markers.start:
|
|
45
45
|
tokens.start = space(w.start);
|
|
46
46
|
break;
|
|
47
|
-
case
|
|
47
|
+
case markers.delim:
|
|
48
48
|
tokens.start = space(w.start + 1);
|
|
49
49
|
break;
|
|
50
50
|
default:
|
|
@@ -87,7 +87,7 @@ export default function align() {
|
|
|
87
87
|
}
|
|
88
88
|
return (_a) => {
|
|
89
89
|
var { source } = _a, fields = __rest(_a, ["source"]);
|
|
90
|
-
w = source.reduce(getWidth, Object.assign({}, zeroWidth));
|
|
90
|
+
w = source.reduce(getWidth(markers), Object.assign({}, zeroWidth));
|
|
91
91
|
return rewireSource(Object.assign(Object.assign({}, fields), { source: source.map(update) }));
|
|
92
92
|
};
|
|
93
93
|
}
|
package/lib/index.cjs
CHANGED
|
@@ -20,7 +20,7 @@ var __exportStar = this && this.__exportStar || function (m, exports) {
|
|
|
20
20
|
Object.defineProperty(exports, "__esModule", {
|
|
21
21
|
value: true
|
|
22
22
|
});
|
|
23
|
-
exports.tokenizers = exports.transforms = exports.inspect = exports.stringify = exports.parse = void 0;
|
|
23
|
+
exports.util = exports.tokenizers = exports.transforms = exports.inspect = exports.stringify = exports.parse = void 0;
|
|
24
24
|
|
|
25
25
|
const index_1 = require("./parser/index.cjs");
|
|
26
26
|
|
|
@@ -42,6 +42,8 @@ const crlf_1 = require("./transforms/crlf.cjs");
|
|
|
42
42
|
|
|
43
43
|
const index_3 = require("./transforms/index.cjs");
|
|
44
44
|
|
|
45
|
+
const util_1 = require("./util.cjs");
|
|
46
|
+
|
|
45
47
|
__exportStar(require("./primitives.cjs"), exports);
|
|
46
48
|
|
|
47
49
|
function parse(source, options = {}) {
|
|
@@ -71,4 +73,10 @@ exports.tokenizers = {
|
|
|
71
73
|
name: name_1.default,
|
|
72
74
|
description: description_1.default
|
|
73
75
|
};
|
|
76
|
+
exports.util = {
|
|
77
|
+
rewireSpecs: util_1.rewireSpecs,
|
|
78
|
+
rewireSource: util_1.rewireSource,
|
|
79
|
+
seedBlock: util_1.seedBlock,
|
|
80
|
+
seedTokens: util_1.seedTokens
|
|
81
|
+
};
|
|
74
82
|
//# sourceMappingURL=index.cjs.map
|
package/lib/index.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["index.js"],"names":["__createBinding","Object","create","o","m","k","k2","undefined","defineProperty","enumerable","get","__exportStar","exports","p","prototype","hasOwnProperty","call","value","tokenizers","transforms","inspect","stringify","parse","index_1","require","description_1","name_1","tag_1","type_1","index_2","align_1","indent_1","crlf_1","index_3","source","options","default","inspect_1","flow","align","indent","crlf","tag","type","name","description"],"mappings":"AAAA;;AACA,IAAIA,eAAe,GAAI,QAAQ,KAAKA,eAAd,KAAmCC,MAAM,CAACC,MAAP,GAAiB,UAASC,CAAT,EAAYC,CAAZ,EAAeC,CAAf,EAAkBC,EAAlB,EAAsB;AAC5F,MAAIA,EAAE,KAAKC,SAAX,EAAsBD,EAAE,GAAGD,CAAL;AACtBJ,EAAAA,MAAM,CAACO,cAAP,CAAsBL,CAAtB,EAAyBG,EAAzB,EAA6B;AAAEG,IAAAA,UAAU,EAAE,IAAd;AAAoBC,IAAAA,GAAG,EAAE,YAAW;AAAE,aAAON,CAAC,CAACC,CAAD,CAAR;AAAc;AAApD,GAA7B;AACH,CAHwD,GAGnD,UAASF,CAAT,EAAYC,CAAZ,EAAeC,CAAf,EAAkBC,EAAlB,EAAsB;AACxB,MAAIA,EAAE,KAAKC,SAAX,EAAsBD,EAAE,GAAGD,CAAL;AACtBF,EAAAA,CAAC,CAACG,EAAD,CAAD,GAAQF,CAAC,CAACC,CAAD,CAAT;AACH,CANqB,CAAtB;;AAOA,IAAIM,YAAY,GAAI,QAAQ,KAAKA,YAAd,IAA+B,UAASP,CAAT,EAAYQ,OAAZ,EAAqB;AACnE,OAAK,IAAIC,CAAT,IAAcT,CAAd,EAAiB,IAAIS,CAAC,KAAK,SAAN,IAAmB,CAACZ,MAAM,CAACa,SAAP,CAAiBC,cAAjB,CAAgCC,IAAhC,CAAqCJ,OAArC,EAA8CC,CAA9C,CAAxB,EAA0Eb,eAAe,CAACY,OAAD,EAAUR,CAAV,EAAaS,CAAb,CAAf;AAC9F,CAFD;;AAGAZ,MAAM,CAACO,cAAP,CAAsBI,OAAtB,EAA+B,YAA/B,EAA6C;AAAEK,EAAAA,KAAK,EAAE;AAAT,CAA7C;AACAL,OAAO,CAACM,
|
|
1
|
+
{"version":3,"sources":["index.js"],"names":["__createBinding","Object","create","o","m","k","k2","undefined","defineProperty","enumerable","get","__exportStar","exports","p","prototype","hasOwnProperty","call","value","util","tokenizers","transforms","inspect","stringify","parse","index_1","require","description_1","name_1","tag_1","type_1","index_2","align_1","indent_1","crlf_1","index_3","util_1","source","options","default","inspect_1","flow","align","indent","crlf","tag","type","name","description","rewireSpecs","rewireSource","seedBlock","seedTokens"],"mappings":"AAAA;;AACA,IAAIA,eAAe,GAAI,QAAQ,KAAKA,eAAd,KAAmCC,MAAM,CAACC,MAAP,GAAiB,UAASC,CAAT,EAAYC,CAAZ,EAAeC,CAAf,EAAkBC,EAAlB,EAAsB;AAC5F,MAAIA,EAAE,KAAKC,SAAX,EAAsBD,EAAE,GAAGD,CAAL;AACtBJ,EAAAA,MAAM,CAACO,cAAP,CAAsBL,CAAtB,EAAyBG,EAAzB,EAA6B;AAAEG,IAAAA,UAAU,EAAE,IAAd;AAAoBC,IAAAA,GAAG,EAAE,YAAW;AAAE,aAAON,CAAC,CAACC,CAAD,CAAR;AAAc;AAApD,GAA7B;AACH,CAHwD,GAGnD,UAASF,CAAT,EAAYC,CAAZ,EAAeC,CAAf,EAAkBC,EAAlB,EAAsB;AACxB,MAAIA,EAAE,KAAKC,SAAX,EAAsBD,EAAE,GAAGD,CAAL;AACtBF,EAAAA,CAAC,CAACG,EAAD,CAAD,GAAQF,CAAC,CAACC,CAAD,CAAT;AACH,CANqB,CAAtB;;AAOA,IAAIM,YAAY,GAAI,QAAQ,KAAKA,YAAd,IAA+B,UAASP,CAAT,EAAYQ,OAAZ,EAAqB;AACnE,OAAK,IAAIC,CAAT,IAAcT,CAAd,EAAiB,IAAIS,CAAC,KAAK,SAAN,IAAmB,CAACZ,MAAM,CAACa,SAAP,CAAiBC,cAAjB,CAAgCC,IAAhC,CAAqCJ,OAArC,EAA8CC,CAA9C,CAAxB,EAA0Eb,eAAe,CAACY,OAAD,EAAUR,CAAV,EAAaS,CAAb,CAAf;AAC9F,CAFD;;AAGAZ,MAAM,CAACO,cAAP,CAAsBI,OAAtB,EAA+B,YAA/B,EAA6C;AAAEK,EAAAA,KAAK,EAAE;AAAT,CAA7C;AACAL,OAAO,CAACM,IAAR,GAAeN,OAAO,CAACO,UAAR,GAAqBP,OAAO,CAACQ,UAAR,GAAqBR,OAAO,CAACS,OAAR,GAAkBT,OAAO,CAACU,SAAR,GAAoBV,OAAO,CAACW,KAAR,GAAgB,KAAK,CAApH;;AACA,MAAMC,OAAO,GAAGC,OAAH,sBAAb;;AACA,MAAMC,aAAa,GAAGD,OAAH,uCAAnB;;AACA,MAAME,MAAM,GAAGF,OAAH,gCAAZ;;AACA,MAAMG,KAAK,GAAGH,OAAH,+BAAX;;AACA,MAAMI,MAAM,GAAGJ,OAAH,gCAAZ;;AACA,MAAMK,OAAO,GAAGL,OAAH,2BAAb;;AACA,MAAMM,OAAO,GAAGN,OAAH,0BAAb;;AACA,MAAMO,QAAQ,GAAGP,OAAH,2BAAd;;AACA,MAAMQ,MAAM,GAAGR,OAAH,yBAAZ;;AACA,MAAMS,OAAO,GAAGT,OAAH,0BAAb;;AACA,MAAMU,MAAM,GAAGV,OAAH,cAAZ;;AACAd,YAAY,CAACc,OAAD,sBAA0Bb,OAA1B,CAAZ;;AACA,SAASW,KAAT,CAAea,MAAf,EAAuBC,OAAO,GAAG,EAAjC,EAAqC;AACjC,SAAOb,OAAO,CAACc,OAAR,CAAgBD,OAAhB,EAAyBD,MAAzB,CAAP;AACH;;AACDxB,OAAO,CAACW,KAAR,GAAgBA,KAAhB;AACAX,OAAO,CAACU,SAAR,GAAoBQ,OAAO,CAACQ,OAAR,EAApB;;AACA,IAAIC,SAAS,GAAGd,OAAH,6BAAb;;AACAxB,MAAM,CAACO,cAAP,CAAsBI,OAAtB,EAA+B,SAA/B,EAA0C;AAAEH,EAAAA,UAAU,EAAE,IAAd;AAAoBC,EAAAA,GAAG,EAAE,YAAY;AAAE,WAAO6B,SAAS,CAACD,OAAjB;AAA2B;AAAlE,CAA1C;AACA1B,OAAO,CAACQ,UAAR,GAAqB;AACjBoB,EAAAA,IAAI,EAAEN,OAAO,CAACM,IADG;AAEjBC,EAAAA,KAAK,EAAEV,OAAO,CAACO,OAFE;AAGjBI,EAAAA,MAAM,EAAEV,QAAQ,CAACM,OAHA;AAIjBK,EAAAA,IAAI,EAAEV,MAAM,CAACK;AAJI,CAArB;AAMA1B,OAAO,CAACO,UAAR,GAAqB;AACjByB,EAAAA,GAAG,EAAEhB,KAAK,CAACU,OADM;AAEjBO,EAAAA,IAAI,EAAEhB,MAAM,CAACS,OAFI;AAGjBQ,EAAAA,IAAI,EAAEnB,MAAM,CAACW,OAHI;AAIjBS,EAAAA,WAAW,EAAErB,aAAa,CAACY;AAJV,CAArB;AAMA1B,OAAO,CAACM,IAAR,GAAe;AAAE8B,EAAAA,WAAW,EAAEb,MAAM,CAACa,WAAtB;AAAmCC,EAAAA,YAAY,EAAEd,MAAM,CAACc,YAAxD;AAAsEC,EAAAA,SAAS,EAAEf,MAAM,CAACe,SAAxF;AAAmGC,EAAAA,UAAU,EAAEhB,MAAM,CAACgB;AAAtH,CAAf","sourcesContent":["\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.util = exports.tokenizers = exports.transforms = exports.inspect = exports.stringify = exports.parse = void 0;\nconst index_1 = require(\"./parser/index\");\nconst description_1 = require(\"./parser/tokenizers/description\");\nconst name_1 = require(\"./parser/tokenizers/name\");\nconst tag_1 = require(\"./parser/tokenizers/tag\");\nconst type_1 = require(\"./parser/tokenizers/type\");\nconst index_2 = require(\"./stringifier/index\");\nconst align_1 = require(\"./transforms/align\");\nconst indent_1 = require(\"./transforms/indent\");\nconst crlf_1 = require(\"./transforms/crlf\");\nconst index_3 = require(\"./transforms/index\");\nconst util_1 = require(\"./util\");\n__exportStar(require(\"./primitives\"), exports);\nfunction parse(source, options = {}) {\n return index_1.default(options)(source);\n}\nexports.parse = parse;\nexports.stringify = index_2.default();\nvar inspect_1 = require(\"./stringifier/inspect\");\nObject.defineProperty(exports, \"inspect\", { enumerable: true, get: function () { return inspect_1.default; } });\nexports.transforms = {\n flow: index_3.flow,\n align: align_1.default,\n indent: indent_1.default,\n crlf: crlf_1.default,\n};\nexports.tokenizers = {\n tag: tag_1.default,\n type: type_1.default,\n name: name_1.default,\n description: description_1.default,\n};\nexports.util = { rewireSpecs: util_1.rewireSpecs, rewireSource: util_1.rewireSource, seedBlock: util_1.seedBlock, seedTokens: util_1.seedTokens };\n"],"file":"index.cjs"}
|
package/lib/index.d.ts
CHANGED
|
@@ -7,6 +7,7 @@ import alignTransform from './transforms/align';
|
|
|
7
7
|
import indentTransform from './transforms/indent';
|
|
8
8
|
import crlfTransform from './transforms/crlf';
|
|
9
9
|
import { flow as flowTransform } from './transforms/index';
|
|
10
|
+
import { rewireSpecs, rewireSource, seedBlock, seedTokens } from './util';
|
|
10
11
|
export * from './primitives';
|
|
11
12
|
export declare function parse(source: string, options?: Partial<ParserOptions>): import("./primitives").Block[];
|
|
12
13
|
export declare const stringify: import("./stringifier/index").Stringifier;
|
|
@@ -23,3 +24,9 @@ export declare const tokenizers: {
|
|
|
23
24
|
name: typeof nameTokenizer;
|
|
24
25
|
description: typeof descriptionTokenizer;
|
|
25
26
|
};
|
|
27
|
+
export declare const util: {
|
|
28
|
+
rewireSpecs: typeof rewireSpecs;
|
|
29
|
+
rewireSource: typeof rewireSource;
|
|
30
|
+
seedBlock: typeof seedBlock;
|
|
31
|
+
seedTokens: typeof seedTokens;
|
|
32
|
+
};
|
package/lib/parser/index.cjs
CHANGED
|
@@ -4,6 +4,8 @@ Object.defineProperty(exports, "__esModule", {
|
|
|
4
4
|
value: true
|
|
5
5
|
});
|
|
6
6
|
|
|
7
|
+
const primitives_1 = require("../primitives.cjs");
|
|
8
|
+
|
|
7
9
|
const util_1 = require("../util.cjs");
|
|
8
10
|
|
|
9
11
|
const block_parser_1 = require("./block-parser.cjs");
|
|
@@ -24,11 +26,13 @@ function getParser({
|
|
|
24
26
|
startLine = 0,
|
|
25
27
|
fence = '```',
|
|
26
28
|
spacing = 'compact',
|
|
29
|
+
markers = primitives_1.Markers,
|
|
27
30
|
tokenizers = [tag_1.default(), type_1.default(spacing), name_1.default(), description_1.default(spacing)]
|
|
28
31
|
} = {}) {
|
|
29
32
|
if (startLine < 0 || startLine % 1 > 0) throw new Error('Invalid startLine');
|
|
30
33
|
const parseSource = source_parser_1.default({
|
|
31
|
-
startLine
|
|
34
|
+
startLine,
|
|
35
|
+
markers
|
|
32
36
|
});
|
|
33
37
|
const parseBlock = block_parser_1.default({
|
|
34
38
|
fence
|
|
@@ -50,7 +54,7 @@ function getParser({
|
|
|
50
54
|
const sections = parseBlock(lines);
|
|
51
55
|
const specs = sections.slice(1).map(parseSpec);
|
|
52
56
|
blocks.push({
|
|
53
|
-
description: joinDescription(sections[0]),
|
|
57
|
+
description: joinDescription(sections[0], markers),
|
|
54
58
|
tags: specs,
|
|
55
59
|
source: lines,
|
|
56
60
|
problems: specs.reduce((acc, spec) => acc.concat(spec.problems), [])
|