comment-parser 1.4.2 → 1.4.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +6 -0
- package/browser/index.js +671 -0
- package/lib/index.cjs +68 -0
- package/lib/index.cjs.map +1 -0
- package/lib/index.d.ts +32 -0
- package/lib/parser/block-parser.cjs +36 -0
- package/lib/parser/block-parser.cjs.map +1 -0
- package/lib/parser/block-parser.d.ts +24 -0
- package/lib/parser/index.cjs +52 -0
- package/lib/parser/index.cjs.map +1 -0
- package/lib/parser/index.d.ts +11 -0
- package/lib/parser/source-parser.cjs +56 -0
- package/lib/parser/source-parser.cjs.map +1 -0
- package/lib/parser/source-parser.d.ts +7 -0
- package/lib/parser/spec-parser.cjs +23 -0
- package/lib/parser/spec-parser.cjs.map +1 -0
- package/lib/parser/spec-parser.d.ts +7 -0
- package/lib/parser/tokenizers/description.cjs +51 -0
- package/lib/parser/tokenizers/description.cjs.map +1 -0
- package/lib/parser/tokenizers/description.d.ts +20 -0
- package/lib/parser/tokenizers/index.cjs +6 -0
- package/lib/parser/tokenizers/index.cjs.map +1 -0
- package/lib/parser/tokenizers/index.d.ts +7 -0
- package/lib/parser/tokenizers/name.cjs +103 -0
- package/lib/parser/tokenizers/name.cjs.map +1 -0
- package/lib/parser/tokenizers/name.d.ts +6 -0
- package/lib/parser/tokenizers/tag.cjs +36 -0
- package/lib/parser/tokenizers/tag.cjs.map +1 -0
- package/lib/parser/tokenizers/tag.d.ts +6 -0
- package/lib/parser/tokenizers/type.cjs +75 -0
- package/lib/parser/tokenizers/type.cjs.map +1 -0
- package/lib/parser/tokenizers/type.d.ts +27 -0
- package/lib/primitives.cjs +15 -0
- package/lib/primitives.cjs.map +1 -0
- package/lib/primitives.d.ts +54 -0
- package/lib/stringifier/index.cjs +15 -0
- package/lib/stringifier/index.cjs.map +1 -0
- package/lib/stringifier/index.d.ts +3 -0
- package/lib/stringifier/inspect.cjs +56 -0
- package/lib/stringifier/inspect.cjs.map +1 -0
- package/lib/stringifier/inspect.d.ts +2 -0
- package/lib/transforms/align.cjs +104 -0
- package/lib/transforms/align.cjs.map +1 -0
- package/lib/transforms/align.d.ts +3 -0
- package/lib/transforms/crlf.cjs +35 -0
- package/lib/transforms/crlf.cjs.map +1 -0
- package/lib/transforms/crlf.d.ts +3 -0
- package/lib/transforms/indent.cjs +45 -0
- package/lib/transforms/indent.cjs.map +1 -0
- package/lib/transforms/indent.d.ts +2 -0
- package/lib/transforms/index.cjs +10 -0
- package/lib/transforms/index.cjs.map +1 -0
- package/lib/transforms/index.d.ts +3 -0
- package/lib/util.cjs +90 -0
- package/lib/util.cjs.map +1 -0
- package/lib/util.d.ts +21 -0
- package/package.json +4 -2
package/CHANGELOG.md
CHANGED
package/browser/index.js
ADDED
|
@@ -0,0 +1,671 @@
|
|
|
1
|
+
var CommentParser = (function (exports) {
|
|
2
|
+
'use strict';
|
|
3
|
+
|
|
4
|
+
/** @deprecated */
|
|
5
|
+
exports.Markers = void 0;
|
|
6
|
+
(function (Markers) {
|
|
7
|
+
Markers["start"] = "/**";
|
|
8
|
+
Markers["nostart"] = "/***";
|
|
9
|
+
Markers["delim"] = "*";
|
|
10
|
+
Markers["end"] = "*/";
|
|
11
|
+
})(exports.Markers || (exports.Markers = {}));
|
|
12
|
+
|
|
13
|
+
function isSpace(source) {
|
|
14
|
+
return /^\s+$/.test(source);
|
|
15
|
+
}
|
|
16
|
+
function splitCR(source) {
|
|
17
|
+
const matches = source.match(/\r+$/);
|
|
18
|
+
return matches == null
|
|
19
|
+
? ['', source]
|
|
20
|
+
: [source.slice(-matches[0].length), source.slice(0, -matches[0].length)];
|
|
21
|
+
}
|
|
22
|
+
function splitSpace(source) {
|
|
23
|
+
const matches = source.match(/^\s+/);
|
|
24
|
+
return matches == null
|
|
25
|
+
? ['', source]
|
|
26
|
+
: [source.slice(0, matches[0].length), source.slice(matches[0].length)];
|
|
27
|
+
}
|
|
28
|
+
function splitLines(source) {
|
|
29
|
+
return source.split(/\n/);
|
|
30
|
+
}
|
|
31
|
+
function seedBlock(block = {}) {
|
|
32
|
+
return Object.assign({ description: '', tags: [], source: [], problems: [] }, block);
|
|
33
|
+
}
|
|
34
|
+
function seedSpec(spec = {}) {
|
|
35
|
+
return Object.assign({ tag: '', name: '', type: '', optional: false, description: '', problems: [], source: [] }, spec);
|
|
36
|
+
}
|
|
37
|
+
function seedTokens(tokens = {}) {
|
|
38
|
+
return Object.assign({ start: '', delimiter: '', postDelimiter: '', tag: '', postTag: '', name: '', postName: '', type: '', postType: '', description: '', end: '', lineEnd: '' }, tokens);
|
|
39
|
+
}
|
|
40
|
+
/**
|
|
41
|
+
* Assures Block.tags[].source contains references to the Block.source items,
|
|
42
|
+
* using Block.source as a source of truth. This is a counterpart of rewireSpecs
|
|
43
|
+
* @param block parsed coments block
|
|
44
|
+
*/
|
|
45
|
+
function rewireSource(block) {
|
|
46
|
+
const source = block.source.reduce((acc, line) => acc.set(line.number, line), new Map());
|
|
47
|
+
for (const spec of block.tags) {
|
|
48
|
+
spec.source = spec.source.map((line) => source.get(line.number));
|
|
49
|
+
}
|
|
50
|
+
return block;
|
|
51
|
+
}
|
|
52
|
+
/**
|
|
53
|
+
* Assures Block.source contains references to the Block.tags[].source items,
|
|
54
|
+
* using Block.tags[].source as a source of truth. This is a counterpart of rewireSource
|
|
55
|
+
* @param block parsed coments block
|
|
56
|
+
*/
|
|
57
|
+
function rewireSpecs(block) {
|
|
58
|
+
const source = block.tags.reduce((acc, spec) => spec.source.reduce((acc, line) => acc.set(line.number, line), acc), new Map());
|
|
59
|
+
block.source = block.source.map((line) => source.get(line.number) || line);
|
|
60
|
+
return block;
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
const reTag = /^@\S+/;
|
|
64
|
+
/**
|
|
65
|
+
* Creates configured `Parser`
|
|
66
|
+
* @param {Partial<Options>} options
|
|
67
|
+
*/
|
|
68
|
+
function getParser$3({ fence = '```', } = {}) {
|
|
69
|
+
const fencer = getFencer(fence);
|
|
70
|
+
const toggleFence = (source, isFenced) => fencer(source) ? !isFenced : isFenced;
|
|
71
|
+
return function parseBlock(source) {
|
|
72
|
+
// start with description section
|
|
73
|
+
const sections = [[]];
|
|
74
|
+
let isFenced = false;
|
|
75
|
+
for (const line of source) {
|
|
76
|
+
if (reTag.test(line.tokens.description) && !isFenced) {
|
|
77
|
+
sections.push([line]);
|
|
78
|
+
}
|
|
79
|
+
else {
|
|
80
|
+
sections[sections.length - 1].push(line);
|
|
81
|
+
}
|
|
82
|
+
isFenced = toggleFence(line.tokens.description, isFenced);
|
|
83
|
+
}
|
|
84
|
+
return sections;
|
|
85
|
+
};
|
|
86
|
+
}
|
|
87
|
+
function getFencer(fence) {
|
|
88
|
+
if (typeof fence === 'string')
|
|
89
|
+
return (source) => source.split(fence).length % 2 === 0;
|
|
90
|
+
return fence;
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
function getParser$2({ startLine = 0, markers = exports.Markers, } = {}) {
|
|
94
|
+
let block = null;
|
|
95
|
+
let num = startLine;
|
|
96
|
+
return function parseSource(source) {
|
|
97
|
+
let rest = source;
|
|
98
|
+
const tokens = seedTokens();
|
|
99
|
+
[tokens.lineEnd, rest] = splitCR(rest);
|
|
100
|
+
[tokens.start, rest] = splitSpace(rest);
|
|
101
|
+
if (block === null &&
|
|
102
|
+
rest.startsWith(markers.start) &&
|
|
103
|
+
!rest.startsWith(markers.nostart)) {
|
|
104
|
+
block = [];
|
|
105
|
+
tokens.delimiter = rest.slice(0, markers.start.length);
|
|
106
|
+
rest = rest.slice(markers.start.length);
|
|
107
|
+
[tokens.postDelimiter, rest] = splitSpace(rest);
|
|
108
|
+
}
|
|
109
|
+
if (block === null) {
|
|
110
|
+
num++;
|
|
111
|
+
return null;
|
|
112
|
+
}
|
|
113
|
+
const isClosed = rest.trimRight().endsWith(markers.end);
|
|
114
|
+
if (tokens.delimiter === '' &&
|
|
115
|
+
rest.startsWith(markers.delim) &&
|
|
116
|
+
!rest.startsWith(markers.end)) {
|
|
117
|
+
tokens.delimiter = markers.delim;
|
|
118
|
+
rest = rest.slice(markers.delim.length);
|
|
119
|
+
[tokens.postDelimiter, rest] = splitSpace(rest);
|
|
120
|
+
}
|
|
121
|
+
if (isClosed) {
|
|
122
|
+
const trimmed = rest.trimRight();
|
|
123
|
+
tokens.end = rest.slice(trimmed.length - markers.end.length);
|
|
124
|
+
rest = trimmed.slice(0, -markers.end.length);
|
|
125
|
+
}
|
|
126
|
+
tokens.description = rest;
|
|
127
|
+
block.push({ number: num, source, tokens });
|
|
128
|
+
num++;
|
|
129
|
+
if (isClosed) {
|
|
130
|
+
const result = block.slice();
|
|
131
|
+
block = null;
|
|
132
|
+
return result;
|
|
133
|
+
}
|
|
134
|
+
return null;
|
|
135
|
+
};
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
function getParser$1({ tokenizers }) {
|
|
139
|
+
return function parseSpec(source) {
|
|
140
|
+
var _a;
|
|
141
|
+
let spec = seedSpec({ source });
|
|
142
|
+
for (const tokenize of tokenizers) {
|
|
143
|
+
spec = tokenize(spec);
|
|
144
|
+
if ((_a = spec.problems[spec.problems.length - 1]) === null || _a === void 0 ? void 0 : _a.critical)
|
|
145
|
+
break;
|
|
146
|
+
}
|
|
147
|
+
return spec;
|
|
148
|
+
};
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
/**
|
|
152
|
+
* Splits the `@prefix` from remaining `Spec.lines[].token.description` into the `tag` token,
|
|
153
|
+
* and populates `spec.tag`
|
|
154
|
+
*/
|
|
155
|
+
function tagTokenizer() {
|
|
156
|
+
return (spec) => {
|
|
157
|
+
const { tokens } = spec.source[0];
|
|
158
|
+
const match = tokens.description.match(/\s*(@(\S+))(\s*)/);
|
|
159
|
+
if (match === null) {
|
|
160
|
+
spec.problems.push({
|
|
161
|
+
code: 'spec:tag:prefix',
|
|
162
|
+
message: 'tag should start with "@" symbol',
|
|
163
|
+
line: spec.source[0].number,
|
|
164
|
+
critical: true,
|
|
165
|
+
});
|
|
166
|
+
return spec;
|
|
167
|
+
}
|
|
168
|
+
if (match[1].includes('/')) {
|
|
169
|
+
return spec;
|
|
170
|
+
}
|
|
171
|
+
tokens.tag = match[1];
|
|
172
|
+
tokens.postTag = match[3];
|
|
173
|
+
tokens.description = tokens.description.slice(match[0].length);
|
|
174
|
+
spec.tag = match[2];
|
|
175
|
+
return spec;
|
|
176
|
+
};
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
/**
|
|
180
|
+
* Sets splits remaining `Spec.lines[].tokens.description` into `type` and `description`
|
|
181
|
+
* tokens and populates Spec.type`
|
|
182
|
+
*
|
|
183
|
+
* @param {Spacing} spacing tells how to deal with a whitespace
|
|
184
|
+
* for type values going over multiple lines
|
|
185
|
+
*/
|
|
186
|
+
function typeTokenizer(spacing = 'compact') {
|
|
187
|
+
const join = getJoiner$1(spacing);
|
|
188
|
+
return (spec) => {
|
|
189
|
+
let curlies = 0;
|
|
190
|
+
let lines = [];
|
|
191
|
+
let descriptionBegun = false;
|
|
192
|
+
let firstTypeIteration = true;
|
|
193
|
+
for (const { tokens } of spec.source.values()) {
|
|
194
|
+
let type = '';
|
|
195
|
+
if (!descriptionBegun && tokens.description.trim()) {
|
|
196
|
+
descriptionBegun = true;
|
|
197
|
+
}
|
|
198
|
+
else if (!descriptionBegun) {
|
|
199
|
+
continue;
|
|
200
|
+
}
|
|
201
|
+
if (firstTypeIteration && tokens.description[0] !== '{')
|
|
202
|
+
return spec;
|
|
203
|
+
firstTypeIteration = false;
|
|
204
|
+
for (const ch of tokens.description) {
|
|
205
|
+
if (ch === '{')
|
|
206
|
+
curlies++;
|
|
207
|
+
if (ch === '}')
|
|
208
|
+
curlies--;
|
|
209
|
+
type += ch;
|
|
210
|
+
if (curlies === 0)
|
|
211
|
+
break;
|
|
212
|
+
}
|
|
213
|
+
lines.push([tokens, type]);
|
|
214
|
+
if (curlies === 0)
|
|
215
|
+
break;
|
|
216
|
+
}
|
|
217
|
+
if (!descriptionBegun) {
|
|
218
|
+
return spec;
|
|
219
|
+
}
|
|
220
|
+
if (curlies !== 0) {
|
|
221
|
+
spec.problems.push({
|
|
222
|
+
code: 'spec:type:unpaired-curlies',
|
|
223
|
+
message: 'unpaired curlies',
|
|
224
|
+
line: spec.source[0].number,
|
|
225
|
+
critical: true,
|
|
226
|
+
});
|
|
227
|
+
return spec;
|
|
228
|
+
}
|
|
229
|
+
const parts = [];
|
|
230
|
+
const offset = lines[0][0].postDelimiter.length;
|
|
231
|
+
for (const [i, [tokens, type]] of lines.entries()) {
|
|
232
|
+
tokens.type = type;
|
|
233
|
+
if (i > 0) {
|
|
234
|
+
tokens.type = tokens.postDelimiter.slice(offset) + type;
|
|
235
|
+
tokens.postDelimiter = tokens.postDelimiter.slice(0, offset);
|
|
236
|
+
}
|
|
237
|
+
[tokens.postType, tokens.description] = splitSpace(tokens.description.slice(type.length));
|
|
238
|
+
parts.push(tokens.type);
|
|
239
|
+
}
|
|
240
|
+
parts[0] = parts[0].slice(1);
|
|
241
|
+
parts[parts.length - 1] = parts[parts.length - 1].slice(0, -1);
|
|
242
|
+
spec.type = join(parts);
|
|
243
|
+
return spec;
|
|
244
|
+
};
|
|
245
|
+
}
|
|
246
|
+
const trim = (x) => x.trim();
|
|
247
|
+
function getJoiner$1(spacing) {
|
|
248
|
+
if (spacing === 'compact')
|
|
249
|
+
return (t) => t.map(trim).join('');
|
|
250
|
+
else if (spacing === 'preserve')
|
|
251
|
+
return (t) => t.join('\n');
|
|
252
|
+
else
|
|
253
|
+
return spacing;
|
|
254
|
+
}
|
|
255
|
+
|
|
256
|
+
const isQuoted = (s) => s && s.startsWith('"') && s.endsWith('"');
|
|
257
|
+
/**
|
|
258
|
+
* Splits remaining `spec.lines[].tokens.description` into `name` and `descriptions` tokens,
|
|
259
|
+
* and populates the `spec.name`
|
|
260
|
+
*/
|
|
261
|
+
function nameTokenizer() {
|
|
262
|
+
const typeEnd = (num, { tokens }, i) => tokens.type === '' ? num : i;
|
|
263
|
+
return (spec) => {
|
|
264
|
+
// look for the name starting in the line where {type} ends
|
|
265
|
+
let finalTypeLine = spec.source.reduce(typeEnd, 0);
|
|
266
|
+
let tokens;
|
|
267
|
+
do {
|
|
268
|
+
({ tokens } = spec.source[finalTypeLine]);
|
|
269
|
+
if (tokens.description.trim()) {
|
|
270
|
+
break;
|
|
271
|
+
}
|
|
272
|
+
finalTypeLine++;
|
|
273
|
+
} while (spec.source[finalTypeLine]);
|
|
274
|
+
const source = tokens.description.trimStart();
|
|
275
|
+
const quotedGroups = source.split('"');
|
|
276
|
+
// if it starts with quoted group, assume it is a literal
|
|
277
|
+
if (quotedGroups.length > 1 &&
|
|
278
|
+
quotedGroups[0] === '' &&
|
|
279
|
+
quotedGroups.length % 2 === 1) {
|
|
280
|
+
spec.name = quotedGroups[1];
|
|
281
|
+
tokens.name = `"${quotedGroups[1]}"`;
|
|
282
|
+
[tokens.postName, tokens.description] = splitSpace(source.slice(tokens.name.length));
|
|
283
|
+
return spec;
|
|
284
|
+
}
|
|
285
|
+
let brackets = 0;
|
|
286
|
+
let name = '';
|
|
287
|
+
let optional = false;
|
|
288
|
+
let defaultValue;
|
|
289
|
+
// assume name is non-space string or anything wrapped into brackets
|
|
290
|
+
for (const ch of source) {
|
|
291
|
+
if (brackets === 0 && isSpace(ch))
|
|
292
|
+
break;
|
|
293
|
+
if (ch === '[')
|
|
294
|
+
brackets++;
|
|
295
|
+
if (ch === ']')
|
|
296
|
+
brackets--;
|
|
297
|
+
name += ch;
|
|
298
|
+
}
|
|
299
|
+
if (brackets !== 0) {
|
|
300
|
+
spec.problems.push({
|
|
301
|
+
code: 'spec:name:unpaired-brackets',
|
|
302
|
+
message: 'unpaired brackets',
|
|
303
|
+
line: spec.source[0].number,
|
|
304
|
+
critical: true,
|
|
305
|
+
});
|
|
306
|
+
return spec;
|
|
307
|
+
}
|
|
308
|
+
const nameToken = name;
|
|
309
|
+
if (name[0] === '[' && name[name.length - 1] === ']') {
|
|
310
|
+
optional = true;
|
|
311
|
+
name = name.slice(1, -1);
|
|
312
|
+
const parts = name.split('=');
|
|
313
|
+
name = parts[0].trim();
|
|
314
|
+
if (parts[1] !== undefined)
|
|
315
|
+
defaultValue = parts.slice(1).join('=').trim();
|
|
316
|
+
if (name === '') {
|
|
317
|
+
spec.problems.push({
|
|
318
|
+
code: 'spec:name:empty-name',
|
|
319
|
+
message: 'empty name',
|
|
320
|
+
line: spec.source[0].number,
|
|
321
|
+
critical: true,
|
|
322
|
+
});
|
|
323
|
+
return spec;
|
|
324
|
+
}
|
|
325
|
+
if (defaultValue === '') {
|
|
326
|
+
spec.problems.push({
|
|
327
|
+
code: 'spec:name:empty-default',
|
|
328
|
+
message: 'empty default value',
|
|
329
|
+
line: spec.source[0].number,
|
|
330
|
+
critical: true,
|
|
331
|
+
});
|
|
332
|
+
return spec;
|
|
333
|
+
}
|
|
334
|
+
// has "=" and is not a string, except for "=>"
|
|
335
|
+
if (!isQuoted(defaultValue) && /=(?!>)/.test(defaultValue)) {
|
|
336
|
+
spec.problems.push({
|
|
337
|
+
code: 'spec:name:invalid-default',
|
|
338
|
+
message: 'invalid default value syntax',
|
|
339
|
+
line: spec.source[0].number,
|
|
340
|
+
critical: true,
|
|
341
|
+
});
|
|
342
|
+
return spec;
|
|
343
|
+
}
|
|
344
|
+
}
|
|
345
|
+
spec.optional = optional;
|
|
346
|
+
spec.name = name;
|
|
347
|
+
tokens.name = nameToken;
|
|
348
|
+
if (defaultValue !== undefined)
|
|
349
|
+
spec.default = defaultValue;
|
|
350
|
+
[tokens.postName, tokens.description] = splitSpace(source.slice(tokens.name.length));
|
|
351
|
+
return spec;
|
|
352
|
+
};
|
|
353
|
+
}
|
|
354
|
+
|
|
355
|
+
/**
|
|
356
|
+
* Makes no changes to `spec.lines[].tokens` but joins them into `spec.description`
|
|
357
|
+
* following given spacing srtategy
|
|
358
|
+
* @param {Spacing} spacing tells how to handle the whitespace
|
|
359
|
+
* @param {BlockMarkers} markers tells how to handle comment block delimitation
|
|
360
|
+
*/
|
|
361
|
+
function descriptionTokenizer(spacing = 'compact', markers = exports.Markers) {
|
|
362
|
+
const join = getJoiner(spacing);
|
|
363
|
+
return (spec) => {
|
|
364
|
+
spec.description = join(spec.source, markers);
|
|
365
|
+
return spec;
|
|
366
|
+
};
|
|
367
|
+
}
|
|
368
|
+
function getJoiner(spacing) {
|
|
369
|
+
if (spacing === 'compact')
|
|
370
|
+
return compactJoiner;
|
|
371
|
+
if (spacing === 'preserve')
|
|
372
|
+
return preserveJoiner;
|
|
373
|
+
return spacing;
|
|
374
|
+
}
|
|
375
|
+
function compactJoiner(lines, markers = exports.Markers) {
|
|
376
|
+
return lines
|
|
377
|
+
.map(({ tokens: { description } }) => description.trim())
|
|
378
|
+
.filter((description) => description !== '')
|
|
379
|
+
.join(' ');
|
|
380
|
+
}
|
|
381
|
+
const lineNo = (num, { tokens }, i) => tokens.type === '' ? num : i;
|
|
382
|
+
const getDescription = ({ tokens }) => (tokens.delimiter === '' ? tokens.start : tokens.postDelimiter.slice(1)) +
|
|
383
|
+
tokens.description;
|
|
384
|
+
function preserveJoiner(lines, markers = exports.Markers) {
|
|
385
|
+
if (lines.length === 0)
|
|
386
|
+
return '';
|
|
387
|
+
// skip the opening line with no description
|
|
388
|
+
if (lines[0].tokens.description === '' &&
|
|
389
|
+
lines[0].tokens.delimiter === markers.start)
|
|
390
|
+
lines = lines.slice(1);
|
|
391
|
+
// skip the closing line with no description
|
|
392
|
+
const lastLine = lines[lines.length - 1];
|
|
393
|
+
if (lastLine !== undefined &&
|
|
394
|
+
lastLine.tokens.description === '' &&
|
|
395
|
+
lastLine.tokens.end.endsWith(markers.end))
|
|
396
|
+
lines = lines.slice(0, -1);
|
|
397
|
+
// description starts at the last line of type definition
|
|
398
|
+
lines = lines.slice(lines.reduce(lineNo, 0));
|
|
399
|
+
return lines.map(getDescription).join('\n');
|
|
400
|
+
}
|
|
401
|
+
|
|
402
|
+
function getParser({ startLine = 0, fence = '```', spacing = 'compact', markers = exports.Markers, tokenizers = [
|
|
403
|
+
tagTokenizer(),
|
|
404
|
+
typeTokenizer(spacing),
|
|
405
|
+
nameTokenizer(),
|
|
406
|
+
descriptionTokenizer(spacing),
|
|
407
|
+
], } = {}) {
|
|
408
|
+
if (startLine < 0 || startLine % 1 > 0)
|
|
409
|
+
throw new Error('Invalid startLine');
|
|
410
|
+
const parseSource = getParser$2({ startLine, markers });
|
|
411
|
+
const parseBlock = getParser$3({ fence });
|
|
412
|
+
const parseSpec = getParser$1({ tokenizers });
|
|
413
|
+
const joinDescription = getJoiner(spacing);
|
|
414
|
+
return function (source) {
|
|
415
|
+
const blocks = [];
|
|
416
|
+
for (const line of splitLines(source)) {
|
|
417
|
+
const lines = parseSource(line);
|
|
418
|
+
if (lines === null)
|
|
419
|
+
continue;
|
|
420
|
+
const sections = parseBlock(lines);
|
|
421
|
+
const specs = sections.slice(1).map(parseSpec);
|
|
422
|
+
blocks.push({
|
|
423
|
+
description: joinDescription(sections[0], markers),
|
|
424
|
+
tags: specs,
|
|
425
|
+
source: lines,
|
|
426
|
+
problems: specs.reduce((acc, spec) => acc.concat(spec.problems), []),
|
|
427
|
+
});
|
|
428
|
+
}
|
|
429
|
+
return blocks;
|
|
430
|
+
};
|
|
431
|
+
}
|
|
432
|
+
|
|
433
|
+
function join(tokens) {
|
|
434
|
+
return (tokens.start +
|
|
435
|
+
tokens.delimiter +
|
|
436
|
+
tokens.postDelimiter +
|
|
437
|
+
tokens.tag +
|
|
438
|
+
tokens.postTag +
|
|
439
|
+
tokens.type +
|
|
440
|
+
tokens.postType +
|
|
441
|
+
tokens.name +
|
|
442
|
+
tokens.postName +
|
|
443
|
+
tokens.description +
|
|
444
|
+
tokens.end +
|
|
445
|
+
tokens.lineEnd);
|
|
446
|
+
}
|
|
447
|
+
function getStringifier() {
|
|
448
|
+
return (block) => block.source.map(({ tokens }) => join(tokens)).join('\n');
|
|
449
|
+
}
|
|
450
|
+
|
|
451
|
+
var __rest$2 = (window && window.__rest) || function (s, e) {
|
|
452
|
+
var t = {};
|
|
453
|
+
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
|
|
454
|
+
t[p] = s[p];
|
|
455
|
+
if (s != null && typeof Object.getOwnPropertySymbols === "function")
|
|
456
|
+
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
|
|
457
|
+
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
|
|
458
|
+
t[p[i]] = s[p[i]];
|
|
459
|
+
}
|
|
460
|
+
return t;
|
|
461
|
+
};
|
|
462
|
+
const zeroWidth$1 = {
|
|
463
|
+
start: 0,
|
|
464
|
+
tag: 0,
|
|
465
|
+
type: 0,
|
|
466
|
+
name: 0,
|
|
467
|
+
};
|
|
468
|
+
const getWidth = (markers = exports.Markers) => (w, { tokens: t }) => ({
|
|
469
|
+
start: t.delimiter === markers.start ? t.start.length : w.start,
|
|
470
|
+
tag: Math.max(w.tag, t.tag.length),
|
|
471
|
+
type: Math.max(w.type, t.type.length),
|
|
472
|
+
name: Math.max(w.name, t.name.length),
|
|
473
|
+
});
|
|
474
|
+
const space = (len) => ''.padStart(len, ' ');
|
|
475
|
+
function align$1(markers = exports.Markers) {
|
|
476
|
+
let intoTags = false;
|
|
477
|
+
let w;
|
|
478
|
+
function update(line) {
|
|
479
|
+
const tokens = Object.assign({}, line.tokens);
|
|
480
|
+
if (tokens.tag !== '')
|
|
481
|
+
intoTags = true;
|
|
482
|
+
const isEmpty = tokens.tag === '' &&
|
|
483
|
+
tokens.name === '' &&
|
|
484
|
+
tokens.type === '' &&
|
|
485
|
+
tokens.description === '';
|
|
486
|
+
// dangling '*/'
|
|
487
|
+
if (tokens.end === markers.end && isEmpty) {
|
|
488
|
+
tokens.start = space(w.start + 1);
|
|
489
|
+
return Object.assign(Object.assign({}, line), { tokens });
|
|
490
|
+
}
|
|
491
|
+
switch (tokens.delimiter) {
|
|
492
|
+
case markers.start:
|
|
493
|
+
tokens.start = space(w.start);
|
|
494
|
+
break;
|
|
495
|
+
case markers.delim:
|
|
496
|
+
tokens.start = space(w.start + 1);
|
|
497
|
+
break;
|
|
498
|
+
default:
|
|
499
|
+
tokens.delimiter = '';
|
|
500
|
+
tokens.start = space(w.start + 2); // compensate delimiter
|
|
501
|
+
}
|
|
502
|
+
if (!intoTags) {
|
|
503
|
+
tokens.postDelimiter = tokens.description === '' ? '' : ' ';
|
|
504
|
+
return Object.assign(Object.assign({}, line), { tokens });
|
|
505
|
+
}
|
|
506
|
+
const nothingAfter = {
|
|
507
|
+
delim: false,
|
|
508
|
+
tag: false,
|
|
509
|
+
type: false,
|
|
510
|
+
name: false,
|
|
511
|
+
};
|
|
512
|
+
if (tokens.description === '') {
|
|
513
|
+
nothingAfter.name = true;
|
|
514
|
+
tokens.postName = '';
|
|
515
|
+
if (tokens.name === '') {
|
|
516
|
+
nothingAfter.type = true;
|
|
517
|
+
tokens.postType = '';
|
|
518
|
+
if (tokens.type === '') {
|
|
519
|
+
nothingAfter.tag = true;
|
|
520
|
+
tokens.postTag = '';
|
|
521
|
+
if (tokens.tag === '') {
|
|
522
|
+
nothingAfter.delim = true;
|
|
523
|
+
}
|
|
524
|
+
}
|
|
525
|
+
}
|
|
526
|
+
}
|
|
527
|
+
tokens.postDelimiter = nothingAfter.delim ? '' : ' ';
|
|
528
|
+
if (!nothingAfter.tag)
|
|
529
|
+
tokens.postTag = space(w.tag - tokens.tag.length + 1);
|
|
530
|
+
if (!nothingAfter.type)
|
|
531
|
+
tokens.postType = space(w.type - tokens.type.length + 1);
|
|
532
|
+
if (!nothingAfter.name)
|
|
533
|
+
tokens.postName = space(w.name - tokens.name.length + 1);
|
|
534
|
+
return Object.assign(Object.assign({}, line), { tokens });
|
|
535
|
+
}
|
|
536
|
+
return (_a) => {
|
|
537
|
+
var { source } = _a, fields = __rest$2(_a, ["source"]);
|
|
538
|
+
w = source.reduce(getWidth(markers), Object.assign({}, zeroWidth$1));
|
|
539
|
+
return rewireSource(Object.assign(Object.assign({}, fields), { source: source.map(update) }));
|
|
540
|
+
};
|
|
541
|
+
}
|
|
542
|
+
|
|
543
|
+
var __rest$1 = (window && window.__rest) || function (s, e) {
|
|
544
|
+
var t = {};
|
|
545
|
+
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
|
|
546
|
+
t[p] = s[p];
|
|
547
|
+
if (s != null && typeof Object.getOwnPropertySymbols === "function")
|
|
548
|
+
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
|
|
549
|
+
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
|
|
550
|
+
t[p[i]] = s[p[i]];
|
|
551
|
+
}
|
|
552
|
+
return t;
|
|
553
|
+
};
|
|
554
|
+
const pull = (offset) => (str) => str.slice(offset);
|
|
555
|
+
const push = (offset) => {
|
|
556
|
+
const space = ''.padStart(offset, ' ');
|
|
557
|
+
return (str) => str + space;
|
|
558
|
+
};
|
|
559
|
+
function indent(pos) {
|
|
560
|
+
let shift;
|
|
561
|
+
const pad = (start) => {
|
|
562
|
+
if (shift === undefined) {
|
|
563
|
+
const offset = pos - start.length;
|
|
564
|
+
shift = offset > 0 ? push(offset) : pull(-offset);
|
|
565
|
+
}
|
|
566
|
+
return shift(start);
|
|
567
|
+
};
|
|
568
|
+
const update = (line) => (Object.assign(Object.assign({}, line), { tokens: Object.assign(Object.assign({}, line.tokens), { start: pad(line.tokens.start) }) }));
|
|
569
|
+
return (_a) => {
|
|
570
|
+
var { source } = _a, fields = __rest$1(_a, ["source"]);
|
|
571
|
+
return rewireSource(Object.assign(Object.assign({}, fields), { source: source.map(update) }));
|
|
572
|
+
};
|
|
573
|
+
}
|
|
574
|
+
|
|
575
|
+
var __rest = (window && window.__rest) || function (s, e) {
|
|
576
|
+
var t = {};
|
|
577
|
+
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
|
|
578
|
+
t[p] = s[p];
|
|
579
|
+
if (s != null && typeof Object.getOwnPropertySymbols === "function")
|
|
580
|
+
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
|
|
581
|
+
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
|
|
582
|
+
t[p[i]] = s[p[i]];
|
|
583
|
+
}
|
|
584
|
+
return t;
|
|
585
|
+
};
|
|
586
|
+
function crlf(ending) {
|
|
587
|
+
function update(line) {
|
|
588
|
+
return Object.assign(Object.assign({}, line), { tokens: Object.assign(Object.assign({}, line.tokens), { lineEnd: ending === 'LF' ? '' : '\r' }) });
|
|
589
|
+
}
|
|
590
|
+
return (_a) => {
|
|
591
|
+
var { source } = _a, fields = __rest(_a, ["source"]);
|
|
592
|
+
return rewireSource(Object.assign(Object.assign({}, fields), { source: source.map(update) }));
|
|
593
|
+
};
|
|
594
|
+
}
|
|
595
|
+
|
|
596
|
+
function flow(...transforms) {
|
|
597
|
+
return (block) => transforms.reduce((block, t) => t(block), block);
|
|
598
|
+
}
|
|
599
|
+
|
|
600
|
+
const zeroWidth = {
|
|
601
|
+
line: 0,
|
|
602
|
+
start: 0,
|
|
603
|
+
delimiter: 0,
|
|
604
|
+
postDelimiter: 0,
|
|
605
|
+
tag: 0,
|
|
606
|
+
postTag: 0,
|
|
607
|
+
name: 0,
|
|
608
|
+
postName: 0,
|
|
609
|
+
type: 0,
|
|
610
|
+
postType: 0,
|
|
611
|
+
description: 0,
|
|
612
|
+
end: 0,
|
|
613
|
+
lineEnd: 0,
|
|
614
|
+
};
|
|
615
|
+
const headers = { lineEnd: 'CR' };
|
|
616
|
+
const fields = Object.keys(zeroWidth);
|
|
617
|
+
const repr = (x) => (isSpace(x) ? `{${x.length}}` : x);
|
|
618
|
+
const frame = (line) => '|' + line.join('|') + '|';
|
|
619
|
+
const align = (width, tokens) => Object.keys(tokens).map((k) => repr(tokens[k]).padEnd(width[k]));
|
|
620
|
+
function inspect({ source }) {
|
|
621
|
+
var _a, _b;
|
|
622
|
+
if (source.length === 0)
|
|
623
|
+
return '';
|
|
624
|
+
const width = Object.assign({}, zeroWidth);
|
|
625
|
+
for (const f of fields)
|
|
626
|
+
width[f] = ((_a = headers[f]) !== null && _a !== void 0 ? _a : f).length;
|
|
627
|
+
for (const { number, tokens } of source) {
|
|
628
|
+
width.line = Math.max(width.line, number.toString().length);
|
|
629
|
+
for (const k in tokens)
|
|
630
|
+
width[k] = Math.max(width[k], repr(tokens[k]).length);
|
|
631
|
+
}
|
|
632
|
+
const lines = [[], []];
|
|
633
|
+
for (const f of fields)
|
|
634
|
+
lines[0].push(((_b = headers[f]) !== null && _b !== void 0 ? _b : f).padEnd(width[f]));
|
|
635
|
+
for (const f of fields)
|
|
636
|
+
lines[1].push('-'.padEnd(width[f], '-'));
|
|
637
|
+
for (const { number, tokens } of source) {
|
|
638
|
+
const line = number.toString().padStart(width.line);
|
|
639
|
+
lines.push([line, ...align(width, tokens)]);
|
|
640
|
+
}
|
|
641
|
+
return lines.map(frame).join('\n');
|
|
642
|
+
}
|
|
643
|
+
|
|
644
|
+
function parse(source, options = {}) {
|
|
645
|
+
return getParser(options)(source);
|
|
646
|
+
}
|
|
647
|
+
const stringify = getStringifier();
|
|
648
|
+
const transforms = {
|
|
649
|
+
flow: flow,
|
|
650
|
+
align: align$1,
|
|
651
|
+
indent: indent,
|
|
652
|
+
crlf: crlf,
|
|
653
|
+
};
|
|
654
|
+
const tokenizers = {
|
|
655
|
+
tag: tagTokenizer,
|
|
656
|
+
type: typeTokenizer,
|
|
657
|
+
name: nameTokenizer,
|
|
658
|
+
description: descriptionTokenizer,
|
|
659
|
+
};
|
|
660
|
+
const util = { rewireSpecs, rewireSource, seedBlock, seedTokens };
|
|
661
|
+
|
|
662
|
+
exports.inspect = inspect;
|
|
663
|
+
exports.parse = parse;
|
|
664
|
+
exports.stringify = stringify;
|
|
665
|
+
exports.tokenizers = tokenizers;
|
|
666
|
+
exports.transforms = transforms;
|
|
667
|
+
exports.util = util;
|
|
668
|
+
|
|
669
|
+
return exports;
|
|
670
|
+
|
|
671
|
+
})({});
|