typedoc 0.26.0-beta.1 → 0.26.0-beta.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -0
- package/dist/lib/application.d.ts +2 -0
- package/dist/lib/application.js +7 -3
- package/dist/lib/converter/comments/declarationReference.d.ts +1 -0
- package/dist/lib/converter/comments/declarationReference.js +14 -0
- package/dist/lib/converter/comments/declarationReferenceResolver.js +4 -2
- package/dist/lib/converter/comments/discovery.js +10 -2
- package/dist/lib/converter/comments/index.d.ts +6 -5
- package/dist/lib/converter/comments/index.js +21 -21
- package/dist/lib/converter/comments/parser.d.ts +3 -2
- package/dist/lib/converter/comments/parser.js +26 -17
- package/dist/lib/converter/comments/textParser.d.ts +19 -0
- package/dist/lib/converter/comments/textParser.js +154 -0
- package/dist/lib/converter/context.js +14 -7
- package/dist/lib/converter/converter.d.ts +3 -2
- package/dist/lib/converter/converter.js +11 -10
- package/dist/lib/converter/plugins/CategoryPlugin.js +3 -5
- package/dist/lib/converter/plugins/CommentPlugin.js +20 -0
- package/dist/lib/converter/plugins/PackagePlugin.js +1 -1
- package/dist/lib/converter/symbols.js +5 -7
- package/dist/lib/converter/types.js +11 -3
- package/dist/lib/internationalization/internationalization.js +2 -2
- package/dist/lib/internationalization/locales/ko.cjs +79 -0
- package/dist/lib/internationalization/locales/ko.d.cts +78 -0
- package/dist/lib/internationalization/translatable.d.ts +6 -1
- package/dist/lib/internationalization/translatable.js +6 -1
- package/dist/lib/models/FileRegistry.d.ts +26 -0
- package/dist/lib/models/FileRegistry.js +136 -0
- package/dist/lib/models/comments/comment.d.ts +30 -8
- package/dist/lib/models/comments/comment.js +26 -71
- package/dist/lib/models/comments/index.d.ts +1 -1
- package/dist/lib/models/index.d.ts +1 -0
- package/dist/lib/models/index.js +1 -0
- package/dist/lib/models/reflections/ReflectionSymbolId.js +1 -1
- package/dist/lib/models/reflections/abstract.js +1 -1
- package/dist/lib/models/reflections/project.d.ts +7 -2
- package/dist/lib/models/reflections/project.js +51 -40
- package/dist/lib/models/reflections/signature.js +2 -0
- package/dist/lib/models/types.js +2 -2
- package/dist/lib/output/plugins/AssetsPlugin.js +5 -0
- package/dist/lib/output/plugins/JavascriptIndexPlugin.js +2 -2
- package/dist/lib/output/plugins/NavigationPlugin.js +1 -1
- package/dist/lib/output/plugins/SitemapPlugin.js +1 -1
- package/dist/lib/output/renderer.d.ts +8 -14
- package/dist/lib/output/renderer.js +17 -12
- package/dist/lib/output/theme.d.ts +0 -7
- package/dist/lib/output/theme.js +0 -8
- package/dist/lib/output/themes/MarkedPlugin.d.ts +1 -0
- package/dist/lib/output/themes/MarkedPlugin.js +83 -1
- package/dist/lib/output/themes/default/DefaultTheme.js +2 -2
- package/dist/lib/output/themes/default/DefaultThemeRenderContext.js +1 -0
- package/dist/lib/output/themes/default/partials/footer.js +1 -1
- package/dist/lib/output/themes/default/partials/typeAndParent.js +2 -4
- package/dist/lib/output/themes/default/partials/typeParameters.js +1 -1
- package/dist/lib/serialization/deserializer.d.ts +8 -2
- package/dist/lib/serialization/deserializer.js +17 -8
- package/dist/lib/serialization/schema.d.ts +27 -3
- package/dist/lib/utils/array.js +1 -1
- package/dist/lib/utils/component.js +3 -0
- package/dist/lib/utils/entry-point.js +1 -1
- package/dist/lib/utils/events.d.ts +1 -1
- package/dist/lib/utils/events.js +2 -1
- package/dist/lib/utils/highlighter.d.ts +4 -2
- package/dist/lib/utils/highlighter.js +16 -5
- package/dist/lib/utils/html.js +1 -0
- package/dist/lib/utils/loggers.d.ts +0 -4
- package/dist/lib/utils/loggers.js +1 -7
- package/dist/lib/utils/options/declaration.d.ts +1 -0
- package/dist/lib/utils/options/help.js +2 -1
- package/dist/lib/utils/options/options.js +8 -6
- package/dist/lib/utils/options/readers/typedoc.js +0 -1
- package/dist/lib/utils/options/sources/typedoc.js +24 -0
- package/dist/lib/utils/options/tsdoc-defaults.d.ts +2 -2
- package/dist/lib/utils/options/tsdoc-defaults.js +2 -0
- package/dist/lib/utils/plugins.js +0 -1
- package/dist/lib/utils/set.d.ts +2 -1
- package/dist/lib/utils/set.js +8 -0
- package/package.json +8 -9
- package/tsdoc.json +8 -0
package/README.md
CHANGED
|
@@ -2,6 +2,8 @@
|
|
|
2
2
|
|
|
3
3
|
Documentation generator for TypeScript projects.
|
|
4
4
|
|
|
5
|
+
Plugins: [plugins](./internal-docs/plugins.md)
|
|
6
|
+
|
|
5
7
|
[](https://github.com/TypeStrong/typedoc/actions)
|
|
6
8
|
[](https://www.npmjs.com/package/typedoc)
|
|
7
9
|
|
|
@@ -8,6 +8,7 @@ import { Options } from "./utils";
|
|
|
8
8
|
import type { TypeDocOptions } from "./utils/options/declaration";
|
|
9
9
|
import { type DocumentationEntryPoint, EntryPointStrategy } from "./utils/entry-point";
|
|
10
10
|
import { Internationalization } from "./internationalization/internationalization";
|
|
11
|
+
import { FileRegistry } from "./models/FileRegistry";
|
|
11
12
|
export declare function createAppForTesting(): Application;
|
|
12
13
|
/**
|
|
13
14
|
* The default TypeDoc main application class.
|
|
@@ -53,6 +54,7 @@ export declare class Application extends ChildableComponent<Application, Abstrac
|
|
|
53
54
|
*/
|
|
54
55
|
i18n: import("./internationalization/internationalization").TranslationProxy;
|
|
55
56
|
options: Options;
|
|
57
|
+
files: FileRegistry;
|
|
56
58
|
/** @internal */
|
|
57
59
|
accessor lang: string;
|
|
58
60
|
/** @internal */
|
package/dist/lib/application.js
CHANGED
|
@@ -100,6 +100,7 @@ const abstract_1 = require("./models/reflections/abstract");
|
|
|
100
100
|
const ReflectionSymbolId_1 = require("./models/reflections/ReflectionSymbolId");
|
|
101
101
|
const internationalization_1 = require("./internationalization/internationalization");
|
|
102
102
|
const highlighter_1 = require("./utils/highlighter");
|
|
103
|
+
const FileRegistry_1 = require("./models/FileRegistry");
|
|
103
104
|
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
|
104
105
|
const packageInfo = require("../../package.json");
|
|
105
106
|
const supportedVersionMajorMinor = packageInfo.peerDependencies.typescript
|
|
@@ -108,7 +109,9 @@ const supportedVersionMajorMinor = packageInfo.peerDependencies.typescript
|
|
|
108
109
|
const DETECTOR = Symbol();
|
|
109
110
|
function createAppForTesting() {
|
|
110
111
|
// @ts-expect-error private constructor
|
|
111
|
-
|
|
112
|
+
const app = new Application(DETECTOR);
|
|
113
|
+
app.files = new FileRegistry_1.FileRegistry();
|
|
114
|
+
return app;
|
|
112
115
|
}
|
|
113
116
|
const DEFAULT_READERS = [
|
|
114
117
|
new index_2.TypeDocReader(),
|
|
@@ -190,6 +193,7 @@ let Application = (() => {
|
|
|
190
193
|
*/
|
|
191
194
|
this.i18n = this.internationalization.proxy;
|
|
192
195
|
this.options = new utils_1.Options(this.i18n);
|
|
196
|
+
this.files = new FileRegistry_1.ValidatingFileRegistry();
|
|
193
197
|
_Application_lang_accessor_storage.set(this, __runInitializers(this, _lang_initializers, void 0));
|
|
194
198
|
_Application_skipErrorChecking_accessor_storage.set(this, (__runInitializers(this, _lang_extraInitializers), __runInitializers(this, _skipErrorChecking_initializers, void 0)));
|
|
195
199
|
_Application_entryPointStrategy_accessor_storage.set(this, (__runInitializers(this, _skipErrorChecking_extraInitializers), __runInitializers(this, _entryPointStrategy_initializers, void 0)));
|
|
@@ -525,7 +529,7 @@ let Application = (() => {
|
|
|
525
529
|
return;
|
|
526
530
|
}
|
|
527
531
|
this.logger.info(this.i18n.merging_converted_projects());
|
|
528
|
-
const result = this.deserializer.reviveProjects(this.options.getValue("name") || "Documentation", projects);
|
|
532
|
+
const result = this.deserializer.reviveProjects(this.options.getValue("name") || "Documentation", projects, process.cwd(), this.files);
|
|
529
533
|
this.trigger(application_events_1.ApplicationEvents.REVIVE, result);
|
|
530
534
|
return result;
|
|
531
535
|
}
|
|
@@ -559,7 +563,7 @@ let Application = (() => {
|
|
|
559
563
|
});
|
|
560
564
|
if (this.logger.hasErrors())
|
|
561
565
|
return;
|
|
562
|
-
const result = this.deserializer.reviveProjects(this.options.getValue("name"), jsonProjects);
|
|
566
|
+
const result = this.deserializer.reviveProjects(this.options.getValue("name"), jsonProjects, process.cwd(), this.files);
|
|
563
567
|
this.logger.verbose(`Reviving projects took ${Date.now() - start}ms`);
|
|
564
568
|
// If we only revived one project, the project documents were set for
|
|
565
569
|
// it when it was created. If we revived more than one project then
|
|
@@ -8,6 +8,7 @@
|
|
|
8
8
|
*/
|
|
9
9
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
10
10
|
exports.MeaningKeywords = void 0;
|
|
11
|
+
exports.meaningToString = meaningToString;
|
|
11
12
|
exports.parseString = parseString;
|
|
12
13
|
exports.parseModuleSource = parseModuleSource;
|
|
13
14
|
exports.parseSymbolReference = parseSymbolReference;
|
|
@@ -34,6 +35,19 @@ exports.MeaningKeywords = [
|
|
|
34
35
|
"getter",
|
|
35
36
|
"setter",
|
|
36
37
|
];
|
|
38
|
+
function meaningToString(meaning) {
|
|
39
|
+
let result = "";
|
|
40
|
+
if (meaning.keyword) {
|
|
41
|
+
result += meaning.keyword;
|
|
42
|
+
}
|
|
43
|
+
else if (meaning.label) {
|
|
44
|
+
result += meaning.label;
|
|
45
|
+
}
|
|
46
|
+
if (typeof meaning.index === "number") {
|
|
47
|
+
result += `(${meaning.index})`;
|
|
48
|
+
}
|
|
49
|
+
return result;
|
|
50
|
+
}
|
|
37
51
|
// <TAB> <VT> <FF> <SP> <NBSP> <ZWNBSP> <USP>
|
|
38
52
|
const WhiteSpace = /[\t\u2B7F\u240C \u00A0\uFEFF\p{White_Space}]/u;
|
|
39
53
|
const LineTerminator = "\r\n\u2028\u2029";
|
|
@@ -22,7 +22,9 @@ function resolveDeclarationReference(reflection, ref) {
|
|
|
22
22
|
high.push(reflection.project);
|
|
23
23
|
}
|
|
24
24
|
else {
|
|
25
|
-
|
|
25
|
+
// Work around no-unnecessary-condition, should be unnecessary... want a trap if it ever becomes false.
|
|
26
|
+
(0, assert_1.ok)(ref.resolutionStart.startsWith("local") &&
|
|
27
|
+
ref.resolutionStart.length === 5);
|
|
26
28
|
// TypeScript's behavior is to first try to resolve links via variable scope, and then
|
|
27
29
|
// if the link still hasn't been found, check either siblings (if comment belongs to a member)
|
|
28
30
|
// or otherwise children.
|
|
@@ -128,7 +130,7 @@ function resolveKeyword(refl, kw) {
|
|
|
128
130
|
if (refl.kindOf(models_1.ReflectionKind.ClassOrInterface |
|
|
129
131
|
models_1.ReflectionKind.TypeLiteral)) {
|
|
130
132
|
const ctor = refl.children?.find((c) => c.kindOf(models_1.ReflectionKind.Constructor));
|
|
131
|
-
return ctor
|
|
133
|
+
return ctor.signatures;
|
|
132
134
|
}
|
|
133
135
|
break;
|
|
134
136
|
case "member":
|
|
@@ -18,6 +18,7 @@ const variablePropertyKinds = [
|
|
|
18
18
|
typescript_1.default.SyntaxKind.PropertySignature,
|
|
19
19
|
typescript_1.default.SyntaxKind.BinaryExpression,
|
|
20
20
|
typescript_1.default.SyntaxKind.PropertyAssignment,
|
|
21
|
+
typescript_1.default.SyntaxKind.ShorthandPropertyAssignment,
|
|
21
22
|
// class X { constructor(/** Comment */ readonly z: string) }
|
|
22
23
|
typescript_1.default.SyntaxKind.Parameter,
|
|
23
24
|
// Variable values
|
|
@@ -50,6 +51,9 @@ const wantedKinds = {
|
|
|
50
51
|
typescript_1.default.SyntaxKind.BindingElement,
|
|
51
52
|
typescript_1.default.SyntaxKind.ExportAssignment,
|
|
52
53
|
typescript_1.default.SyntaxKind.PropertyAccessExpression,
|
|
54
|
+
typescript_1.default.SyntaxKind.PropertyDeclaration,
|
|
55
|
+
typescript_1.default.SyntaxKind.PropertyAssignment,
|
|
56
|
+
typescript_1.default.SyntaxKind.ShorthandPropertyAssignment,
|
|
53
57
|
],
|
|
54
58
|
[models_1.ReflectionKind.Enum]: [
|
|
55
59
|
typescript_1.default.SyntaxKind.EnumDeclaration,
|
|
@@ -68,6 +72,9 @@ const wantedKinds = {
|
|
|
68
72
|
typescript_1.default.SyntaxKind.VariableDeclaration,
|
|
69
73
|
typescript_1.default.SyntaxKind.ExportAssignment,
|
|
70
74
|
typescript_1.default.SyntaxKind.PropertyAccessExpression,
|
|
75
|
+
typescript_1.default.SyntaxKind.PropertyDeclaration,
|
|
76
|
+
typescript_1.default.SyntaxKind.PropertyAssignment,
|
|
77
|
+
typescript_1.default.SyntaxKind.ShorthandPropertyAssignment,
|
|
71
78
|
],
|
|
72
79
|
[models_1.ReflectionKind.Class]: [
|
|
73
80
|
typescript_1.default.SyntaxKind.ClassDeclaration,
|
|
@@ -264,8 +271,7 @@ function isTopmostModuleDeclaration(node) {
|
|
|
264
271
|
* ```
|
|
265
272
|
*/
|
|
266
273
|
function getRootModuleDeclaration(node) {
|
|
267
|
-
while (node.parent
|
|
268
|
-
node.parent.kind === typescript_1.default.SyntaxKind.ModuleDeclaration) {
|
|
274
|
+
while (node.parent.kind === typescript_1.default.SyntaxKind.ModuleDeclaration) {
|
|
269
275
|
const parent = node.parent;
|
|
270
276
|
if (node.name.pos === parent.name.end + 1) {
|
|
271
277
|
node = parent;
|
|
@@ -277,6 +283,8 @@ function getRootModuleDeclaration(node) {
|
|
|
277
283
|
return node;
|
|
278
284
|
}
|
|
279
285
|
function declarationToCommentNode(node) {
|
|
286
|
+
// ts.SourceFile is a counterexample
|
|
287
|
+
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
|
|
280
288
|
if (!node.parent)
|
|
281
289
|
return node;
|
|
282
290
|
// const abc = 123
|
|
@@ -2,6 +2,7 @@ import ts from "typescript";
|
|
|
2
2
|
import { Comment, ReflectionKind } from "../../models";
|
|
3
3
|
import { type Logger } from "../../utils";
|
|
4
4
|
import type { CommentStyle, JsDocCompatibility } from "../../utils/options/declaration";
|
|
5
|
+
import type { FileRegistry } from "../../models/FileRegistry";
|
|
5
6
|
export interface CommentParserConfig {
|
|
6
7
|
blockTags: Set<string>;
|
|
7
8
|
inlineTags: Set<string>;
|
|
@@ -9,8 +10,8 @@ export interface CommentParserConfig {
|
|
|
9
10
|
jsDocCompatibility: JsDocCompatibility;
|
|
10
11
|
}
|
|
11
12
|
export declare function clearCommentCache(): void;
|
|
12
|
-
export declare function getComment(symbol: ts.Symbol, kind: ReflectionKind, config: CommentParserConfig, logger: Logger, commentStyle: CommentStyle, checker: ts.TypeChecker | undefined): Comment | undefined;
|
|
13
|
-
export declare function getNodeComment(node: ts.Node, kind: ReflectionKind, config: CommentParserConfig, logger: Logger, commentStyle: CommentStyle, checker: ts.TypeChecker | undefined): Comment | undefined;
|
|
14
|
-
export declare function getFileComment(file: ts.SourceFile, config: CommentParserConfig, logger: Logger, commentStyle: CommentStyle, checker: ts.TypeChecker | undefined): Comment | undefined;
|
|
15
|
-
export declare function getSignatureComment(declaration: ts.SignatureDeclaration | ts.JSDocSignature, config: CommentParserConfig, logger: Logger, commentStyle: CommentStyle, checker: ts.TypeChecker | undefined): Comment | undefined;
|
|
16
|
-
export declare function getJsDocComment(declaration: ts.JSDocPropertyLikeTag | ts.JSDocCallbackTag | ts.JSDocTypedefTag | ts.JSDocTemplateTag | ts.JSDocEnumTag, config: CommentParserConfig, logger: Logger, checker: ts.TypeChecker | undefined): Comment | undefined;
|
|
13
|
+
export declare function getComment(symbol: ts.Symbol, kind: ReflectionKind, config: CommentParserConfig, logger: Logger, commentStyle: CommentStyle, checker: ts.TypeChecker | undefined, files: FileRegistry): Comment | undefined;
|
|
14
|
+
export declare function getNodeComment(node: ts.Node, kind: ReflectionKind, config: CommentParserConfig, logger: Logger, commentStyle: CommentStyle, checker: ts.TypeChecker | undefined, files: FileRegistry): Comment | undefined;
|
|
15
|
+
export declare function getFileComment(file: ts.SourceFile, config: CommentParserConfig, logger: Logger, commentStyle: CommentStyle, checker: ts.TypeChecker | undefined, files: FileRegistry): Comment | undefined;
|
|
16
|
+
export declare function getSignatureComment(declaration: ts.SignatureDeclaration | ts.JSDocSignature, config: CommentParserConfig, logger: Logger, commentStyle: CommentStyle, checker: ts.TypeChecker | undefined, files: FileRegistry): Comment | undefined;
|
|
17
|
+
export declare function getJsDocComment(declaration: ts.JSDocPropertyLikeTag | ts.JSDocCallbackTag | ts.JSDocTypedefTag | ts.JSDocTemplateTag | ts.JSDocEnumTag, config: CommentParserConfig, logger: Logger, checker: ts.TypeChecker | undefined, files: FileRegistry): Comment | undefined;
|
|
@@ -32,21 +32,21 @@ function clearCommentCache() {
|
|
|
32
32
|
commentCache = new WeakMap();
|
|
33
33
|
commentDiscoveryId = 0;
|
|
34
34
|
}
|
|
35
|
-
function getCommentWithCache(discovered, config, logger, checker) {
|
|
35
|
+
function getCommentWithCache(discovered, config, logger, checker, files) {
|
|
36
36
|
if (!discovered)
|
|
37
37
|
return;
|
|
38
38
|
const { file, ranges, jsDoc } = discovered;
|
|
39
39
|
const cache = commentCache.get(file) || new Map();
|
|
40
|
-
if (cache
|
|
40
|
+
if (cache.has(ranges[0].pos)) {
|
|
41
41
|
return cache.get(ranges[0].pos).clone();
|
|
42
42
|
}
|
|
43
43
|
let comment;
|
|
44
44
|
switch (ranges[0].kind) {
|
|
45
45
|
case typescript_1.default.SyntaxKind.MultiLineCommentTrivia:
|
|
46
|
-
comment = (0, parser_1.parseComment)((0, blockLexer_1.lexBlockComment)(file.text, ranges[0].pos, ranges[0].end, jsDoc, checker), config, file, logger);
|
|
46
|
+
comment = (0, parser_1.parseComment)((0, blockLexer_1.lexBlockComment)(file.text, ranges[0].pos, ranges[0].end, jsDoc, checker), config, file, logger, files);
|
|
47
47
|
break;
|
|
48
48
|
case typescript_1.default.SyntaxKind.SingleLineCommentTrivia:
|
|
49
|
-
comment = (0, parser_1.parseComment)((0, lineLexer_1.lexLineComments)(file.text, ranges), config, file, logger);
|
|
49
|
+
comment = (0, parser_1.parseComment)((0, lineLexer_1.lexLineComments)(file.text, ranges), config, file, logger, files);
|
|
50
50
|
break;
|
|
51
51
|
default:
|
|
52
52
|
(0, utils_1.assertNever)(ranges[0].kind);
|
|
@@ -56,8 +56,8 @@ function getCommentWithCache(discovered, config, logger, checker) {
|
|
|
56
56
|
commentCache.set(file, cache);
|
|
57
57
|
return comment.clone();
|
|
58
58
|
}
|
|
59
|
-
function getCommentImpl(commentSource, config, logger, moduleComment, checker) {
|
|
60
|
-
const comment = getCommentWithCache(commentSource, config, logger, checker);
|
|
59
|
+
function getCommentImpl(commentSource, config, logger, moduleComment, checker, files) {
|
|
60
|
+
const comment = getCommentWithCache(commentSource, config, logger, checker, files);
|
|
61
61
|
if (moduleComment && comment) {
|
|
62
62
|
// Module comment, make sure it is tagged with @packageDocumentation or @module.
|
|
63
63
|
// If it isn't then the comment applies to the first statement in the file, so throw it away.
|
|
@@ -75,15 +75,15 @@ function getCommentImpl(commentSource, config, logger, moduleComment, checker) {
|
|
|
75
75
|
}
|
|
76
76
|
return comment;
|
|
77
77
|
}
|
|
78
|
-
function getComment(symbol, kind, config, logger, commentStyle, checker) {
|
|
78
|
+
function getComment(symbol, kind, config, logger, commentStyle, checker, files) {
|
|
79
79
|
const declarations = symbol.declarations || [];
|
|
80
80
|
if (declarations.length &&
|
|
81
81
|
declarations.every((d) => jsDocCommentKinds.includes(d.kind))) {
|
|
82
|
-
return getJsDocComment(declarations[0], config, logger, checker);
|
|
82
|
+
return getJsDocComment(declarations[0], config, logger, checker, files);
|
|
83
83
|
}
|
|
84
84
|
const sf = declarations.find(typescript_1.default.isSourceFile);
|
|
85
85
|
if (sf) {
|
|
86
|
-
return getFileComment(sf, config, logger, commentStyle, checker);
|
|
86
|
+
return getFileComment(sf, config, logger, commentStyle, checker, files);
|
|
87
87
|
}
|
|
88
88
|
const isModule = declarations.some((decl) => {
|
|
89
89
|
if (typescript_1.default.isModuleDeclaration(decl) && typescript_1.default.isStringLiteral(decl.name)) {
|
|
@@ -91,18 +91,18 @@ function getComment(symbol, kind, config, logger, commentStyle, checker) {
|
|
|
91
91
|
}
|
|
92
92
|
return false;
|
|
93
93
|
});
|
|
94
|
-
const comment = getCommentImpl((0, discovery_1.discoverComment)(symbol, kind, logger, commentStyle), config, logger, isModule, checker);
|
|
94
|
+
const comment = getCommentImpl((0, discovery_1.discoverComment)(symbol, kind, logger, commentStyle), config, logger, isModule, checker, files);
|
|
95
95
|
if (!comment && kind === models_1.ReflectionKind.Property) {
|
|
96
|
-
return getConstructorParamPropertyComment(symbol, config, logger, commentStyle, checker);
|
|
96
|
+
return getConstructorParamPropertyComment(symbol, config, logger, commentStyle, checker, files);
|
|
97
97
|
}
|
|
98
98
|
return comment;
|
|
99
99
|
}
|
|
100
|
-
function getNodeComment(node, kind, config, logger, commentStyle, checker) {
|
|
101
|
-
return getCommentImpl((0, discovery_1.discoverNodeComment)(node, commentStyle), config, logger, kind === models_1.ReflectionKind.Module, checker);
|
|
100
|
+
function getNodeComment(node, kind, config, logger, commentStyle, checker, files) {
|
|
101
|
+
return getCommentImpl((0, discovery_1.discoverNodeComment)(node, commentStyle), config, logger, kind === models_1.ReflectionKind.Module, checker, files);
|
|
102
102
|
}
|
|
103
|
-
function getFileComment(file, config, logger, commentStyle, checker) {
|
|
103
|
+
function getFileComment(file, config, logger, commentStyle, checker, files) {
|
|
104
104
|
for (const commentSource of (0, discovery_1.discoverFileComments)(file, commentStyle)) {
|
|
105
|
-
const comment = getCommentWithCache(commentSource, config, logger, checker);
|
|
105
|
+
const comment = getCommentWithCache(commentSource, config, logger, checker, files);
|
|
106
106
|
if (comment?.getTag("@license") || comment?.getTag("@import")) {
|
|
107
107
|
continue;
|
|
108
108
|
}
|
|
@@ -113,12 +113,12 @@ function getFileComment(file, config, logger, commentStyle, checker) {
|
|
|
113
113
|
return;
|
|
114
114
|
}
|
|
115
115
|
}
|
|
116
|
-
function getConstructorParamPropertyComment(symbol, config, logger, commentStyle, checker) {
|
|
116
|
+
function getConstructorParamPropertyComment(symbol, config, logger, commentStyle, checker, files) {
|
|
117
117
|
const decl = symbol.declarations?.find(typescript_1.default.isParameter);
|
|
118
118
|
if (!decl)
|
|
119
119
|
return;
|
|
120
120
|
const ctor = decl.parent;
|
|
121
|
-
const comment = getSignatureComment(ctor, config, logger, commentStyle, checker);
|
|
121
|
+
const comment = getSignatureComment(ctor, config, logger, commentStyle, checker, files);
|
|
122
122
|
const paramTag = comment?.getIdentifiedTag(symbol.name, "@param");
|
|
123
123
|
if (paramTag) {
|
|
124
124
|
const result = new models_1.Comment(paramTag.content);
|
|
@@ -126,10 +126,10 @@ function getConstructorParamPropertyComment(symbol, config, logger, commentStyle
|
|
|
126
126
|
return result;
|
|
127
127
|
}
|
|
128
128
|
}
|
|
129
|
-
function getSignatureComment(declaration, config, logger, commentStyle, checker) {
|
|
130
|
-
return getCommentImpl((0, discovery_1.discoverSignatureComment)(declaration, commentStyle), config, logger, false, checker);
|
|
129
|
+
function getSignatureComment(declaration, config, logger, commentStyle, checker, files) {
|
|
130
|
+
return getCommentImpl((0, discovery_1.discoverSignatureComment)(declaration, commentStyle), config, logger, false, checker, files);
|
|
131
131
|
}
|
|
132
|
-
function getJsDocComment(declaration, config, logger, checker) {
|
|
132
|
+
function getJsDocComment(declaration, config, logger, checker, files) {
|
|
133
133
|
const file = declaration.getSourceFile();
|
|
134
134
|
// First, get the whole comment. We know we'll need all of it.
|
|
135
135
|
let parent = declaration.parent;
|
|
@@ -147,7 +147,7 @@ function getJsDocComment(declaration, config, logger, checker) {
|
|
|
147
147
|
},
|
|
148
148
|
],
|
|
149
149
|
jsDoc: parent,
|
|
150
|
-
}, config, logger, checker);
|
|
150
|
+
}, config, logger, checker, files);
|
|
151
151
|
// And pull out the tag we actually care about.
|
|
152
152
|
if (typescript_1.default.isJSDocEnumTag(declaration)) {
|
|
153
153
|
const result = new models_1.Comment(comment.getTag("@enum")?.content);
|
|
@@ -3,7 +3,8 @@ import { Comment, type CommentDisplayPart } from "../../models";
|
|
|
3
3
|
import { type Logger } from "../../utils";
|
|
4
4
|
import type { MinimalSourceFile } from "../../utils/minimalSourceFile";
|
|
5
5
|
import { type Token } from "./lexer";
|
|
6
|
-
|
|
6
|
+
import { FileRegistry } from "../../models/FileRegistry";
|
|
7
|
+
export declare function parseComment(tokens: Generator<Token, undefined, undefined>, config: CommentParserConfig, file: MinimalSourceFile, logger: Logger, files: FileRegistry): Comment;
|
|
7
8
|
/**
|
|
8
9
|
* Intended for parsing markdown documents. This only parses code blocks and
|
|
9
10
|
* inline tags outside of code blocks, everything else is text.
|
|
@@ -11,7 +12,7 @@ export declare function parseComment(tokens: Generator<Token, undefined, undefin
|
|
|
11
12
|
* If you change this, also look at blockContent, as it likely needs similar
|
|
12
13
|
* modifications to ensure parsing is consistent.
|
|
13
14
|
*/
|
|
14
|
-
export declare function parseCommentString(tokens: Generator<Token, undefined, undefined>, config: CommentParserConfig, file: MinimalSourceFile, logger: Logger): {
|
|
15
|
+
export declare function parseCommentString(tokens: Generator<Token, undefined, undefined>, config: CommentParserConfig, file: MinimalSourceFile, logger: Logger, files: FileRegistry): {
|
|
15
16
|
content: CommentDisplayPart[];
|
|
16
17
|
frontmatter: Record<string, unknown>;
|
|
17
18
|
};
|
|
@@ -32,6 +32,8 @@ const utils_1 = require("../../utils");
|
|
|
32
32
|
const paths_1 = require("../../utils/paths");
|
|
33
33
|
const lexer_1 = require("./lexer");
|
|
34
34
|
const tagName_1 = require("./tagName");
|
|
35
|
+
const FileRegistry_1 = require("../../models/FileRegistry");
|
|
36
|
+
const textParser_1 = require("./textParser");
|
|
35
37
|
function makeLookaheadGenerator(gen) {
|
|
36
38
|
let trackHistory = false;
|
|
37
39
|
const history = [];
|
|
@@ -64,14 +66,14 @@ function makeLookaheadGenerator(gen) {
|
|
|
64
66
|
},
|
|
65
67
|
};
|
|
66
68
|
}
|
|
67
|
-
function parseComment(tokens, config, file, logger) {
|
|
69
|
+
function parseComment(tokens, config, file, logger, files) {
|
|
68
70
|
const lexer = makeLookaheadGenerator(tokens);
|
|
69
71
|
const tok = lexer.done() || lexer.peek();
|
|
70
72
|
const comment = new models_1.Comment();
|
|
71
73
|
comment.sourcePath = file.fileName;
|
|
72
|
-
comment.summary = blockContent(comment, lexer, config, logger.i18n, warningImpl);
|
|
74
|
+
comment.summary = blockContent(comment, lexer, config, logger.i18n, warningImpl, files);
|
|
73
75
|
while (!lexer.done()) {
|
|
74
|
-
comment.blockTags.push(blockTag(comment, lexer, config, logger.i18n, warningImpl));
|
|
76
|
+
comment.blockTags.push(blockTag(comment, lexer, config, logger.i18n, warningImpl, files));
|
|
75
77
|
}
|
|
76
78
|
const tok2 = tok;
|
|
77
79
|
postProcessComment(comment, logger.i18n, () => `${(0, paths_1.nicePath)(file.fileName)}:${file.getLineAndCharacterOfPosition(tok2.pos).line + 1}`, (message) => logger.warn(message));
|
|
@@ -87,7 +89,7 @@ function parseComment(tokens, config, file, logger) {
|
|
|
87
89
|
* If you change this, also look at blockContent, as it likely needs similar
|
|
88
90
|
* modifications to ensure parsing is consistent.
|
|
89
91
|
*/
|
|
90
|
-
function parseCommentString(tokens, config, file, logger) {
|
|
92
|
+
function parseCommentString(tokens, config, file, logger, files) {
|
|
91
93
|
const suppressWarningsConfig = {
|
|
92
94
|
...config,
|
|
93
95
|
jsDocCompatibility: {
|
|
@@ -99,6 +101,7 @@ function parseCommentString(tokens, config, file, logger) {
|
|
|
99
101
|
};
|
|
100
102
|
const content = [];
|
|
101
103
|
const lexer = makeLookaheadGenerator(tokens);
|
|
104
|
+
let atNewLine = false;
|
|
102
105
|
while (!lexer.done()) {
|
|
103
106
|
let consume = true;
|
|
104
107
|
const next = lexer.peek();
|
|
@@ -111,7 +114,7 @@ function parseCommentString(tokens, config, file, logger) {
|
|
|
111
114
|
case lexer_1.TokenSyntaxKind.Text:
|
|
112
115
|
case lexer_1.TokenSyntaxKind.Tag:
|
|
113
116
|
case lexer_1.TokenSyntaxKind.CloseBrace:
|
|
114
|
-
|
|
117
|
+
(0, textParser_1.textContent)(file.fileName, next, logger.i18n, (msg, token) => logger.warn(msg, token.pos, file), content, files, atNewLine);
|
|
115
118
|
break;
|
|
116
119
|
case lexer_1.TokenSyntaxKind.Code:
|
|
117
120
|
content.push({ kind: "code", text: next.text });
|
|
@@ -123,6 +126,7 @@ function parseCommentString(tokens, config, file, logger) {
|
|
|
123
126
|
default:
|
|
124
127
|
(0, utils_1.assertNever)(next.kind);
|
|
125
128
|
}
|
|
129
|
+
atNewLine = next.kind === lexer_1.TokenSyntaxKind.NewLine;
|
|
126
130
|
if (consume) {
|
|
127
131
|
lexer.take();
|
|
128
132
|
}
|
|
@@ -223,7 +227,7 @@ function postProcessComment(comment, i18n, getPosition, warning) {
|
|
|
223
227
|
}
|
|
224
228
|
}
|
|
225
229
|
const aliasedTags = new Map([["@return", "@returns"]]);
|
|
226
|
-
function blockTag(comment, lexer, config, i18n, warning) {
|
|
230
|
+
function blockTag(comment, lexer, config, i18n, warning, files) {
|
|
227
231
|
const blockTag = lexer.take();
|
|
228
232
|
(0, assert_1.ok)(blockTag.kind === lexer_1.TokenSyntaxKind.Tag, "blockTag called not at the start of a block tag."); // blockContent is broken if this fails.
|
|
229
233
|
if (!config.blockTags.has(blockTag.text)) {
|
|
@@ -232,14 +236,14 @@ function blockTag(comment, lexer, config, i18n, warning) {
|
|
|
232
236
|
const tagName = aliasedTags.get(blockTag.text) || blockTag.text;
|
|
233
237
|
let content;
|
|
234
238
|
if (tagName === "@example") {
|
|
235
|
-
return exampleBlock(comment, lexer, config, i18n, warning);
|
|
239
|
+
return exampleBlock(comment, lexer, config, i18n, warning, files);
|
|
236
240
|
}
|
|
237
241
|
else if (["@default", "@defaultValue"].includes(tagName) &&
|
|
238
242
|
config.jsDocCompatibility.defaultTag) {
|
|
239
|
-
content = defaultBlockContent(comment, lexer, config, i18n, warning);
|
|
243
|
+
content = defaultBlockContent(comment, lexer, config, i18n, warning, files);
|
|
240
244
|
}
|
|
241
245
|
else {
|
|
242
|
-
content = blockContent(comment, lexer, config, i18n, warning);
|
|
246
|
+
content = blockContent(comment, lexer, config, i18n, warning, files);
|
|
243
247
|
}
|
|
244
248
|
return new models_1.CommentTag(tagName, content);
|
|
245
249
|
}
|
|
@@ -247,13 +251,14 @@ function blockTag(comment, lexer, config, i18n, warning) {
|
|
|
247
251
|
* The `@default` tag gets a special case because otherwise we will produce many warnings
|
|
248
252
|
* about unescaped/mismatched/missing braces in legacy JSDoc comments
|
|
249
253
|
*/
|
|
250
|
-
function defaultBlockContent(comment, lexer, config, i18n, warning) {
|
|
254
|
+
function defaultBlockContent(comment, lexer, config, i18n, warning, files) {
|
|
251
255
|
lexer.mark();
|
|
252
|
-
const
|
|
256
|
+
const tempRegistry = new FileRegistry_1.FileRegistry();
|
|
257
|
+
const content = blockContent(comment, lexer, config, i18n, () => { }, tempRegistry);
|
|
253
258
|
const end = lexer.done() || lexer.peek();
|
|
254
259
|
lexer.release();
|
|
255
260
|
if (content.some((part) => part.kind === "code")) {
|
|
256
|
-
return blockContent(comment, lexer, config, i18n, warning);
|
|
261
|
+
return blockContent(comment, lexer, config, i18n, warning, files);
|
|
257
262
|
}
|
|
258
263
|
const tokens = [];
|
|
259
264
|
while ((lexer.done() || lexer.peek()) !== end) {
|
|
@@ -276,9 +281,10 @@ function defaultBlockContent(comment, lexer, config, i18n, warning) {
|
|
|
276
281
|
*
|
|
277
282
|
* In TSDoc, we also want to treat the first line of the block as the example name.
|
|
278
283
|
*/
|
|
279
|
-
function exampleBlock(comment, lexer, config, i18n, warning) {
|
|
284
|
+
function exampleBlock(comment, lexer, config, i18n, warning, files) {
|
|
280
285
|
lexer.mark();
|
|
281
|
-
const
|
|
286
|
+
const tempRegistry = new FileRegistry_1.FileRegistry();
|
|
287
|
+
const content = blockContent(comment, lexer, config, i18n, () => { }, tempRegistry);
|
|
282
288
|
const end = lexer.done() || lexer.peek();
|
|
283
289
|
lexer.release();
|
|
284
290
|
if (!config.jsDocCompatibility.exampleTag ||
|
|
@@ -319,7 +325,7 @@ function exampleBlock(comment, lexer, config, i18n, warning) {
|
|
|
319
325
|
(0, utils_1.assertNever)(next.kind);
|
|
320
326
|
}
|
|
321
327
|
}
|
|
322
|
-
const content = blockContent(comment, lexer, config, i18n, warning);
|
|
328
|
+
const content = blockContent(comment, lexer, config, i18n, warning, files);
|
|
323
329
|
const tag = new models_1.CommentTag("@example", content);
|
|
324
330
|
if (exampleName.trim()) {
|
|
325
331
|
tag.name = exampleName.trim();
|
|
@@ -358,7 +364,7 @@ function exampleBlock(comment, lexer, config, i18n, warning) {
|
|
|
358
364
|
* If you change this, also look at parseCommentString as it
|
|
359
365
|
* likely needs similar modifications to ensure parsing is consistent.
|
|
360
366
|
*/
|
|
361
|
-
function blockContent(comment, lexer, config, i18n, warning) {
|
|
367
|
+
function blockContent(comment, lexer, config, i18n, warning, files) {
|
|
362
368
|
const content = [];
|
|
363
369
|
let atNewLine = true;
|
|
364
370
|
loop: while (!lexer.done()) {
|
|
@@ -366,9 +372,12 @@ function blockContent(comment, lexer, config, i18n, warning) {
|
|
|
366
372
|
let consume = true;
|
|
367
373
|
switch (next.kind) {
|
|
368
374
|
case lexer_1.TokenSyntaxKind.NewLine:
|
|
369
|
-
case lexer_1.TokenSyntaxKind.Text:
|
|
370
375
|
content.push({ kind: "text", text: next.text });
|
|
371
376
|
break;
|
|
377
|
+
case lexer_1.TokenSyntaxKind.Text:
|
|
378
|
+
(0, textParser_1.textContent)(comment.sourcePath, next, i18n, warning,
|
|
379
|
+
/*out*/ content, files, atNewLine);
|
|
380
|
+
break;
|
|
372
381
|
case lexer_1.TokenSyntaxKind.Code:
|
|
373
382
|
content.push({ kind: "code", text: next.text });
|
|
374
383
|
break;
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Parser to handle plain text markdown.
|
|
3
|
+
*
|
|
4
|
+
* Responsible for recognizing relative paths within the text and turning
|
|
5
|
+
* them into references.
|
|
6
|
+
* @module
|
|
7
|
+
*/
|
|
8
|
+
import type { TranslationProxy, TranslatedString } from "../../internationalization";
|
|
9
|
+
import type { CommentDisplayPart } from "../../models";
|
|
10
|
+
import type { FileRegistry } from "../../models/FileRegistry";
|
|
11
|
+
import { type Token } from "./lexer";
|
|
12
|
+
/**
|
|
13
|
+
* Look for relative links within a piece of text and add them to the {@link FileRegistry}
|
|
14
|
+
* so that they can be correctly resolved during rendering.
|
|
15
|
+
*
|
|
16
|
+
* TODO: We also handle `<a>` and `<img>` tags with relative targets here.
|
|
17
|
+
*
|
|
18
|
+
*/
|
|
19
|
+
export declare function textContent(sourcePath: string, token: Token, i18n: TranslationProxy, warning: (msg: TranslatedString, token: Token) => void, outContent: CommentDisplayPart[], files: FileRegistry, atNewLine: boolean): void;
|
|
@@ -0,0 +1,154 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.textContent = textContent;
|
|
7
|
+
const lexer_1 = require("./lexer");
|
|
8
|
+
const markdown_it_1 = __importDefault(require("markdown-it"));
|
|
9
|
+
const MdHelpers = new markdown_it_1.default().helpers;
|
|
10
|
+
/**
|
|
11
|
+
* Look for relative links within a piece of text and add them to the {@link FileRegistry}
|
|
12
|
+
* so that they can be correctly resolved during rendering.
|
|
13
|
+
*
|
|
14
|
+
* TODO: We also handle `<a>` and `<img>` tags with relative targets here.
|
|
15
|
+
*
|
|
16
|
+
*/
|
|
17
|
+
function textContent(sourcePath, token, i18n, warning, outContent, files, atNewLine) {
|
|
18
|
+
let lastPartEnd = 0;
|
|
19
|
+
const data = {
|
|
20
|
+
sourcePath,
|
|
21
|
+
token,
|
|
22
|
+
pos: 0,
|
|
23
|
+
i18n,
|
|
24
|
+
warning,
|
|
25
|
+
files: files,
|
|
26
|
+
atNewLine,
|
|
27
|
+
};
|
|
28
|
+
function addRef(ref) {
|
|
29
|
+
outContent.push({
|
|
30
|
+
kind: "text",
|
|
31
|
+
text: token.text.slice(lastPartEnd, ref.pos),
|
|
32
|
+
});
|
|
33
|
+
outContent.push({
|
|
34
|
+
kind: "relative-link",
|
|
35
|
+
text: token.text.slice(ref.pos, ref.end),
|
|
36
|
+
target: ref.target,
|
|
37
|
+
});
|
|
38
|
+
lastPartEnd = ref.end;
|
|
39
|
+
data.pos = ref.end;
|
|
40
|
+
if (!ref.target) {
|
|
41
|
+
warning(i18n.relative_path_0_does_not_exist(token.text.slice(ref.pos, ref.end)), {
|
|
42
|
+
kind: lexer_1.TokenSyntaxKind.Text,
|
|
43
|
+
pos: ref.pos,
|
|
44
|
+
text: token.text.slice(ref.pos, ref.end),
|
|
45
|
+
});
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
while (data.pos < token.text.length) {
|
|
49
|
+
const link = checkMarkdownLink(data);
|
|
50
|
+
if (link) {
|
|
51
|
+
addRef(link);
|
|
52
|
+
continue;
|
|
53
|
+
}
|
|
54
|
+
const reference = checkReference(data);
|
|
55
|
+
if (reference) {
|
|
56
|
+
addRef(reference);
|
|
57
|
+
continue;
|
|
58
|
+
}
|
|
59
|
+
++data.pos;
|
|
60
|
+
}
|
|
61
|
+
if (lastPartEnd !== token.text.length) {
|
|
62
|
+
outContent.push({ kind: "text", text: token.text.slice(lastPartEnd) });
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
/**
|
|
66
|
+
* Links are inline text with the form `[ text ]( url title )`.
|
|
67
|
+
*
|
|
68
|
+
* Images are just links with a leading `!` and lack of support for `[ref]` referring to a path
|
|
69
|
+
* defined elsewhere, we don't care about that distinction here as we'll only replace the path
|
|
70
|
+
* piece of the image.
|
|
71
|
+
*
|
|
72
|
+
* Reference: https://github.com/markdown-it/markdown-it/blob/14.1.0/lib/rules_inline/link.mjs
|
|
73
|
+
* Reference: https://github.com/markdown-it/markdown-it/blob/14.1.0/lib/rules_inline/image.mjs
|
|
74
|
+
*
|
|
75
|
+
*/
|
|
76
|
+
function checkMarkdownLink(data) {
|
|
77
|
+
const { token, sourcePath, files } = data;
|
|
78
|
+
if (token.text[data.pos] === "[") {
|
|
79
|
+
const labelEnd = findLabelEnd(token.text, data.pos + 1);
|
|
80
|
+
if (labelEnd !== -1 &&
|
|
81
|
+
token.text[labelEnd] === "]" &&
|
|
82
|
+
token.text[labelEnd + 1] === "(") {
|
|
83
|
+
const link = MdHelpers.parseLinkDestination(token.text, labelEnd + 2, token.text.length);
|
|
84
|
+
if (link.ok) {
|
|
85
|
+
// Only make a relative-link display part if it's actually a relative link.
|
|
86
|
+
// Discard protocol:// links, unix style absolute paths, and windows style absolute paths.
|
|
87
|
+
if (isRelativeLink(link.str)) {
|
|
88
|
+
return {
|
|
89
|
+
pos: labelEnd + 2,
|
|
90
|
+
end: link.pos,
|
|
91
|
+
target: files.register(sourcePath, link.str),
|
|
92
|
+
};
|
|
93
|
+
}
|
|
94
|
+
// This was a link, skip ahead to ensure we don't happen to parse
|
|
95
|
+
// something else as a link within the link.
|
|
96
|
+
data.pos = link.pos - 1;
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
/**
|
|
102
|
+
* Reference definitions are blocks with the form `[label]: link title`
|
|
103
|
+
* Reference: https://github.com/markdown-it/markdown-it/blob/14.1.0/lib/rules_block/reference.mjs
|
|
104
|
+
*
|
|
105
|
+
* Note: This may include false positives where TypeDoc recognizes a reference block that markdown
|
|
106
|
+
* does not if users start lines with something that looks like a reference block without fully
|
|
107
|
+
* separating it from an above paragraph. For a first cut, this is good enough.
|
|
108
|
+
*/
|
|
109
|
+
function checkReference(data) {
|
|
110
|
+
const { atNewLine, pos, token, files, sourcePath } = data;
|
|
111
|
+
if (atNewLine) {
|
|
112
|
+
let lookahead = pos;
|
|
113
|
+
while (/[ \t]/.test(token.text[lookahead])) {
|
|
114
|
+
++lookahead;
|
|
115
|
+
}
|
|
116
|
+
if (token.text[lookahead] === "[") {
|
|
117
|
+
while (lookahead < token.text.length &&
|
|
118
|
+
/[^\n\]]/.test(token.text[lookahead])) {
|
|
119
|
+
++lookahead;
|
|
120
|
+
}
|
|
121
|
+
if (token.text.startsWith("]:", lookahead)) {
|
|
122
|
+
lookahead += 2;
|
|
123
|
+
while (/[ \t]/.test(token.text[lookahead])) {
|
|
124
|
+
++lookahead;
|
|
125
|
+
}
|
|
126
|
+
const link = MdHelpers.parseLinkDestination(token.text, lookahead, token.text.length);
|
|
127
|
+
if (link.ok) {
|
|
128
|
+
if (isRelativeLink(link.str)) {
|
|
129
|
+
return {
|
|
130
|
+
pos: lookahead,
|
|
131
|
+
end: link.pos,
|
|
132
|
+
target: files.register(sourcePath, link.str),
|
|
133
|
+
};
|
|
134
|
+
}
|
|
135
|
+
data.pos = link.pos - 1;
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
function isRelativeLink(link) {
|
|
142
|
+
return !/^[a-z]+:\/\/|^\/|^[a-z]:\\/i.test(link);
|
|
143
|
+
}
|
|
144
|
+
function findLabelEnd(text, pos) {
|
|
145
|
+
while (pos < text.length) {
|
|
146
|
+
switch (text[pos]) {
|
|
147
|
+
case "\n":
|
|
148
|
+
case "]":
|
|
149
|
+
return pos;
|
|
150
|
+
}
|
|
151
|
+
++pos;
|
|
152
|
+
}
|
|
153
|
+
return -1;
|
|
154
|
+
}
|