typedoc 0.23.2 → 0.23.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. package/dist/index.d.ts +2 -2
  2. package/dist/index.js +3 -1
  3. package/dist/lib/converter/comments/blockLexer.js +5 -1
  4. package/dist/lib/converter/comments/declarationReference.d.ts +1 -1
  5. package/dist/lib/converter/comments/declarationReference.js +4 -1
  6. package/dist/lib/converter/comments/declarationReferenceResolver.js +12 -0
  7. package/dist/lib/converter/comments/discovery.js +30 -13
  8. package/dist/lib/converter/comments/index.js +2 -5
  9. package/dist/lib/converter/comments/lexer.d.ts +1 -0
  10. package/dist/lib/converter/comments/lineLexer.js +7 -3
  11. package/dist/lib/converter/comments/parser.d.ts +3 -1
  12. package/dist/lib/converter/comments/parser.js +91 -51
  13. package/dist/lib/converter/comments/rawLexer.js +5 -1
  14. package/dist/lib/converter/plugins/ImplementsPlugin.js +8 -0
  15. package/dist/lib/converter/plugins/InheritDocPlugin.js +7 -0
  16. package/dist/lib/converter/plugins/PackagePlugin.js +2 -3
  17. package/dist/lib/converter/plugins/SourcePlugin.d.ts +1 -0
  18. package/dist/lib/converter/plugins/SourcePlugin.js +4 -1
  19. package/dist/lib/converter/symbols.js +1 -7
  20. package/dist/lib/models/types.d.ts +1 -2
  21. package/dist/lib/models/types.js +7 -16
  22. package/dist/lib/output/events.d.ts +43 -0
  23. package/dist/lib/output/events.js +38 -1
  24. package/dist/lib/output/index.d.ts +1 -1
  25. package/dist/lib/output/index.js +2 -1
  26. package/dist/lib/output/plugins/JavascriptIndexPlugin.d.ts +2 -0
  27. package/dist/lib/output/plugins/JavascriptIndexPlugin.js +50 -18
  28. package/dist/lib/output/renderer.d.ts +6 -0
  29. package/dist/lib/output/renderer.js +6 -0
  30. package/dist/lib/output/themes/default/DefaultThemeRenderContext.d.ts +1 -0
  31. package/dist/lib/output/themes/default/DefaultThemeRenderContext.js +2 -0
  32. package/dist/lib/output/themes/default/layouts/default.js +1 -1
  33. package/dist/lib/output/themes/default/partials/anchor-icon.d.ts +2 -1
  34. package/dist/lib/output/themes/default/partials/anchor-icon.js +1 -2
  35. package/dist/lib/output/themes/default/partials/icon.d.ts +1 -3
  36. package/dist/lib/output/themes/default/partials/index.js +3 -4
  37. package/dist/lib/output/themes/default/partials/member.js +1 -1
  38. package/dist/lib/output/themes/default/partials/navigation.js +8 -9
  39. package/dist/lib/output/themes/default/partials/toolbar.js +2 -3
  40. package/dist/lib/serialization/components.d.ts +6 -1
  41. package/dist/lib/serialization/schema.d.ts +2 -1
  42. package/dist/lib/utils/entry-point.js +4 -2
  43. package/dist/lib/utils/index.d.ts +2 -0
  44. package/dist/lib/utils/index.js +4 -1
  45. package/dist/lib/utils/loggers.d.ts +7 -3
  46. package/dist/lib/utils/loggers.js +29 -37
  47. package/dist/lib/utils/minimalSourceFile.d.ts +7 -0
  48. package/dist/lib/utils/minimalSourceFile.js +36 -0
  49. package/dist/lib/utils/options/declaration.d.ts +3 -0
  50. package/dist/lib/utils/options/sources/typedoc.js +16 -0
  51. package/dist/lib/utils/package-manifest.d.ts +2 -1
  52. package/dist/lib/utils/package-manifest.js +6 -2
  53. package/dist/lib/utils/tsutils.d.ts +2 -0
  54. package/dist/lib/utils/tsutils.js +17 -0
  55. package/dist/lib/validation/documentation.d.ts +1 -1
  56. package/dist/lib/validation/documentation.js +1 -7
  57. package/dist/lib/validation/exports.js +4 -4
  58. package/package.json +3 -2
  59. package/static/main.js +2 -2
package/dist/index.d.ts CHANGED
@@ -4,9 +4,9 @@ export { resetReflectionID } from "./lib/models/reflections/abstract";
4
4
  export { normalizePath } from "./lib/utils/fs";
5
5
  export * from "./lib/models";
6
6
  export { Converter, Context, type CommentParserConfig } from "./lib/converter";
7
- export { Renderer, DefaultTheme, DefaultThemeRenderContext, UrlMapping, Theme, PageEvent, RendererEvent, MarkdownEvent, } from "./lib/output";
7
+ export { Renderer, DefaultTheme, DefaultThemeRenderContext, UrlMapping, Theme, PageEvent, RendererEvent, MarkdownEvent, IndexEvent, } from "./lib/output";
8
8
  export type { RenderTemplate, RendererHooks } from "./lib/output";
9
- export { ArgumentsReader, BindOption, CommentStyle, JSX, LogLevel, Logger, Options, ParameterHint, ParameterType, TSConfigReader, TypeDocReader, EntryPointStrategy, EventHooks, } from "./lib/utils";
9
+ export { ArgumentsReader, BindOption, CommentStyle, JSX, LogLevel, Logger, Options, ParameterHint, ParameterType, TSConfigReader, TypeDocReader, EntryPointStrategy, EventHooks, MinimalSourceFile, } from "./lib/utils";
10
10
  export type { OptionsReader, TypeDocOptions, TypeDocOptionMap, ValidationOptions, TypeDocOptionValues, KeyToDeclaration, DeclarationOption, DeclarationOptionBase, StringDeclarationOption, NumberDeclarationOption, BooleanDeclarationOption, ArrayDeclarationOption, MixedDeclarationOption, MapDeclarationOption, FlagsDeclarationOption, DeclarationOptionToOptionType, SortStrategy, ParameterTypeToOptionTypeMap, DocumentationEntryPoint, ManuallyValidatedOption, } from "./lib/utils";
11
11
  export type { EventMap, EventCallback } from "./lib/utils/events";
12
12
  export { JSONOutput, Serializer, type SerializerComponent, SerializeEvent, } from "./lib/serialization";
package/dist/index.js CHANGED
@@ -14,7 +14,7 @@ var __exportStar = (this && this.__exportStar) || function(m, exports) {
14
14
  for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
15
15
  };
16
16
  Object.defineProperty(exports, "__esModule", { value: true });
17
- exports.TypeScript = exports.SerializeEvent = exports.Serializer = exports.JSONOutput = exports.EventHooks = exports.EntryPointStrategy = exports.TypeDocReader = exports.TSConfigReader = exports.ParameterType = exports.ParameterHint = exports.Options = exports.Logger = exports.LogLevel = exports.JSX = exports.CommentStyle = exports.BindOption = exports.ArgumentsReader = exports.MarkdownEvent = exports.RendererEvent = exports.PageEvent = exports.Theme = exports.UrlMapping = exports.DefaultThemeRenderContext = exports.DefaultTheme = exports.Renderer = exports.Context = exports.Converter = exports.normalizePath = exports.resetReflectionID = exports.Event = exports.EventDispatcher = exports.Application = void 0;
17
+ exports.TypeScript = exports.SerializeEvent = exports.Serializer = exports.JSONOutput = exports.MinimalSourceFile = exports.EventHooks = exports.EntryPointStrategy = exports.TypeDocReader = exports.TSConfigReader = exports.ParameterType = exports.ParameterHint = exports.Options = exports.Logger = exports.LogLevel = exports.JSX = exports.CommentStyle = exports.BindOption = exports.ArgumentsReader = exports.IndexEvent = exports.MarkdownEvent = exports.RendererEvent = exports.PageEvent = exports.Theme = exports.UrlMapping = exports.DefaultThemeRenderContext = exports.DefaultTheme = exports.Renderer = exports.Context = exports.Converter = exports.normalizePath = exports.resetReflectionID = exports.Event = exports.EventDispatcher = exports.Application = void 0;
18
18
  var application_1 = require("./lib/application");
19
19
  Object.defineProperty(exports, "Application", { enumerable: true, get: function () { return application_1.Application; } });
20
20
  var events_1 = require("./lib/utils/events");
@@ -37,6 +37,7 @@ Object.defineProperty(exports, "Theme", { enumerable: true, get: function () { r
37
37
  Object.defineProperty(exports, "PageEvent", { enumerable: true, get: function () { return output_1.PageEvent; } });
38
38
  Object.defineProperty(exports, "RendererEvent", { enumerable: true, get: function () { return output_1.RendererEvent; } });
39
39
  Object.defineProperty(exports, "MarkdownEvent", { enumerable: true, get: function () { return output_1.MarkdownEvent; } });
40
+ Object.defineProperty(exports, "IndexEvent", { enumerable: true, get: function () { return output_1.IndexEvent; } });
40
41
  var utils_1 = require("./lib/utils");
41
42
  Object.defineProperty(exports, "ArgumentsReader", { enumerable: true, get: function () { return utils_1.ArgumentsReader; } });
42
43
  Object.defineProperty(exports, "BindOption", { enumerable: true, get: function () { return utils_1.BindOption; } });
@@ -51,6 +52,7 @@ Object.defineProperty(exports, "TSConfigReader", { enumerable: true, get: functi
51
52
  Object.defineProperty(exports, "TypeDocReader", { enumerable: true, get: function () { return utils_1.TypeDocReader; } });
52
53
  Object.defineProperty(exports, "EntryPointStrategy", { enumerable: true, get: function () { return utils_1.EntryPointStrategy; } });
53
54
  Object.defineProperty(exports, "EventHooks", { enumerable: true, get: function () { return utils_1.EventHooks; } });
55
+ Object.defineProperty(exports, "MinimalSourceFile", { enumerable: true, get: function () { return utils_1.MinimalSourceFile; } });
54
56
  var serialization_1 = require("./lib/serialization");
55
57
  Object.defineProperty(exports, "JSONOutput", { enumerable: true, get: function () { return serialization_1.JSONOutput; } });
56
58
  Object.defineProperty(exports, "Serializer", { enumerable: true, get: function () { return serialization_1.Serializer; } });
@@ -102,6 +102,7 @@ function* lexBlockComment2(file, pos, end) {
102
102
  yield {
103
103
  kind: lexer_1.TokenSyntaxKind.Code,
104
104
  text: codeText.join(""),
105
+ pos,
105
106
  };
106
107
  pos = lookahead;
107
108
  break;
@@ -146,6 +147,7 @@ function* lexBlockComment2(file, pos, end) {
146
147
  yield {
147
148
  kind: lexer_1.TokenSyntaxKind.Code,
148
149
  text: codeText.join(""),
150
+ pos,
149
151
  };
150
152
  pos = lookahead;
151
153
  }
@@ -199,12 +201,13 @@ function* lexBlockComment2(file, pos, end) {
199
201
  if (textParts.some((part) => /\S/.test(part))) {
200
202
  braceStartsType = false;
201
203
  }
202
- pos = lookahead;
203
204
  // This piece of text had line continuations or escaped text
204
205
  yield {
205
206
  kind: lexer_1.TokenSyntaxKind.Text,
206
207
  text: textParts.join(""),
208
+ pos,
207
209
  };
210
+ pos = lookahead;
208
211
  break;
209
212
  }
210
213
  }
@@ -215,6 +218,7 @@ function* lexBlockComment2(file, pos, end) {
215
218
  return {
216
219
  kind,
217
220
  text: file.substring(start, pos),
221
+ pos: start,
218
222
  };
219
223
  }
220
224
  function skipIndent(pos) {
@@ -5,7 +5,7 @@
5
5
  *
6
6
  * @module
7
7
  */
8
- export declare const MeaningKeywords: readonly ["class", "interface", "type", "enum", "namespace", "function", "var", "constructor", "member", "event", "call", "new", "index", "complex"];
8
+ export declare const MeaningKeywords: readonly ["class", "interface", "type", "enum", "namespace", "function", "var", "constructor", "member", "event", "call", "new", "index", "complex", "getter", "setter"];
9
9
  export declare type MeaningKeyword = typeof MeaningKeywords[number];
10
10
  export interface DeclarationReference {
11
11
  resolutionStart: "global" | "local";
@@ -22,7 +22,10 @@ exports.MeaningKeywords = [
22
22
  "call",
23
23
  "new",
24
24
  "index",
25
- "complex", // Any complex type
25
+ "complex",
26
+ // TypeDoc specific
27
+ "getter",
28
+ "setter",
26
29
  ];
27
30
  // <TAB> <VT> <FF> <SP> <NBSP> <ZWNBSP> <USP>
28
31
  const WhiteSpace = /[\t\u2B7F\u240C \u00A0\uFEFF\p{White_Space}]/u;
@@ -137,6 +137,18 @@ function resolveKeyword(refl, kw) {
137
137
  if (refl.kindOf(models_1.ReflectionKind.SomeType))
138
138
  return [refl];
139
139
  break;
140
+ case "getter":
141
+ if (refl.getSignature) {
142
+ return [refl.getSignature];
143
+ }
144
+ break;
145
+ case "setter":
146
+ if (refl.setSignature) {
147
+ return [refl.setSignature];
148
+ }
149
+ break;
150
+ default:
151
+ (0, utils_1.assertNever)(kw);
140
152
  }
141
153
  }
142
154
  function resolveSymbolReferencePart(refl, path) {
@@ -18,6 +18,7 @@ const wantedKinds = {
18
18
  ts.SyntaxKind.SourceFile,
19
19
  ts.SyntaxKind.BindingElement,
20
20
  ts.SyntaxKind.ExportSpecifier,
21
+ ts.SyntaxKind.NamespaceExport,
21
22
  ],
22
23
  [models_1.ReflectionKind.Enum]: [
23
24
  ts.SyntaxKind.EnumDeclaration,
@@ -33,6 +34,7 @@ const wantedKinds = {
33
34
  ts.SyntaxKind.VariableDeclaration,
34
35
  ts.SyntaxKind.BindingElement,
35
36
  ts.SyntaxKind.ExportAssignment,
37
+ ts.SyntaxKind.PropertyAccessExpression,
36
38
  ],
37
39
  [models_1.ReflectionKind.Function]: [
38
40
  ts.SyntaxKind.FunctionDeclaration,
@@ -68,10 +70,7 @@ const wantedKinds = {
68
70
  [models_1.ReflectionKind.Parameter]: [ts.SyntaxKind.Parameter],
69
71
  [models_1.ReflectionKind.TypeLiteral]: [ts.SyntaxKind.TypeLiteral],
70
72
  [models_1.ReflectionKind.TypeParameter]: [ts.SyntaxKind.TypeParameter],
71
- [models_1.ReflectionKind.Accessor]: [
72
- ts.SyntaxKind.GetAccessor,
73
- ts.SyntaxKind.SetAccessor,
74
- ],
73
+ [models_1.ReflectionKind.Accessor]: [],
75
74
  [models_1.ReflectionKind.GetSignature]: [ts.SyntaxKind.GetAccessor],
76
75
  [models_1.ReflectionKind.SetSignature]: [ts.SyntaxKind.SetAccessor],
77
76
  [models_1.ReflectionKind.ObjectLiteral]: [ts.SyntaxKind.ObjectLiteralExpression],
@@ -138,6 +137,7 @@ function discoverSignatureComment(declaration, commentStyle) {
138
137
  }
139
138
  const text = node.getSourceFile().text;
140
139
  const comments = collectCommentRanges(ts.getLeadingCommentRanges(text, node.pos));
140
+ comments.reverse();
141
141
  const comment = comments.find((ranges) => permittedRange(text, ranges, commentStyle));
142
142
  if (comment) {
143
143
  return [node.getSourceFile(), comment];
@@ -184,10 +184,33 @@ function getRootModuleDeclaration(node) {
184
184
  return node;
185
185
  }
186
186
  function declarationToCommentNode(node) {
187
- if (node.parent?.kind === ts.SyntaxKind.VariableDeclarationList) {
187
+ if (!node.parent)
188
+ return node;
189
+ // const abc = 123
190
+ // ^^^
191
+ if (node.parent.kind === ts.SyntaxKind.VariableDeclarationList) {
188
192
  return node.parent.parent;
189
193
  }
190
- if (node.kind === ts.SyntaxKind.ModuleDeclaration) {
194
+ // const a = () => {}
195
+ // ^^^^^^^^
196
+ if (node.parent.kind === ts.SyntaxKind.VariableDeclaration) {
197
+ return node.parent.parent.parent;
198
+ }
199
+ // class X { y = () => {} }
200
+ // ^^^^^^^^
201
+ // function Z() {}
202
+ // Z.method = () => {}
203
+ // ^^^^^^^^
204
+ // export default () => {}
205
+ // ^^^^^^^^
206
+ if ([
207
+ ts.SyntaxKind.PropertyDeclaration,
208
+ ts.SyntaxKind.BinaryExpression,
209
+ ts.SyntaxKind.ExportAssignment,
210
+ ].includes(node.parent.kind)) {
211
+ return node.parent;
212
+ }
213
+ if (ts.isModuleDeclaration(node)) {
191
214
  if (!isTopmostModuleDeclaration(node)) {
192
215
  return;
193
216
  }
@@ -198,13 +221,7 @@ function declarationToCommentNode(node) {
198
221
  if (node.kind === ts.SyntaxKind.ExportSpecifier) {
199
222
  return node.parent.parent;
200
223
  }
201
- if ([
202
- ts.SyntaxKind.NamespaceExport,
203
- ts.SyntaxKind.FunctionExpression,
204
- ts.SyntaxKind.FunctionType,
205
- ts.SyntaxKind.FunctionType,
206
- ts.SyntaxKind.ArrowFunction,
207
- ].includes(node.kind)) {
224
+ if ([ts.SyntaxKind.NamespaceExport, ts.SyntaxKind.FunctionType].includes(node.kind)) {
208
225
  return node.parent;
209
226
  }
210
227
  return node;
@@ -4,7 +4,6 @@ exports.getJsDocComment = exports.getSignatureComment = exports.getComment = voi
4
4
  const ts = require("typescript");
5
5
  const models_1 = require("../../models");
6
6
  const utils_1 = require("../../utils");
7
- const paths_1 = require("../../utils/paths");
8
7
  const blockLexer_1 = require("./blockLexer");
9
8
  const discovery_1 = require("./discovery");
10
9
  const lineLexer_1 = require("./lineLexer");
@@ -18,15 +17,13 @@ function getCommentWithCache(discovered, config, logger) {
18
17
  if (cache?.has(ranges[0].pos)) {
19
18
  return cache.get(ranges[0].pos).clone();
20
19
  }
21
- const line = ts.getLineAndCharacterOfPosition(file, ranges[0].pos).line + 1;
22
- const warning = (warning) => logger.warn(`${warning} in comment at ${(0, paths_1.nicePath)(file.fileName)}:${line}.`);
23
20
  let comment;
24
21
  switch (ranges[0].kind) {
25
22
  case ts.SyntaxKind.MultiLineCommentTrivia:
26
- comment = (0, parser_1.parseComment)((0, blockLexer_1.lexBlockComment)(file.text, ranges[0].pos, ranges[0].end), config, warning);
23
+ comment = (0, parser_1.parseComment)((0, blockLexer_1.lexBlockComment)(file.text, ranges[0].pos, ranges[0].end), config, file, logger);
27
24
  break;
28
25
  case ts.SyntaxKind.SingleLineCommentTrivia:
29
- comment = (0, parser_1.parseComment)((0, lineLexer_1.lexLineComments)(file.text, ranges), config, warning);
26
+ comment = (0, parser_1.parseComment)((0, lineLexer_1.lexLineComments)(file.text, ranges), config, file, logger);
30
27
  break;
31
28
  default:
32
29
  (0, utils_1.assertNever)(ranges[0].kind);
@@ -10,4 +10,5 @@ export declare enum TokenSyntaxKind {
10
10
  export interface Token {
11
11
  kind: TokenSyntaxKind;
12
12
  text: string;
13
+ pos: number;
13
14
  }
@@ -5,7 +5,7 @@ const lexer_1 = require("./lexer");
5
5
  function* lexLineComments(file, ranges) {
6
6
  // Wrapper around our real lex function to collapse adjacent text tokens.
7
7
  let textToken;
8
- for (const token of lexBlockComment2(file, ranges[0].pos, ranges[ranges.length - 1].end)) {
8
+ for (const token of lexLineComments2(file, ranges[0].pos, ranges[ranges.length - 1].end)) {
9
9
  if (token.kind === lexer_1.TokenSyntaxKind.Text) {
10
10
  if (textToken) {
11
11
  textToken.text += token.text;
@@ -28,7 +28,7 @@ function* lexLineComments(file, ranges) {
28
28
  return;
29
29
  }
30
30
  exports.lexLineComments = lexLineComments;
31
- function* lexBlockComment2(file, pos, end) {
31
+ function* lexLineComments2(file, pos, end) {
32
32
  // Trailing whitespace
33
33
  while (pos < end && /\s/.test(file[end - 1])) {
34
34
  end--;
@@ -84,6 +84,7 @@ function* lexBlockComment2(file, pos, end) {
84
84
  yield {
85
85
  kind: lexer_1.TokenSyntaxKind.Code,
86
86
  text: codeText.join(""),
87
+ pos,
87
88
  };
88
89
  pos = lookahead;
89
90
  break;
@@ -115,6 +116,7 @@ function* lexBlockComment2(file, pos, end) {
115
116
  yield {
116
117
  kind: lexer_1.TokenSyntaxKind.Code,
117
118
  text: codeText.join(""),
119
+ pos,
118
120
  };
119
121
  pos = lookahead;
120
122
  }
@@ -168,12 +170,13 @@ function* lexBlockComment2(file, pos, end) {
168
170
  if (textParts.some((part) => /\S/.test(part))) {
169
171
  braceStartsType = false;
170
172
  }
171
- pos = lookahead;
172
173
  // This piece of text had line continuations or escaped text
173
174
  yield {
174
175
  kind: lexer_1.TokenSyntaxKind.Text,
175
176
  text: textParts.join(""),
177
+ pos,
176
178
  };
179
+ pos = lookahead;
177
180
  break;
178
181
  }
179
182
  }
@@ -184,6 +187,7 @@ function* lexBlockComment2(file, pos, end) {
184
187
  return {
185
188
  kind,
186
189
  text: file.substring(start, pos),
190
+ pos: start,
187
191
  };
188
192
  }
189
193
  function skipLeadingLineTrivia(pos) {
@@ -1,4 +1,6 @@
1
1
  import type { CommentParserConfig } from ".";
2
2
  import { Comment } from "../../models";
3
+ import { Logger } from "../../utils";
4
+ import type { MinimalSourceFile } from "../../utils/minimalSourceFile";
3
5
  import { Token } from "./lexer";
4
- export declare function parseComment(tokens: Generator<Token, undefined, undefined>, config: CommentParserConfig, warning: (message: string) => void): Comment;
6
+ export declare function parseComment(tokens: Generator<Token, undefined, undefined>, config: CommentParserConfig, file: MinimalSourceFile, logger: Logger): Comment;
@@ -4,34 +4,56 @@ exports.parseComment = void 0;
4
4
  const assert_1 = require("assert");
5
5
  const models_1 = require("../../models");
6
6
  const utils_1 = require("../../utils");
7
+ const paths_1 = require("../../utils/paths");
7
8
  const lexer_1 = require("./lexer");
8
9
  function makeLookaheadGenerator(gen) {
9
- let nextItem = gen.next();
10
+ let trackHistory = false;
11
+ const history = [];
12
+ const next = [gen.next()];
10
13
  return {
11
14
  done() {
12
- return !!nextItem.done;
15
+ return !!next[0].done;
13
16
  },
14
17
  peek() {
15
- (0, assert_1.ok)(!nextItem.done);
16
- return nextItem.value;
18
+ (0, assert_1.ok)(!next[0].done);
19
+ return next[0].value;
17
20
  },
18
21
  take() {
19
- const thisItem = nextItem;
22
+ const thisItem = next.shift();
23
+ if (trackHistory) {
24
+ history.push(thisItem);
25
+ }
20
26
  (0, assert_1.ok)(!thisItem.done);
21
- nextItem = gen.next();
27
+ next.push(gen.next());
22
28
  return thisItem.value;
23
29
  },
30
+ mark() {
31
+ (0, assert_1.ok)(!trackHistory, "Can only mark one location for backtracking at a time");
32
+ trackHistory = true;
33
+ },
34
+ release() {
35
+ trackHistory = false;
36
+ next.unshift(...history);
37
+ history.length = 0;
38
+ },
24
39
  };
25
40
  }
26
- function parseComment(tokens, config, warning) {
41
+ function parseComment(tokens, config, file, logger) {
27
42
  const lexer = makeLookaheadGenerator(tokens);
43
+ const tok = lexer.done() || lexer.peek();
28
44
  const comment = new models_1.Comment();
29
- comment.summary = blockContent(comment, lexer, config, warning);
45
+ comment.summary = blockContent(comment, lexer, config, warningImpl);
30
46
  while (!lexer.done()) {
31
- comment.blockTags.push(blockTag(comment, lexer, config, warning));
47
+ comment.blockTags.push(blockTag(comment, lexer, config, warningImpl));
32
48
  }
33
- postProcessComment(comment, warning);
49
+ postProcessComment(comment, (message) => {
50
+ (0, assert_1.ok)(typeof tok === "object");
51
+ logger.warn(`${message} in comment at ${(0, paths_1.nicePath)(file.fileName)}:${file.getLineAndCharacterOfPosition(tok.pos).line + 1}`);
52
+ });
34
53
  return comment;
54
+ function warningImpl(message, token) {
55
+ logger.warn(message, token.pos, file);
56
+ }
35
57
  }
36
58
  exports.parseComment = parseComment;
37
59
  const HAS_USER_IDENTIFIER = [
@@ -77,37 +99,6 @@ function postProcessComment(comment, warning) {
77
99
  if (tag.content.some((part) => part.kind === "inline-tag" && part.tag === "@inheritDoc")) {
78
100
  warning("An inline @inheritDoc tag should not appear within a block tag as it will not be processed");
79
101
  }
80
- if (tag.tag === "@example" &&
81
- !tag.content.some((part) => part.kind === "code")) {
82
- const caption = tag.content[0].text.match(/^\s*<caption>(.*?)<\/caption>\s*(\n|$)/);
83
- if (caption) {
84
- const code = models_1.Comment.combineDisplayParts([
85
- {
86
- kind: "text",
87
- text: tag.content[0].text.slice(caption[0].length),
88
- },
89
- ...tag.content.slice(1),
90
- ]);
91
- tag.content = [
92
- {
93
- kind: "text",
94
- text: caption[1] + "\n",
95
- },
96
- {
97
- kind: "code",
98
- text: makeCodeBlock(code),
99
- },
100
- ];
101
- }
102
- else {
103
- tag.content = [
104
- {
105
- kind: "code",
106
- text: makeCodeBlock(models_1.Comment.combineDisplayParts(tag.content)),
107
- },
108
- ];
109
- }
110
- }
111
102
  }
112
103
  const remarks = comment.blockTags.filter((tag) => tag.tag === "@remarks");
113
104
  if (remarks.length > 1) {
@@ -133,9 +124,58 @@ function postProcessComment(comment, warning) {
133
124
  const aliasedTags = new Map([["@return", "@returns"]]);
134
125
  function blockTag(comment, lexer, config, warning) {
135
126
  const blockTag = lexer.take();
136
- (0, assert_1.ok)(blockTag.kind === lexer_1.TokenSyntaxKind.Tag); // blockContent is broken if this fails.
127
+ (0, assert_1.ok)(blockTag.kind === lexer_1.TokenSyntaxKind.Tag, "blockTag called not at the start of a block tag."); // blockContent is broken if this fails.
137
128
  const tagName = aliasedTags.get(blockTag.text) || blockTag.text;
138
- return new models_1.CommentTag(tagName, blockContent(comment, lexer, config, warning));
129
+ let content;
130
+ if (tagName === "@example") {
131
+ content = exampleBlockContent(comment, lexer, config, warning);
132
+ }
133
+ else {
134
+ content = blockContent(comment, lexer, config, warning);
135
+ }
136
+ return new models_1.CommentTag(tagName, content);
137
+ }
138
+ /**
139
+ * The `@example` tag gets a special case because otherwise we will produce many warnings
140
+ * about unescaped/mismatched/missing braces
141
+ */
142
+ function exampleBlockContent(comment, lexer, config, warning) {
143
+ lexer.mark();
144
+ const content = blockContent(comment, lexer, config, () => { });
145
+ const end = lexer.done() || lexer.peek();
146
+ lexer.release();
147
+ if (content.some((part) => part.kind === "code")) {
148
+ return blockContent(comment, lexer, config, warning);
149
+ }
150
+ const tokens = [];
151
+ while ((lexer.done() || lexer.peek()) !== end) {
152
+ tokens.push(lexer.take());
153
+ }
154
+ const blockText = tokens
155
+ .map((tok) => tok.text)
156
+ .join("")
157
+ .trim();
158
+ const caption = blockText.match(/^\s*<caption>(.*?)<\/caption>\s*(\n|$)/);
159
+ if (caption) {
160
+ return [
161
+ {
162
+ kind: "text",
163
+ text: caption[1] + "\n",
164
+ },
165
+ {
166
+ kind: "code",
167
+ text: makeCodeBlock(blockText.slice(caption[0].length)),
168
+ },
169
+ ];
170
+ }
171
+ else {
172
+ return [
173
+ {
174
+ kind: "code",
175
+ text: makeCodeBlock(blockText),
176
+ },
177
+ ];
178
+ }
139
179
  }
140
180
  function blockContent(comment, lexer, config, warning) {
141
181
  const content = [];
@@ -153,7 +193,7 @@ function blockContent(comment, lexer, config, warning) {
153
193
  break;
154
194
  case lexer_1.TokenSyntaxKind.Tag:
155
195
  if (next.text === "@inheritdoc") {
156
- warning("The @inheritDoc tag should be properly capitalized");
196
+ warning("The @inheritDoc tag should be properly capitalized", next);
157
197
  next.text = "@inheritDoc";
158
198
  }
159
199
  if (config.modifierTags.has(next.text)) {
@@ -163,7 +203,7 @@ function blockContent(comment, lexer, config, warning) {
163
203
  else if (!atNewLine && !config.blockTags.has(next.text)) {
164
204
  // Treat unknown tag as a modifier, but warn about it.
165
205
  comment.modifierTags.add(next.text);
166
- warning(`Treating unrecognized tag "${next.text}" as a modifier tag`);
206
+ warning(`Treating unrecognized tag "${next.text}" as a modifier tag`, next);
167
207
  break;
168
208
  }
169
209
  else {
@@ -176,7 +216,7 @@ function blockContent(comment, lexer, config, warning) {
176
216
  break;
177
217
  case lexer_1.TokenSyntaxKind.CloseBrace:
178
218
  // Unmatched closing brace, generate a warning, and treat it as text.
179
- warning(`Unmatched closing brace`);
219
+ warning(`Unmatched closing brace`, next);
180
220
  content.push({ kind: "text", text: next.text });
181
221
  break;
182
222
  case lexer_1.TokenSyntaxKind.OpenBrace:
@@ -224,7 +264,7 @@ function inlineTag(lexer, block, config, warning) {
224
264
  // then produce a warning and treat what we've consumed as plain text.
225
265
  if (lexer.done() ||
226
266
  ![lexer_1.TokenSyntaxKind.Text, lexer_1.TokenSyntaxKind.Tag].includes(lexer.peek().kind)) {
227
- warning("Encountered an unescaped open brace without an inline tag");
267
+ warning("Encountered an unescaped open brace without an inline tag", openBrace);
228
268
  block.push({ kind: "text", text: openBrace.text });
229
269
  return;
230
270
  }
@@ -233,7 +273,7 @@ function inlineTag(lexer, block, config, warning) {
233
273
  (tagName.kind === lexer_1.TokenSyntaxKind.Text &&
234
274
  (!/^\s+$/.test(tagName.text) ||
235
275
  lexer.peek().kind != lexer_1.TokenSyntaxKind.Tag))) {
236
- warning("Encountered an unescaped open brace without an inline tag");
276
+ warning("Encountered an unescaped open brace without an inline tag", openBrace);
237
277
  block.push({ kind: "text", text: openBrace.text + tagName.text });
238
278
  return;
239
279
  }
@@ -241,7 +281,7 @@ function inlineTag(lexer, block, config, warning) {
241
281
  tagName = lexer.take();
242
282
  }
243
283
  if (!config.inlineTags.has(tagName.text)) {
244
- warning(`Encountered an unknown inline tag "${tagName.text}"`);
284
+ warning(`Encountered an unknown inline tag "${tagName.text}"`, tagName);
245
285
  }
246
286
  const content = [];
247
287
  // At this point, we know we have an inline tag. Treat everything following as plain text,
@@ -249,12 +289,12 @@ function inlineTag(lexer, block, config, warning) {
249
289
  while (!lexer.done() && lexer.peek().kind !== lexer_1.TokenSyntaxKind.CloseBrace) {
250
290
  const token = lexer.take();
251
291
  if (token.kind === lexer_1.TokenSyntaxKind.OpenBrace) {
252
- warning("Encountered an open brace within an inline tag, this is likely a mistake");
292
+ warning("Encountered an open brace within an inline tag, this is likely a mistake", token);
253
293
  }
254
294
  content.push(token.kind === lexer_1.TokenSyntaxKind.NewLine ? " " : token.text);
255
295
  }
256
296
  if (lexer.done()) {
257
- warning("Inline tag is not closed");
297
+ warning("Inline tag is not closed", openBrace);
258
298
  }
259
299
  else {
260
300
  lexer.take(); // Close brace
@@ -89,6 +89,7 @@ function* lexCommentString2(file) {
89
89
  yield {
90
90
  kind: lexer_1.TokenSyntaxKind.Code,
91
91
  text: codeText.join(""),
92
+ pos,
92
93
  };
93
94
  pos = lookahead;
94
95
  break;
@@ -119,6 +120,7 @@ function* lexCommentString2(file) {
119
120
  yield {
120
121
  kind: lexer_1.TokenSyntaxKind.Code,
121
122
  text: codeText.join(""),
123
+ pos,
122
124
  };
123
125
  pos = lookahead;
124
126
  }
@@ -172,12 +174,13 @@ function* lexCommentString2(file) {
172
174
  if (textParts.some((part) => /\S/.test(part))) {
173
175
  braceStartsType = false;
174
176
  }
175
- pos = lookahead;
176
177
  // This piece of text had line continuations or escaped text
177
178
  yield {
178
179
  kind: lexer_1.TokenSyntaxKind.Text,
179
180
  text: textParts.join(""),
181
+ pos,
180
182
  };
183
+ pos = lookahead;
181
184
  break;
182
185
  }
183
186
  }
@@ -188,6 +191,7 @@ function* lexCommentString2(file) {
188
191
  return {
189
192
  kind,
190
193
  text: file.substring(start, pos),
194
+ pos: start,
191
195
  };
192
196
  }
193
197
  function lookaheadExactlyNTicks(pos, n) {
@@ -292,6 +292,14 @@ function handleInheritedComments(child, parent) {
292
292
  copyComment(cs, ps);
293
293
  }
294
294
  }
295
+ else if (parent.kindOf(index_1.ReflectionKind.Property) &&
296
+ parent.type instanceof types_1.ReflectionType &&
297
+ parent.type.declaration.signatures &&
298
+ child.signatures) {
299
+ for (const [cs, ps] of (0, array_1.zip)(child.signatures, parent.type.declaration.signatures)) {
300
+ copyComment(cs, ps);
301
+ }
302
+ }
295
303
  }
296
304
  /**
297
305
  * Copy the comment of the source reflection to the target reflection with a JSDoc style copy
@@ -70,6 +70,13 @@ let InheritDocPlugin = class InheritDocPlugin extends components_1.ConverterComp
70
70
  sourceRefl = sourceRefl.getAllSignatures()[index];
71
71
  }
72
72
  }
73
+ if (sourceRefl instanceof models_1.DeclarationReflection &&
74
+ sourceRefl.kindOf(models_1.ReflectionKind.Accessor)) {
75
+ // Accessors, like functions, never have comments on their actual root reflection.
76
+ // If the user didn't specify whether to inherit from the getter or setter, then implicitly
77
+ // try to inherit from the getter, #1968.
78
+ sourceRefl = sourceRefl.getSignature || sourceRefl.setSignature;
79
+ }
73
80
  if (!sourceRefl) {
74
81
  this.application.logger.warn(`Failed to find "${source}" to inherit the comment from in the comment for ${reflection.getFullName()}`);
75
82
  continue;
@@ -16,6 +16,7 @@ const fs_1 = require("../../utils/fs");
16
16
  const paths_1 = require("../../utils/paths");
17
17
  const rawLexer_1 = require("../comments/rawLexer");
18
18
  const parser_1 = require("../comments/parser");
19
+ const minimalSourceFile_1 = require("../../utils/minimalSourceFile");
19
20
  /**
20
21
  * A handler that tries to find the package.json and readme.md files of the
21
22
  * current project.
@@ -75,9 +76,7 @@ let PackagePlugin = class PackagePlugin extends components_1.ConverterComponent
75
76
  const project = context.project;
76
77
  if (this.readmeFile) {
77
78
  const readme = (0, utils_1.readFile)(this.readmeFile);
78
- const comment = (0, parser_1.parseComment)((0, rawLexer_1.lexCommentString)(readme), context.converter.config, (msg) => {
79
- this.application.logger.warn(`${msg} in ${this.readmeFile}`);
80
- });
79
+ const comment = (0, parser_1.parseComment)((0, rawLexer_1.lexCommentString)(readme), context.converter.config, new minimalSourceFile_1.MinimalSourceFile(readme, this.readmeFile), context.logger);
81
80
  if (comment.blockTags.length || comment.modifierTags.size) {
82
81
  const ignored = [
83
82
  ...comment.blockTags.map((tag) => tag.tag),
@@ -6,6 +6,7 @@ export declare class SourcePlugin extends ConverterComponent {
6
6
  readonly disableSources: boolean;
7
7
  readonly gitRevision: string;
8
8
  readonly gitRemote: string;
9
+ readonly basePath: string;
9
10
  /**
10
11
  * All file names to find the base path from.
11
12
  */
@@ -85,7 +85,7 @@ let SourcePlugin = class SourcePlugin extends components_1.ConverterComponent {
85
85
  onBeginResolve(context) {
86
86
  if (this.disableSources)
87
87
  return;
88
- const basePath = (0, fs_1.getCommonDirectory)([...this.fileNames]);
88
+ const basePath = this.basePath || (0, fs_1.getCommonDirectory)([...this.fileNames]);
89
89
  for (const refl of Object.values(context.project.reflections)) {
90
90
  for (const source of refl.sources || []) {
91
91
  if (repository_1.gitIsInstalled) {
@@ -139,6 +139,9 @@ __decorate([
139
139
  __decorate([
140
140
  (0, utils_1.BindOption)("gitRemote")
141
141
  ], SourcePlugin.prototype, "gitRemote", void 0);
142
+ __decorate([
143
+ (0, utils_1.BindOption)("basePath")
144
+ ], SourcePlugin.prototype, "basePath", void 0);
142
145
  SourcePlugin = __decorate([
143
146
  (0, components_1.Component)({ name: "source" })
144
147
  ], SourcePlugin);