@rightcapital/phpdoc-parser 0.5.3 → 0.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. package/dist/index.d.ts +10 -1
  2. package/dist/index.js +22 -4
  3. package/dist/phpdoc-parser/ast/comment.d.ts +7 -0
  4. package/dist/phpdoc-parser/ast/comment.js +14 -0
  5. package/dist/phpdoc-parser/ast/const-expr/const-expr-node.d.ts +0 -2
  6. package/dist/phpdoc-parser/ast/const-expr/const-expr-node.js +0 -3
  7. package/dist/phpdoc-parser/ast/const-expr/const-expr-string-node.d.ts +5 -1
  8. package/dist/phpdoc-parser/ast/const-expr/const-expr-string-node.js +33 -2
  9. package/dist/phpdoc-parser/ast/php-doc/param-closure-this-tag-value-node.d.ts +10 -0
  10. package/dist/phpdoc-parser/ast/php-doc/param-closure-this-tag-value-node.js +19 -0
  11. package/dist/phpdoc-parser/ast/php-doc/param-immediately-invoked-callable-tag-value-node.d.ts +8 -0
  12. package/dist/phpdoc-parser/ast/php-doc/param-immediately-invoked-callable-tag-value-node.js +18 -0
  13. package/dist/phpdoc-parser/ast/php-doc/param-later-invoked-callable-tag-value-node.d.ts +8 -0
  14. package/dist/phpdoc-parser/ast/php-doc/param-later-invoked-callable-tag-value-node.js +18 -0
  15. package/dist/phpdoc-parser/ast/php-doc/php-doc-node.d.ts +2 -0
  16. package/dist/phpdoc-parser/ast/php-doc/php-doc-node.js +6 -0
  17. package/dist/phpdoc-parser/ast/php-doc/pure-unless-callable-is-impure-tag-value-node.d.ts +8 -0
  18. package/dist/phpdoc-parser/ast/php-doc/pure-unless-callable-is-impure-tag-value-node.js +18 -0
  19. package/dist/phpdoc-parser/ast/php-doc/require-extends-tag-value-node.d.ts +9 -0
  20. package/dist/phpdoc-parser/ast/php-doc/require-extends-tag-value-node.js +18 -0
  21. package/dist/phpdoc-parser/ast/php-doc/require-implements-tag-value-node.d.ts +9 -0
  22. package/dist/phpdoc-parser/ast/php-doc/require-implements-tag-value-node.js +18 -0
  23. package/dist/phpdoc-parser/ast/php-doc/sealed-tag-value-node.d.ts +9 -0
  24. package/dist/phpdoc-parser/ast/php-doc/sealed-tag-value-node.js +18 -0
  25. package/dist/phpdoc-parser/ast/php-doc/template-tag-value-node.d.ts +2 -1
  26. package/dist/phpdoc-parser/ast/php-doc/template-tag-value-node.js +4 -2
  27. package/dist/phpdoc-parser/ast/type/array-shape-item-node.d.ts +3 -2
  28. package/dist/phpdoc-parser/ast/type/array-shape-node.d.ts +8 -2
  29. package/dist/phpdoc-parser/ast/type/array-shape-node.js +13 -3
  30. package/dist/phpdoc-parser/ast/type/array-shape-unsealed-type-node.d.ts +8 -0
  31. package/dist/phpdoc-parser/ast/type/array-shape-unsealed-type-node.js +21 -0
  32. package/dist/phpdoc-parser/ast/type/callable-type-node.d.ts +3 -1
  33. package/dist/phpdoc-parser/ast/type/callable-type-node.js +4 -2
  34. package/dist/phpdoc-parser/ast/types.d.ts +2 -1
  35. package/dist/phpdoc-parser/ast/types.js +1 -0
  36. package/dist/phpdoc-parser/lexer/lexer.d.ts +2 -0
  37. package/dist/phpdoc-parser/lexer/lexer.js +3 -0
  38. package/dist/phpdoc-parser/parser/const-expr-parser.d.ts +2 -4
  39. package/dist/phpdoc-parser/parser/const-expr-parser.js +6 -20
  40. package/dist/phpdoc-parser/parser/php-doc-parser.d.ts +11 -9
  41. package/dist/phpdoc-parser/parser/php-doc-parser.js +112 -66
  42. package/dist/phpdoc-parser/parser/string-unescaper.js +2 -2
  43. package/dist/phpdoc-parser/parser/token-iterator.d.ts +4 -0
  44. package/dist/phpdoc-parser/parser/token-iterator.js +31 -3
  45. package/dist/phpdoc-parser/parser/type-parser.d.ts +8 -2
  46. package/dist/phpdoc-parser/parser/type-parser.js +205 -76
  47. package/dist/phpdoc-parser/parser-config.d.ts +10 -0
  48. package/dist/phpdoc-parser/parser-config.js +12 -0
  49. package/dist/phpdoc-parser/transpiler/php-doc-to-typescript-type-transpiler.js +9 -2
  50. package/package.json +8 -5
  51. package/dist/phpdoc-parser/ast/const-expr/quote-aware-const-expr-string-node.d.ts +0 -10
  52. package/dist/phpdoc-parser/ast/const-expr/quote-aware-const-expr-string-node.js +0 -47
@@ -1,4 +1,5 @@
1
1
  import type { BaseNode } from '../ast/base-node';
2
+ import { TemplateTagValueNode } from '../ast/php-doc/template-tag-value-node';
2
3
  import { GenericTypeNode } from '../ast/type/generic-type-node';
3
4
  import { IdentifierTypeNode } from '../ast/type/identifier-type-node';
4
5
  import type { TypeNode } from '../ast/type/type-node';
@@ -6,15 +7,16 @@ import type { ConstExprParser } from './const-expr-parser';
6
7
  import type { TokenIterator } from './token-iterator';
7
8
  export declare class TypeParser {
8
9
  private constExprParser;
9
- private quoteAwareConstExprString;
10
10
  private useLinesAttributes;
11
11
  private useIndexAttributes;
12
- constructor(constExprParser?: ConstExprParser | null, quoteAwareConstExprString?: boolean, usedAttributes?: {
12
+ constructor(constExprParser?: ConstExprParser | null, usedAttributes?: {
13
13
  lines?: boolean;
14
14
  indexes?: boolean;
15
15
  });
16
16
  parse(tokens: TokenIterator): TypeNode;
17
+ private enrichTypeOnUnionOrIntersection;
17
18
  enrichWithAttributes<T extends BaseNode>(tokens: TokenIterator, type: T, startLine: number, startIndex: number): T;
19
+ parseTemplateTagValue(tokens: TokenIterator, parseDescription?: (tokens: TokenIterator) => string): TemplateTagValueNode;
18
20
  private subParse;
19
21
  private parseAtomic;
20
22
  private parseUnion;
@@ -28,6 +30,8 @@ export declare class TypeParser {
28
30
  parseGeneric(tokens: TokenIterator, baseType: IdentifierTypeNode): GenericTypeNode;
29
31
  private parseGenericTypeArgument;
30
32
  private parseCallable;
33
+ private parseCallableTemplates;
34
+ private parseCallableTemplateArgument;
31
35
  private parseCallableParameter;
32
36
  private parseCallableReturnType;
33
37
  private tryParseCallable;
@@ -35,6 +39,8 @@ export declare class TypeParser {
35
39
  private parseArrayShape;
36
40
  private parseArrayShapeItem;
37
41
  private parseArrayShapeKey;
42
+ private parseArrayShapeUnsealedType;
43
+ private parseListShapeUnsealedType;
38
44
  private parseObjectShape;
39
45
  private parseObjectShapeItem;
40
46
  private parseObjectShapeKey;
@@ -4,9 +4,11 @@ exports.TypeParser = void 0;
4
4
  const const_expr_array_node_1 = require("../ast/const-expr/const-expr-array-node");
5
5
  const const_expr_integer_node_1 = require("../ast/const-expr/const-expr-integer-node");
6
6
  const const_expr_string_node_1 = require("../ast/const-expr/const-expr-string-node");
7
- const quote_aware_const_expr_string_node_1 = require("../ast/const-expr/quote-aware-const-expr-string-node");
7
+ const const_fetch_node_1 = require("../ast/const-expr/const-fetch-node");
8
+ const template_tag_value_node_1 = require("../ast/php-doc/template-tag-value-node");
8
9
  const array_shape_item_node_1 = require("../ast/type/array-shape-item-node");
9
10
  const array_shape_node_1 = require("../ast/type/array-shape-node");
11
+ const array_shape_unsealed_type_node_1 = require("../ast/type/array-shape-unsealed-type-node");
10
12
  const array_type_node_1 = require("../ast/type/array-type-node");
11
13
  const callable_type_node_1 = require("../ast/type/callable-type-node");
12
14
  const callable_type_parameter_node_1 = require("../ast/type/callable-type-parameter-node");
@@ -27,42 +29,85 @@ const lexer_1 = require("../lexer/lexer");
27
29
  const parser_exception_1 = require("./parser-exception");
28
30
  const string_unescaper_1 = require("./string-unescaper");
29
31
  class TypeParser {
30
- constructor(constExprParser = null, quoteAwareConstExprString = false, usedAttributes = {}) {
32
+ constructor(constExprParser = null, usedAttributes = {}) {
31
33
  var _a, _b;
32
34
  this.constExprParser = constExprParser;
33
- this.quoteAwareConstExprString = quoteAwareConstExprString;
34
35
  this.useLinesAttributes = (_a = usedAttributes.lines) !== null && _a !== void 0 ? _a : false;
35
36
  this.useIndexAttributes = (_b = usedAttributes.indexes) !== null && _b !== void 0 ? _b : false;
36
37
  }
37
38
  parse(tokens) {
39
+ var _a;
38
40
  const startLine = tokens.currentTokenLine();
39
41
  const startIndex = tokens.currentTokenIndex();
40
42
  if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_NULLABLE)) {
41
43
  return this.parseNullable(tokens);
42
44
  }
43
45
  let type = this.parseAtomic(tokens);
44
- if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_UNION)) {
45
- type = this.parseUnion(tokens, type);
46
+ tokens.pushSavePoint();
47
+ tokens.skipNewLineTokensAndConsumeComments();
48
+ let enrichedType = null;
49
+ try {
50
+ enrichedType = this.enrichTypeOnUnionOrIntersection(tokens, type);
46
51
  }
47
- else if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_INTERSECTION)) {
48
- type = this.parseIntersection(tokens, type);
52
+ catch (_b) {
53
+ enrichedType = null;
49
54
  }
50
- else if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_OPEN_SQUARE_BRACKET)) {
51
- type = this.tryParseArrayOrOffsetAccess(tokens, type);
55
+ if (enrichedType !== null) {
56
+ type = enrichedType;
57
+ tokens.dropSavePoint();
58
+ }
59
+ else {
60
+ tokens.rollback();
61
+ type = (_a = this.enrichTypeOnUnionOrIntersection(tokens, type)) !== null && _a !== void 0 ? _a : type;
52
62
  }
53
63
  return this.enrichWithAttributes(tokens, type, startLine, startIndex);
54
64
  }
65
+ enrichTypeOnUnionOrIntersection(tokens, type) {
66
+ if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_UNION)) {
67
+ return this.parseUnion(tokens, type);
68
+ }
69
+ if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_INTERSECTION)) {
70
+ return this.parseIntersection(tokens, type);
71
+ }
72
+ return null;
73
+ }
55
74
  enrichWithAttributes(tokens, type, startLine, startIndex) {
56
75
  if (this.useLinesAttributes) {
57
76
  type.setAttribute(types_1.Attribute.START_LINE, startLine);
58
77
  type.setAttribute(types_1.Attribute.END_LINE, tokens.currentTokenLine());
59
78
  }
79
+ tokens.flushComments();
60
80
  if (this.useIndexAttributes) {
61
81
  type.setAttribute(types_1.Attribute.START_INDEX, startIndex);
62
82
  type.setAttribute(types_1.Attribute.END_INDEX, tokens.endIndexOfLastRelevantToken());
63
83
  }
64
84
  return type;
65
85
  }
86
+ parseTemplateTagValue(tokens, parseDescription) {
87
+ const name = tokens.currentTokenValue();
88
+ tokens.consumeTokenType(lexer_1.Lexer.TOKEN_IDENTIFIER);
89
+ let upperBound = null;
90
+ let lowerBound = null;
91
+ if (tokens.tryConsumeTokenValue('of') ||
92
+ tokens.tryConsumeTokenValue('as')) {
93
+ upperBound = this.parse(tokens);
94
+ }
95
+ if (tokens.tryConsumeTokenValue('super')) {
96
+ lowerBound = this.parse(tokens);
97
+ }
98
+ let defaultValue = null;
99
+ if (tokens.tryConsumeTokenValue('=')) {
100
+ defaultValue = this.parse(tokens);
101
+ }
102
+ let description = '';
103
+ if (parseDescription) {
104
+ description = parseDescription(tokens);
105
+ }
106
+ if (name === '') {
107
+ throw new Error('Template tag name cannot be empty.');
108
+ }
109
+ return new template_tag_value_node_1.TemplateTagValueNode(name, upperBound, description, defaultValue, lowerBound);
110
+ }
66
111
  subParse(tokens) {
67
112
  const startLine = tokens.currentTokenLine();
68
113
  const startIndex = tokens.currentTokenIndex();
@@ -76,7 +121,7 @@ class TypeParser {
76
121
  type = this.parseConditional(tokens, type);
77
122
  }
78
123
  else {
79
- tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
124
+ tokens.skipNewLineTokensAndConsumeComments();
80
125
  if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_UNION)) {
81
126
  type = this.subParseUnion(tokens, type);
82
127
  }
@@ -92,9 +137,9 @@ class TypeParser {
92
137
  const startIndex = tokens.currentTokenIndex();
93
138
  let type;
94
139
  if (tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_OPEN_PARENTHESES)) {
95
- tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
140
+ tokens.skipNewLineTokensAndConsumeComments();
96
141
  type = this.subParse(tokens);
97
- tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
142
+ tokens.skipNewLineTokensAndConsumeComments();
98
143
  tokens.consumeTokenType(lexer_1.Lexer.TOKEN_CLOSE_PARENTHESES);
99
144
  if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_OPEN_SQUARE_BRACKET)) {
100
145
  type = this.tryParseArrayOrOffsetAccess(tokens, type);
@@ -114,13 +159,36 @@ class TypeParser {
114
159
  const identifierTypeNode = new identifier_type_node_1.IdentifierTypeNode(currentTokenValue);
115
160
  type = identifierTypeNode;
116
161
  if (!tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_DOUBLE_COLON)) {
162
+ tokens.dropSavePoint();
117
163
  if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_OPEN_ANGLE_BRACKET)) {
118
- type = this.parseGeneric(tokens, identifierTypeNode);
164
+ tokens.pushSavePoint();
165
+ const isHtml = this.isHtml(tokens);
166
+ tokens.rollback();
167
+ if (isHtml) {
168
+ return this.enrichWithAttributes(tokens, type, startLine, startIndex);
169
+ }
170
+ const origType = type;
171
+ type = this.tryParseCallable(tokens, identifierTypeNode, true);
172
+ if (type === origType) {
173
+ type = this.parseGeneric(tokens, identifierTypeNode);
174
+ if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_OPEN_SQUARE_BRACKET)) {
175
+ type = this.tryParseArrayOrOffsetAccess(tokens, type);
176
+ }
177
+ }
178
+ }
179
+ else if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_OPEN_PARENTHESES)) {
180
+ type = this.tryParseCallable(tokens, identifierTypeNode, false);
119
181
  }
120
182
  else if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_OPEN_SQUARE_BRACKET)) {
121
183
  type = this.tryParseArrayOrOffsetAccess(tokens, type);
122
184
  }
123
- else if (['array', 'list', 'object'].includes(identifierTypeNode.name) &&
185
+ else if ([
186
+ 'array',
187
+ 'list',
188
+ 'non-empty-array',
189
+ 'non-empty-list',
190
+ 'object',
191
+ ].includes(identifierTypeNode.name) &&
124
192
  tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_OPEN_CURLY_BRACKET) &&
125
193
  !tokens.isPrecededByHorizontalWhitespace()) {
126
194
  if (identifierTypeNode.name === 'object') {
@@ -130,7 +198,7 @@ class TypeParser {
130
198
  type = this.parseArrayShape(tokens, type, identifierTypeNode.name);
131
199
  }
132
200
  if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_OPEN_SQUARE_BRACKET)) {
133
- type = this.tryParseArrayOrOffsetAccess(tokens, type);
201
+ type = this.tryParseArrayOrOffsetAccess(tokens, this.enrichWithAttributes(tokens, type, startLine, startIndex));
134
202
  }
135
203
  }
136
204
  return this.enrichWithAttributes(tokens, type, startLine, startIndex);
@@ -145,7 +213,7 @@ class TypeParser {
145
213
  throw exception;
146
214
  }
147
215
  try {
148
- const constExpr = this.constExprParser.parse(tokens, true);
216
+ const constExpr = this.constExprParser.parse(tokens);
149
217
  if (constExpr instanceof const_expr_array_node_1.ConstExprArrayNode) {
150
218
  throw exception;
151
219
  }
@@ -159,6 +227,7 @@ class TypeParser {
159
227
  parseUnion(tokens, type) {
160
228
  const types = [type];
161
229
  while (tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_UNION)) {
230
+ tokens.skipNewLineTokensAndConsumeComments();
162
231
  types.push(this.parseAtomic(tokens));
163
232
  }
164
233
  return new union_type_node_1.UnionTypeNode(types);
@@ -166,15 +235,16 @@ class TypeParser {
166
235
  subParseUnion(tokens, type) {
167
236
  const types = [type];
168
237
  while (tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_UNION)) {
169
- tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
238
+ tokens.skipNewLineTokensAndConsumeComments();
170
239
  types.push(this.parseAtomic(tokens));
171
- tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
240
+ tokens.skipNewLineTokensAndConsumeComments();
172
241
  }
173
242
  return new union_type_node_1.UnionTypeNode(types);
174
243
  }
175
244
  parseIntersection(tokens, type) {
176
245
  const types = [type];
177
246
  while (tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_INTERSECTION)) {
247
+ tokens.skipNewLineTokensAndConsumeComments();
178
248
  types.push(this.parseAtomic(tokens));
179
249
  }
180
250
  return new intersection_type_node_1.IntersectionTypeNode(types);
@@ -182,9 +252,9 @@ class TypeParser {
182
252
  subParseIntersection(tokens, type) {
183
253
  const types = [type];
184
254
  while (tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_INTERSECTION)) {
185
- tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
255
+ tokens.skipNewLineTokensAndConsumeComments();
186
256
  types.push(this.parseAtomic(tokens));
187
- tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
257
+ tokens.skipNewLineTokensAndConsumeComments();
188
258
  }
189
259
  return new intersection_type_node_1.IntersectionTypeNode(types);
190
260
  }
@@ -196,13 +266,13 @@ class TypeParser {
196
266
  tokens.consumeTokenType(lexer_1.Lexer.TOKEN_IDENTIFIER);
197
267
  }
198
268
  const targetType = this.parse(tokens);
199
- tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
269
+ tokens.skipNewLineTokensAndConsumeComments();
200
270
  tokens.consumeTokenType(lexer_1.Lexer.TOKEN_NULLABLE);
201
- tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
271
+ tokens.skipNewLineTokensAndConsumeComments();
202
272
  const ifType = this.parse(tokens);
203
- tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
273
+ tokens.skipNewLineTokensAndConsumeComments();
204
274
  tokens.consumeTokenType(lexer_1.Lexer.TOKEN_COLON);
205
- tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
275
+ tokens.skipNewLineTokensAndConsumeComments();
206
276
  const elseType = this.subParse(tokens);
207
277
  return new conditional_type_node_1.ConditionalTypeNode(subjectType, targetType, ifType, elseType, negated);
208
278
  }
@@ -215,13 +285,13 @@ class TypeParser {
215
285
  tokens.consumeTokenType(lexer_1.Lexer.TOKEN_IDENTIFIER);
216
286
  }
217
287
  const targetType = this.parse(tokens);
218
- tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
288
+ tokens.skipNewLineTokensAndConsumeComments();
219
289
  tokens.consumeTokenType(lexer_1.Lexer.TOKEN_NULLABLE);
220
- tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
290
+ tokens.skipNewLineTokensAndConsumeComments();
221
291
  const ifType = this.parse(tokens);
222
- tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
292
+ tokens.skipNewLineTokensAndConsumeComments();
223
293
  tokens.consumeTokenType(lexer_1.Lexer.TOKEN_COLON);
224
- tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
294
+ tokens.skipNewLineTokensAndConsumeComments();
225
295
  const elseType = this.subParse(tokens);
226
296
  return new conditional_type_for_parameter_node_1.ConditionalTypeForParameterNode(parameterName, targetType, ifType, elseType, negated);
227
297
  }
@@ -251,24 +321,24 @@ class TypeParser {
251
321
  }
252
322
  parseGeneric(tokens, baseType) {
253
323
  tokens.consumeTokenType(lexer_1.Lexer.TOKEN_OPEN_ANGLE_BRACKET);
254
- tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
324
+ tokens.skipNewLineTokensAndConsumeComments();
255
325
  const genericTypes = [];
256
326
  const variances = [];
257
327
  const [genericType, variance] = this.parseGenericTypeArgument(tokens);
258
328
  genericTypes.push(genericType);
259
329
  variances.push(variance);
260
- tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
330
+ tokens.skipNewLineTokensAndConsumeComments();
261
331
  while (tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_COMMA)) {
262
- tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
332
+ tokens.skipNewLineTokensAndConsumeComments();
263
333
  if (tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_CLOSE_ANGLE_BRACKET)) {
264
334
  break;
265
335
  }
266
336
  const [genericTypeToAddInWhileLoop, varianceToAddInWhileLoop] = this.parseGenericTypeArgument(tokens);
267
337
  genericTypes.push(genericTypeToAddInWhileLoop);
268
338
  variances.push(varianceToAddInWhileLoop);
269
- tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
339
+ tokens.skipNewLineTokensAndConsumeComments();
270
340
  }
271
- tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
341
+ tokens.skipNewLineTokensAndConsumeComments();
272
342
  tokens.consumeTokenType(lexer_1.Lexer.TOKEN_CLOSE_ANGLE_BRACKET);
273
343
  const type = new generic_type_node_1.GenericTypeNode(baseType, genericTypes, variances);
274
344
  const startLine = baseType.getAttribute(types_1.Attribute.START_LINE);
@@ -300,26 +370,51 @@ class TypeParser {
300
370
  const type = this.parse(tokens);
301
371
  return [type, variance];
302
372
  }
303
- parseCallable(tokens, identifier) {
373
+ parseCallable(tokens, identifier, hasTemplate) {
374
+ const templates = hasTemplate ? this.parseCallableTemplates(tokens) : [];
304
375
  tokens.consumeTokenType(lexer_1.Lexer.TOKEN_OPEN_PARENTHESES);
305
- tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
376
+ tokens.skipNewLineTokensAndConsumeComments();
306
377
  const parameters = [];
307
378
  if (!tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_CLOSE_PARENTHESES)) {
308
379
  parameters.push(this.parseCallableParameter(tokens));
309
- tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
380
+ tokens.skipNewLineTokensAndConsumeComments();
310
381
  while (tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_COMMA)) {
311
- tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
382
+ tokens.skipNewLineTokensAndConsumeComments();
312
383
  if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_CLOSE_PARENTHESES)) {
313
384
  break;
314
385
  }
315
386
  parameters.push(this.parseCallableParameter(tokens));
316
- tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
387
+ tokens.skipNewLineTokensAndConsumeComments();
317
388
  }
318
389
  }
319
390
  tokens.consumeTokenType(lexer_1.Lexer.TOKEN_CLOSE_PARENTHESES);
320
391
  tokens.consumeTokenType(lexer_1.Lexer.TOKEN_COLON);
321
- const returnType = this.parseCallableReturnType(tokens);
322
- return new callable_type_node_1.CallableTypeNode(identifier, parameters, returnType);
392
+ const startLine = tokens.currentTokenLine();
393
+ const startIndex = tokens.currentTokenIndex();
394
+ const returnType = this.enrichWithAttributes(tokens, this.parseCallableReturnType(tokens), startLine, startIndex);
395
+ return new callable_type_node_1.CallableTypeNode(identifier, parameters, returnType, templates);
396
+ }
397
+ parseCallableTemplates(tokens) {
398
+ tokens.consumeTokenType(lexer_1.Lexer.TOKEN_OPEN_ANGLE_BRACKET);
399
+ const templates = [];
400
+ let isFirst = true;
401
+ while (isFirst || tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_COMMA)) {
402
+ tokens.skipNewLineTokensAndConsumeComments();
403
+ if (!isFirst &&
404
+ tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_CLOSE_ANGLE_BRACKET)) {
405
+ break;
406
+ }
407
+ isFirst = false;
408
+ templates.push(this.parseCallableTemplateArgument(tokens));
409
+ tokens.skipNewLineTokensAndConsumeComments();
410
+ }
411
+ tokens.consumeTokenType(lexer_1.Lexer.TOKEN_CLOSE_ANGLE_BRACKET);
412
+ return templates;
413
+ }
414
+ parseCallableTemplateArgument(tokens) {
415
+ const startLine = tokens.currentTokenLine();
416
+ const startIndex = tokens.currentTokenIndex();
417
+ return this.enrichWithAttributes(tokens, this.parseTemplateTagValue(tokens), startLine, startIndex);
323
418
  }
324
419
  parseCallableParameter(tokens) {
325
420
  const startLine = tokens.currentTokenLine();
@@ -383,7 +478,7 @@ class TypeParser {
383
478
  throw exception;
384
479
  }
385
480
  try {
386
- const constExpr = this.constExprParser.parse(tokens, true);
481
+ const constExpr = this.constExprParser.parse(tokens);
387
482
  if (constExpr instanceof const_expr_array_node_1.ConstExprArrayNode) {
388
483
  throw exception;
389
484
  }
@@ -402,10 +497,10 @@ class TypeParser {
402
497
  }
403
498
  }
404
499
  }
405
- tryParseCallable(tokens, identifier) {
500
+ tryParseCallable(tokens, identifier, hasTemplate) {
406
501
  try {
407
502
  tokens.pushSavePoint();
408
- const type = this.parseCallable(tokens, identifier);
503
+ const type = this.parseCallable(tokens, identifier, hasTemplate);
409
504
  tokens.dropSavePoint();
410
505
  return type;
411
506
  }
@@ -451,22 +546,41 @@ class TypeParser {
451
546
  tokens.consumeTokenType(lexer_1.Lexer.TOKEN_OPEN_CURLY_BRACKET);
452
547
  const items = [];
453
548
  let sealed = true;
549
+ let unsealedType = null;
550
+ let done = false;
454
551
  do {
455
- tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
552
+ tokens.skipNewLineTokensAndConsumeComments();
456
553
  if (tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_CLOSE_CURLY_BRACKET)) {
457
554
  return new array_shape_node_1.ArrayShapeNode(items, true, kind);
458
555
  }
459
556
  if (tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_VARIADIC)) {
460
557
  sealed = false;
558
+ tokens.skipNewLineTokensAndConsumeComments();
559
+ if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_OPEN_ANGLE_BRACKET)) {
560
+ if (kind === array_shape_node_1.ArrayShapeNodeKind.ARRAY ||
561
+ kind === array_shape_node_1.ArrayShapeNodeKind.NON_EMPTY_ARRAY) {
562
+ unsealedType = this.parseArrayShapeUnsealedType(tokens);
563
+ }
564
+ else {
565
+ unsealedType = this.parseListShapeUnsealedType(tokens);
566
+ }
567
+ tokens.skipNewLineTokensAndConsumeComments();
568
+ }
461
569
  tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_COMMA);
462
570
  break;
463
571
  }
464
572
  items.push(this.parseArrayShapeItem(tokens));
465
- tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
466
- } while (tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_COMMA));
467
- tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
573
+ tokens.skipNewLineTokensAndConsumeComments();
574
+ if (!tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_COMMA)) {
575
+ done = true;
576
+ }
577
+ } while (!done);
578
+ tokens.skipNewLineTokensAndConsumeComments();
468
579
  tokens.consumeTokenType(lexer_1.Lexer.TOKEN_CLOSE_CURLY_BRACKET);
469
- return new array_shape_node_1.ArrayShapeNode(items, sealed, kind);
580
+ if (sealed) {
581
+ return new array_shape_node_1.ArrayShapeNode(items, true, kind);
582
+ }
583
+ return new array_shape_node_1.ArrayShapeNode(items, false, kind, unsealedType);
470
584
  }
471
585
  parseArrayShapeItem(tokens) {
472
586
  const startLine = tokens.currentTokenLine();
@@ -498,41 +612,66 @@ class TypeParser {
498
612
  tokens.next();
499
613
  }
500
614
  else if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_SINGLE_QUOTED_STRING)) {
501
- if (this.quoteAwareConstExprString) {
502
- key = new quote_aware_const_expr_string_node_1.QuoteAwareConstExprStringNode(string_unescaper_1.StringUnescaper.unescapeString(tokens.currentTokenValue()), quote_aware_const_expr_string_node_1.QuoteAwareConstExprStringNode.SINGLE_QUOTED);
503
- }
504
- else {
505
- key = new const_expr_string_node_1.ConstExprStringNode(tokens.currentTokenValue().replace(/(^'|'$)/g, ''));
506
- }
615
+ key = new const_expr_string_node_1.ConstExprStringNode(string_unescaper_1.StringUnescaper.unescapeString(tokens.currentTokenValue()), const_expr_string_node_1.ConstExprStringNode.SINGLE_QUOTED);
507
616
  tokens.next();
508
617
  }
509
618
  else if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_DOUBLE_QUOTED_STRING)) {
510
- if (this.quoteAwareConstExprString) {
511
- key = new quote_aware_const_expr_string_node_1.QuoteAwareConstExprStringNode(string_unescaper_1.StringUnescaper.unescapeString(tokens.currentTokenValue()), quote_aware_const_expr_string_node_1.QuoteAwareConstExprStringNode.DOUBLE_QUOTED);
512
- }
513
- else {
514
- key = new const_expr_string_node_1.ConstExprStringNode(tokens.currentTokenValue().replace(/(^"|"$)/g, ''));
515
- }
619
+ key = new const_expr_string_node_1.ConstExprStringNode(string_unescaper_1.StringUnescaper.unescapeString(tokens.currentTokenValue()), const_expr_string_node_1.ConstExprStringNode.DOUBLE_QUOTED);
516
620
  tokens.next();
517
621
  }
518
622
  else {
519
- key = new identifier_type_node_1.IdentifierTypeNode(tokens.currentTokenValue());
623
+ const identifier = tokens.currentTokenValue();
520
624
  tokens.consumeTokenType(lexer_1.Lexer.TOKEN_IDENTIFIER);
625
+ if (tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_DOUBLE_COLON)) {
626
+ const classConstantName = tokens.currentTokenValue();
627
+ tokens.consumeTokenType(lexer_1.Lexer.TOKEN_IDENTIFIER);
628
+ key = new const_fetch_node_1.ConstFetchNode(identifier, classConstantName);
629
+ }
630
+ else {
631
+ key = new identifier_type_node_1.IdentifierTypeNode(identifier);
632
+ }
521
633
  }
522
634
  return this.enrichWithAttributes(tokens, key, startLine, startIndex);
523
635
  }
636
+ parseArrayShapeUnsealedType(tokens) {
637
+ const startLine = tokens.currentTokenLine();
638
+ const startIndex = tokens.currentTokenIndex();
639
+ tokens.consumeTokenType(lexer_1.Lexer.TOKEN_OPEN_ANGLE_BRACKET);
640
+ tokens.skipNewLineTokensAndConsumeComments();
641
+ let valueType = this.parse(tokens);
642
+ tokens.skipNewLineTokensAndConsumeComments();
643
+ let keyType = null;
644
+ if (tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_COMMA)) {
645
+ tokens.skipNewLineTokensAndConsumeComments();
646
+ keyType = valueType;
647
+ valueType = this.parse(tokens);
648
+ tokens.skipNewLineTokensAndConsumeComments();
649
+ }
650
+ tokens.consumeTokenType(lexer_1.Lexer.TOKEN_CLOSE_ANGLE_BRACKET);
651
+ return this.enrichWithAttributes(tokens, new array_shape_unsealed_type_node_1.ArrayShapeUnsealedTypeNode(valueType, keyType), startLine, startIndex);
652
+ }
653
+ parseListShapeUnsealedType(tokens) {
654
+ const startLine = tokens.currentTokenLine();
655
+ const startIndex = tokens.currentTokenIndex();
656
+ tokens.consumeTokenType(lexer_1.Lexer.TOKEN_OPEN_ANGLE_BRACKET);
657
+ tokens.skipNewLineTokensAndConsumeComments();
658
+ const valueType = this.parse(tokens);
659
+ tokens.skipNewLineTokensAndConsumeComments();
660
+ tokens.consumeTokenType(lexer_1.Lexer.TOKEN_CLOSE_ANGLE_BRACKET);
661
+ return this.enrichWithAttributes(tokens, new array_shape_unsealed_type_node_1.ArrayShapeUnsealedTypeNode(valueType, null), startLine, startIndex);
662
+ }
524
663
  parseObjectShape(tokens) {
525
664
  tokens.consumeTokenType(lexer_1.Lexer.TOKEN_OPEN_CURLY_BRACKET);
526
665
  const items = [];
527
666
  do {
528
- tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
667
+ tokens.skipNewLineTokensAndConsumeComments();
529
668
  if (tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_CLOSE_CURLY_BRACKET)) {
530
669
  return new object_shape_node_1.ObjectShapeNode(items);
531
670
  }
532
671
  items.push(this.parseObjectShapeItem(tokens));
533
- tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
672
+ tokens.skipNewLineTokensAndConsumeComments();
534
673
  } while (tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_COMMA));
535
- tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
674
+ tokens.skipNewLineTokensAndConsumeComments();
536
675
  tokens.consumeTokenType(lexer_1.Lexer.TOKEN_CLOSE_CURLY_BRACKET);
537
676
  return new object_shape_node_1.ObjectShapeNode(items);
538
677
  }
@@ -550,21 +689,11 @@ class TypeParser {
550
689
  const startIndex = tokens.currentTokenIndex();
551
690
  let key;
552
691
  if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_SINGLE_QUOTED_STRING)) {
553
- if (this.quoteAwareConstExprString) {
554
- key = new quote_aware_const_expr_string_node_1.QuoteAwareConstExprStringNode(string_unescaper_1.StringUnescaper.unescapeString(tokens.currentTokenValue()), quote_aware_const_expr_string_node_1.QuoteAwareConstExprStringNode.SINGLE_QUOTED);
555
- }
556
- else {
557
- key = new const_expr_string_node_1.ConstExprStringNode(tokens.currentTokenValue().replace(/(^'|'$)/g, ''));
558
- }
692
+ key = new const_expr_string_node_1.ConstExprStringNode(string_unescaper_1.StringUnescaper.unescapeString(tokens.currentTokenValue()), const_expr_string_node_1.ConstExprStringNode.SINGLE_QUOTED);
559
693
  tokens.next();
560
694
  }
561
695
  else if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_DOUBLE_QUOTED_STRING)) {
562
- if (this.quoteAwareConstExprString) {
563
- key = new quote_aware_const_expr_string_node_1.QuoteAwareConstExprStringNode(string_unescaper_1.StringUnescaper.unescapeString(tokens.currentTokenValue()), quote_aware_const_expr_string_node_1.QuoteAwareConstExprStringNode.DOUBLE_QUOTED);
564
- }
565
- else {
566
- key = new const_expr_string_node_1.ConstExprStringNode(tokens.currentTokenValue().replace(/(^"|"$)/g, ''));
567
- }
696
+ key = new const_expr_string_node_1.ConstExprStringNode(string_unescaper_1.StringUnescaper.unescapeString(tokens.currentTokenValue()), const_expr_string_node_1.ConstExprStringNode.DOUBLE_QUOTED);
568
697
  tokens.next();
569
698
  }
570
699
  else {
@@ -0,0 +1,10 @@
1
+ export declare class ParserConfig {
2
+ useLinesAttributes: boolean;
3
+ useIndexAttributes: boolean;
4
+ useCommentsAttributes: boolean;
5
+ constructor(usedAttributes?: {
6
+ lines?: boolean;
7
+ indexes?: boolean;
8
+ comments?: boolean;
9
+ });
10
+ }
@@ -0,0 +1,12 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.ParserConfig = void 0;
4
+ class ParserConfig {
5
+ constructor(usedAttributes = {}) {
6
+ var _a, _b, _c;
7
+ this.useLinesAttributes = (_a = usedAttributes.lines) !== null && _a !== void 0 ? _a : false;
8
+ this.useIndexAttributes = (_b = usedAttributes.indexes) !== null && _b !== void 0 ? _b : false;
9
+ this.useCommentsAttributes = (_c = usedAttributes.comments) !== null && _c !== void 0 ? _c : false;
10
+ }
11
+ }
12
+ exports.ParserConfig = ParserConfig;
@@ -2,6 +2,7 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.PhpDocTypeNodeToTypescriptTypeNodeTranspiler = void 0;
4
4
  const typescript_1 = require("typescript");
5
+ const const_expr_string_node_1 = require("../ast/const-expr/const-expr-string-node");
5
6
  const array_shape_node_1 = require("../ast/type/array-shape-node");
6
7
  const array_type_node_1 = require("../ast/type/array-type-node");
7
8
  const object_shape_node_1 = require("../ast/type/object-shape-node");
@@ -19,14 +20,20 @@ class PhpDocTypeNodeToTypescriptTypeNodeTranspiler {
19
20
  }
20
21
  if (sourceTypeNode instanceof array_shape_node_1.ArrayShapeNode) {
21
22
  return typescript_1.factory.createTypeLiteralNode(sourceTypeNode.items.map((item) => {
22
- return typescript_1.factory.createPropertySignature(undefined, item.keyName.toString(), item.optional
23
+ const keyName = item.keyName instanceof const_expr_string_node_1.ConstExprStringNode
24
+ ? item.keyName.value
25
+ : item.keyName.toString();
26
+ return typescript_1.factory.createPropertySignature(undefined, keyName, item.optional
23
27
  ? typescript_1.factory.createToken(typescript_1.SyntaxKind.QuestionToken)
24
28
  : undefined, this.transpile(item.valueType));
25
29
  }));
26
30
  }
27
31
  if (sourceTypeNode instanceof object_shape_node_1.ObjectShapeNode) {
28
32
  return typescript_1.factory.createTypeLiteralNode(sourceTypeNode.items.map((item) => {
29
- return typescript_1.factory.createPropertySignature(undefined, item.keyName.toString(), item.optional
33
+ const keyName = item.keyName instanceof const_expr_string_node_1.ConstExprStringNode
34
+ ? item.keyName.value
35
+ : item.keyName.toString();
36
+ return typescript_1.factory.createPropertySignature(undefined, keyName, item.optional
30
37
  ? typescript_1.factory.createToken(typescript_1.SyntaxKind.QuestionToken)
31
38
  : undefined, this.transpile(item.valueType));
32
39
  }));
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@rightcapital/phpdoc-parser",
3
- "version": "0.5.3",
3
+ "version": "0.6.0",
4
4
  "description": "TypeScript version of PHPDoc parser with support for intersection types and generics",
5
5
  "keywords": [
6
6
  "PHP",
@@ -36,9 +36,7 @@
36
36
  }
37
37
  },
38
38
  "dependencies": {
39
- "@types/node": "22.19.1",
40
- "lodash": "4.17.21",
41
- "typescript": "5.9.3"
39
+ "lodash": "4.17.23"
42
40
  },
43
41
  "devDependencies": {
44
42
  "@babel/core": "7.26.9",
@@ -50,15 +48,20 @@
50
48
  "@rightcapital/eslint-config": "42.1.0",
51
49
  "@rightcapital/prettier-config": "7.2.1",
52
50
  "@types/lodash": "4.17.16",
51
+ "@types/node": "22.19.11",
53
52
  "beachball": "2.51.0",
54
53
  "commitizen": "4.3.1",
55
54
  "eslint": "9.13.0",
56
55
  "husky": "9.1.7",
57
56
  "inquirer": "12.4.2",
58
57
  "prettier": "3.5.3",
58
+ "typescript": "5.9.3",
59
59
  "vitest": "3.0.7"
60
60
  },
61
- "packageManager": "pnpm@10.21.0",
61
+ "peerDependencies": {
62
+ "typescript": ">=5.8.0"
63
+ },
64
+ "packageManager": "pnpm@10.30.0",
62
65
  "engines": {
63
66
  "node": ">=16.x",
64
67
  "pnpm": ">=8.x"