@rightcapital/phpdoc-parser 0.3.3-feature-add-basic-ci.7.0 → 0.3.3-feature-add-basic-ci.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +73 -0
- package/dist/index.js +0 -1
- package/dist/phpdoc-parser/ast/abstract-node-visitor.d.ts +8 -0
- package/dist/phpdoc-parser/ast/abstract-node-visitor.js +18 -0
- package/dist/phpdoc-parser/ast/base-node.d.ts +10 -0
- package/dist/phpdoc-parser/ast/base-node.js +27 -0
- package/dist/phpdoc-parser/ast/const-expr/const-expr-array-item-node.d.ts +8 -0
- package/dist/phpdoc-parser/ast/const-expr/const-expr-array-item-node.js +21 -0
- package/dist/phpdoc-parser/ast/const-expr/const-expr-array-node.d.ts +8 -0
- package/dist/phpdoc-parser/ast/const-expr/const-expr-array-node.js +17 -0
- package/dist/phpdoc-parser/ast/const-expr/const-expr-false-node.d.ts +5 -0
- package/dist/phpdoc-parser/ast/const-expr/const-expr-false-node.js +13 -0
- package/dist/phpdoc-parser/ast/const-expr/const-expr-float-node.d.ts +7 -0
- package/dist/phpdoc-parser/ast/const-expr/const-expr-float-node.js +17 -0
- package/dist/phpdoc-parser/ast/const-expr/const-expr-integer-node.d.ts +7 -0
- package/dist/phpdoc-parser/ast/const-expr/const-expr-integer-node.js +17 -0
- package/dist/phpdoc-parser/ast/const-expr/const-expr-node.d.ts +25 -0
- package/dist/phpdoc-parser/ast/const-expr/const-expr-node.js +43 -0
- package/dist/phpdoc-parser/ast/const-expr/const-expr-null-node.d.ts +5 -0
- package/dist/phpdoc-parser/ast/const-expr/const-expr-null-node.js +13 -0
- package/dist/phpdoc-parser/ast/const-expr/const-expr-string-node.d.ts +7 -0
- package/dist/phpdoc-parser/ast/const-expr/const-expr-string-node.js +17 -0
- package/dist/phpdoc-parser/ast/const-expr/const-expr-true-node.d.ts +5 -0
- package/dist/phpdoc-parser/ast/const-expr/const-expr-true-node.js +13 -0
- package/dist/phpdoc-parser/ast/const-expr/const-fetch-node.d.ts +8 -0
- package/dist/phpdoc-parser/ast/const-expr/const-fetch-node.js +21 -0
- package/dist/phpdoc-parser/ast/const-expr/quote-aware-const-expr-string-node.d.ts +10 -0
- package/dist/phpdoc-parser/ast/const-expr/quote-aware-const-expr-string-node.js +47 -0
- package/dist/phpdoc-parser/ast/node-traverser.d.ts +11 -0
- package/dist/phpdoc-parser/ast/node-traverser.js +216 -0
- package/dist/phpdoc-parser/ast/node-visitor/cloning-visitor.d.ts +5 -0
- package/dist/phpdoc-parser/ast/node-visitor/cloning-visitor.js +14 -0
- package/dist/phpdoc-parser/ast/node-visitor.d.ts +11 -0
- package/dist/phpdoc-parser/ast/node-visitor.js +2 -0
- package/dist/phpdoc-parser/ast/node.d.ts +6 -0
- package/dist/phpdoc-parser/ast/node.js +2 -0
- package/dist/phpdoc-parser/ast/php-doc/assert-tag-method-value-node.d.ts +13 -0
- package/dist/phpdoc-parser/ast/php-doc/assert-tag-method-value-node.js +24 -0
- package/dist/phpdoc-parser/ast/php-doc/assert-tag-property-value-node.d.ts +13 -0
- package/dist/phpdoc-parser/ast/php-doc/assert-tag-property-value-node.js +24 -0
- package/dist/phpdoc-parser/ast/php-doc/assert-tag-value-node.d.ts +12 -0
- package/dist/phpdoc-parser/ast/php-doc/assert-tag-value-node.js +23 -0
- package/dist/phpdoc-parser/ast/php-doc/deprecated-tag-value-node.d.ts +7 -0
- package/dist/phpdoc-parser/ast/php-doc/deprecated-tag-value-node.js +17 -0
- package/dist/phpdoc-parser/ast/php-doc/extends-tag-value-node.d.ts +9 -0
- package/dist/phpdoc-parser/ast/php-doc/extends-tag-value-node.js +18 -0
- package/dist/phpdoc-parser/ast/php-doc/generic-tag-value-node.d.ts +7 -0
- package/dist/phpdoc-parser/ast/php-doc/generic-tag-value-node.js +17 -0
- package/dist/phpdoc-parser/ast/php-doc/implements-tag-value-node.d.ts +9 -0
- package/dist/phpdoc-parser/ast/php-doc/implements-tag-value-node.js +18 -0
- package/dist/phpdoc-parser/ast/php-doc/invalid-tag-value-node.d.ts +10 -0
- package/dist/phpdoc-parser/ast/php-doc/invalid-tag-value-node.js +33 -0
- package/dist/phpdoc-parser/ast/php-doc/method-tag-value-node.d.ts +15 -0
- package/dist/phpdoc-parser/ast/php-doc/method-tag-value-node.js +27 -0
- package/dist/phpdoc-parser/ast/php-doc/method-tag-value-parameter-node.d.ts +13 -0
- package/dist/phpdoc-parser/ast/php-doc/method-tag-value-parameter-node.js +27 -0
- package/dist/phpdoc-parser/ast/php-doc/mixin-tag-value-node.d.ts +9 -0
- package/dist/phpdoc-parser/ast/php-doc/mixin-tag-value-node.js +18 -0
- package/dist/phpdoc-parser/ast/php-doc/param-out-tag-value-node.d.ts +10 -0
- package/dist/phpdoc-parser/ast/php-doc/param-out-tag-value-node.js +19 -0
- package/dist/phpdoc-parser/ast/php-doc/param-tag-value-node.d.ts +12 -0
- package/dist/phpdoc-parser/ast/php-doc/param-tag-value-node.js +28 -0
- package/dist/phpdoc-parser/ast/php-doc/php-doc-child-node.d.ts +4 -0
- package/dist/phpdoc-parser/ast/php-doc/php-doc-child-node.js +10 -0
- package/dist/phpdoc-parser/ast/php-doc/php-doc-node.d.ts +32 -0
- package/dist/phpdoc-parser/ast/php-doc/php-doc-node.js +97 -0
- package/dist/phpdoc-parser/ast/php-doc/php-doc-tag-node.d.ts +56 -0
- package/dist/phpdoc-parser/ast/php-doc/php-doc-tag-node.js +90 -0
- package/dist/phpdoc-parser/ast/php-doc/php-doc-tag-value-node.d.ts +4 -0
- package/dist/phpdoc-parser/ast/php-doc/php-doc-tag-value-node.js +10 -0
- package/dist/phpdoc-parser/ast/php-doc/php-doc-text-node.d.ts +7 -0
- package/dist/phpdoc-parser/ast/php-doc/php-doc-text-node.js +17 -0
- package/dist/phpdoc-parser/ast/php-doc/property-tag-value-node.d.ts +10 -0
- package/dist/phpdoc-parser/ast/php-doc/property-tag-value-node.js +19 -0
- package/dist/phpdoc-parser/ast/php-doc/return-tag-value-node.d.ts +9 -0
- package/dist/phpdoc-parser/ast/php-doc/return-tag-value-node.js +18 -0
- package/dist/phpdoc-parser/ast/php-doc/self-out-tag-value-node.d.ts +9 -0
- package/dist/phpdoc-parser/ast/php-doc/self-out-tag-value-node.js +18 -0
- package/dist/phpdoc-parser/ast/php-doc/template-tag-value-node.d.ts +11 -0
- package/dist/phpdoc-parser/ast/php-doc/template-tag-value-node.js +24 -0
- package/dist/phpdoc-parser/ast/php-doc/throws-tag-value-node.d.ts +9 -0
- package/dist/phpdoc-parser/ast/php-doc/throws-tag-value-node.js +18 -0
- package/dist/phpdoc-parser/ast/php-doc/type-alias-import-tag-value-node.d.ts +10 -0
- package/dist/phpdoc-parser/ast/php-doc/type-alias-import-tag-value-node.js +22 -0
- package/dist/phpdoc-parser/ast/php-doc/type-alias-tag-value-node.d.ts +9 -0
- package/dist/phpdoc-parser/ast/php-doc/type-alias-tag-value-node.js +18 -0
- package/dist/phpdoc-parser/ast/php-doc/typeless-param-tag-value-node.d.ts +10 -0
- package/dist/phpdoc-parser/ast/php-doc/typeless-param-tag-value-node.js +22 -0
- package/dist/phpdoc-parser/ast/php-doc/uses-tag-value-node.d.ts +9 -0
- package/dist/phpdoc-parser/ast/php-doc/uses-tag-value-node.js +18 -0
- package/dist/phpdoc-parser/ast/php-doc/var-tag-value-node.d.ts +10 -0
- package/dist/phpdoc-parser/ast/php-doc/var-tag-value-node.js +19 -0
- package/dist/phpdoc-parser/ast/type/array-shape-item-node.d.ts +12 -0
- package/dist/phpdoc-parser/ast/type/array-shape-item-node.js +22 -0
- package/dist/phpdoc-parser/ast/type/array-shape-node.d.ts +14 -0
- package/dist/phpdoc-parser/ast/type/array-shape-node.js +31 -0
- package/dist/phpdoc-parser/ast/type/array-type-node.d.ts +7 -0
- package/dist/phpdoc-parser/ast/type/array-type-node.js +25 -0
- package/dist/phpdoc-parser/ast/type/callable-type-node.d.ts +11 -0
- package/dist/phpdoc-parser/ast/type/callable-type-node.js +24 -0
- package/dist/phpdoc-parser/ast/type/callable-type-parameter-node.d.ts +11 -0
- package/dist/phpdoc-parser/ast/type/callable-type-parameter-node.js +26 -0
- package/dist/phpdoc-parser/ast/type/conditional-type-for-parameter-node.d.ts +11 -0
- package/dist/phpdoc-parser/ast/type/conditional-type-for-parameter-node.js +21 -0
- package/dist/phpdoc-parser/ast/type/conditional-type-node.d.ts +11 -0
- package/dist/phpdoc-parser/ast/type/conditional-type-node.js +21 -0
- package/dist/phpdoc-parser/ast/type/const-type-node.d.ts +8 -0
- package/dist/phpdoc-parser/ast/type/const-type-node.js +17 -0
- package/dist/phpdoc-parser/ast/type/generic-type-node.d.ts +15 -0
- package/dist/phpdoc-parser/ast/type/generic-type-node.js +37 -0
- package/dist/phpdoc-parser/ast/type/identifier-type-node.d.ts +7 -0
- package/dist/phpdoc-parser/ast/type/identifier-type-node.js +17 -0
- package/dist/phpdoc-parser/ast/type/intersection-type-node.d.ts +7 -0
- package/dist/phpdoc-parser/ast/type/intersection-type-node.js +25 -0
- package/dist/phpdoc-parser/ast/type/invalid-type-node.d.ts +10 -0
- package/dist/phpdoc-parser/ast/type/invalid-type-node.js +29 -0
- package/dist/phpdoc-parser/ast/type/nullable-type-node.d.ts +7 -0
- package/dist/phpdoc-parser/ast/type/nullable-type-node.js +17 -0
- package/dist/phpdoc-parser/ast/type/object-shape-item-node.d.ts +11 -0
- package/dist/phpdoc-parser/ast/type/object-shape-item-node.js +22 -0
- package/dist/phpdoc-parser/ast/type/object-shape-node.d.ts +8 -0
- package/dist/phpdoc-parser/ast/type/object-shape-node.js +17 -0
- package/dist/phpdoc-parser/ast/type/offset-access-type-node.d.ts +8 -0
- package/dist/phpdoc-parser/ast/type/offset-access-type-node.js +26 -0
- package/dist/phpdoc-parser/ast/type/this-type-node.d.ts +5 -0
- package/dist/phpdoc-parser/ast/type/this-type-node.js +13 -0
- package/dist/phpdoc-parser/ast/type/type-node.d.ts +40 -0
- package/dist/phpdoc-parser/ast/type/type-node.js +64 -0
- package/dist/phpdoc-parser/ast/type/union-type-node.d.ts +7 -0
- package/dist/phpdoc-parser/ast/type/union-type-node.js +25 -0
- package/dist/phpdoc-parser/ast/types.d.ts +13 -0
- package/dist/phpdoc-parser/ast/types.js +18 -0
- package/dist/phpdoc-parser/lexer/lexer.d.ts +46 -0
- package/dist/phpdoc-parser/lexer/lexer.js +143 -0
- package/dist/phpdoc-parser/parser/const-expr-parser.d.ts +16 -0
- package/dist/phpdoc-parser/parser/const-expr-parser.js +146 -0
- package/dist/phpdoc-parser/parser/node-collecting-visitor.d.ts +7 -0
- package/dist/phpdoc-parser/parser/node-collecting-visitor.js +15 -0
- package/dist/phpdoc-parser/parser/parser-exception.d.ts +16 -0
- package/dist/phpdoc-parser/parser/parser-exception.js +44 -0
- package/dist/phpdoc-parser/parser/php-doc-parser.d.ts +47 -0
- package/dist/phpdoc-parser/parser/php-doc-parser.js +558 -0
- package/dist/phpdoc-parser/parser/string-unescaper.d.ts +6 -0
- package/dist/phpdoc-parser/parser/string-unescaper.js +41 -0
- package/dist/phpdoc-parser/parser/token-iterator.d.ts +40 -0
- package/dist/phpdoc-parser/parser/token-iterator.js +215 -0
- package/dist/phpdoc-parser/parser/type-parser.d.ts +41 -0
- package/dist/phpdoc-parser/parser/type-parser.js +576 -0
- package/dist/phpdoc-parser/printer/diff-elem.d.ts +12 -0
- package/dist/phpdoc-parser/printer/diff-elem.js +19 -0
- package/dist/phpdoc-parser/printer/differ.d.ts +10 -0
- package/dist/phpdoc-parser/printer/differ.js +111 -0
- package/dist/phpdoc-parser/printer/printer.d.ts +20 -0
- package/dist/phpdoc-parser/printer/printer.js +612 -0
- package/package.json +2 -2
|
@@ -0,0 +1,576 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.TypeParser = void 0;
|
|
4
|
+
const parser_exception_1 = require("./parser-exception");
|
|
5
|
+
const string_unescaper_1 = require("./string-unescaper");
|
|
6
|
+
const const_expr_array_node_1 = require("../ast/const-expr/const-expr-array-node");
|
|
7
|
+
const const_expr_integer_node_1 = require("../ast/const-expr/const-expr-integer-node");
|
|
8
|
+
const const_expr_string_node_1 = require("../ast/const-expr/const-expr-string-node");
|
|
9
|
+
const quote_aware_const_expr_string_node_1 = require("../ast/const-expr/quote-aware-const-expr-string-node");
|
|
10
|
+
const array_shape_item_node_1 = require("../ast/type/array-shape-item-node");
|
|
11
|
+
const array_shape_node_1 = require("../ast/type/array-shape-node");
|
|
12
|
+
const array_type_node_1 = require("../ast/type/array-type-node");
|
|
13
|
+
const callable_type_node_1 = require("../ast/type/callable-type-node");
|
|
14
|
+
const callable_type_parameter_node_1 = require("../ast/type/callable-type-parameter-node");
|
|
15
|
+
const conditional_type_for_parameter_node_1 = require("../ast/type/conditional-type-for-parameter-node");
|
|
16
|
+
const conditional_type_node_1 = require("../ast/type/conditional-type-node");
|
|
17
|
+
const const_type_node_1 = require("../ast/type/const-type-node");
|
|
18
|
+
const generic_type_node_1 = require("../ast/type/generic-type-node");
|
|
19
|
+
const identifier_type_node_1 = require("../ast/type/identifier-type-node");
|
|
20
|
+
const intersection_type_node_1 = require("../ast/type/intersection-type-node");
|
|
21
|
+
const nullable_type_node_1 = require("../ast/type/nullable-type-node");
|
|
22
|
+
const object_shape_item_node_1 = require("../ast/type/object-shape-item-node");
|
|
23
|
+
const object_shape_node_1 = require("../ast/type/object-shape-node");
|
|
24
|
+
const offset_access_type_node_1 = require("../ast/type/offset-access-type-node");
|
|
25
|
+
const this_type_node_1 = require("../ast/type/this-type-node");
|
|
26
|
+
const union_type_node_1 = require("../ast/type/union-type-node");
|
|
27
|
+
const types_1 = require("../ast/types");
|
|
28
|
+
const lexer_1 = require("../lexer/lexer");
|
|
29
|
+
class TypeParser {
|
|
30
|
+
constructor(constExprParser = null, quoteAwareConstExprString = false, usedAttributes = {}) {
|
|
31
|
+
var _a, _b;
|
|
32
|
+
this.constExprParser = constExprParser;
|
|
33
|
+
this.quoteAwareConstExprString = quoteAwareConstExprString;
|
|
34
|
+
this.useLinesAttributes = (_a = usedAttributes.lines) !== null && _a !== void 0 ? _a : false;
|
|
35
|
+
this.useIndexAttributes = (_b = usedAttributes.indexes) !== null && _b !== void 0 ? _b : false;
|
|
36
|
+
}
|
|
37
|
+
parse(tokens) {
|
|
38
|
+
const startLine = tokens.currentTokenLine();
|
|
39
|
+
const startIndex = tokens.currentTokenIndex();
|
|
40
|
+
if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_NULLABLE)) {
|
|
41
|
+
return this.parseNullable(tokens);
|
|
42
|
+
}
|
|
43
|
+
let type = this.parseAtomic(tokens);
|
|
44
|
+
if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_UNION)) {
|
|
45
|
+
type = this.parseUnion(tokens, type);
|
|
46
|
+
}
|
|
47
|
+
else if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_INTERSECTION)) {
|
|
48
|
+
type = this.parseIntersection(tokens, type);
|
|
49
|
+
}
|
|
50
|
+
else if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_OPEN_SQUARE_BRACKET)) {
|
|
51
|
+
type = this.tryParseArrayOrOffsetAccess(tokens, type);
|
|
52
|
+
}
|
|
53
|
+
return this.enrichWithAttributes(tokens, type, startLine, startIndex);
|
|
54
|
+
}
|
|
55
|
+
enrichWithAttributes(tokens, type, startLine, startIndex) {
|
|
56
|
+
if (this.useLinesAttributes) {
|
|
57
|
+
type.setAttribute(types_1.Attribute.START_LINE, startLine);
|
|
58
|
+
type.setAttribute(types_1.Attribute.END_LINE, tokens.currentTokenLine());
|
|
59
|
+
}
|
|
60
|
+
if (this.useIndexAttributes) {
|
|
61
|
+
type.setAttribute(types_1.Attribute.START_INDEX, startIndex);
|
|
62
|
+
type.setAttribute(types_1.Attribute.END_INDEX, tokens.endIndexOfLastRelevantToken());
|
|
63
|
+
}
|
|
64
|
+
return type;
|
|
65
|
+
}
|
|
66
|
+
subParse(tokens) {
|
|
67
|
+
const startLine = tokens.currentTokenLine();
|
|
68
|
+
const startIndex = tokens.currentTokenIndex();
|
|
69
|
+
let type;
|
|
70
|
+
if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_NULLABLE)) {
|
|
71
|
+
type = this.parseNullable(tokens);
|
|
72
|
+
}
|
|
73
|
+
else {
|
|
74
|
+
type = this.parseAtomic(tokens);
|
|
75
|
+
if (tokens.isCurrentTokenValue('is')) {
|
|
76
|
+
type = this.parseConditional(tokens, type);
|
|
77
|
+
}
|
|
78
|
+
else {
|
|
79
|
+
tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
|
|
80
|
+
if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_UNION)) {
|
|
81
|
+
type = this.subParseUnion(tokens, type);
|
|
82
|
+
}
|
|
83
|
+
else if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_INTERSECTION)) {
|
|
84
|
+
type = this.subParseIntersection(tokens, type);
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
return this.enrichWithAttributes(tokens, type, startLine, startIndex);
|
|
89
|
+
}
|
|
90
|
+
parseAtomic(tokens) {
|
|
91
|
+
const startLine = tokens.currentTokenLine();
|
|
92
|
+
const startIndex = tokens.currentTokenIndex();
|
|
93
|
+
let type;
|
|
94
|
+
if (tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_OPEN_PARENTHESES)) {
|
|
95
|
+
tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
|
|
96
|
+
type = this.subParse(tokens);
|
|
97
|
+
tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
|
|
98
|
+
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_CLOSE_PARENTHESES);
|
|
99
|
+
if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_OPEN_SQUARE_BRACKET)) {
|
|
100
|
+
type = this.tryParseArrayOrOffsetAccess(tokens, type);
|
|
101
|
+
}
|
|
102
|
+
return this.enrichWithAttributes(tokens, type, startLine, startIndex);
|
|
103
|
+
}
|
|
104
|
+
if (tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_THIS_VARIABLE)) {
|
|
105
|
+
type = new this_type_node_1.ThisTypeNode();
|
|
106
|
+
if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_OPEN_SQUARE_BRACKET)) {
|
|
107
|
+
type = this.tryParseArrayOrOffsetAccess(tokens, this.enrichWithAttributes(tokens, type, startLine, startIndex));
|
|
108
|
+
}
|
|
109
|
+
return this.enrichWithAttributes(tokens, type, startLine, startIndex);
|
|
110
|
+
}
|
|
111
|
+
const currentTokenValue = tokens.currentTokenValue();
|
|
112
|
+
tokens.pushSavePoint();
|
|
113
|
+
if (tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_IDENTIFIER)) {
|
|
114
|
+
const identifierTypeNode = new identifier_type_node_1.IdentifierTypeNode(currentTokenValue);
|
|
115
|
+
type = identifierTypeNode;
|
|
116
|
+
if (!tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_DOUBLE_COLON)) {
|
|
117
|
+
if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_OPEN_ANGLE_BRACKET)) {
|
|
118
|
+
type = this.parseGeneric(tokens, identifierTypeNode);
|
|
119
|
+
}
|
|
120
|
+
else if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_OPEN_SQUARE_BRACKET)) {
|
|
121
|
+
type = this.tryParseArrayOrOffsetAccess(tokens, type);
|
|
122
|
+
}
|
|
123
|
+
else if (['array', 'list', 'object'].includes(identifierTypeNode.name) &&
|
|
124
|
+
tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_OPEN_CURLY_BRACKET) &&
|
|
125
|
+
!tokens.isPrecededByHorizontalWhitespace()) {
|
|
126
|
+
if (identifierTypeNode.name === 'object') {
|
|
127
|
+
type = this.parseObjectShape(tokens);
|
|
128
|
+
}
|
|
129
|
+
else {
|
|
130
|
+
type = this.parseArrayShape(tokens, type, identifierTypeNode.name);
|
|
131
|
+
}
|
|
132
|
+
if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_OPEN_SQUARE_BRACKET)) {
|
|
133
|
+
type = this.tryParseArrayOrOffsetAccess(tokens, type);
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
return this.enrichWithAttributes(tokens, type, startLine, startIndex);
|
|
137
|
+
}
|
|
138
|
+
tokens.rollback();
|
|
139
|
+
}
|
|
140
|
+
else {
|
|
141
|
+
tokens.dropSavePoint();
|
|
142
|
+
}
|
|
143
|
+
const exception = new parser_exception_1.ParserException(tokens.currentTokenValue(), tokens.currentTokenType(), tokens.currentTokenOffset(), lexer_1.Lexer.TOKEN_IDENTIFIER, null, tokens.currentTokenLine());
|
|
144
|
+
if (this.constExprParser === null) {
|
|
145
|
+
throw exception;
|
|
146
|
+
}
|
|
147
|
+
try {
|
|
148
|
+
const constExpr = this.constExprParser.parse(tokens, true);
|
|
149
|
+
if (constExpr instanceof const_expr_array_node_1.ConstExprArrayNode) {
|
|
150
|
+
throw exception;
|
|
151
|
+
}
|
|
152
|
+
return this.enrichWithAttributes(tokens, new const_type_node_1.ConstTypeNode(constExpr), startLine, startIndex);
|
|
153
|
+
}
|
|
154
|
+
catch (error) {
|
|
155
|
+
throw exception;
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
parseUnion(tokens, type) {
|
|
159
|
+
const types = [type];
|
|
160
|
+
while (tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_UNION)) {
|
|
161
|
+
types.push(this.parseAtomic(tokens));
|
|
162
|
+
}
|
|
163
|
+
return new union_type_node_1.UnionTypeNode(types);
|
|
164
|
+
}
|
|
165
|
+
subParseUnion(tokens, type) {
|
|
166
|
+
const types = [type];
|
|
167
|
+
while (tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_UNION)) {
|
|
168
|
+
tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
|
|
169
|
+
types.push(this.parseAtomic(tokens));
|
|
170
|
+
tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
|
|
171
|
+
}
|
|
172
|
+
return new union_type_node_1.UnionTypeNode(types);
|
|
173
|
+
}
|
|
174
|
+
parseIntersection(tokens, type) {
|
|
175
|
+
const types = [type];
|
|
176
|
+
while (tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_INTERSECTION)) {
|
|
177
|
+
types.push(this.parseAtomic(tokens));
|
|
178
|
+
}
|
|
179
|
+
return new intersection_type_node_1.IntersectionTypeNode(types);
|
|
180
|
+
}
|
|
181
|
+
subParseIntersection(tokens, type) {
|
|
182
|
+
const types = [type];
|
|
183
|
+
while (tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_INTERSECTION)) {
|
|
184
|
+
tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
|
|
185
|
+
types.push(this.parseAtomic(tokens));
|
|
186
|
+
tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
|
|
187
|
+
}
|
|
188
|
+
return new intersection_type_node_1.IntersectionTypeNode(types);
|
|
189
|
+
}
|
|
190
|
+
parseConditional(tokens, subjectType) {
|
|
191
|
+
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_IDENTIFIER);
|
|
192
|
+
let negated = false;
|
|
193
|
+
if (tokens.isCurrentTokenValue('not')) {
|
|
194
|
+
negated = true;
|
|
195
|
+
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_IDENTIFIER);
|
|
196
|
+
}
|
|
197
|
+
const targetType = this.parse(tokens);
|
|
198
|
+
tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
|
|
199
|
+
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_NULLABLE);
|
|
200
|
+
tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
|
|
201
|
+
const ifType = this.parse(tokens);
|
|
202
|
+
tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
|
|
203
|
+
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_COLON);
|
|
204
|
+
tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
|
|
205
|
+
const elseType = this.subParse(tokens);
|
|
206
|
+
return new conditional_type_node_1.ConditionalTypeNode(subjectType, targetType, ifType, elseType, negated);
|
|
207
|
+
}
|
|
208
|
+
parseConditionalForParameter(tokens, parameterName) {
|
|
209
|
+
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_VARIABLE);
|
|
210
|
+
tokens.consumeTokenValue(lexer_1.Lexer.TOKEN_IDENTIFIER, 'is');
|
|
211
|
+
let negated = false;
|
|
212
|
+
if (tokens.isCurrentTokenValue('not')) {
|
|
213
|
+
negated = true;
|
|
214
|
+
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_IDENTIFIER);
|
|
215
|
+
}
|
|
216
|
+
const targetType = this.parse(tokens);
|
|
217
|
+
tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
|
|
218
|
+
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_NULLABLE);
|
|
219
|
+
tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
|
|
220
|
+
const ifType = this.parse(tokens);
|
|
221
|
+
tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
|
|
222
|
+
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_COLON);
|
|
223
|
+
tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
|
|
224
|
+
const elseType = this.subParse(tokens);
|
|
225
|
+
return new conditional_type_for_parameter_node_1.ConditionalTypeForParameterNode(parameterName, targetType, ifType, elseType, negated);
|
|
226
|
+
}
|
|
227
|
+
parseNullable(tokens) {
|
|
228
|
+
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_NULLABLE);
|
|
229
|
+
const type = this.parseAtomic(tokens);
|
|
230
|
+
return new nullable_type_node_1.NullableTypeNode(type);
|
|
231
|
+
}
|
|
232
|
+
isHtml(tokens) {
|
|
233
|
+
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_OPEN_ANGLE_BRACKET);
|
|
234
|
+
if (!tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_IDENTIFIER)) {
|
|
235
|
+
return false;
|
|
236
|
+
}
|
|
237
|
+
const htmlTagName = tokens.currentTokenValue();
|
|
238
|
+
tokens.next();
|
|
239
|
+
if (!tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_CLOSE_ANGLE_BRACKET)) {
|
|
240
|
+
return false;
|
|
241
|
+
}
|
|
242
|
+
while (!tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_END)) {
|
|
243
|
+
if (tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_OPEN_ANGLE_BRACKET) &&
|
|
244
|
+
tokens.currentTokenValue().includes(`/${htmlTagName}>`)) {
|
|
245
|
+
return true;
|
|
246
|
+
}
|
|
247
|
+
tokens.next();
|
|
248
|
+
}
|
|
249
|
+
return false;
|
|
250
|
+
}
|
|
251
|
+
parseGeneric(tokens, baseType) {
|
|
252
|
+
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_OPEN_ANGLE_BRACKET);
|
|
253
|
+
tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
|
|
254
|
+
const genericTypes = [];
|
|
255
|
+
const variances = [];
|
|
256
|
+
const [genericType, variance] = this.parseGenericTypeArgument(tokens);
|
|
257
|
+
genericTypes.push(genericType);
|
|
258
|
+
variances.push(variance);
|
|
259
|
+
tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
|
|
260
|
+
while (tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_COMMA)) {
|
|
261
|
+
tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
|
|
262
|
+
if (tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_CLOSE_ANGLE_BRACKET)) {
|
|
263
|
+
break;
|
|
264
|
+
}
|
|
265
|
+
const [genericTypeToAddInWhileLoop, varianceToAddInWhileLoop] = this.parseGenericTypeArgument(tokens);
|
|
266
|
+
genericTypes.push(genericTypeToAddInWhileLoop);
|
|
267
|
+
variances.push(varianceToAddInWhileLoop);
|
|
268
|
+
tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
|
|
269
|
+
}
|
|
270
|
+
tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
|
|
271
|
+
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_CLOSE_ANGLE_BRACKET);
|
|
272
|
+
const type = new generic_type_node_1.GenericTypeNode(baseType, genericTypes, variances);
|
|
273
|
+
const startLine = baseType.getAttribute(types_1.Attribute.START_LINE);
|
|
274
|
+
const startIndex = baseType.getAttribute(types_1.Attribute.START_INDEX);
|
|
275
|
+
if (startLine !== null && startIndex !== null) {
|
|
276
|
+
return this.enrichWithAttributes(tokens, type, baseType.getAttribute(types_1.Attribute.START_LINE), baseType.getAttribute(types_1.Attribute.START_INDEX));
|
|
277
|
+
}
|
|
278
|
+
return type;
|
|
279
|
+
}
|
|
280
|
+
parseGenericTypeArgument(tokens) {
|
|
281
|
+
const startLine = tokens.currentTokenLine();
|
|
282
|
+
const startIndex = tokens.currentTokenIndex();
|
|
283
|
+
if (tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_WILDCARD)) {
|
|
284
|
+
return [
|
|
285
|
+
this.enrichWithAttributes(tokens, new identifier_type_node_1.IdentifierTypeNode('mixed'), startLine, startIndex),
|
|
286
|
+
generic_type_node_1.GenericTypeNode.VARIANCE_BIVARIANT,
|
|
287
|
+
];
|
|
288
|
+
}
|
|
289
|
+
let variance;
|
|
290
|
+
if (tokens.tryConsumeTokenValue('contravariant')) {
|
|
291
|
+
variance = generic_type_node_1.GenericTypeNode.VARIANCE_CONTRAVARIANT;
|
|
292
|
+
}
|
|
293
|
+
else if (tokens.tryConsumeTokenValue('covariant')) {
|
|
294
|
+
variance = generic_type_node_1.GenericTypeNode.VARIANCE_COVARIANT;
|
|
295
|
+
}
|
|
296
|
+
else {
|
|
297
|
+
variance = generic_type_node_1.GenericTypeNode.VARIANCE_INVARIANT;
|
|
298
|
+
}
|
|
299
|
+
const type = this.parse(tokens);
|
|
300
|
+
return [type, variance];
|
|
301
|
+
}
|
|
302
|
+
parseCallable(tokens, identifier) {
|
|
303
|
+
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_OPEN_PARENTHESES);
|
|
304
|
+
tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
|
|
305
|
+
const parameters = [];
|
|
306
|
+
if (!tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_CLOSE_PARENTHESES)) {
|
|
307
|
+
parameters.push(this.parseCallableParameter(tokens));
|
|
308
|
+
tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
|
|
309
|
+
while (tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_COMMA)) {
|
|
310
|
+
tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
|
|
311
|
+
if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_CLOSE_PARENTHESES)) {
|
|
312
|
+
break;
|
|
313
|
+
}
|
|
314
|
+
parameters.push(this.parseCallableParameter(tokens));
|
|
315
|
+
tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
|
|
316
|
+
}
|
|
317
|
+
}
|
|
318
|
+
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_CLOSE_PARENTHESES);
|
|
319
|
+
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_COLON);
|
|
320
|
+
const returnType = this.parseCallableReturnType(tokens);
|
|
321
|
+
return new callable_type_node_1.CallableTypeNode(identifier, parameters, returnType);
|
|
322
|
+
}
|
|
323
|
+
parseCallableParameter(tokens) {
|
|
324
|
+
const startLine = tokens.currentTokenLine();
|
|
325
|
+
const startIndex = tokens.currentTokenIndex();
|
|
326
|
+
const type = this.parse(tokens);
|
|
327
|
+
const isReference = tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_REFERENCE);
|
|
328
|
+
const isVariadic = tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_VARIADIC);
|
|
329
|
+
let parameterName = '';
|
|
330
|
+
if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_VARIABLE)) {
|
|
331
|
+
parameterName = tokens.currentTokenValue();
|
|
332
|
+
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_VARIABLE);
|
|
333
|
+
}
|
|
334
|
+
const isOptional = tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_EQUAL);
|
|
335
|
+
return this.enrichWithAttributes(tokens, new callable_type_parameter_node_1.CallableTypeParameterNode(type, isReference, isVariadic, parameterName, isOptional), startLine, startIndex);
|
|
336
|
+
}
|
|
337
|
+
parseCallableReturnType(tokens) {
|
|
338
|
+
const startLine = tokens.currentTokenLine();
|
|
339
|
+
const startIndex = tokens.currentTokenIndex();
|
|
340
|
+
let type;
|
|
341
|
+
if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_NULLABLE)) {
|
|
342
|
+
return this.parseNullable(tokens);
|
|
343
|
+
}
|
|
344
|
+
if (tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_OPEN_PARENTHESES)) {
|
|
345
|
+
type = this.parse(tokens);
|
|
346
|
+
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_CLOSE_PARENTHESES);
|
|
347
|
+
if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_OPEN_SQUARE_BRACKET)) {
|
|
348
|
+
type = this.tryParseArrayOrOffsetAccess(tokens, type);
|
|
349
|
+
}
|
|
350
|
+
return type;
|
|
351
|
+
}
|
|
352
|
+
if (tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_THIS_VARIABLE)) {
|
|
353
|
+
type = new this_type_node_1.ThisTypeNode();
|
|
354
|
+
if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_OPEN_SQUARE_BRACKET)) {
|
|
355
|
+
type = this.tryParseArrayOrOffsetAccess(tokens, this.enrichWithAttributes(tokens, type, startLine, startIndex));
|
|
356
|
+
}
|
|
357
|
+
return type;
|
|
358
|
+
}
|
|
359
|
+
const currentTokenValue = tokens.currentTokenValue();
|
|
360
|
+
tokens.pushSavePoint();
|
|
361
|
+
if (tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_IDENTIFIER)) {
|
|
362
|
+
type = new identifier_type_node_1.IdentifierTypeNode(currentTokenValue);
|
|
363
|
+
if (!tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_DOUBLE_COLON)) {
|
|
364
|
+
if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_OPEN_ANGLE_BRACKET)) {
|
|
365
|
+
type = this.parseGeneric(tokens, this.enrichWithAttributes(tokens, type, startLine, startIndex));
|
|
366
|
+
}
|
|
367
|
+
else if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_OPEN_SQUARE_BRACKET)) {
|
|
368
|
+
type = this.tryParseArrayOrOffsetAccess(tokens, this.enrichWithAttributes(tokens, type, startLine, startIndex));
|
|
369
|
+
}
|
|
370
|
+
}
|
|
371
|
+
else {
|
|
372
|
+
tokens.rollback();
|
|
373
|
+
}
|
|
374
|
+
return type;
|
|
375
|
+
}
|
|
376
|
+
tokens.dropSavePoint();
|
|
377
|
+
const exception = new parser_exception_1.ParserException(tokens.currentTokenValue(), tokens.currentTokenType(), tokens.currentTokenOffset(), lexer_1.Lexer.TOKEN_IDENTIFIER, null, tokens.currentTokenLine());
|
|
378
|
+
if (this.constExprParser === null) {
|
|
379
|
+
throw exception;
|
|
380
|
+
}
|
|
381
|
+
if (this.constExprParser === null) {
|
|
382
|
+
throw exception;
|
|
383
|
+
}
|
|
384
|
+
try {
|
|
385
|
+
const constExpr = this.constExprParser.parse(tokens, true);
|
|
386
|
+
if (constExpr instanceof const_expr_array_node_1.ConstExprArrayNode) {
|
|
387
|
+
throw exception;
|
|
388
|
+
}
|
|
389
|
+
type = new const_type_node_1.ConstTypeNode(constExpr);
|
|
390
|
+
if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_OPEN_SQUARE_BRACKET)) {
|
|
391
|
+
type = this.tryParseArrayOrOffsetAccess(tokens, this.enrichWithAttributes(tokens, type, startLine, startIndex));
|
|
392
|
+
}
|
|
393
|
+
return type;
|
|
394
|
+
}
|
|
395
|
+
catch (e) {
|
|
396
|
+
if (e instanceof Error) {
|
|
397
|
+
throw exception;
|
|
398
|
+
}
|
|
399
|
+
else {
|
|
400
|
+
throw e;
|
|
401
|
+
}
|
|
402
|
+
}
|
|
403
|
+
}
|
|
404
|
+
tryParseCallable(tokens, identifier) {
|
|
405
|
+
try {
|
|
406
|
+
tokens.pushSavePoint();
|
|
407
|
+
const type = this.parseCallable(tokens, identifier);
|
|
408
|
+
tokens.dropSavePoint();
|
|
409
|
+
return type;
|
|
410
|
+
}
|
|
411
|
+
catch (e) {
|
|
412
|
+
if (e instanceof parser_exception_1.ParserException) {
|
|
413
|
+
tokens.rollback();
|
|
414
|
+
return identifier;
|
|
415
|
+
}
|
|
416
|
+
throw e;
|
|
417
|
+
}
|
|
418
|
+
}
|
|
419
|
+
tryParseArrayOrOffsetAccess(tokens, type) {
|
|
420
|
+
try {
|
|
421
|
+
while (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_OPEN_SQUARE_BRACKET)) {
|
|
422
|
+
tokens.pushSavePoint();
|
|
423
|
+
const canBeOffsetAccessType = !tokens.isPrecededByHorizontalWhitespace();
|
|
424
|
+
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_OPEN_SQUARE_BRACKET);
|
|
425
|
+
if (canBeOffsetAccessType &&
|
|
426
|
+
!tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_CLOSE_SQUARE_BRACKET)) {
|
|
427
|
+
const offset = this.parse(tokens);
|
|
428
|
+
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_CLOSE_SQUARE_BRACKET);
|
|
429
|
+
tokens.dropSavePoint();
|
|
430
|
+
type = new offset_access_type_node_1.OffsetAccessTypeNode(type, offset);
|
|
431
|
+
}
|
|
432
|
+
else {
|
|
433
|
+
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_CLOSE_SQUARE_BRACKET);
|
|
434
|
+
tokens.dropSavePoint();
|
|
435
|
+
type = new array_type_node_1.ArrayTypeNode(type);
|
|
436
|
+
}
|
|
437
|
+
}
|
|
438
|
+
}
|
|
439
|
+
catch (e) {
|
|
440
|
+
if (e instanceof parser_exception_1.ParserException) {
|
|
441
|
+
tokens.rollback();
|
|
442
|
+
}
|
|
443
|
+
else {
|
|
444
|
+
throw e;
|
|
445
|
+
}
|
|
446
|
+
}
|
|
447
|
+
return type;
|
|
448
|
+
}
|
|
449
|
+
parseArrayShape(tokens, type, kind) {
|
|
450
|
+
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_OPEN_CURLY_BRACKET);
|
|
451
|
+
const items = [];
|
|
452
|
+
let sealed = true;
|
|
453
|
+
do {
|
|
454
|
+
tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
|
|
455
|
+
if (tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_CLOSE_CURLY_BRACKET)) {
|
|
456
|
+
return new array_shape_node_1.ArrayShapeNode(items, true, kind);
|
|
457
|
+
}
|
|
458
|
+
if (tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_VARIADIC)) {
|
|
459
|
+
sealed = false;
|
|
460
|
+
tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_COMMA);
|
|
461
|
+
break;
|
|
462
|
+
}
|
|
463
|
+
items.push(this.parseArrayShapeItem(tokens));
|
|
464
|
+
tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
|
|
465
|
+
} while (tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_COMMA));
|
|
466
|
+
tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
|
|
467
|
+
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_CLOSE_CURLY_BRACKET);
|
|
468
|
+
return new array_shape_node_1.ArrayShapeNode(items, sealed, kind);
|
|
469
|
+
}
|
|
470
|
+
parseArrayShapeItem(tokens) {
|
|
471
|
+
const startLine = tokens.currentTokenLine();
|
|
472
|
+
const startIndex = tokens.currentTokenIndex();
|
|
473
|
+
try {
|
|
474
|
+
tokens.pushSavePoint();
|
|
475
|
+
const key = this.parseArrayShapeKey(tokens);
|
|
476
|
+
const optional = tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_NULLABLE);
|
|
477
|
+
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_COLON);
|
|
478
|
+
const value = this.parse(tokens);
|
|
479
|
+
tokens.dropSavePoint();
|
|
480
|
+
return this.enrichWithAttributes(tokens, new array_shape_item_node_1.ArrayShapeItemNode(key, optional, value), startLine, startIndex);
|
|
481
|
+
}
|
|
482
|
+
catch (e) {
|
|
483
|
+
if (e instanceof parser_exception_1.ParserException) {
|
|
484
|
+
tokens.rollback();
|
|
485
|
+
const value = this.parse(tokens);
|
|
486
|
+
return this.enrichWithAttributes(tokens, new array_shape_item_node_1.ArrayShapeItemNode(null, false, value), startLine, startIndex);
|
|
487
|
+
}
|
|
488
|
+
throw e;
|
|
489
|
+
}
|
|
490
|
+
}
|
|
491
|
+
parseArrayShapeKey(tokens) {
|
|
492
|
+
const startLine = tokens.currentTokenLine();
|
|
493
|
+
const startIndex = tokens.currentTokenIndex();
|
|
494
|
+
if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_INTEGER)) {
|
|
495
|
+
const key = new const_expr_integer_node_1.ConstExprIntegerNode(tokens.currentTokenValue().replaceAll('_', ''));
|
|
496
|
+
tokens.next();
|
|
497
|
+
return this.enrichWithAttributes(tokens, key, startLine, startIndex);
|
|
498
|
+
}
|
|
499
|
+
let key;
|
|
500
|
+
if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_SINGLE_QUOTED_STRING)) {
|
|
501
|
+
if (this.quoteAwareConstExprString) {
|
|
502
|
+
key = new quote_aware_const_expr_string_node_1.QuoteAwareConstExprStringNode(string_unescaper_1.StringUnescaper.unescapeString(tokens.currentTokenValue()), quote_aware_const_expr_string_node_1.QuoteAwareConstExprStringNode.SINGLE_QUOTED);
|
|
503
|
+
}
|
|
504
|
+
else {
|
|
505
|
+
key = new const_expr_string_node_1.ConstExprStringNode(tokens.currentTokenValue().replace(/(^'|'$)/g, ''));
|
|
506
|
+
}
|
|
507
|
+
tokens.next();
|
|
508
|
+
}
|
|
509
|
+
if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_DOUBLE_QUOTED_STRING)) {
|
|
510
|
+
if (this.quoteAwareConstExprString) {
|
|
511
|
+
key = new quote_aware_const_expr_string_node_1.QuoteAwareConstExprStringNode(string_unescaper_1.StringUnescaper.unescapeString(tokens.currentTokenValue()), quote_aware_const_expr_string_node_1.QuoteAwareConstExprStringNode.DOUBLE_QUOTED);
|
|
512
|
+
}
|
|
513
|
+
else {
|
|
514
|
+
key = new const_expr_string_node_1.ConstExprStringNode(tokens.currentTokenValue().replace(/(^"|"$)/g, ''));
|
|
515
|
+
}
|
|
516
|
+
}
|
|
517
|
+
else {
|
|
518
|
+
key = new identifier_type_node_1.IdentifierTypeNode(tokens.currentTokenValue());
|
|
519
|
+
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_IDENTIFIER);
|
|
520
|
+
}
|
|
521
|
+
return this.enrichWithAttributes(tokens, key, startLine, startIndex);
|
|
522
|
+
}
|
|
523
|
+
parseObjectShape(tokens) {
|
|
524
|
+
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_OPEN_CURLY_BRACKET);
|
|
525
|
+
const items = [];
|
|
526
|
+
do {
|
|
527
|
+
tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
|
|
528
|
+
if (tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_CLOSE_CURLY_BRACKET)) {
|
|
529
|
+
return new object_shape_node_1.ObjectShapeNode(items);
|
|
530
|
+
}
|
|
531
|
+
items.push(this.parseObjectShapeItem(tokens));
|
|
532
|
+
tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
|
|
533
|
+
} while (tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_COMMA));
|
|
534
|
+
tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
|
|
535
|
+
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_CLOSE_CURLY_BRACKET);
|
|
536
|
+
return new object_shape_node_1.ObjectShapeNode(items);
|
|
537
|
+
}
|
|
538
|
+
parseObjectShapeItem(tokens) {
|
|
539
|
+
const startLine = tokens.currentTokenLine();
|
|
540
|
+
const startIndex = tokens.currentTokenIndex();
|
|
541
|
+
const key = this.parseObjectShapeKey(tokens);
|
|
542
|
+
const optional = tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_NULLABLE);
|
|
543
|
+
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_COLON);
|
|
544
|
+
const value = this.parse(tokens);
|
|
545
|
+
return this.enrichWithAttributes(tokens, new object_shape_item_node_1.ObjectShapeItemNode(key, optional, value), startLine, startIndex);
|
|
546
|
+
}
|
|
547
|
+
parseObjectShapeKey(tokens) {
|
|
548
|
+
const startLine = tokens.currentTokenLine();
|
|
549
|
+
const startIndex = tokens.currentTokenIndex();
|
|
550
|
+
let key;
|
|
551
|
+
if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_SINGLE_QUOTED_STRING)) {
|
|
552
|
+
if (this.quoteAwareConstExprString) {
|
|
553
|
+
key = new quote_aware_const_expr_string_node_1.QuoteAwareConstExprStringNode(string_unescaper_1.StringUnescaper.unescapeString(tokens.currentTokenValue()), quote_aware_const_expr_string_node_1.QuoteAwareConstExprStringNode.SINGLE_QUOTED);
|
|
554
|
+
}
|
|
555
|
+
else {
|
|
556
|
+
key = new const_expr_string_node_1.ConstExprStringNode(tokens.currentTokenValue().replace(/(^"|"$)/g, ''));
|
|
557
|
+
}
|
|
558
|
+
tokens.next();
|
|
559
|
+
}
|
|
560
|
+
if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_DOUBLE_QUOTED_STRING)) {
|
|
561
|
+
if (this.quoteAwareConstExprString) {
|
|
562
|
+
key = new quote_aware_const_expr_string_node_1.QuoteAwareConstExprStringNode(string_unescaper_1.StringUnescaper.unescapeString(tokens.currentTokenValue()), quote_aware_const_expr_string_node_1.QuoteAwareConstExprStringNode.DOUBLE_QUOTED);
|
|
563
|
+
}
|
|
564
|
+
else {
|
|
565
|
+
key = new const_expr_string_node_1.ConstExprStringNode(tokens.currentTokenValue().replace(/(^"|"$)/g, ''));
|
|
566
|
+
}
|
|
567
|
+
tokens.next();
|
|
568
|
+
}
|
|
569
|
+
else {
|
|
570
|
+
key = new identifier_type_node_1.IdentifierTypeNode(tokens.currentTokenValue());
|
|
571
|
+
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_IDENTIFIER);
|
|
572
|
+
}
|
|
573
|
+
return this.enrichWithAttributes(tokens, key, startLine, startIndex);
|
|
574
|
+
}
|
|
575
|
+
}
|
|
576
|
+
exports.TypeParser = TypeParser;
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
export declare class DiffElem<EleType> {
|
|
2
|
+
type: DiffElemType;
|
|
3
|
+
old: EleType;
|
|
4
|
+
new: EleType;
|
|
5
|
+
constructor(type: DiffElemType, oldEle: EleType, newEle: EleType);
|
|
6
|
+
}
|
|
7
|
+
export declare enum DiffElemType {
|
|
8
|
+
KEEP = 1,
|
|
9
|
+
REMOVE = 2,
|
|
10
|
+
ADD = 3,
|
|
11
|
+
REPLACE = 4
|
|
12
|
+
}
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.DiffElemType = exports.DiffElem = void 0;
|
|
4
|
+
class DiffElem {
|
|
5
|
+
constructor(type, oldEle, newEle) {
|
|
6
|
+
this.type = type;
|
|
7
|
+
this.type = type;
|
|
8
|
+
this.old = oldEle;
|
|
9
|
+
this.new = newEle;
|
|
10
|
+
}
|
|
11
|
+
}
|
|
12
|
+
exports.DiffElem = DiffElem;
|
|
13
|
+
var DiffElemType;
|
|
14
|
+
(function (DiffElemType) {
|
|
15
|
+
DiffElemType[DiffElemType["KEEP"] = 1] = "KEEP";
|
|
16
|
+
DiffElemType[DiffElemType["REMOVE"] = 2] = "REMOVE";
|
|
17
|
+
DiffElemType[DiffElemType["ADD"] = 3] = "ADD";
|
|
18
|
+
DiffElemType[DiffElemType["REPLACE"] = 4] = "REPLACE";
|
|
19
|
+
})(DiffElemType || (exports.DiffElemType = DiffElemType = {}));
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import { DiffElem } from './diff-elem';
|
|
2
|
+
export declare class Differ<T> {
|
|
3
|
+
private isEqual;
|
|
4
|
+
constructor(isEqual: (a: T, b: T) => boolean);
|
|
5
|
+
diff(old: T[], newElements: T[]): DiffElem<T>[];
|
|
6
|
+
diffWithReplacements(old: T[], newElements: T[]): DiffElem<T>[];
|
|
7
|
+
private calculateTrace;
|
|
8
|
+
private extractDiff;
|
|
9
|
+
private coalesceReplacements;
|
|
10
|
+
}
|