@ktjs/ts-plugin 0.1.4 → 0.1.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,6 @@
1
+ import type tsModule from 'typescript/lib/tsserverlibrary';
2
+ import type { JsxOpeningLikeElement } from './types';
3
+ export declare function getScopeName(opening: JsxOpeningLikeElement, attrName: string, fallback: string, ts: typeof tsModule): string;
4
+ export declare function getJsxAttribute(opening: tsModule.JsxOpeningElement | tsModule.JsxSelfClosingElement, attrName: string, ts: typeof tsModule): tsModule.JsxAttribute | undefined;
5
+ export declare function getAttributeText(attr: tsModule.JsxAttribute | undefined, ts: typeof tsModule, allowIdentifier?: boolean): string | undefined;
6
+ export declare function getAttributeExpression(attr: tsModule.JsxAttribute | undefined, ts: typeof tsModule): tsModule.Expression | undefined;
@@ -0,0 +1,59 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.getScopeName = getScopeName;
4
+ exports.getJsxAttribute = getJsxAttribute;
5
+ exports.getAttributeText = getAttributeText;
6
+ exports.getAttributeExpression = getAttributeExpression;
7
+ const identifiers_1 = require("./identifiers");
8
+ function getScopeName(opening, attrName, fallback, ts) {
9
+ const attr = getJsxAttribute(opening, attrName, ts);
10
+ const raw = getAttributeText(attr, ts, true);
11
+ if (raw && (0, identifiers_1.isValidIdentifier)(raw)) {
12
+ return raw;
13
+ }
14
+ return fallback;
15
+ }
16
+ function getJsxAttribute(opening, attrName, ts) {
17
+ const attrs = opening.attributes.properties;
18
+ for (let i = 0; i < attrs.length; i++) {
19
+ const attr = attrs[i];
20
+ if (!ts.isJsxAttribute(attr)) {
21
+ continue;
22
+ }
23
+ if (getAttributeName(attr.name) === attrName) {
24
+ return attr;
25
+ }
26
+ }
27
+ return undefined;
28
+ }
29
+ function getAttributeText(attr, ts, allowIdentifier = false) {
30
+ if (!attr || !attr.initializer) {
31
+ return undefined;
32
+ }
33
+ if (ts.isStringLiteral(attr.initializer)) {
34
+ return attr.initializer.text;
35
+ }
36
+ if (!ts.isJsxExpression(attr.initializer) || !attr.initializer.expression) {
37
+ return undefined;
38
+ }
39
+ const expr = attr.initializer.expression;
40
+ if (ts.isStringLiteralLike(expr)) {
41
+ return expr.text;
42
+ }
43
+ if (allowIdentifier && ts.isIdentifier(expr)) {
44
+ return expr.text;
45
+ }
46
+ return undefined;
47
+ }
48
+ function getAttributeExpression(attr, ts) {
49
+ if (!attr?.initializer || !ts.isJsxExpression(attr.initializer) || !attr.initializer.expression) {
50
+ return undefined;
51
+ }
52
+ return attr.initializer.expression;
53
+ }
54
+ function getAttributeName(name) {
55
+ if ('text' in name) {
56
+ return String(name.text);
57
+ }
58
+ return name.getText();
59
+ }
@@ -0,0 +1,4 @@
1
+ import type tsModule from 'typescript/lib/tsserverlibrary';
2
+ import type { ResolvedConfig } from './types';
3
+ export declare function addKForSemanticClassifications(base: tsModule.Classifications, sourceFile: tsModule.SourceFile, span: tsModule.TextSpan, format: tsModule.SemanticClassificationFormat | undefined, ts: typeof tsModule, config: ResolvedConfig): tsModule.Classifications;
4
+ export declare function addKForSyntacticClassifications(base: tsModule.Classifications, sourceFile: tsModule.SourceFile, span: tsModule.TextSpan, ts: typeof tsModule, config: ResolvedConfig): tsModule.Classifications;
@@ -0,0 +1,407 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.addKForSemanticClassifications = addKForSemanticClassifications;
4
+ exports.addKForSyntacticClassifications = addKForSyntacticClassifications;
5
+ const jsx_attributes_1 = require("./jsx-attributes");
6
+ const kfor_parser_1 = require("./kfor-parser");
7
+ const identifiers_1 = require("./identifiers");
8
+ const TOKEN_TYPE_VARIABLE = 7;
9
+ const TOKEN_MODIFIER_READONLY = 1 << 3;
10
+ const TOKEN_MODIFIER_LOCAL = 1 << 5;
11
+ const TOKEN_ENCODING_TYPE_OFFSET = 8;
12
+ const IDENTIFIER_PATTERN = /[A-Za-z_$][A-Za-z0-9_$]*/g;
13
+ const KEYWORD_DELIMITER_PATTERN = /\s+(in|of)\s+/;
14
+ function addKForSemanticClassifications(base, sourceFile, span, format, ts, config) {
15
+ const tokens = collectHighlightTokens(sourceFile, ts, config, span);
16
+ if (tokens.length === 0) {
17
+ return base;
18
+ }
19
+ const semanticSpans = buildSemanticSpans(tokens, format, ts);
20
+ if (semanticSpans.length === 0) {
21
+ return base;
22
+ }
23
+ return mergeClassifications(base, semanticSpans, false);
24
+ }
25
+ function addKForSyntacticClassifications(base, sourceFile, span, ts, config) {
26
+ const tokens = collectHighlightTokens(sourceFile, ts, config, span);
27
+ if (tokens.length === 0) {
28
+ return base;
29
+ }
30
+ const keywordSpans = buildSyntacticKeywordSpans(tokens, ts);
31
+ if (keywordSpans.length === 0) {
32
+ return base;
33
+ }
34
+ return mergeClassifications(base, keywordSpans, true);
35
+ }
36
+ function collectHighlightTokens(sourceFile, ts, config, span) {
37
+ const tokens = [];
38
+ const spanStart = span.start;
39
+ const spanEnd = span.start + span.length;
40
+ const visit = (node) => {
41
+ let opening;
42
+ if (ts.isJsxElement(node)) {
43
+ opening = node.openingElement;
44
+ }
45
+ else if (ts.isJsxSelfClosingElement(node)) {
46
+ opening = node;
47
+ }
48
+ if (opening) {
49
+ const attr = (0, jsx_attributes_1.getJsxAttribute)(opening, config.forAttr, ts);
50
+ if (attr) {
51
+ const parsed = parseKForAttributeTokens(attr, sourceFile, ts, config.allowOfKeyword);
52
+ for (let i = 0; i < parsed.length; i++) {
53
+ const token = parsed[i];
54
+ const tokenStart = token.start;
55
+ const tokenEnd = token.start + token.length;
56
+ if (tokenEnd <= spanStart || tokenStart >= spanEnd) {
57
+ continue;
58
+ }
59
+ const clippedStart = Math.max(tokenStart, spanStart);
60
+ const clippedEnd = Math.min(tokenEnd, spanEnd);
61
+ if (clippedEnd > clippedStart) {
62
+ tokens.push({
63
+ start: clippedStart,
64
+ length: clippedEnd - clippedStart,
65
+ kind: token.kind,
66
+ });
67
+ }
68
+ }
69
+ }
70
+ }
71
+ ts.forEachChild(node, visit);
72
+ };
73
+ visit(sourceFile);
74
+ return uniqueTokens(tokens);
75
+ }
76
+ function parseKForAttributeTokens(attr, sourceFile, ts, allowOfKeyword) {
77
+ const content = getAttributeRawContent(attr, sourceFile, ts);
78
+ if (!content) {
79
+ return [];
80
+ }
81
+ const raw = content.text;
82
+ const rawOffset = content.start;
83
+ const value = raw.trim();
84
+ if (!value) {
85
+ return [];
86
+ }
87
+ const parsed = (0, kfor_parser_1.parseKForExpression)(value, allowOfKeyword);
88
+ if (!parsed) {
89
+ return [];
90
+ }
91
+ const trimStart = raw.length - raw.trimStart().length;
92
+ const delimiterMatch = KEYWORD_DELIMITER_PATTERN.exec(value);
93
+ if (!delimiterMatch) {
94
+ return [];
95
+ }
96
+ const keyword = delimiterMatch[1];
97
+ const keywordOffsetInDelimiter = delimiterMatch[0].indexOf(keyword);
98
+ const leftSegment = value.slice(0, delimiterMatch.index);
99
+ const rightSegment = value.slice(delimiterMatch.index + delimiterMatch[0].length);
100
+ const leftLeading = leftSegment.length - leftSegment.trimStart().length;
101
+ const leftTrimmed = leftSegment.trim();
102
+ let aliasText = leftTrimmed;
103
+ let aliasTextStartInRaw = trimStart + leftLeading;
104
+ if (leftTrimmed.startsWith('(') && leftTrimmed.endsWith(')')) {
105
+ aliasText = leftTrimmed.slice(1, -1);
106
+ aliasTextStartInRaw += 1;
107
+ }
108
+ const rightLeading = rightSegment.length - rightSegment.trimStart().length;
109
+ const sourceText = rightSegment.trim();
110
+ const sourceTextStartInRaw = trimStart + delimiterMatch.index + delimiterMatch[0].length + rightLeading;
111
+ const keywordStartInRaw = trimStart + delimiterMatch.index + keywordOffsetInDelimiter;
112
+ const tokens = [];
113
+ const aliasTokens = collectAliasTokens(aliasText, aliasTextStartInRaw, parsed.aliases);
114
+ for (let i = 0; i < aliasTokens.length; i++) {
115
+ tokens.push({
116
+ start: rawOffset + aliasTokens[i].start,
117
+ length: aliasTokens[i].length,
118
+ kind: 'alias',
119
+ });
120
+ }
121
+ tokens.push({
122
+ start: rawOffset + keywordStartInRaw,
123
+ length: keyword.length,
124
+ kind: 'keyword',
125
+ });
126
+ const sourceTokens = collectSourceTokens(sourceText, sourceTextStartInRaw, ts);
127
+ for (let i = 0; i < sourceTokens.length; i++) {
128
+ tokens.push({
129
+ start: rawOffset + sourceTokens[i].start,
130
+ length: sourceTokens[i].length,
131
+ kind: 'source',
132
+ });
133
+ }
134
+ return tokens;
135
+ }
136
+ function collectAliasTokens(aliasText, baseStart, aliases) {
137
+ const result = [];
138
+ const allowed = new Set(aliases);
139
+ let match;
140
+ IDENTIFIER_PATTERN.lastIndex = 0;
141
+ while ((match = IDENTIFIER_PATTERN.exec(aliasText))) {
142
+ const name = match[0];
143
+ if (!allowed.has(name) || !(0, identifiers_1.isValidIdentifier)(name)) {
144
+ continue;
145
+ }
146
+ result.push({
147
+ start: baseStart + match.index,
148
+ length: name.length,
149
+ kind: 'alias',
150
+ });
151
+ }
152
+ return result;
153
+ }
154
+ function collectSourceTokens(sourceText, baseStart, ts) {
155
+ const tokens = collectSourceTokensWithAst(sourceText, baseStart, ts);
156
+ if (tokens.length > 0) {
157
+ return tokens;
158
+ }
159
+ const fallback = [];
160
+ let match;
161
+ IDENTIFIER_PATTERN.lastIndex = 0;
162
+ while ((match = IDENTIFIER_PATTERN.exec(sourceText))) {
163
+ const name = match[0];
164
+ if (!(0, identifiers_1.isValidIdentifier)(name)) {
165
+ continue;
166
+ }
167
+ fallback.push({
168
+ start: baseStart + match.index,
169
+ length: name.length,
170
+ kind: 'source',
171
+ });
172
+ }
173
+ return fallback;
174
+ }
175
+ function collectSourceTokensWithAst(sourceText, baseStart, ts) {
176
+ const snippet = `(${sourceText});`;
177
+ const tempSourceFile = ts.createSourceFile('__k_for_highlight.ts', snippet, ts.ScriptTarget.Latest, true, ts.ScriptKind.TS);
178
+ if (tempSourceFile.statements.length === 0) {
179
+ return [];
180
+ }
181
+ const statement = tempSourceFile.statements[0];
182
+ if (!ts.isExpressionStatement(statement)) {
183
+ return [];
184
+ }
185
+ const result = [];
186
+ const visit = (node) => {
187
+ if (ts.isIdentifier(node) && (0, identifiers_1.isValidIdentifier)(node.text)) {
188
+ const start = node.getStart(tempSourceFile) - 1;
189
+ const end = start + node.getWidth(tempSourceFile);
190
+ if (start >= 0 && end <= sourceText.length) {
191
+ result.push({
192
+ start: baseStart + start,
193
+ length: end - start,
194
+ kind: 'source',
195
+ });
196
+ }
197
+ }
198
+ ts.forEachChild(node, visit);
199
+ };
200
+ visit(statement.expression);
201
+ return result;
202
+ }
203
+ function getAttributeRawContent(attr, sourceFile, ts) {
204
+ const initializer = attr.initializer;
205
+ if (!initializer) {
206
+ return undefined;
207
+ }
208
+ if (ts.isStringLiteral(initializer)) {
209
+ const quotedText = initializer.getText(sourceFile);
210
+ if (quotedText.length < 2) {
211
+ return undefined;
212
+ }
213
+ return {
214
+ start: initializer.getStart(sourceFile) + 1,
215
+ text: quotedText.slice(1, -1),
216
+ };
217
+ }
218
+ const expression = (0, jsx_attributes_1.getAttributeExpression)(attr, ts);
219
+ if (!expression || !ts.isStringLiteralLike(expression)) {
220
+ return undefined;
221
+ }
222
+ const quotedText = expression.getText(sourceFile);
223
+ if (quotedText.length < 2) {
224
+ return undefined;
225
+ }
226
+ return {
227
+ start: expression.getStart(sourceFile) + 1,
228
+ text: quotedText.slice(1, -1),
229
+ };
230
+ }
231
+ function buildSemanticSpans(tokens, format, ts) {
232
+ const spans = [];
233
+ if (format === ts.SemanticClassificationFormat.TwentyTwenty) {
234
+ for (let i = 0; i < tokens.length; i++) {
235
+ const token = tokens[i];
236
+ if (token.kind === 'keyword') {
237
+ continue;
238
+ }
239
+ spans.push({
240
+ start: token.start,
241
+ length: token.length,
242
+ classification: token.kind === 'alias'
243
+ ? encodeSemantic2020(TOKEN_TYPE_VARIABLE, TOKEN_MODIFIER_READONLY | TOKEN_MODIFIER_LOCAL)
244
+ : encodeSemantic2020(TOKEN_TYPE_VARIABLE, 0),
245
+ });
246
+ }
247
+ return spans;
248
+ }
249
+ for (let i = 0; i < tokens.length; i++) {
250
+ const token = tokens[i];
251
+ spans.push({
252
+ start: token.start,
253
+ length: token.length,
254
+ classification: token.kind === 'keyword' ? ts.ClassificationType.keyword : ts.ClassificationType.identifier,
255
+ });
256
+ }
257
+ return spans;
258
+ }
259
+ function buildSyntacticKeywordSpans(tokens, ts) {
260
+ const spans = [];
261
+ for (let i = 0; i < tokens.length; i++) {
262
+ const token = tokens[i];
263
+ if (token.kind !== 'keyword') {
264
+ continue;
265
+ }
266
+ spans.push({
267
+ start: token.start,
268
+ length: token.length,
269
+ classification: ts.ClassificationType.keyword,
270
+ });
271
+ }
272
+ return spans;
273
+ }
274
+ function encodeSemantic2020(typeIndex, modifiers) {
275
+ return ((typeIndex + 1) << TOKEN_ENCODING_TYPE_OFFSET) + modifiers;
276
+ }
277
+ function mergeClassifications(base, overlays, replaceOverlaps) {
278
+ if (overlays.length === 0) {
279
+ return base;
280
+ }
281
+ const baseSpans = decodeSpans(base.spans);
282
+ const extraSpans = normalizeSpans(overlays);
283
+ if (extraSpans.length === 0) {
284
+ return base;
285
+ }
286
+ const merged = replaceOverlaps
287
+ ? [...removeOverlayOverlaps(baseSpans, extraSpans), ...extraSpans]
288
+ : [...baseSpans, ...extraSpans];
289
+ const normalized = normalizeSpans(merged);
290
+ return {
291
+ spans: encodeSpans(normalized),
292
+ endOfLineState: base.endOfLineState,
293
+ };
294
+ }
295
+ function removeOverlayOverlaps(baseSpans, overlays) {
296
+ const result = [];
297
+ for (let i = 0; i < baseSpans.length; i++) {
298
+ let segments = [baseSpans[i]];
299
+ for (let j = 0; j < overlays.length; j++) {
300
+ const overlay = overlays[j];
301
+ const nextSegments = [];
302
+ for (let k = 0; k < segments.length; k++) {
303
+ const segment = segments[k];
304
+ const segmentStart = segment.start;
305
+ const segmentEnd = segment.start + segment.length;
306
+ const overlayStart = overlay.start;
307
+ const overlayEnd = overlay.start + overlay.length;
308
+ if (overlayEnd <= segmentStart || overlayStart >= segmentEnd) {
309
+ nextSegments.push(segment);
310
+ continue;
311
+ }
312
+ if (segmentStart < overlayStart) {
313
+ nextSegments.push({
314
+ start: segmentStart,
315
+ length: overlayStart - segmentStart,
316
+ classification: segment.classification,
317
+ });
318
+ }
319
+ if (overlayEnd < segmentEnd) {
320
+ nextSegments.push({
321
+ start: overlayEnd,
322
+ length: segmentEnd - overlayEnd,
323
+ classification: segment.classification,
324
+ });
325
+ }
326
+ }
327
+ segments = nextSegments;
328
+ if (segments.length === 0) {
329
+ break;
330
+ }
331
+ }
332
+ for (let j = 0; j < segments.length; j++) {
333
+ result.push(segments[j]);
334
+ }
335
+ }
336
+ return result;
337
+ }
338
+ function decodeSpans(spans) {
339
+ const decoded = [];
340
+ for (let i = 0; i + 2 < spans.length; i += 3) {
341
+ const start = spans[i];
342
+ const length = spans[i + 1];
343
+ const classification = spans[i + 2];
344
+ if (length <= 0) {
345
+ continue;
346
+ }
347
+ decoded.push({ start, length, classification });
348
+ }
349
+ return decoded;
350
+ }
351
+ function encodeSpans(spans) {
352
+ const encoded = [];
353
+ for (let i = 0; i < spans.length; i++) {
354
+ const span = spans[i];
355
+ encoded.push(span.start, span.length, span.classification);
356
+ }
357
+ return encoded;
358
+ }
359
+ function normalizeSpans(spans) {
360
+ if (spans.length === 0) {
361
+ return [];
362
+ }
363
+ const sorted = spans
364
+ .filter((span) => span.length > 0)
365
+ .slice()
366
+ .sort((left, right) => {
367
+ if (left.start !== right.start) {
368
+ return left.start - right.start;
369
+ }
370
+ if (left.length !== right.length) {
371
+ return left.length - right.length;
372
+ }
373
+ return left.classification - right.classification;
374
+ });
375
+ const normalized = [];
376
+ for (let i = 0; i < sorted.length; i++) {
377
+ const span = sorted[i];
378
+ const previous = normalized[normalized.length - 1];
379
+ if (previous &&
380
+ previous.start === span.start &&
381
+ previous.length === span.length &&
382
+ previous.classification === span.classification) {
383
+ continue;
384
+ }
385
+ normalized.push(span);
386
+ }
387
+ return normalized;
388
+ }
389
+ function uniqueTokens(tokens) {
390
+ if (tokens.length === 0) {
391
+ return [];
392
+ }
393
+ const map = new Map();
394
+ for (let i = 0; i < tokens.length; i++) {
395
+ const token = tokens[i];
396
+ const key = `${token.start}:${token.length}:${token.kind}`;
397
+ if (!map.has(key)) {
398
+ map.set(key, token);
399
+ }
400
+ }
401
+ return Array.from(map.values()).sort((left, right) => {
402
+ if (left.start !== right.start) {
403
+ return left.start - right.start;
404
+ }
405
+ return left.length - right.length;
406
+ });
407
+ }
@@ -0,0 +1,2 @@
1
+ import type { ParsedKForExpression } from './types';
2
+ export declare function parseKForExpression(raw: string, allowOfKeyword: boolean): ParsedKForExpression | null;
@@ -0,0 +1,36 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.parseKForExpression = parseKForExpression;
4
+ const constants_1 = require("./constants");
5
+ const identifiers_1 = require("./identifiers");
6
+ function parseKForExpression(raw, allowOfKeyword) {
7
+ const value = raw.trim();
8
+ if (!value) {
9
+ return null;
10
+ }
11
+ const tupleMatch = constants_1.KFOR_TUPLE_PATTERN.exec(value);
12
+ if (tupleMatch) {
13
+ const keyword = tupleMatch[4];
14
+ const source = tupleMatch[5]?.trim();
15
+ if ((!allowOfKeyword && keyword === 'of') || !source) {
16
+ return null;
17
+ }
18
+ return {
19
+ aliases: (0, identifiers_1.uniqueIdentifiers)([tupleMatch[1], tupleMatch[2], tupleMatch[3]].filter(Boolean)),
20
+ source,
21
+ };
22
+ }
23
+ const singleMatch = constants_1.KFOR_SINGLE_PATTERN.exec(value);
24
+ if (singleMatch) {
25
+ const keyword = singleMatch[2];
26
+ const source = singleMatch[3]?.trim();
27
+ if ((!allowOfKeyword && keyword === 'of') || !source) {
28
+ return null;
29
+ }
30
+ return {
31
+ aliases: (0, identifiers_1.uniqueIdentifiers)([singleMatch[1]]),
32
+ source,
33
+ };
34
+ }
35
+ return null;
36
+ }
@@ -0,0 +1,5 @@
1
+ import type tsModule from 'typescript/lib/tsserverlibrary';
2
+ import type { FileAnalysis, KForBinding, KForScope, ResolvedConfig } from './types';
3
+ export declare function getFileAnalysis(fileName: string, languageService: tsModule.LanguageService, ts: typeof tsModule, config: ResolvedConfig): FileAnalysis | undefined;
4
+ export declare function isSuppressed(position: number, diagnosticName: string, scopes: KForScope[]): boolean;
5
+ export declare function collectBindingsAtPosition(position: number, scopes: KForScope[]): Map<string, KForBinding>;
@@ -0,0 +1,151 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.getFileAnalysis = getFileAnalysis;
4
+ exports.isSuppressed = isSuppressed;
5
+ exports.collectBindingsAtPosition = collectBindingsAtPosition;
6
+ const jsx_attributes_1 = require("./jsx-attributes");
7
+ const kfor_parser_1 = require("./kfor-parser");
8
+ const identifiers_1 = require("./identifiers");
9
+ const type_resolution_1 = require("./type-resolution");
10
+ function getFileAnalysis(fileName, languageService, ts, config) {
11
+ const program = languageService.getProgram();
12
+ if (!program) {
13
+ return undefined;
14
+ }
15
+ const sourceFile = program.getSourceFile(fileName);
16
+ if (!sourceFile) {
17
+ return undefined;
18
+ }
19
+ const checker = program.getTypeChecker();
20
+ const scopes = collectKForScopes(sourceFile, checker, ts, config);
21
+ if (scopes.length === 0) {
22
+ return undefined;
23
+ }
24
+ return { sourceFile, checker, scopes };
25
+ }
26
+ function isSuppressed(position, diagnosticName, scopes) {
27
+ for (let i = scopes.length - 1; i >= 0; i--) {
28
+ const scope = scopes[i];
29
+ if (position < scope.start || position >= scope.end) {
30
+ continue;
31
+ }
32
+ for (let j = 0; j < scope.bindings.length; j++) {
33
+ if (scope.bindings[j].name === diagnosticName) {
34
+ return true;
35
+ }
36
+ }
37
+ }
38
+ return false;
39
+ }
40
+ function collectBindingsAtPosition(position, scopes) {
41
+ const bindings = new Map();
42
+ for (let i = scopes.length - 1; i >= 0; i--) {
43
+ const scope = scopes[i];
44
+ if (position < scope.start || position >= scope.end) {
45
+ continue;
46
+ }
47
+ for (let j = 0; j < scope.bindings.length; j++) {
48
+ const binding = scope.bindings[j];
49
+ if (!bindings.has(binding.name)) {
50
+ bindings.set(binding.name, binding);
51
+ }
52
+ }
53
+ }
54
+ return bindings;
55
+ }
56
+ function collectKForScopes(sourceFile, checker, ts, config) {
57
+ const scopes = [];
58
+ const visit = (node) => {
59
+ if (ts.isJsxElement(node)) {
60
+ const forAttr = (0, jsx_attributes_1.getJsxAttribute)(node.openingElement, config.forAttr, ts);
61
+ if (forAttr) {
62
+ const bindings = resolveScopeBindings(node.openingElement, forAttr, checker, config, ts);
63
+ const start = node.openingElement.end;
64
+ const end = node.closingElement.getStart(sourceFile);
65
+ if (start < end && bindings.length > 0) {
66
+ scopes.push({ start, end, bindings });
67
+ }
68
+ }
69
+ }
70
+ ts.forEachChild(node, visit);
71
+ };
72
+ visit(sourceFile);
73
+ return scopes;
74
+ }
75
+ function resolveScopeBindings(opening, forAttr, checker, config, ts) {
76
+ const forExpression = (0, jsx_attributes_1.getAttributeText)(forAttr, ts);
77
+ if (forExpression !== undefined) {
78
+ const parsed = (0, kfor_parser_1.parseKForExpression)(forExpression, config.allowOfKeyword);
79
+ if (parsed) {
80
+ const sourceTypes = (0, type_resolution_1.resolveExpressionTypesFromText)(parsed.source, {
81
+ checker,
82
+ ts,
83
+ scopeNode: opening,
84
+ });
85
+ return createBindings(parsed.aliases, sourceTypes, checker, opening, ts);
86
+ }
87
+ }
88
+ const itemName = (0, jsx_attributes_1.getScopeName)(opening, config.itemAttr, config.itemName, ts);
89
+ const indexName = (0, jsx_attributes_1.getScopeName)(opening, config.indexAttr, config.indexName, ts);
90
+ const aliases = (0, identifiers_1.uniqueIdentifiers)([itemName, indexName]);
91
+ const sourceTypes = getLegacyForSourceTypes(forAttr, checker, ts);
92
+ return createBindings(aliases, sourceTypes, checker, opening, ts);
93
+ }
94
+ function createBindings(names, sourceTypes, checker, scopeNode, ts) {
95
+ if (names.length === 0) {
96
+ return [];
97
+ }
98
+ const inferred = inferBindingTypes(sourceTypes, names.length, checker, scopeNode, ts);
99
+ const bindings = [];
100
+ for (let i = 0; i < names.length; i++) {
101
+ bindings.push({
102
+ name: names[i],
103
+ types: inferred[i] || [],
104
+ });
105
+ }
106
+ return bindings;
107
+ }
108
+ function inferBindingTypes(sourceTypes, bindingCount, checker, scopeNode, ts) {
109
+ const slots = Array.from({ length: bindingCount }, () => []);
110
+ const candidates = expandUnionTypes(sourceTypes, ts);
111
+ for (let i = 0; i < candidates.length; i++) {
112
+ const sourceType = checker.getApparentType(candidates[i]);
113
+ const elementType = checker.getIndexTypeOfType(sourceType, ts.IndexKind.Number);
114
+ const stringValueType = elementType ? undefined : checker.getIndexTypeOfType(sourceType, ts.IndexKind.String);
115
+ const valueTypes = elementType ? [elementType] : stringValueType ? [stringValueType] : [];
116
+ if (valueTypes.length === 0) {
117
+ continue;
118
+ }
119
+ slots[0].push(...valueTypes);
120
+ if (bindingCount > 1) {
121
+ slots[1].push(elementType ? checker.getNumberType() : checker.getStringType());
122
+ }
123
+ if (bindingCount > 2) {
124
+ slots[2].push(checker.getNumberType());
125
+ }
126
+ }
127
+ for (let i = 0; i < slots.length; i++) {
128
+ slots[i] = (0, type_resolution_1.uniqueTypes)(slots[i], checker, scopeNode, ts);
129
+ }
130
+ return slots;
131
+ }
132
+ function expandUnionTypes(types, ts) {
133
+ const result = [];
134
+ for (let i = 0; i < types.length; i++) {
135
+ const type = types[i];
136
+ if (type.flags & ts.TypeFlags.Union) {
137
+ const union = type;
138
+ result.push(...union.types);
139
+ continue;
140
+ }
141
+ result.push(type);
142
+ }
143
+ return result;
144
+ }
145
+ function getLegacyForSourceTypes(forAttr, checker, ts) {
146
+ const expression = (0, jsx_attributes_1.getAttributeExpression)(forAttr, ts);
147
+ if (!expression || ts.isStringLiteralLike(expression)) {
148
+ return [];
149
+ }
150
+ return [checker.getTypeAtLocation(expression)];
151
+ }
@@ -0,0 +1,5 @@
1
+ import type tsModule from 'typescript/lib/tsserverlibrary';
2
+ import type { TypeResolutionContext } from './types';
3
+ export declare function resolveExpressionTypesFromText(raw: string, context: TypeResolutionContext): tsModule.Type[];
4
+ export declare function formatTypeList(types: tsModule.Type[], checker: tsModule.TypeChecker, scopeNode: tsModule.Node, ts: typeof tsModule): string;
5
+ export declare function uniqueTypes(types: tsModule.Type[], checker: tsModule.TypeChecker, scopeNode: tsModule.Node, ts: typeof tsModule): tsModule.Type[];