@ktjs/ts-plugin 0.1.5 → 0.1.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -0
- package/dist/index.js +23 -0
- package/dist/kfor-highlighting.d.ts +4 -0
- package/dist/kfor-highlighting.js +407 -0
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -8,6 +8,7 @@ TypeScript language service plugin for KT.js `k-for` scope variables in TSX.
|
|
|
8
8
|
- Suppresses TS2304 (`Cannot find name ...`) for aliases declared by `k-for`.
|
|
9
9
|
- Infers alias types from iterable/indexed sources (for example `k-for="item in users"` makes `item` resolve to `users[number]`).
|
|
10
10
|
- Provides hover type info and member completions for inferred aliases.
|
|
11
|
+
- Adds `k-for` inline semantic highlighting in string expressions (for example alias/keyword/source in `k-for="item in list"`).
|
|
11
12
|
- Supports Vue-like syntax:
|
|
12
13
|
- `k-for="item in list"`
|
|
13
14
|
- `k-for="(item, i) in list"`
|
package/dist/index.js
CHANGED
|
@@ -4,6 +4,7 @@ const completion_1 = require("./completion");
|
|
|
4
4
|
const constants_1 = require("./constants");
|
|
5
5
|
const config_1 = require("./config");
|
|
6
6
|
const identifiers_1 = require("./identifiers");
|
|
7
|
+
const kfor_highlighting_1 = require("./kfor-highlighting");
|
|
7
8
|
const scope_analysis_1 = require("./scope-analysis");
|
|
8
9
|
const type_resolution_1 = require("./type-resolution");
|
|
9
10
|
function init(modules) {
|
|
@@ -38,6 +39,28 @@ function init(modules) {
|
|
|
38
39
|
return !(0, scope_analysis_1.isSuppressed)(diagnostic.start, name, analysis.scopes);
|
|
39
40
|
});
|
|
40
41
|
};
|
|
42
|
+
proxy.getEncodedSemanticClassifications = (fileName, span, format) => {
|
|
43
|
+
const classifications = languageService.getEncodedSemanticClassifications(fileName, span, format);
|
|
44
|
+
if (!(0, config_1.isJsxLikeFile)(fileName)) {
|
|
45
|
+
return classifications;
|
|
46
|
+
}
|
|
47
|
+
const sourceFile = languageService.getProgram()?.getSourceFile(fileName);
|
|
48
|
+
if (!sourceFile) {
|
|
49
|
+
return classifications;
|
|
50
|
+
}
|
|
51
|
+
return (0, kfor_highlighting_1.addKForSemanticClassifications)(classifications, sourceFile, span, format, ts, config);
|
|
52
|
+
};
|
|
53
|
+
proxy.getEncodedSyntacticClassifications = (fileName, span) => {
|
|
54
|
+
const classifications = languageService.getEncodedSyntacticClassifications(fileName, span);
|
|
55
|
+
if (!(0, config_1.isJsxLikeFile)(fileName)) {
|
|
56
|
+
return classifications;
|
|
57
|
+
}
|
|
58
|
+
const sourceFile = languageService.getProgram()?.getSourceFile(fileName);
|
|
59
|
+
if (!sourceFile) {
|
|
60
|
+
return classifications;
|
|
61
|
+
}
|
|
62
|
+
return (0, kfor_highlighting_1.addKForSyntacticClassifications)(classifications, sourceFile, span, ts, config);
|
|
63
|
+
};
|
|
41
64
|
proxy.getQuickInfoAtPosition = (fileName, position) => {
|
|
42
65
|
const quickInfo = languageService.getQuickInfoAtPosition(fileName, position);
|
|
43
66
|
if (!(0, config_1.isJsxLikeFile)(fileName)) {
|
|
@@ -0,0 +1,4 @@
|
|
|
1
|
+
import type tsModule from 'typescript/lib/tsserverlibrary';
|
|
2
|
+
import type { ResolvedConfig } from './types';
|
|
3
|
+
export declare function addKForSemanticClassifications(base: tsModule.Classifications, sourceFile: tsModule.SourceFile, span: tsModule.TextSpan, format: tsModule.SemanticClassificationFormat | undefined, ts: typeof tsModule, config: ResolvedConfig): tsModule.Classifications;
|
|
4
|
+
export declare function addKForSyntacticClassifications(base: tsModule.Classifications, sourceFile: tsModule.SourceFile, span: tsModule.TextSpan, ts: typeof tsModule, config: ResolvedConfig): tsModule.Classifications;
|
|
@@ -0,0 +1,407 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.addKForSemanticClassifications = addKForSemanticClassifications;
|
|
4
|
+
exports.addKForSyntacticClassifications = addKForSyntacticClassifications;
|
|
5
|
+
const jsx_attributes_1 = require("./jsx-attributes");
|
|
6
|
+
const kfor_parser_1 = require("./kfor-parser");
|
|
7
|
+
const identifiers_1 = require("./identifiers");
|
|
8
|
+
const TOKEN_TYPE_VARIABLE = 7;
|
|
9
|
+
const TOKEN_MODIFIER_READONLY = 1 << 3;
|
|
10
|
+
const TOKEN_MODIFIER_LOCAL = 1 << 5;
|
|
11
|
+
const TOKEN_ENCODING_TYPE_OFFSET = 8;
|
|
12
|
+
const IDENTIFIER_PATTERN = /[A-Za-z_$][A-Za-z0-9_$]*/g;
|
|
13
|
+
const KEYWORD_DELIMITER_PATTERN = /\s+(in|of)\s+/;
|
|
14
|
+
function addKForSemanticClassifications(base, sourceFile, span, format, ts, config) {
|
|
15
|
+
const tokens = collectHighlightTokens(sourceFile, ts, config, span);
|
|
16
|
+
if (tokens.length === 0) {
|
|
17
|
+
return base;
|
|
18
|
+
}
|
|
19
|
+
const semanticSpans = buildSemanticSpans(tokens, format, ts);
|
|
20
|
+
if (semanticSpans.length === 0) {
|
|
21
|
+
return base;
|
|
22
|
+
}
|
|
23
|
+
return mergeClassifications(base, semanticSpans, false);
|
|
24
|
+
}
|
|
25
|
+
function addKForSyntacticClassifications(base, sourceFile, span, ts, config) {
|
|
26
|
+
const tokens = collectHighlightTokens(sourceFile, ts, config, span);
|
|
27
|
+
if (tokens.length === 0) {
|
|
28
|
+
return base;
|
|
29
|
+
}
|
|
30
|
+
const keywordSpans = buildSyntacticKeywordSpans(tokens, ts);
|
|
31
|
+
if (keywordSpans.length === 0) {
|
|
32
|
+
return base;
|
|
33
|
+
}
|
|
34
|
+
return mergeClassifications(base, keywordSpans, true);
|
|
35
|
+
}
|
|
36
|
+
function collectHighlightTokens(sourceFile, ts, config, span) {
|
|
37
|
+
const tokens = [];
|
|
38
|
+
const spanStart = span.start;
|
|
39
|
+
const spanEnd = span.start + span.length;
|
|
40
|
+
const visit = (node) => {
|
|
41
|
+
let opening;
|
|
42
|
+
if (ts.isJsxElement(node)) {
|
|
43
|
+
opening = node.openingElement;
|
|
44
|
+
}
|
|
45
|
+
else if (ts.isJsxSelfClosingElement(node)) {
|
|
46
|
+
opening = node;
|
|
47
|
+
}
|
|
48
|
+
if (opening) {
|
|
49
|
+
const attr = (0, jsx_attributes_1.getJsxAttribute)(opening, config.forAttr, ts);
|
|
50
|
+
if (attr) {
|
|
51
|
+
const parsed = parseKForAttributeTokens(attr, sourceFile, ts, config.allowOfKeyword);
|
|
52
|
+
for (let i = 0; i < parsed.length; i++) {
|
|
53
|
+
const token = parsed[i];
|
|
54
|
+
const tokenStart = token.start;
|
|
55
|
+
const tokenEnd = token.start + token.length;
|
|
56
|
+
if (tokenEnd <= spanStart || tokenStart >= spanEnd) {
|
|
57
|
+
continue;
|
|
58
|
+
}
|
|
59
|
+
const clippedStart = Math.max(tokenStart, spanStart);
|
|
60
|
+
const clippedEnd = Math.min(tokenEnd, spanEnd);
|
|
61
|
+
if (clippedEnd > clippedStart) {
|
|
62
|
+
tokens.push({
|
|
63
|
+
start: clippedStart,
|
|
64
|
+
length: clippedEnd - clippedStart,
|
|
65
|
+
kind: token.kind,
|
|
66
|
+
});
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
ts.forEachChild(node, visit);
|
|
72
|
+
};
|
|
73
|
+
visit(sourceFile);
|
|
74
|
+
return uniqueTokens(tokens);
|
|
75
|
+
}
|
|
76
|
+
function parseKForAttributeTokens(attr, sourceFile, ts, allowOfKeyword) {
|
|
77
|
+
const content = getAttributeRawContent(attr, sourceFile, ts);
|
|
78
|
+
if (!content) {
|
|
79
|
+
return [];
|
|
80
|
+
}
|
|
81
|
+
const raw = content.text;
|
|
82
|
+
const rawOffset = content.start;
|
|
83
|
+
const value = raw.trim();
|
|
84
|
+
if (!value) {
|
|
85
|
+
return [];
|
|
86
|
+
}
|
|
87
|
+
const parsed = (0, kfor_parser_1.parseKForExpression)(value, allowOfKeyword);
|
|
88
|
+
if (!parsed) {
|
|
89
|
+
return [];
|
|
90
|
+
}
|
|
91
|
+
const trimStart = raw.length - raw.trimStart().length;
|
|
92
|
+
const delimiterMatch = KEYWORD_DELIMITER_PATTERN.exec(value);
|
|
93
|
+
if (!delimiterMatch) {
|
|
94
|
+
return [];
|
|
95
|
+
}
|
|
96
|
+
const keyword = delimiterMatch[1];
|
|
97
|
+
const keywordOffsetInDelimiter = delimiterMatch[0].indexOf(keyword);
|
|
98
|
+
const leftSegment = value.slice(0, delimiterMatch.index);
|
|
99
|
+
const rightSegment = value.slice(delimiterMatch.index + delimiterMatch[0].length);
|
|
100
|
+
const leftLeading = leftSegment.length - leftSegment.trimStart().length;
|
|
101
|
+
const leftTrimmed = leftSegment.trim();
|
|
102
|
+
let aliasText = leftTrimmed;
|
|
103
|
+
let aliasTextStartInRaw = trimStart + leftLeading;
|
|
104
|
+
if (leftTrimmed.startsWith('(') && leftTrimmed.endsWith(')')) {
|
|
105
|
+
aliasText = leftTrimmed.slice(1, -1);
|
|
106
|
+
aliasTextStartInRaw += 1;
|
|
107
|
+
}
|
|
108
|
+
const rightLeading = rightSegment.length - rightSegment.trimStart().length;
|
|
109
|
+
const sourceText = rightSegment.trim();
|
|
110
|
+
const sourceTextStartInRaw = trimStart + delimiterMatch.index + delimiterMatch[0].length + rightLeading;
|
|
111
|
+
const keywordStartInRaw = trimStart + delimiterMatch.index + keywordOffsetInDelimiter;
|
|
112
|
+
const tokens = [];
|
|
113
|
+
const aliasTokens = collectAliasTokens(aliasText, aliasTextStartInRaw, parsed.aliases);
|
|
114
|
+
for (let i = 0; i < aliasTokens.length; i++) {
|
|
115
|
+
tokens.push({
|
|
116
|
+
start: rawOffset + aliasTokens[i].start,
|
|
117
|
+
length: aliasTokens[i].length,
|
|
118
|
+
kind: 'alias',
|
|
119
|
+
});
|
|
120
|
+
}
|
|
121
|
+
tokens.push({
|
|
122
|
+
start: rawOffset + keywordStartInRaw,
|
|
123
|
+
length: keyword.length,
|
|
124
|
+
kind: 'keyword',
|
|
125
|
+
});
|
|
126
|
+
const sourceTokens = collectSourceTokens(sourceText, sourceTextStartInRaw, ts);
|
|
127
|
+
for (let i = 0; i < sourceTokens.length; i++) {
|
|
128
|
+
tokens.push({
|
|
129
|
+
start: rawOffset + sourceTokens[i].start,
|
|
130
|
+
length: sourceTokens[i].length,
|
|
131
|
+
kind: 'source',
|
|
132
|
+
});
|
|
133
|
+
}
|
|
134
|
+
return tokens;
|
|
135
|
+
}
|
|
136
|
+
function collectAliasTokens(aliasText, baseStart, aliases) {
|
|
137
|
+
const result = [];
|
|
138
|
+
const allowed = new Set(aliases);
|
|
139
|
+
let match;
|
|
140
|
+
IDENTIFIER_PATTERN.lastIndex = 0;
|
|
141
|
+
while ((match = IDENTIFIER_PATTERN.exec(aliasText))) {
|
|
142
|
+
const name = match[0];
|
|
143
|
+
if (!allowed.has(name) || !(0, identifiers_1.isValidIdentifier)(name)) {
|
|
144
|
+
continue;
|
|
145
|
+
}
|
|
146
|
+
result.push({
|
|
147
|
+
start: baseStart + match.index,
|
|
148
|
+
length: name.length,
|
|
149
|
+
kind: 'alias',
|
|
150
|
+
});
|
|
151
|
+
}
|
|
152
|
+
return result;
|
|
153
|
+
}
|
|
154
|
+
function collectSourceTokens(sourceText, baseStart, ts) {
|
|
155
|
+
const tokens = collectSourceTokensWithAst(sourceText, baseStart, ts);
|
|
156
|
+
if (tokens.length > 0) {
|
|
157
|
+
return tokens;
|
|
158
|
+
}
|
|
159
|
+
const fallback = [];
|
|
160
|
+
let match;
|
|
161
|
+
IDENTIFIER_PATTERN.lastIndex = 0;
|
|
162
|
+
while ((match = IDENTIFIER_PATTERN.exec(sourceText))) {
|
|
163
|
+
const name = match[0];
|
|
164
|
+
if (!(0, identifiers_1.isValidIdentifier)(name)) {
|
|
165
|
+
continue;
|
|
166
|
+
}
|
|
167
|
+
fallback.push({
|
|
168
|
+
start: baseStart + match.index,
|
|
169
|
+
length: name.length,
|
|
170
|
+
kind: 'source',
|
|
171
|
+
});
|
|
172
|
+
}
|
|
173
|
+
return fallback;
|
|
174
|
+
}
|
|
175
|
+
function collectSourceTokensWithAst(sourceText, baseStart, ts) {
|
|
176
|
+
const snippet = `(${sourceText});`;
|
|
177
|
+
const tempSourceFile = ts.createSourceFile('__k_for_highlight.ts', snippet, ts.ScriptTarget.Latest, true, ts.ScriptKind.TS);
|
|
178
|
+
if (tempSourceFile.statements.length === 0) {
|
|
179
|
+
return [];
|
|
180
|
+
}
|
|
181
|
+
const statement = tempSourceFile.statements[0];
|
|
182
|
+
if (!ts.isExpressionStatement(statement)) {
|
|
183
|
+
return [];
|
|
184
|
+
}
|
|
185
|
+
const result = [];
|
|
186
|
+
const visit = (node) => {
|
|
187
|
+
if (ts.isIdentifier(node) && (0, identifiers_1.isValidIdentifier)(node.text)) {
|
|
188
|
+
const start = node.getStart(tempSourceFile) - 1;
|
|
189
|
+
const end = start + node.getWidth(tempSourceFile);
|
|
190
|
+
if (start >= 0 && end <= sourceText.length) {
|
|
191
|
+
result.push({
|
|
192
|
+
start: baseStart + start,
|
|
193
|
+
length: end - start,
|
|
194
|
+
kind: 'source',
|
|
195
|
+
});
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
ts.forEachChild(node, visit);
|
|
199
|
+
};
|
|
200
|
+
visit(statement.expression);
|
|
201
|
+
return result;
|
|
202
|
+
}
|
|
203
|
+
function getAttributeRawContent(attr, sourceFile, ts) {
|
|
204
|
+
const initializer = attr.initializer;
|
|
205
|
+
if (!initializer) {
|
|
206
|
+
return undefined;
|
|
207
|
+
}
|
|
208
|
+
if (ts.isStringLiteral(initializer)) {
|
|
209
|
+
const quotedText = initializer.getText(sourceFile);
|
|
210
|
+
if (quotedText.length < 2) {
|
|
211
|
+
return undefined;
|
|
212
|
+
}
|
|
213
|
+
return {
|
|
214
|
+
start: initializer.getStart(sourceFile) + 1,
|
|
215
|
+
text: quotedText.slice(1, -1),
|
|
216
|
+
};
|
|
217
|
+
}
|
|
218
|
+
const expression = (0, jsx_attributes_1.getAttributeExpression)(attr, ts);
|
|
219
|
+
if (!expression || !ts.isStringLiteralLike(expression)) {
|
|
220
|
+
return undefined;
|
|
221
|
+
}
|
|
222
|
+
const quotedText = expression.getText(sourceFile);
|
|
223
|
+
if (quotedText.length < 2) {
|
|
224
|
+
return undefined;
|
|
225
|
+
}
|
|
226
|
+
return {
|
|
227
|
+
start: expression.getStart(sourceFile) + 1,
|
|
228
|
+
text: quotedText.slice(1, -1),
|
|
229
|
+
};
|
|
230
|
+
}
|
|
231
|
+
function buildSemanticSpans(tokens, format, ts) {
|
|
232
|
+
const spans = [];
|
|
233
|
+
if (format === ts.SemanticClassificationFormat.TwentyTwenty) {
|
|
234
|
+
for (let i = 0; i < tokens.length; i++) {
|
|
235
|
+
const token = tokens[i];
|
|
236
|
+
if (token.kind === 'keyword') {
|
|
237
|
+
continue;
|
|
238
|
+
}
|
|
239
|
+
spans.push({
|
|
240
|
+
start: token.start,
|
|
241
|
+
length: token.length,
|
|
242
|
+
classification: token.kind === 'alias'
|
|
243
|
+
? encodeSemantic2020(TOKEN_TYPE_VARIABLE, TOKEN_MODIFIER_READONLY | TOKEN_MODIFIER_LOCAL)
|
|
244
|
+
: encodeSemantic2020(TOKEN_TYPE_VARIABLE, 0),
|
|
245
|
+
});
|
|
246
|
+
}
|
|
247
|
+
return spans;
|
|
248
|
+
}
|
|
249
|
+
for (let i = 0; i < tokens.length; i++) {
|
|
250
|
+
const token = tokens[i];
|
|
251
|
+
spans.push({
|
|
252
|
+
start: token.start,
|
|
253
|
+
length: token.length,
|
|
254
|
+
classification: token.kind === 'keyword' ? ts.ClassificationType.keyword : ts.ClassificationType.identifier,
|
|
255
|
+
});
|
|
256
|
+
}
|
|
257
|
+
return spans;
|
|
258
|
+
}
|
|
259
|
+
function buildSyntacticKeywordSpans(tokens, ts) {
|
|
260
|
+
const spans = [];
|
|
261
|
+
for (let i = 0; i < tokens.length; i++) {
|
|
262
|
+
const token = tokens[i];
|
|
263
|
+
if (token.kind !== 'keyword') {
|
|
264
|
+
continue;
|
|
265
|
+
}
|
|
266
|
+
spans.push({
|
|
267
|
+
start: token.start,
|
|
268
|
+
length: token.length,
|
|
269
|
+
classification: ts.ClassificationType.keyword,
|
|
270
|
+
});
|
|
271
|
+
}
|
|
272
|
+
return spans;
|
|
273
|
+
}
|
|
274
|
+
function encodeSemantic2020(typeIndex, modifiers) {
|
|
275
|
+
return ((typeIndex + 1) << TOKEN_ENCODING_TYPE_OFFSET) + modifiers;
|
|
276
|
+
}
|
|
277
|
+
function mergeClassifications(base, overlays, replaceOverlaps) {
|
|
278
|
+
if (overlays.length === 0) {
|
|
279
|
+
return base;
|
|
280
|
+
}
|
|
281
|
+
const baseSpans = decodeSpans(base.spans);
|
|
282
|
+
const extraSpans = normalizeSpans(overlays);
|
|
283
|
+
if (extraSpans.length === 0) {
|
|
284
|
+
return base;
|
|
285
|
+
}
|
|
286
|
+
const merged = replaceOverlaps
|
|
287
|
+
? [...removeOverlayOverlaps(baseSpans, extraSpans), ...extraSpans]
|
|
288
|
+
: [...baseSpans, ...extraSpans];
|
|
289
|
+
const normalized = normalizeSpans(merged);
|
|
290
|
+
return {
|
|
291
|
+
spans: encodeSpans(normalized),
|
|
292
|
+
endOfLineState: base.endOfLineState,
|
|
293
|
+
};
|
|
294
|
+
}
|
|
295
|
+
function removeOverlayOverlaps(baseSpans, overlays) {
|
|
296
|
+
const result = [];
|
|
297
|
+
for (let i = 0; i < baseSpans.length; i++) {
|
|
298
|
+
let segments = [baseSpans[i]];
|
|
299
|
+
for (let j = 0; j < overlays.length; j++) {
|
|
300
|
+
const overlay = overlays[j];
|
|
301
|
+
const nextSegments = [];
|
|
302
|
+
for (let k = 0; k < segments.length; k++) {
|
|
303
|
+
const segment = segments[k];
|
|
304
|
+
const segmentStart = segment.start;
|
|
305
|
+
const segmentEnd = segment.start + segment.length;
|
|
306
|
+
const overlayStart = overlay.start;
|
|
307
|
+
const overlayEnd = overlay.start + overlay.length;
|
|
308
|
+
if (overlayEnd <= segmentStart || overlayStart >= segmentEnd) {
|
|
309
|
+
nextSegments.push(segment);
|
|
310
|
+
continue;
|
|
311
|
+
}
|
|
312
|
+
if (segmentStart < overlayStart) {
|
|
313
|
+
nextSegments.push({
|
|
314
|
+
start: segmentStart,
|
|
315
|
+
length: overlayStart - segmentStart,
|
|
316
|
+
classification: segment.classification,
|
|
317
|
+
});
|
|
318
|
+
}
|
|
319
|
+
if (overlayEnd < segmentEnd) {
|
|
320
|
+
nextSegments.push({
|
|
321
|
+
start: overlayEnd,
|
|
322
|
+
length: segmentEnd - overlayEnd,
|
|
323
|
+
classification: segment.classification,
|
|
324
|
+
});
|
|
325
|
+
}
|
|
326
|
+
}
|
|
327
|
+
segments = nextSegments;
|
|
328
|
+
if (segments.length === 0) {
|
|
329
|
+
break;
|
|
330
|
+
}
|
|
331
|
+
}
|
|
332
|
+
for (let j = 0; j < segments.length; j++) {
|
|
333
|
+
result.push(segments[j]);
|
|
334
|
+
}
|
|
335
|
+
}
|
|
336
|
+
return result;
|
|
337
|
+
}
|
|
338
|
+
function decodeSpans(spans) {
|
|
339
|
+
const decoded = [];
|
|
340
|
+
for (let i = 0; i + 2 < spans.length; i += 3) {
|
|
341
|
+
const start = spans[i];
|
|
342
|
+
const length = spans[i + 1];
|
|
343
|
+
const classification = spans[i + 2];
|
|
344
|
+
if (length <= 0) {
|
|
345
|
+
continue;
|
|
346
|
+
}
|
|
347
|
+
decoded.push({ start, length, classification });
|
|
348
|
+
}
|
|
349
|
+
return decoded;
|
|
350
|
+
}
|
|
351
|
+
function encodeSpans(spans) {
|
|
352
|
+
const encoded = [];
|
|
353
|
+
for (let i = 0; i < spans.length; i++) {
|
|
354
|
+
const span = spans[i];
|
|
355
|
+
encoded.push(span.start, span.length, span.classification);
|
|
356
|
+
}
|
|
357
|
+
return encoded;
|
|
358
|
+
}
|
|
359
|
+
function normalizeSpans(spans) {
|
|
360
|
+
if (spans.length === 0) {
|
|
361
|
+
return [];
|
|
362
|
+
}
|
|
363
|
+
const sorted = spans
|
|
364
|
+
.filter((span) => span.length > 0)
|
|
365
|
+
.slice()
|
|
366
|
+
.sort((left, right) => {
|
|
367
|
+
if (left.start !== right.start) {
|
|
368
|
+
return left.start - right.start;
|
|
369
|
+
}
|
|
370
|
+
if (left.length !== right.length) {
|
|
371
|
+
return left.length - right.length;
|
|
372
|
+
}
|
|
373
|
+
return left.classification - right.classification;
|
|
374
|
+
});
|
|
375
|
+
const normalized = [];
|
|
376
|
+
for (let i = 0; i < sorted.length; i++) {
|
|
377
|
+
const span = sorted[i];
|
|
378
|
+
const previous = normalized[normalized.length - 1];
|
|
379
|
+
if (previous &&
|
|
380
|
+
previous.start === span.start &&
|
|
381
|
+
previous.length === span.length &&
|
|
382
|
+
previous.classification === span.classification) {
|
|
383
|
+
continue;
|
|
384
|
+
}
|
|
385
|
+
normalized.push(span);
|
|
386
|
+
}
|
|
387
|
+
return normalized;
|
|
388
|
+
}
|
|
389
|
+
function uniqueTokens(tokens) {
|
|
390
|
+
if (tokens.length === 0) {
|
|
391
|
+
return [];
|
|
392
|
+
}
|
|
393
|
+
const map = new Map();
|
|
394
|
+
for (let i = 0; i < tokens.length; i++) {
|
|
395
|
+
const token = tokens[i];
|
|
396
|
+
const key = `${token.start}:${token.length}:${token.kind}`;
|
|
397
|
+
if (!map.has(key)) {
|
|
398
|
+
map.set(key, token);
|
|
399
|
+
}
|
|
400
|
+
}
|
|
401
|
+
return Array.from(map.values()).sort((left, right) => {
|
|
402
|
+
if (left.start !== right.start) {
|
|
403
|
+
return left.start - right.start;
|
|
404
|
+
}
|
|
405
|
+
return left.length - right.length;
|
|
406
|
+
});
|
|
407
|
+
}
|