@elliots/typical 0.1.8 → 0.1.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +253 -7
- package/dist/src/cli.js +33 -6
- package/dist/src/cli.js.map +1 -1
- package/dist/src/config.d.ts +56 -0
- package/dist/src/config.js +93 -1
- package/dist/src/config.js.map +1 -1
- package/dist/src/esm-loader.d.ts +1 -0
- package/dist/src/esm-loader.js +13 -3
- package/dist/src/esm-loader.js.map +1 -1
- package/dist/src/index.d.ts +4 -2
- package/dist/src/index.js +2 -1
- package/dist/src/index.js.map +1 -1
- package/dist/src/source-map.d.ts +78 -0
- package/dist/src/source-map.js +133 -0
- package/dist/src/source-map.js.map +1 -0
- package/dist/src/transformer.d.ts +71 -4
- package/dist/src/transformer.js +773 -330
- package/dist/src/transformer.js.map +1 -1
- package/dist/src/tsc-plugin.js +2 -2
- package/dist/src/tsc-plugin.js.map +1 -1
- package/package.json +8 -2
- package/src/cli.ts +44 -10
- package/src/config.ts +145 -3
- package/src/esm-loader.ts +15 -3
- package/src/index.ts +10 -2
- package/src/source-map.ts +202 -0
- package/src/transformer.ts +1047 -419
- package/src/tsc-plugin.ts +2 -2
package/src/transformer.ts
CHANGED
|
@@ -1,26 +1,303 @@
|
|
|
1
1
|
import ts from "typescript";
|
|
2
2
|
import fs from "fs";
|
|
3
3
|
import path from "path";
|
|
4
|
-
import { loadConfig, TypicalConfig,
|
|
4
|
+
import { loadConfig, validateConfig, TypicalConfig, getCompiledIgnorePatterns, CompiledIgnorePatterns } from "./config.js";
|
|
5
5
|
import { shouldTransformFile } from "./file-filter.js";
|
|
6
6
|
import { hoistRegexConstructors } from "./regex-hoister.js";
|
|
7
|
+
import {
|
|
8
|
+
TransformResult,
|
|
9
|
+
composeSourceMaps,
|
|
10
|
+
} from "./source-map.js";
|
|
11
|
+
import type { EncodedSourceMap } from '@ampproject/remapping';
|
|
7
12
|
|
|
8
13
|
import { transform as typiaTransform } from "typia/lib/transform.js";
|
|
9
14
|
import { setupTsProgram } from "./setup.js";
|
|
10
15
|
|
|
16
|
+
// Re-export TransformResult for consumers
|
|
17
|
+
export type { TransformResult } from "./source-map.js";
|
|
18
|
+
|
|
11
19
|
// Flags for typeToTypeNode to prefer type aliases over import() syntax
|
|
12
20
|
const TYPE_NODE_FLAGS = ts.NodeBuilderFlags.NoTruncation | ts.NodeBuilderFlags.UseAliasDefinedOutsideCurrentScope;
|
|
13
21
|
|
|
22
|
+
// Source map markers:
|
|
23
|
+
// - @T:line:col - Type annotation marker (maps generated code to source type annotation)
|
|
24
|
+
// - @L:line - Line marker (identity mapping - maps output line to source line)
|
|
25
|
+
//
|
|
26
|
+
// Lines with @T markers map to the specified type annotation position
|
|
27
|
+
// Lines with @L markers establish identity mapping (output line N maps to source line N)
|
|
28
|
+
// Lines without markers inherit from the most recent marker above
|
|
29
|
+
//
|
|
30
|
+
// Match single-line comment markers: //@T:line:col or //@L:line
|
|
31
|
+
const TYPE_MARKER_REGEX = /\/\/@T:(\d+):(\d+)/g;
|
|
32
|
+
const LINE_MARKER_REGEX = /\/\/@L:(\d+)/g;
|
|
33
|
+
// Strip all markers
|
|
34
|
+
const ALL_MARKERS_REGEX = /\/\/@[TL]:\d+(?::\d+)?\n?/g;
|
|
35
|
+
|
|
36
|
+
/**
|
|
37
|
+
* Add a type annotation marker comment to a node.
|
|
38
|
+
* The marker encodes the original line:column position of the type annotation
|
|
39
|
+
* so validation errors can be traced back to the source.
|
|
40
|
+
*
|
|
41
|
+
* Uses a single-line comment (//) which forces a newline after it,
|
|
42
|
+
* ensuring each marked statement is on its own line for accurate source maps.
|
|
43
|
+
*/
|
|
44
|
+
function addSourceMapMarker<T extends ts.Node>(
|
|
45
|
+
node: T,
|
|
46
|
+
sourceFile: ts.SourceFile,
|
|
47
|
+
originalNode: ts.Node
|
|
48
|
+
): T {
|
|
49
|
+
const pos = originalNode.getStart(sourceFile);
|
|
50
|
+
const { line, character } = sourceFile.getLineAndCharacterOfPosition(pos);
|
|
51
|
+
// Use 1-based line numbers for source maps
|
|
52
|
+
// Single-line comment forces a newline after it
|
|
53
|
+
const marker = `@T:${line + 1}:${character}`;
|
|
54
|
+
if (process.env.DEBUG) {
|
|
55
|
+
console.log(`TYPICAL: Adding source map marker //${marker}`);
|
|
56
|
+
}
|
|
57
|
+
const result = ts.addSyntheticLeadingComment(
|
|
58
|
+
node,
|
|
59
|
+
ts.SyntaxKind.SingleLineCommentTrivia,
|
|
60
|
+
marker,
|
|
61
|
+
true // trailing newline
|
|
62
|
+
);
|
|
63
|
+
if (process.env.DEBUG) {
|
|
64
|
+
const comments = ts.getSyntheticLeadingComments(result);
|
|
65
|
+
console.log(`TYPICAL: Synthetic comments after addSourceMapMarker:`, comments?.length);
|
|
66
|
+
}
|
|
67
|
+
return result;
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
/**
|
|
71
|
+
* Add a line marker comment to a node for identity mapping.
|
|
72
|
+
* The marker encodes the original line number so the output line maps to itself.
|
|
73
|
+
*
|
|
74
|
+
* Uses a single-line comment (//) which forces a newline after it.
|
|
75
|
+
*/
|
|
76
|
+
function addLineMarker<T extends ts.Node>(
|
|
77
|
+
node: T,
|
|
78
|
+
sourceFile: ts.SourceFile,
|
|
79
|
+
originalNode: ts.Node
|
|
80
|
+
): T {
|
|
81
|
+
const pos = originalNode.getStart(sourceFile);
|
|
82
|
+
const { line } = sourceFile.getLineAndCharacterOfPosition(pos);
|
|
83
|
+
// Use 1-based line numbers for source maps
|
|
84
|
+
const marker = `@L:${line + 1}`;
|
|
85
|
+
if (process.env.DEBUG) {
|
|
86
|
+
console.log(`TYPICAL: Adding line marker //${marker}`);
|
|
87
|
+
}
|
|
88
|
+
return ts.addSyntheticLeadingComment(
|
|
89
|
+
node,
|
|
90
|
+
ts.SyntaxKind.SingleLineCommentTrivia,
|
|
91
|
+
marker,
|
|
92
|
+
true // trailing newline
|
|
93
|
+
);
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
/**
|
|
97
|
+
* Parse source map markers from code and build a source map.
|
|
98
|
+
* Markers are single-line comments on their own line:
|
|
99
|
+
* - //@T:line:col - Type annotation marker (maps to specific source position)
|
|
100
|
+
* - //@L:line - Line marker (identity mapping to source line, col 0)
|
|
101
|
+
*
|
|
102
|
+
* The marker applies to the NEXT line (the actual code statement).
|
|
103
|
+
* Lines without markers inherit from the most recent marker above.
|
|
104
|
+
* Returns the code with markers stripped and the generated source map.
|
|
105
|
+
*/
|
|
106
|
+
function parseMarkersAndBuildSourceMap(
|
|
107
|
+
code: string,
|
|
108
|
+
fileName: string,
|
|
109
|
+
originalSource: string,
|
|
110
|
+
includeContent: boolean
|
|
111
|
+
): { code: string; map: EncodedSourceMap } {
|
|
112
|
+
const lines = code.split('\n');
|
|
113
|
+
|
|
114
|
+
// Current mapping position (inherited by unmarked lines)
|
|
115
|
+
let currentOrigLine = 1;
|
|
116
|
+
let currentOrigCol = 0;
|
|
117
|
+
let pendingMarker: { line: number; col: number } | null = null;
|
|
118
|
+
|
|
119
|
+
const mappings: Array<{ generatedLine: number; generatedCol: number; originalLine: number; originalCol: number }> = [];
|
|
120
|
+
let outputLineNum = 0; // 0-indexed output line counter (after stripping markers)
|
|
121
|
+
|
|
122
|
+
for (let lineIdx = 0; lineIdx < lines.length; lineIdx++) {
|
|
123
|
+
const line = lines[lineIdx];
|
|
124
|
+
|
|
125
|
+
// Check for type annotation marker (@T:line:col)
|
|
126
|
+
TYPE_MARKER_REGEX.lastIndex = 0;
|
|
127
|
+
const typeMatch = TYPE_MARKER_REGEX.exec(line);
|
|
128
|
+
|
|
129
|
+
if (typeMatch) {
|
|
130
|
+
// This is a @T marker line - store the position for the next line
|
|
131
|
+
pendingMarker = {
|
|
132
|
+
line: parseInt(typeMatch[1], 10),
|
|
133
|
+
col: parseInt(typeMatch[2], 10),
|
|
134
|
+
};
|
|
135
|
+
// Don't output this line or create a mapping for it
|
|
136
|
+
continue;
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
// Check for line marker (@L:line)
|
|
140
|
+
LINE_MARKER_REGEX.lastIndex = 0;
|
|
141
|
+
const lineMatch = LINE_MARKER_REGEX.exec(line);
|
|
142
|
+
|
|
143
|
+
if (lineMatch) {
|
|
144
|
+
// This is a @L marker line - identity mapping (col 0)
|
|
145
|
+
pendingMarker = {
|
|
146
|
+
line: parseInt(lineMatch[1], 10),
|
|
147
|
+
col: 0,
|
|
148
|
+
};
|
|
149
|
+
// Don't output this line or create a mapping for it
|
|
150
|
+
continue;
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
// This is a code line - apply pending marker if any
|
|
154
|
+
if (pendingMarker) {
|
|
155
|
+
currentOrigLine = pendingMarker.line;
|
|
156
|
+
currentOrigCol = pendingMarker.col;
|
|
157
|
+
pendingMarker = null;
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
outputLineNum++;
|
|
161
|
+
|
|
162
|
+
// Create mapping for this line
|
|
163
|
+
mappings.push({
|
|
164
|
+
generatedLine: outputLineNum,
|
|
165
|
+
generatedCol: 0,
|
|
166
|
+
originalLine: currentOrigLine,
|
|
167
|
+
originalCol: currentOrigCol,
|
|
168
|
+
});
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
// Strip all markers from the code
|
|
172
|
+
const cleanCode = code.replace(ALL_MARKERS_REGEX, '');
|
|
173
|
+
|
|
174
|
+
// Build VLQ-encoded source map
|
|
175
|
+
const map = buildSourceMapFromMappings(mappings, fileName, originalSource, includeContent);
|
|
176
|
+
|
|
177
|
+
return { code: cleanCode, map };
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
/**
|
|
181
|
+
* Build a source map from a list of position mappings.
|
|
182
|
+
*/
|
|
183
|
+
function buildSourceMapFromMappings(
|
|
184
|
+
mappings: Array<{ generatedLine: number; generatedCol: number; originalLine: number; originalCol: number }>,
|
|
185
|
+
fileName: string,
|
|
186
|
+
originalSource: string,
|
|
187
|
+
includeContent: boolean
|
|
188
|
+
): EncodedSourceMap {
|
|
189
|
+
// Group mappings by generated line
|
|
190
|
+
const lineMap = new Map<number, Array<{ generatedCol: number; originalLine: number; originalCol: number }>>();
|
|
191
|
+
for (const m of mappings) {
|
|
192
|
+
if (!lineMap.has(m.generatedLine)) {
|
|
193
|
+
lineMap.set(m.generatedLine, []);
|
|
194
|
+
}
|
|
195
|
+
lineMap.get(m.generatedLine)!.push({
|
|
196
|
+
generatedCol: m.generatedCol,
|
|
197
|
+
originalLine: m.originalLine,
|
|
198
|
+
originalCol: m.originalCol,
|
|
199
|
+
});
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
// Build VLQ-encoded mappings string
|
|
203
|
+
const maxLine = Math.max(...mappings.map(m => m.generatedLine), 0);
|
|
204
|
+
const mappingLines: string[] = [];
|
|
205
|
+
|
|
206
|
+
let prevGenCol = 0;
|
|
207
|
+
let prevOrigLine = 0;
|
|
208
|
+
let prevOrigCol = 0;
|
|
209
|
+
|
|
210
|
+
for (let line = 1; line <= maxLine; line++) {
|
|
211
|
+
const lineMappings = lineMap.get(line);
|
|
212
|
+
if (!lineMappings || lineMappings.length === 0) {
|
|
213
|
+
mappingLines.push('');
|
|
214
|
+
continue;
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
// Sort by generated column
|
|
218
|
+
lineMappings.sort((a, b) => a.generatedCol - b.generatedCol);
|
|
219
|
+
|
|
220
|
+
const segments: string[] = [];
|
|
221
|
+
prevGenCol = 0; // Reset for each line
|
|
222
|
+
|
|
223
|
+
for (const m of lineMappings) {
|
|
224
|
+
// VLQ encode: [genCol, sourceIdx=0, origLine, origCol]
|
|
225
|
+
const segment = vlqEncode([
|
|
226
|
+
m.generatedCol - prevGenCol,
|
|
227
|
+
0, // source index (we only have one source)
|
|
228
|
+
(m.originalLine - 1) - prevOrigLine, // 0-based, relative
|
|
229
|
+
m.originalCol - prevOrigCol,
|
|
230
|
+
]);
|
|
231
|
+
segments.push(segment);
|
|
232
|
+
|
|
233
|
+
prevGenCol = m.generatedCol;
|
|
234
|
+
prevOrigLine = m.originalLine - 1;
|
|
235
|
+
prevOrigCol = m.originalCol;
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
mappingLines.push(segments.join(','));
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
const map: EncodedSourceMap = {
|
|
242
|
+
version: 3,
|
|
243
|
+
file: fileName,
|
|
244
|
+
sources: [fileName],
|
|
245
|
+
names: [],
|
|
246
|
+
mappings: mappingLines.join(';'),
|
|
247
|
+
};
|
|
248
|
+
|
|
249
|
+
if (includeContent) {
|
|
250
|
+
map.sourcesContent = [originalSource];
|
|
251
|
+
}
|
|
252
|
+
|
|
253
|
+
return map;
|
|
254
|
+
}
|
|
255
|
+
|
|
256
|
+
// VLQ encoding for source maps
|
|
257
|
+
const VLQ_BASE64 = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
|
|
258
|
+
const VLQ_BASE = 32;
|
|
259
|
+
const VLQ_CONTINUATION_BIT = 32;
|
|
260
|
+
|
|
261
|
+
function vlqEncode(values: number[]): string {
|
|
262
|
+
return values.map(vlqEncodeInteger).join('');
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
function vlqEncodeInteger(value: number): string {
|
|
266
|
+
let result = '';
|
|
267
|
+
let vlq = value < 0 ? ((-value) << 1) | 1 : value << 1;
|
|
268
|
+
|
|
269
|
+
do {
|
|
270
|
+
let digit = vlq & 31;
|
|
271
|
+
vlq >>>= 5;
|
|
272
|
+
if (vlq > 0) {
|
|
273
|
+
digit |= VLQ_CONTINUATION_BIT;
|
|
274
|
+
}
|
|
275
|
+
result += VLQ_BASE64[digit];
|
|
276
|
+
} while (vlq > 0);
|
|
277
|
+
|
|
278
|
+
return result;
|
|
279
|
+
}
|
|
280
|
+
|
|
14
281
|
export interface TransformContext {
|
|
15
282
|
ts: typeof ts;
|
|
16
283
|
factory: ts.NodeFactory;
|
|
17
284
|
context: ts.TransformationContext;
|
|
285
|
+
sourceFile: ts.SourceFile;
|
|
286
|
+
}
|
|
287
|
+
|
|
288
|
+
/**
|
|
289
|
+
* Internal state for a single file transformation.
|
|
290
|
+
* Passed between visitor functions to track mutable state.
|
|
291
|
+
*/
|
|
292
|
+
interface FileTransformState {
|
|
293
|
+
needsTypiaImport: boolean;
|
|
18
294
|
}
|
|
19
295
|
|
|
20
296
|
export class TypicalTransformer {
|
|
21
297
|
public config: TypicalConfig;
|
|
22
298
|
private program: ts.Program;
|
|
23
299
|
private ts: typeof ts;
|
|
300
|
+
private compiledPatterns: CompiledIgnorePatterns | null = null;
|
|
24
301
|
private typeValidators = new Map<
|
|
25
302
|
string,
|
|
26
303
|
{ name: string; typeNode: ts.TypeNode }
|
|
@@ -44,6 +321,144 @@ export class TypicalTransformer {
|
|
|
44
321
|
this.program = program ?? setupTsProgram(this.ts);
|
|
45
322
|
}
|
|
46
323
|
|
|
324
|
+
/**
|
|
325
|
+
* Create a new TypeScript program with transformed source code.
|
|
326
|
+
* This is needed so typia can resolve types from our generated typia.createAssert<T>() calls.
|
|
327
|
+
*/
|
|
328
|
+
private createTypiaProgram(
|
|
329
|
+
fileName: string,
|
|
330
|
+
transformedCode: string,
|
|
331
|
+
languageVersion: ts.ScriptTarget = this.ts.ScriptTarget.ES2020
|
|
332
|
+
): { newProgram: ts.Program; boundSourceFile: ts.SourceFile } {
|
|
333
|
+
// Create a new source file from the transformed code
|
|
334
|
+
const newSourceFile = this.ts.createSourceFile(
|
|
335
|
+
fileName,
|
|
336
|
+
transformedCode,
|
|
337
|
+
languageVersion,
|
|
338
|
+
true
|
|
339
|
+
);
|
|
340
|
+
|
|
341
|
+
// Build map of all source files, replacing the transformed one
|
|
342
|
+
const compilerOptions = this.program.getCompilerOptions();
|
|
343
|
+
const originalSourceFiles = new Map<string, ts.SourceFile>();
|
|
344
|
+
for (const sf of this.program.getSourceFiles()) {
|
|
345
|
+
originalSourceFiles.set(sf.fileName, sf);
|
|
346
|
+
}
|
|
347
|
+
originalSourceFiles.set(fileName, newSourceFile);
|
|
348
|
+
|
|
349
|
+
// Create custom compiler host that serves our transformed file
|
|
350
|
+
const customHost: ts.CompilerHost = {
|
|
351
|
+
getSourceFile: (hostFileName, langVersion) => {
|
|
352
|
+
if (originalSourceFiles.has(hostFileName)) {
|
|
353
|
+
return originalSourceFiles.get(hostFileName);
|
|
354
|
+
}
|
|
355
|
+
return this.ts.createSourceFile(
|
|
356
|
+
hostFileName,
|
|
357
|
+
this.ts.sys.readFile(hostFileName) || "",
|
|
358
|
+
langVersion,
|
|
359
|
+
true
|
|
360
|
+
);
|
|
361
|
+
},
|
|
362
|
+
getDefaultLibFileName: (opts) => this.ts.getDefaultLibFilePath(opts),
|
|
363
|
+
writeFile: () => {},
|
|
364
|
+
getCurrentDirectory: () => this.ts.sys.getCurrentDirectory(),
|
|
365
|
+
getCanonicalFileName: (fn) =>
|
|
366
|
+
this.ts.sys.useCaseSensitiveFileNames ? fn : fn.toLowerCase(),
|
|
367
|
+
useCaseSensitiveFileNames: () => this.ts.sys.useCaseSensitiveFileNames,
|
|
368
|
+
getNewLine: () => this.ts.sys.newLine,
|
|
369
|
+
fileExists: (fn) => originalSourceFiles.has(fn) || this.ts.sys.fileExists(fn),
|
|
370
|
+
readFile: (fn) => this.ts.sys.readFile(fn),
|
|
371
|
+
};
|
|
372
|
+
|
|
373
|
+
// Create new program, passing oldProgram to reuse dependency context
|
|
374
|
+
const newProgram = this.ts.createProgram(
|
|
375
|
+
Array.from(originalSourceFiles.keys()),
|
|
376
|
+
compilerOptions,
|
|
377
|
+
customHost,
|
|
378
|
+
this.program
|
|
379
|
+
);
|
|
380
|
+
|
|
381
|
+
// Get the bound source file from the new program (has proper symbol tables)
|
|
382
|
+
const boundSourceFile = newProgram.getSourceFile(fileName);
|
|
383
|
+
if (!boundSourceFile) {
|
|
384
|
+
throw new Error(`Failed to get bound source file: ${fileName}`);
|
|
385
|
+
}
|
|
386
|
+
|
|
387
|
+
return { newProgram, boundSourceFile };
|
|
388
|
+
}
|
|
389
|
+
|
|
390
|
+
/**
|
|
391
|
+
* Write intermediate file for debugging purposes.
|
|
392
|
+
* Creates a .typical.ts file showing the code after typical's transformations
|
|
393
|
+
* but before typia processes it.
|
|
394
|
+
*/
|
|
395
|
+
private writeIntermediateFile(fileName: string, code: string): void {
|
|
396
|
+
if (!this.config.debug?.writeIntermediateFiles) {
|
|
397
|
+
return;
|
|
398
|
+
}
|
|
399
|
+
|
|
400
|
+
const compilerOptions = this.program.getCompilerOptions();
|
|
401
|
+
const outDir = compilerOptions.outDir || ".";
|
|
402
|
+
const rootDir = compilerOptions.rootDir || ".";
|
|
403
|
+
|
|
404
|
+
const relativePath = path.relative(rootDir, fileName);
|
|
405
|
+
const intermediateFileName = relativePath.replace(/\.(tsx?)$/, ".typical.$1");
|
|
406
|
+
const intermediateFilePath = path.join(outDir, intermediateFileName);
|
|
407
|
+
|
|
408
|
+
const dir = path.dirname(intermediateFilePath);
|
|
409
|
+
if (!fs.existsSync(dir)) {
|
|
410
|
+
fs.mkdirSync(dir, { recursive: true });
|
|
411
|
+
}
|
|
412
|
+
|
|
413
|
+
fs.writeFileSync(intermediateFilePath, code);
|
|
414
|
+
console.log(`TYPICAL: Wrote intermediate file: ${intermediateFilePath}`);
|
|
415
|
+
}
|
|
416
|
+
|
|
417
|
+
/**
|
|
418
|
+
* Format typia diagnostic errors into readable error messages.
|
|
419
|
+
*/
|
|
420
|
+
private formatTypiaErrors(errors: ts.Diagnostic[]): string[] {
|
|
421
|
+
return errors.map(d => {
|
|
422
|
+
const fullMessage = typeof d.messageText === 'string' ? d.messageText : d.messageText.messageText;
|
|
423
|
+
|
|
424
|
+
if (d.file && d.start !== undefined && d.length !== undefined) {
|
|
425
|
+
const { line, character } = d.file.getLineAndCharacterOfPosition(d.start);
|
|
426
|
+
// Extract the actual source code that caused the error
|
|
427
|
+
const sourceSnippet = d.file.text.substring(d.start, d.start + d.length);
|
|
428
|
+
// Truncate long snippets
|
|
429
|
+
const snippet = sourceSnippet.length > 100
|
|
430
|
+
? sourceSnippet.substring(0, 100) + '...'
|
|
431
|
+
: sourceSnippet;
|
|
432
|
+
|
|
433
|
+
// Format the error message - extract type issues from typia's verbose output
|
|
434
|
+
const formattedIssues = this.formatTypiaError(fullMessage);
|
|
435
|
+
|
|
436
|
+
return `${d.file.fileName}:${line + 1}:${character + 1}\n` +
|
|
437
|
+
` Code: ${snippet}\n` +
|
|
438
|
+
formattedIssues;
|
|
439
|
+
}
|
|
440
|
+
return this.formatTypiaError(fullMessage);
|
|
441
|
+
});
|
|
442
|
+
}
|
|
443
|
+
|
|
444
|
+
/**
|
|
445
|
+
* Check for untransformed typia calls and throw an error if found.
|
|
446
|
+
* This is a fallback in case typia silently fails without reporting a diagnostic.
|
|
447
|
+
*/
|
|
448
|
+
private checkUntransformedTypiaCalls(code: string, fileName: string): void {
|
|
449
|
+
const untransformedCalls = this.findUntransformedTypiaCalls(code);
|
|
450
|
+
if (untransformedCalls.length > 0) {
|
|
451
|
+
const failedTypes = untransformedCalls.map(c => c.type).filter((v, i, a) => a.indexOf(v) === i);
|
|
452
|
+
throw new Error(
|
|
453
|
+
`TYPICAL: Failed to transform the following types (typia cannot process them):\n` +
|
|
454
|
+
failedTypes.map(t => ` - ${t}`).join('\n') +
|
|
455
|
+
`\n\nTo skip validation for these types, add to ignoreTypes in typical.json:\n` +
|
|
456
|
+
` "ignoreTypes": [${failedTypes.map(t => `"${t}"`).join(', ')}]` +
|
|
457
|
+
`\n\nFile: ${fileName}`
|
|
458
|
+
);
|
|
459
|
+
}
|
|
460
|
+
}
|
|
461
|
+
|
|
47
462
|
public createSourceFile(fileName: string, content: string): ts.SourceFile {
|
|
48
463
|
return this.ts.createSourceFile(
|
|
49
464
|
fileName,
|
|
@@ -53,11 +468,19 @@ export class TypicalTransformer {
|
|
|
53
468
|
);
|
|
54
469
|
}
|
|
55
470
|
|
|
471
|
+
/**
|
|
472
|
+
* Transform options for controlling source map generation.
|
|
473
|
+
*/
|
|
56
474
|
public transform(
|
|
57
475
|
sourceFile: ts.SourceFile | string,
|
|
58
476
|
mode: "basic" | "typia" | "js",
|
|
59
|
-
|
|
60
|
-
|
|
477
|
+
options: {
|
|
478
|
+
sourceMap?: boolean;
|
|
479
|
+
skippedTypes?: Set<string>;
|
|
480
|
+
} = {}
|
|
481
|
+
): TransformResult {
|
|
482
|
+
const { sourceMap = false, skippedTypes = new Set() } = options;
|
|
483
|
+
|
|
61
484
|
if (typeof sourceFile === "string") {
|
|
62
485
|
const file = this.program.getSourceFile(sourceFile);
|
|
63
486
|
if (!file) {
|
|
@@ -66,128 +489,164 @@ export class TypicalTransformer {
|
|
|
66
489
|
sourceFile = file;
|
|
67
490
|
}
|
|
68
491
|
|
|
492
|
+
const fileName = sourceFile.fileName;
|
|
493
|
+
const originalSource = sourceFile.getFullText();
|
|
69
494
|
const printer = this.ts.createPrinter();
|
|
495
|
+
const includeContent = this.config.sourceMap?.includeContent ?? true;
|
|
70
496
|
|
|
71
|
-
// Phase 1: typical's own transformations
|
|
497
|
+
// Phase 1: typical's own transformations (adds source map markers as comments)
|
|
72
498
|
const typicalTransformer = this.getTypicalOnlyTransformer(skippedTypes);
|
|
73
499
|
const phase1Result = this.ts.transform(sourceFile, [typicalTransformer]);
|
|
74
500
|
let transformedCode = printer.printFile(phase1Result.transformed[0]);
|
|
501
|
+
if (process.env.DEBUG) {
|
|
502
|
+
console.log("TYPICAL: After phase1 print (first 500):", transformedCode.substring(0, 500));
|
|
503
|
+
console.log("TYPICAL: Contains //@T:", transformedCode.includes("//@T:"));
|
|
504
|
+
}
|
|
75
505
|
phase1Result.dispose();
|
|
76
506
|
|
|
77
507
|
if (mode === "basic") {
|
|
78
|
-
|
|
508
|
+
// For basic mode, parse markers and build source map, then strip markers
|
|
509
|
+
if (sourceMap) {
|
|
510
|
+
const { code, map } = parseMarkersAndBuildSourceMap(
|
|
511
|
+
transformedCode,
|
|
512
|
+
fileName,
|
|
513
|
+
originalSource,
|
|
514
|
+
includeContent
|
|
515
|
+
);
|
|
516
|
+
return { code, map };
|
|
517
|
+
}
|
|
518
|
+
// No source map requested - just strip markers
|
|
519
|
+
return {
|
|
520
|
+
code: transformedCode.replace(ALL_MARKERS_REGEX, ''),
|
|
521
|
+
map: null,
|
|
522
|
+
};
|
|
79
523
|
}
|
|
80
524
|
|
|
81
525
|
// Phase 2: if code has typia calls, run typia transformer in its own context
|
|
526
|
+
// The markers survive through typia since they're comments
|
|
82
527
|
if (transformedCode.includes("typia.")) {
|
|
83
528
|
const result = this.applyTypiaTransform(sourceFile.fileName, transformedCode, printer);
|
|
84
|
-
if (typeof result === 'object' && result.retry) {
|
|
529
|
+
if (typeof result === 'object' && 'retry' in result && result.retry) {
|
|
85
530
|
// Typia failed on a type - add to skipped and retry the whole transform
|
|
86
531
|
skippedTypes.add(result.failedType);
|
|
87
532
|
// Clear validator caches since we're retrying
|
|
88
533
|
this.typeValidators.clear();
|
|
89
534
|
this.typeStringifiers.clear();
|
|
90
535
|
this.typeParsers.clear();
|
|
91
|
-
return this.transform(sourceFile, mode, skippedTypes);
|
|
536
|
+
return this.transform(sourceFile, mode, { sourceMap, skippedTypes });
|
|
92
537
|
}
|
|
93
|
-
transformedCode = result as string;
|
|
538
|
+
transformedCode = (result as { code: string }).code;
|
|
94
539
|
}
|
|
95
540
|
|
|
96
541
|
if (mode === "typia") {
|
|
97
|
-
|
|
542
|
+
// For typia mode, parse markers and build source map, then strip markers
|
|
543
|
+
if (sourceMap) {
|
|
544
|
+
const { code, map } = parseMarkersAndBuildSourceMap(
|
|
545
|
+
transformedCode,
|
|
546
|
+
fileName,
|
|
547
|
+
originalSource,
|
|
548
|
+
includeContent
|
|
549
|
+
);
|
|
550
|
+
return { code, map };
|
|
551
|
+
}
|
|
552
|
+
// No source map requested - just strip markers
|
|
553
|
+
return {
|
|
554
|
+
code: transformedCode.replace(ALL_MARKERS_REGEX, ''),
|
|
555
|
+
map: null,
|
|
556
|
+
};
|
|
98
557
|
}
|
|
99
558
|
|
|
100
|
-
// Mode "js" -
|
|
559
|
+
// Mode "js" - first parse markers to build our source map, then transpile
|
|
560
|
+
let typicalMap: EncodedSourceMap | null = null;
|
|
561
|
+
if (sourceMap) {
|
|
562
|
+
const parsed = parseMarkersAndBuildSourceMap(
|
|
563
|
+
transformedCode,
|
|
564
|
+
fileName,
|
|
565
|
+
originalSource,
|
|
566
|
+
includeContent
|
|
567
|
+
);
|
|
568
|
+
transformedCode = parsed.code;
|
|
569
|
+
typicalMap = parsed.map;
|
|
570
|
+
} else {
|
|
571
|
+
// Strip markers even without source map
|
|
572
|
+
transformedCode = transformedCode.replace(ALL_MARKERS_REGEX, '');
|
|
573
|
+
}
|
|
574
|
+
|
|
575
|
+
// Transpile to JavaScript with source map support
|
|
576
|
+
const compilerOptions = {
|
|
577
|
+
...this.program.getCompilerOptions(),
|
|
578
|
+
sourceMap: sourceMap,
|
|
579
|
+
inlineSourceMap: false,
|
|
580
|
+
inlineSources: false,
|
|
581
|
+
};
|
|
582
|
+
|
|
101
583
|
const compileResult = ts.transpileModule(transformedCode, {
|
|
102
|
-
compilerOptions
|
|
584
|
+
compilerOptions,
|
|
585
|
+
fileName,
|
|
103
586
|
});
|
|
104
587
|
|
|
105
|
-
|
|
588
|
+
// Compose the two source maps: typical -> original AND js -> typical
|
|
589
|
+
if (sourceMap && typicalMap) {
|
|
590
|
+
let jsMap: EncodedSourceMap | null = null;
|
|
591
|
+
if (compileResult.sourceMapText) {
|
|
592
|
+
try {
|
|
593
|
+
jsMap = JSON.parse(compileResult.sourceMapText) as EncodedSourceMap;
|
|
594
|
+
jsMap.sources = [fileName];
|
|
595
|
+
} catch {
|
|
596
|
+
// Failed to parse, continue without
|
|
597
|
+
}
|
|
598
|
+
}
|
|
599
|
+
|
|
600
|
+
// Compose maps: jsMap traces JS->TS, typicalMap traces TS->original
|
|
601
|
+
// Result traces JS->original
|
|
602
|
+
const composedMap = composeSourceMaps([typicalMap, jsMap], fileName);
|
|
603
|
+
if (composedMap && includeContent) {
|
|
604
|
+
composedMap.sourcesContent = [originalSource];
|
|
605
|
+
}
|
|
606
|
+
|
|
607
|
+
return {
|
|
608
|
+
code: compileResult.outputText,
|
|
609
|
+
map: composedMap,
|
|
610
|
+
};
|
|
611
|
+
}
|
|
612
|
+
|
|
613
|
+
return {
|
|
614
|
+
code: compileResult.outputText,
|
|
615
|
+
map: null,
|
|
616
|
+
};
|
|
617
|
+
}
|
|
618
|
+
|
|
619
|
+
/**
|
|
620
|
+
* Legacy transform method that returns just the code string.
|
|
621
|
+
* @deprecated Use transform() with options.sourceMap instead
|
|
622
|
+
*/
|
|
623
|
+
public transformCode(
|
|
624
|
+
sourceFile: ts.SourceFile | string,
|
|
625
|
+
mode: "basic" | "typia" | "js",
|
|
626
|
+
skippedTypes: Set<string> = new Set()
|
|
627
|
+
): string {
|
|
628
|
+
return this.transform(sourceFile, mode, { skippedTypes }).code;
|
|
106
629
|
}
|
|
107
630
|
|
|
108
631
|
/**
|
|
109
632
|
* Apply typia transformation in a separate ts.transform() context.
|
|
110
633
|
* This avoids mixing program contexts and eliminates the need for import recreation.
|
|
111
|
-
* Returns either the transformed code
|
|
634
|
+
* Returns either the transformed code, or a retry signal with the failed type.
|
|
635
|
+
* Source map markers in the code are preserved through the typia transformation.
|
|
112
636
|
*/
|
|
113
|
-
private applyTypiaTransform(
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
const relativePath = path.relative(rootDir, fileName);
|
|
121
|
-
const intermediateFileName = relativePath.replace(/\.(tsx?)$/, ".typical.$1");
|
|
122
|
-
const intermediateFilePath = path.join(outDir, intermediateFileName);
|
|
123
|
-
|
|
124
|
-
const dir = path.dirname(intermediateFilePath);
|
|
125
|
-
if (!fs.existsSync(dir)) {
|
|
126
|
-
fs.mkdirSync(dir, { recursive: true });
|
|
127
|
-
}
|
|
128
|
-
|
|
129
|
-
fs.writeFileSync(intermediateFilePath, code);
|
|
130
|
-
console.log(`TYPICAL: Wrote intermediate file: ${intermediateFilePath}`);
|
|
131
|
-
}
|
|
637
|
+
private applyTypiaTransform(
|
|
638
|
+
fileName: string,
|
|
639
|
+
code: string,
|
|
640
|
+
printer: ts.Printer
|
|
641
|
+
): { code: string } | { retry: true; failedType: string } {
|
|
642
|
+
this.writeIntermediateFile(fileName, code);
|
|
132
643
|
|
|
133
644
|
if (process.env.DEBUG) {
|
|
134
645
|
console.log("TYPICAL: Before typia transform (first 500 chars):", code.substring(0, 500));
|
|
135
646
|
}
|
|
136
647
|
|
|
137
|
-
// Create a new source file
|
|
138
|
-
const
|
|
139
|
-
fileName,
|
|
140
|
-
code,
|
|
141
|
-
this.ts.ScriptTarget.ES2020,
|
|
142
|
-
true
|
|
143
|
-
);
|
|
144
|
-
|
|
145
|
-
// Create a new program with the transformed source file so typia can resolve types.
|
|
146
|
-
// Pass oldProgram to reuse parsed/bound data from unchanged dependency files.
|
|
147
|
-
const compilerOptions = this.program.getCompilerOptions();
|
|
148
|
-
const originalSourceFiles = new Map<string, ts.SourceFile>();
|
|
149
|
-
for (const sf of this.program.getSourceFiles()) {
|
|
150
|
-
originalSourceFiles.set(sf.fileName, sf);
|
|
151
|
-
}
|
|
152
|
-
// Replace the original source file with our transformed one
|
|
153
|
-
originalSourceFiles.set(fileName, newSourceFile);
|
|
154
|
-
|
|
155
|
-
const customHost: ts.CompilerHost = {
|
|
156
|
-
getSourceFile: (hostFileName, languageVersion) => {
|
|
157
|
-
if (originalSourceFiles.has(hostFileName)) {
|
|
158
|
-
return originalSourceFiles.get(hostFileName);
|
|
159
|
-
}
|
|
160
|
-
return this.ts.createSourceFile(
|
|
161
|
-
hostFileName,
|
|
162
|
-
this.ts.sys.readFile(hostFileName) || "",
|
|
163
|
-
languageVersion,
|
|
164
|
-
true
|
|
165
|
-
);
|
|
166
|
-
},
|
|
167
|
-
getDefaultLibFileName: (opts) => this.ts.getDefaultLibFilePath(opts),
|
|
168
|
-
writeFile: () => {},
|
|
169
|
-
getCurrentDirectory: () => this.ts.sys.getCurrentDirectory(),
|
|
170
|
-
getCanonicalFileName: (fn) =>
|
|
171
|
-
this.ts.sys.useCaseSensitiveFileNames ? fn : fn.toLowerCase(),
|
|
172
|
-
useCaseSensitiveFileNames: () => this.ts.sys.useCaseSensitiveFileNames,
|
|
173
|
-
getNewLine: () => this.ts.sys.newLine,
|
|
174
|
-
fileExists: (fn) => originalSourceFiles.has(fn) || this.ts.sys.fileExists(fn),
|
|
175
|
-
readFile: (fn) => this.ts.sys.readFile(fn),
|
|
176
|
-
};
|
|
177
|
-
|
|
178
|
-
// Create new program, passing oldProgram to reuse dependency context
|
|
179
|
-
const newProgram = this.ts.createProgram(
|
|
180
|
-
Array.from(originalSourceFiles.keys()),
|
|
181
|
-
compilerOptions,
|
|
182
|
-
customHost,
|
|
183
|
-
this.program // Reuse old program's structure for unchanged files
|
|
184
|
-
);
|
|
185
|
-
|
|
186
|
-
// Get the bound source file from the new program (has proper symbol tables)
|
|
187
|
-
const boundSourceFile = newProgram.getSourceFile(fileName);
|
|
188
|
-
if (!boundSourceFile) {
|
|
189
|
-
throw new Error(`Failed to get bound source file: ${fileName}`);
|
|
190
|
-
}
|
|
648
|
+
// Create a new program with the transformed source file so typia can resolve types
|
|
649
|
+
const { newProgram, boundSourceFile } = this.createTypiaProgram(fileName, code);
|
|
191
650
|
|
|
192
651
|
// Collect typia diagnostics to detect transformation failures
|
|
193
652
|
const diagnostics: ts.Diagnostic[] = [];
|
|
@@ -242,27 +701,7 @@ export class TypicalTransformer {
|
|
|
242
701
|
}
|
|
243
702
|
|
|
244
703
|
// No retryable errors, throw the original error
|
|
245
|
-
const errorMessages =
|
|
246
|
-
const fullMessage = typeof d.messageText === 'string' ? d.messageText : d.messageText.messageText;
|
|
247
|
-
|
|
248
|
-
if (d.file && d.start !== undefined && d.length !== undefined) {
|
|
249
|
-
const { line, character } = d.file.getLineAndCharacterOfPosition(d.start);
|
|
250
|
-
// Extract the actual source code that caused the error
|
|
251
|
-
const sourceSnippet = d.file.text.substring(d.start, d.start + d.length);
|
|
252
|
-
// Truncate long snippets
|
|
253
|
-
const snippet = sourceSnippet.length > 100
|
|
254
|
-
? sourceSnippet.substring(0, 100) + '...'
|
|
255
|
-
: sourceSnippet;
|
|
256
|
-
|
|
257
|
-
// Format the error message - extract type issues from typia's verbose output
|
|
258
|
-
const formattedIssues = this.formatTypiaError(fullMessage);
|
|
259
|
-
|
|
260
|
-
return `${d.file.fileName}:${line + 1}:${character + 1}\n` +
|
|
261
|
-
` Code: ${snippet}\n` +
|
|
262
|
-
formattedIssues;
|
|
263
|
-
}
|
|
264
|
-
return this.formatTypiaError(fullMessage);
|
|
265
|
-
});
|
|
704
|
+
const errorMessages = this.formatTypiaErrors(errors);
|
|
266
705
|
throw new Error(
|
|
267
706
|
`TYPICAL: Typia transformation failed:\n\n${errorMessages.join('\n\n')}`
|
|
268
707
|
);
|
|
@@ -280,21 +719,12 @@ export class TypicalTransformer {
|
|
|
280
719
|
|
|
281
720
|
const finalCode = printer.printFile(typiaTransformed);
|
|
282
721
|
|
|
283
|
-
//
|
|
284
|
-
|
|
285
|
-
const untransformedCalls = this.findUntransformedTypiaCalls(finalCode);
|
|
286
|
-
if (untransformedCalls.length > 0) {
|
|
287
|
-
const failedTypes = untransformedCalls.map(c => c.type).filter((v, i, a) => a.indexOf(v) === i);
|
|
288
|
-
throw new Error(
|
|
289
|
-
`TYPICAL: Failed to transform the following types (typia cannot process them):\n` +
|
|
290
|
-
failedTypes.map(t => ` - ${t}`).join('\n') +
|
|
291
|
-
`\n\nTo skip validation for these types, add to ignoreTypes in typical.json:\n` +
|
|
292
|
-
` "ignoreTypes": [${failedTypes.map(t => `"${t}"`).join(', ')}]` +
|
|
293
|
-
`\n\nFile: ${fileName}`
|
|
294
|
-
);
|
|
295
|
-
}
|
|
722
|
+
// Check for untransformed typia calls as a fallback
|
|
723
|
+
this.checkUntransformedTypiaCalls(finalCode, fileName);
|
|
296
724
|
|
|
297
|
-
|
|
725
|
+
// Source map markers (@T:line:col) are preserved through typia transformation
|
|
726
|
+
// and will be parsed later in the transform() method
|
|
727
|
+
return { code: finalCode };
|
|
298
728
|
}
|
|
299
729
|
|
|
300
730
|
/**
|
|
@@ -305,11 +735,6 @@ export class TypicalTransformer {
|
|
|
305
735
|
return (context: ts.TransformationContext) => {
|
|
306
736
|
const factory = context.factory;
|
|
307
737
|
const typeChecker = this.program.getTypeChecker();
|
|
308
|
-
const transformContext: TransformContext = {
|
|
309
|
-
ts: this.ts,
|
|
310
|
-
factory,
|
|
311
|
-
context,
|
|
312
|
-
};
|
|
313
738
|
|
|
314
739
|
return (sourceFile: ts.SourceFile) => {
|
|
315
740
|
// Check if this file should be transformed based on include/exclude patterns
|
|
@@ -321,6 +746,13 @@ export class TypicalTransformer {
|
|
|
321
746
|
console.log("TYPICAL: processing ", sourceFile.fileName);
|
|
322
747
|
}
|
|
323
748
|
|
|
749
|
+
const transformContext: TransformContext = {
|
|
750
|
+
ts: this.ts,
|
|
751
|
+
factory,
|
|
752
|
+
context,
|
|
753
|
+
sourceFile,
|
|
754
|
+
};
|
|
755
|
+
|
|
324
756
|
return this.transformSourceFile(sourceFile, transformContext, typeChecker, skippedTypes);
|
|
325
757
|
};
|
|
326
758
|
};
|
|
@@ -337,11 +769,6 @@ export class TypicalTransformer {
|
|
|
337
769
|
return (context: ts.TransformationContext) => {
|
|
338
770
|
const factory = context.factory;
|
|
339
771
|
const typeChecker = this.program.getTypeChecker();
|
|
340
|
-
const transformContext: TransformContext = {
|
|
341
|
-
ts: this.ts,
|
|
342
|
-
factory,
|
|
343
|
-
context,
|
|
344
|
-
};
|
|
345
772
|
|
|
346
773
|
return (sourceFile: ts.SourceFile) => {
|
|
347
774
|
// Check if this file should be transformed based on include/exclude patterns
|
|
@@ -353,6 +780,13 @@ export class TypicalTransformer {
|
|
|
353
780
|
console.log("TYPICAL: processing ", sourceFile.fileName);
|
|
354
781
|
}
|
|
355
782
|
|
|
783
|
+
const transformContext: TransformContext = {
|
|
784
|
+
ts: this.ts,
|
|
785
|
+
factory,
|
|
786
|
+
context,
|
|
787
|
+
sourceFile,
|
|
788
|
+
};
|
|
789
|
+
|
|
356
790
|
// Apply typical's transformations
|
|
357
791
|
let transformedSourceFile = this.transformSourceFile(
|
|
358
792
|
sourceFile,
|
|
@@ -372,81 +806,19 @@ export class TypicalTransformer {
|
|
|
372
806
|
return transformedSourceFile;
|
|
373
807
|
}
|
|
374
808
|
|
|
375
|
-
|
|
376
|
-
if (this.config.debug?.writeIntermediateFiles) {
|
|
377
|
-
const compilerOptions = this.program.getCompilerOptions();
|
|
378
|
-
const outDir = compilerOptions.outDir || ".";
|
|
379
|
-
const rootDir = compilerOptions.rootDir || ".";
|
|
380
|
-
const relativePath = path.relative(rootDir, sourceFile.fileName);
|
|
381
|
-
const intermediateFileName = relativePath.replace(/\.(tsx?)$/, ".typical.$1");
|
|
382
|
-
const intermediateFilePath = path.join(outDir, intermediateFileName);
|
|
383
|
-
const dir = path.dirname(intermediateFilePath);
|
|
384
|
-
if (!fs.existsSync(dir)) {
|
|
385
|
-
fs.mkdirSync(dir, { recursive: true });
|
|
386
|
-
}
|
|
387
|
-
fs.writeFileSync(intermediateFilePath, transformedCode);
|
|
388
|
-
console.log(`TYPICAL: Wrote intermediate file: ${intermediateFilePath}`);
|
|
389
|
-
}
|
|
809
|
+
this.writeIntermediateFile(sourceFile.fileName, transformedCode);
|
|
390
810
|
|
|
391
811
|
if (process.env.DEBUG) {
|
|
392
812
|
console.log("TYPICAL: Before typia transform (first 500 chars):", transformedCode.substring(0, 500));
|
|
393
813
|
}
|
|
394
814
|
|
|
395
|
-
// Create a new source file
|
|
396
|
-
const
|
|
815
|
+
// Create a new program with the transformed source file so typia can resolve types
|
|
816
|
+
const { newProgram, boundSourceFile } = this.createTypiaProgram(
|
|
397
817
|
sourceFile.fileName,
|
|
398
818
|
transformedCode,
|
|
399
|
-
sourceFile.languageVersion
|
|
400
|
-
true
|
|
401
|
-
);
|
|
402
|
-
|
|
403
|
-
// Create a new program with the transformed source file so typia can resolve types.
|
|
404
|
-
// Pass oldProgram to reuse parsed/bound data from unchanged dependency files.
|
|
405
|
-
const compilerOptions = this.program.getCompilerOptions();
|
|
406
|
-
const originalSourceFiles = new Map<string, ts.SourceFile>();
|
|
407
|
-
for (const sf of this.program.getSourceFiles()) {
|
|
408
|
-
originalSourceFiles.set(sf.fileName, sf);
|
|
409
|
-
}
|
|
410
|
-
// Replace the original source file with our transformed one
|
|
411
|
-
originalSourceFiles.set(sourceFile.fileName, newSourceFile);
|
|
412
|
-
|
|
413
|
-
const customHost: ts.CompilerHost = {
|
|
414
|
-
getSourceFile: (hostFileName, languageVersion) => {
|
|
415
|
-
if (originalSourceFiles.has(hostFileName)) {
|
|
416
|
-
return originalSourceFiles.get(hostFileName);
|
|
417
|
-
}
|
|
418
|
-
return this.ts.createSourceFile(
|
|
419
|
-
hostFileName,
|
|
420
|
-
this.ts.sys.readFile(hostFileName) || "",
|
|
421
|
-
languageVersion,
|
|
422
|
-
true
|
|
423
|
-
);
|
|
424
|
-
},
|
|
425
|
-
getDefaultLibFileName: (opts) => this.ts.getDefaultLibFilePath(opts),
|
|
426
|
-
writeFile: () => {},
|
|
427
|
-
getCurrentDirectory: () => this.ts.sys.getCurrentDirectory(),
|
|
428
|
-
getCanonicalFileName: (fn) =>
|
|
429
|
-
this.ts.sys.useCaseSensitiveFileNames ? fn : fn.toLowerCase(),
|
|
430
|
-
useCaseSensitiveFileNames: () => this.ts.sys.useCaseSensitiveFileNames,
|
|
431
|
-
getNewLine: () => this.ts.sys.newLine,
|
|
432
|
-
fileExists: (fn) => originalSourceFiles.has(fn) || this.ts.sys.fileExists(fn),
|
|
433
|
-
readFile: (fn) => this.ts.sys.readFile(fn),
|
|
434
|
-
};
|
|
435
|
-
|
|
436
|
-
// Create new program, passing oldProgram to reuse dependency context
|
|
437
|
-
const newProgram = this.ts.createProgram(
|
|
438
|
-
Array.from(originalSourceFiles.keys()),
|
|
439
|
-
compilerOptions,
|
|
440
|
-
customHost,
|
|
441
|
-
this.program // Reuse old program's structure for unchanged files
|
|
819
|
+
sourceFile.languageVersion
|
|
442
820
|
);
|
|
443
821
|
|
|
444
|
-
// Get the bound source file from the new program (has proper symbol tables)
|
|
445
|
-
const boundSourceFile = newProgram.getSourceFile(sourceFile.fileName);
|
|
446
|
-
if (!boundSourceFile) {
|
|
447
|
-
throw new Error(`Failed to get bound source file: ${sourceFile.fileName}`);
|
|
448
|
-
}
|
|
449
|
-
|
|
450
822
|
// Collect typia diagnostics to detect transformation failures
|
|
451
823
|
const diagnostics: ts.Diagnostic[] = [];
|
|
452
824
|
|
|
@@ -477,27 +849,7 @@ export class TypicalTransformer {
|
|
|
477
849
|
// Check for typia errors via diagnostics
|
|
478
850
|
const errors = diagnostics.filter(d => d.category === this.ts.DiagnosticCategory.Error);
|
|
479
851
|
if (errors.length > 0) {
|
|
480
|
-
const errorMessages =
|
|
481
|
-
const fullMessage = typeof d.messageText === 'string' ? d.messageText : d.messageText.messageText;
|
|
482
|
-
|
|
483
|
-
if (d.file && d.start !== undefined && d.length !== undefined) {
|
|
484
|
-
const { line, character } = d.file.getLineAndCharacterOfPosition(d.start);
|
|
485
|
-
// Extract the actual source code that caused the error
|
|
486
|
-
const sourceSnippet = d.file.text.substring(d.start, d.start + d.length);
|
|
487
|
-
// Truncate long snippets
|
|
488
|
-
const snippet = sourceSnippet.length > 100
|
|
489
|
-
? sourceSnippet.substring(0, 100) + '...'
|
|
490
|
-
: sourceSnippet;
|
|
491
|
-
|
|
492
|
-
// Format the error message - extract type issues from typia's verbose output
|
|
493
|
-
const formattedIssues = this.formatTypiaError(fullMessage);
|
|
494
|
-
|
|
495
|
-
return `${d.file.fileName}:${line + 1}:${character + 1}\n` +
|
|
496
|
-
` Code: ${snippet}\n` +
|
|
497
|
-
formattedIssues;
|
|
498
|
-
}
|
|
499
|
-
return this.formatTypiaError(fullMessage);
|
|
500
|
-
});
|
|
852
|
+
const errorMessages = this.formatTypiaErrors(errors);
|
|
501
853
|
throw new Error(
|
|
502
854
|
`TYPICAL: Typia transformation failed:\n\n${errorMessages.join('\n\n')}`
|
|
503
855
|
);
|
|
@@ -512,19 +864,9 @@ export class TypicalTransformer {
|
|
|
512
864
|
);
|
|
513
865
|
}
|
|
514
866
|
|
|
515
|
-
//
|
|
867
|
+
// Check for untransformed typia calls as a fallback
|
|
516
868
|
const finalCode = printer.printFile(transformedSourceFile);
|
|
517
|
-
|
|
518
|
-
if (untransformedCalls.length > 0) {
|
|
519
|
-
const failedTypes = untransformedCalls.map(c => c.type).filter((v, i, a) => a.indexOf(v) === i);
|
|
520
|
-
throw new Error(
|
|
521
|
-
`TYPICAL: Failed to transform the following types (typia cannot process them):\n` +
|
|
522
|
-
failedTypes.map(t => ` - ${t}`).join('\n') +
|
|
523
|
-
`\n\nTo skip validation for these types, add to ignoreTypes in typical.json:\n` +
|
|
524
|
-
` "ignoreTypes": [${failedTypes.map(t => `"${t}"`).join(', ')}]` +
|
|
525
|
-
`\n\nFile: ${sourceFile.fileName}`
|
|
526
|
-
);
|
|
527
|
-
}
|
|
869
|
+
this.checkUntransformedTypiaCalls(finalCode, sourceFile.fileName);
|
|
528
870
|
|
|
529
871
|
return transformedSourceFile;
|
|
530
872
|
};
|
|
@@ -532,48 +874,183 @@ export class TypicalTransformer {
|
|
|
532
874
|
}
|
|
533
875
|
|
|
534
876
|
/**
|
|
535
|
-
* Transform
|
|
877
|
+
* Transform JSON.stringify or JSON.parse calls to use typia's validated versions.
|
|
878
|
+
* Returns the transformed node if applicable, or undefined to indicate no transformation.
|
|
536
879
|
*/
|
|
537
|
-
private
|
|
538
|
-
|
|
880
|
+
private transformJSONCall(
|
|
881
|
+
node: ts.CallExpression,
|
|
539
882
|
ctx: TransformContext,
|
|
540
883
|
typeChecker: ts.TypeChecker,
|
|
541
|
-
|
|
542
|
-
): ts.
|
|
543
|
-
const { ts } = ctx;
|
|
884
|
+
shouldSkipType: (typeText: string) => boolean
|
|
885
|
+
): ts.Node | undefined {
|
|
886
|
+
const { ts, factory } = ctx;
|
|
887
|
+
const propertyAccess = node.expression as ts.PropertyAccessExpression;
|
|
888
|
+
|
|
889
|
+
if (propertyAccess.name.text === "stringify") {
|
|
890
|
+
// For stringify, we need to infer the type from the argument
|
|
891
|
+
// First check if the argument type is 'any' - if so, skip transformation
|
|
892
|
+
if (node.arguments.length > 0) {
|
|
893
|
+
const arg = node.arguments[0];
|
|
894
|
+
const argType = typeChecker.getTypeAtLocation(arg);
|
|
895
|
+
if (this.isAnyOrUnknownTypeFlags(argType)) {
|
|
896
|
+
return undefined; // Don't transform JSON.stringify for any/unknown types
|
|
897
|
+
}
|
|
898
|
+
}
|
|
899
|
+
|
|
900
|
+
if (this.config.reusableValidators) {
|
|
901
|
+
// Infer type from argument
|
|
902
|
+
const arg = node.arguments[0];
|
|
903
|
+
const { typeText, typeNode } = this.inferStringifyType(arg, typeChecker, ctx);
|
|
904
|
+
|
|
905
|
+
const stringifierName = this.getOrCreateStringifier(typeText, typeNode);
|
|
906
|
+
return factory.createCallExpression(
|
|
907
|
+
factory.createIdentifier(stringifierName),
|
|
908
|
+
undefined,
|
|
909
|
+
node.arguments
|
|
910
|
+
);
|
|
911
|
+
} else {
|
|
912
|
+
// Use inline typia.json.stringify
|
|
913
|
+
return factory.updateCallExpression(
|
|
914
|
+
node,
|
|
915
|
+
factory.createPropertyAccessExpression(
|
|
916
|
+
factory.createPropertyAccessExpression(
|
|
917
|
+
factory.createIdentifier("typia"),
|
|
918
|
+
"json"
|
|
919
|
+
),
|
|
920
|
+
"stringify"
|
|
921
|
+
),
|
|
922
|
+
node.typeArguments,
|
|
923
|
+
node.arguments
|
|
924
|
+
);
|
|
925
|
+
}
|
|
926
|
+
} else if (propertyAccess.name.text === "parse") {
|
|
927
|
+
// For JSON.parse, we need to infer the expected type from context
|
|
928
|
+
// Check if this is part of a variable declaration or type assertion
|
|
929
|
+
let targetType: ts.TypeNode | undefined;
|
|
930
|
+
|
|
931
|
+
// Look for type annotations in parent nodes
|
|
932
|
+
let parent = node.parent;
|
|
933
|
+
while (parent) {
|
|
934
|
+
if (ts.isVariableDeclaration(parent) && parent.type) {
|
|
935
|
+
targetType = parent.type;
|
|
936
|
+
break;
|
|
937
|
+
} else if (ts.isAsExpression(parent)) {
|
|
938
|
+
targetType = parent.type;
|
|
939
|
+
break;
|
|
940
|
+
} else if (ts.isReturnStatement(parent)) {
|
|
941
|
+
// Look for function return type
|
|
942
|
+
let funcParent = parent.parent;
|
|
943
|
+
while (funcParent) {
|
|
944
|
+
if (
|
|
945
|
+
(ts.isFunctionDeclaration(funcParent) ||
|
|
946
|
+
ts.isArrowFunction(funcParent) ||
|
|
947
|
+
ts.isMethodDeclaration(funcParent)) &&
|
|
948
|
+
funcParent.type
|
|
949
|
+
) {
|
|
950
|
+
targetType = funcParent.type;
|
|
951
|
+
break;
|
|
952
|
+
}
|
|
953
|
+
funcParent = funcParent.parent;
|
|
954
|
+
}
|
|
955
|
+
break;
|
|
956
|
+
} else if (ts.isArrowFunction(parent) && parent.type) {
|
|
957
|
+
// Arrow function with expression body (not block)
|
|
958
|
+
// e.g., (s: string): User => JSON.parse(s)
|
|
959
|
+
targetType = parent.type;
|
|
960
|
+
break;
|
|
961
|
+
}
|
|
962
|
+
parent = parent.parent;
|
|
963
|
+
}
|
|
544
964
|
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
965
|
+
if (targetType && this.isAnyOrUnknownType(targetType)) {
|
|
966
|
+
// Don't transform JSON.parse for any/unknown types
|
|
967
|
+
return undefined;
|
|
968
|
+
}
|
|
969
|
+
|
|
970
|
+
// If we can't determine the target type and there's no explicit type argument,
|
|
971
|
+
// don't transform - we can't validate against an unknown type
|
|
972
|
+
if (!targetType && !node.typeArguments) {
|
|
973
|
+
return undefined;
|
|
974
|
+
}
|
|
975
|
+
|
|
976
|
+
if (this.config.reusableValidators && targetType) {
|
|
977
|
+
// Use reusable parser - use typeNode text to preserve local aliases
|
|
978
|
+
const typeText = this.getTypeKey(targetType, typeChecker);
|
|
979
|
+
|
|
980
|
+
// Skip types that failed in typia (retry mechanism)
|
|
981
|
+
if (shouldSkipType(typeText)) {
|
|
554
982
|
if (process.env.DEBUG) {
|
|
555
|
-
console.log(`TYPICAL:
|
|
983
|
+
console.log(`TYPICAL: Skipping previously failed type for JSON.parse: ${typeText}`);
|
|
556
984
|
}
|
|
557
|
-
return
|
|
985
|
+
return undefined;
|
|
558
986
|
}
|
|
987
|
+
|
|
988
|
+
const parserName = this.getOrCreateParser(typeText, targetType);
|
|
989
|
+
|
|
990
|
+
return factory.createCallExpression(
|
|
991
|
+
factory.createIdentifier(parserName),
|
|
992
|
+
undefined,
|
|
993
|
+
node.arguments
|
|
994
|
+
);
|
|
995
|
+
} else {
|
|
996
|
+
// Use inline typia.json.assertParse
|
|
997
|
+
const typeArguments = targetType
|
|
998
|
+
? [targetType]
|
|
999
|
+
: node.typeArguments;
|
|
1000
|
+
|
|
1001
|
+
return factory.updateCallExpression(
|
|
1002
|
+
node,
|
|
1003
|
+
factory.createPropertyAccessExpression(
|
|
1004
|
+
factory.createPropertyAccessExpression(
|
|
1005
|
+
factory.createIdentifier("typia"),
|
|
1006
|
+
"json"
|
|
1007
|
+
),
|
|
1008
|
+
"assertParse"
|
|
1009
|
+
),
|
|
1010
|
+
typeArguments,
|
|
1011
|
+
node.arguments
|
|
1012
|
+
);
|
|
559
1013
|
}
|
|
560
|
-
|
|
561
|
-
};
|
|
1014
|
+
}
|
|
562
1015
|
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
|
|
1016
|
+
return undefined;
|
|
1017
|
+
}
|
|
1018
|
+
|
|
1019
|
+
/**
|
|
1020
|
+
* Check if a type should be skipped (failed in typia on previous attempt).
|
|
1021
|
+
*/
|
|
1022
|
+
private shouldSkipType(typeText: string, skippedTypes: Set<string>): boolean {
|
|
1023
|
+
if (skippedTypes.size === 0) return false;
|
|
1024
|
+
// Normalize: remove all whitespace and semicolons for comparison
|
|
1025
|
+
const normalize = (s: string) => s.replace(/[\s;]+/g, '').toLowerCase();
|
|
1026
|
+
const normalized = normalize(typeText);
|
|
1027
|
+
for (const skipped of skippedTypes) {
|
|
1028
|
+
const skippedNormalized = normalize(skipped);
|
|
1029
|
+
if (normalized === skippedNormalized || normalized.includes(skippedNormalized) || skippedNormalized.includes(normalized)) {
|
|
1030
|
+
if (process.env.DEBUG) {
|
|
1031
|
+
console.log(`TYPICAL: Matched skipped type: "${typeText.substring(0, 50)}..." matches "${skipped.substring(0, 50)}..."`);
|
|
1032
|
+
}
|
|
1033
|
+
return true;
|
|
568
1034
|
}
|
|
569
1035
|
}
|
|
1036
|
+
return false;
|
|
1037
|
+
}
|
|
570
1038
|
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
|
|
574
|
-
|
|
1039
|
+
/**
|
|
1040
|
+
* Create an AST visitor function for transforming a source file.
|
|
1041
|
+
* The visitor handles JSON calls, type casts, and function declarations.
|
|
1042
|
+
*/
|
|
1043
|
+
private createVisitor(
|
|
1044
|
+
ctx: TransformContext,
|
|
1045
|
+
typeChecker: ts.TypeChecker,
|
|
1046
|
+
skippedTypes: Set<string>,
|
|
1047
|
+
state: FileTransformState
|
|
1048
|
+
): (node: ts.Node) => ts.Node {
|
|
1049
|
+
const { ts } = ctx;
|
|
1050
|
+
const shouldSkipType = (typeText: string) => this.shouldSkipType(typeText, skippedTypes);
|
|
575
1051
|
|
|
576
|
-
|
|
1052
|
+
// Forward declaration for mutual recursion
|
|
1053
|
+
let transformFunction: (func: ts.FunctionDeclaration | ts.ArrowFunction | ts.MethodDeclaration) => ts.Node;
|
|
577
1054
|
|
|
578
1055
|
const visit = (node: ts.Node): ts.Node => {
|
|
579
1056
|
// Transform JSON calls first (before they get wrapped in functions)
|
|
@@ -586,127 +1063,12 @@ export class TypicalTransformer {
|
|
|
586
1063
|
ts.isIdentifier(propertyAccess.expression) &&
|
|
587
1064
|
propertyAccess.expression.text === "JSON"
|
|
588
1065
|
) {
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
// First check if the argument type is 'any' - if so, skip transformation
|
|
594
|
-
if (node.arguments.length > 0) {
|
|
595
|
-
const arg = node.arguments[0];
|
|
596
|
-
const argType = typeChecker.getTypeAtLocation(arg);
|
|
597
|
-
if (this.isAnyOrUnknownTypeFlags(argType)) {
|
|
598
|
-
return node; // Don't transform JSON.stringify for any/unknown types
|
|
599
|
-
}
|
|
600
|
-
}
|
|
601
|
-
|
|
602
|
-
if (this.config.reusableValidators) {
|
|
603
|
-
// Infer type from argument
|
|
604
|
-
const arg = node.arguments[0];
|
|
605
|
-
const { typeText, typeNode } = this.inferStringifyType(arg, typeChecker, ctx);
|
|
606
|
-
|
|
607
|
-
const stringifierName = this.getOrCreateStringifier(typeText, typeNode);
|
|
608
|
-
return ctx.factory.createCallExpression(
|
|
609
|
-
ctx.factory.createIdentifier(stringifierName),
|
|
610
|
-
undefined,
|
|
611
|
-
node.arguments
|
|
612
|
-
);
|
|
613
|
-
} else {
|
|
614
|
-
// Use inline typia.json.stringify
|
|
615
|
-
return ctx.factory.updateCallExpression(
|
|
616
|
-
node,
|
|
617
|
-
ctx.factory.createPropertyAccessExpression(
|
|
618
|
-
ctx.factory.createPropertyAccessExpression(
|
|
619
|
-
ctx.factory.createIdentifier("typia"),
|
|
620
|
-
"json"
|
|
621
|
-
),
|
|
622
|
-
"stringify"
|
|
623
|
-
),
|
|
624
|
-
node.typeArguments,
|
|
625
|
-
node.arguments
|
|
626
|
-
);
|
|
627
|
-
}
|
|
628
|
-
} else if (propertyAccess.name.text === "parse") {
|
|
629
|
-
// For JSON.parse, we need to infer the expected type from context
|
|
630
|
-
// Check if this is part of a variable declaration or type assertion
|
|
631
|
-
let targetType: ts.TypeNode | undefined;
|
|
632
|
-
|
|
633
|
-
// Look for type annotations in parent nodes
|
|
634
|
-
let parent = node.parent;
|
|
635
|
-
while (parent) {
|
|
636
|
-
if (ts.isVariableDeclaration(parent) && parent.type) {
|
|
637
|
-
targetType = parent.type;
|
|
638
|
-
break;
|
|
639
|
-
} else if (ts.isAsExpression(parent)) {
|
|
640
|
-
targetType = parent.type;
|
|
641
|
-
break;
|
|
642
|
-
} else if (ts.isReturnStatement(parent)) {
|
|
643
|
-
// Look for function return type
|
|
644
|
-
let funcParent = parent.parent;
|
|
645
|
-
while (funcParent) {
|
|
646
|
-
if (
|
|
647
|
-
(ts.isFunctionDeclaration(funcParent) ||
|
|
648
|
-
ts.isArrowFunction(funcParent) ||
|
|
649
|
-
ts.isMethodDeclaration(funcParent)) &&
|
|
650
|
-
funcParent.type
|
|
651
|
-
) {
|
|
652
|
-
targetType = funcParent.type;
|
|
653
|
-
break;
|
|
654
|
-
}
|
|
655
|
-
funcParent = funcParent.parent;
|
|
656
|
-
}
|
|
657
|
-
break;
|
|
658
|
-
} else if (ts.isArrowFunction(parent) && parent.type) {
|
|
659
|
-
// Arrow function with expression body (not block)
|
|
660
|
-
// e.g., (s: string): User => JSON.parse(s)
|
|
661
|
-
targetType = parent.type;
|
|
662
|
-
break;
|
|
663
|
-
}
|
|
664
|
-
parent = parent.parent;
|
|
665
|
-
}
|
|
666
|
-
|
|
667
|
-
if (targetType && this.isAnyOrUnknownType(targetType)) {
|
|
668
|
-
// Don't transform JSON.parse for any/unknown types
|
|
669
|
-
return node;
|
|
670
|
-
}
|
|
671
|
-
|
|
672
|
-
// If we can't determine the target type and there's no explicit type argument,
|
|
673
|
-
// don't transform - we can't validate against an unknown type
|
|
674
|
-
if (!targetType && !node.typeArguments) {
|
|
675
|
-
return node;
|
|
676
|
-
}
|
|
677
|
-
|
|
678
|
-
if (this.config.reusableValidators && targetType) {
|
|
679
|
-
// Use reusable parser - use typeNode text to preserve local aliases
|
|
680
|
-
const typeText = this.getTypeKey(targetType, typeChecker);
|
|
681
|
-
const parserName = this.getOrCreateParser(typeText, targetType);
|
|
682
|
-
|
|
683
|
-
const newCall = ctx.factory.createCallExpression(
|
|
684
|
-
ctx.factory.createIdentifier(parserName),
|
|
685
|
-
undefined,
|
|
686
|
-
node.arguments
|
|
687
|
-
);
|
|
688
|
-
|
|
689
|
-
return newCall;
|
|
690
|
-
} else {
|
|
691
|
-
// Use inline typia.json.assertParse
|
|
692
|
-
const typeArguments = targetType
|
|
693
|
-
? [targetType]
|
|
694
|
-
: node.typeArguments;
|
|
695
|
-
|
|
696
|
-
return ctx.factory.updateCallExpression(
|
|
697
|
-
node,
|
|
698
|
-
ctx.factory.createPropertyAccessExpression(
|
|
699
|
-
ctx.factory.createPropertyAccessExpression(
|
|
700
|
-
ctx.factory.createIdentifier("typia"),
|
|
701
|
-
"json"
|
|
702
|
-
),
|
|
703
|
-
"assertParse"
|
|
704
|
-
),
|
|
705
|
-
typeArguments,
|
|
706
|
-
node.arguments
|
|
707
|
-
);
|
|
708
|
-
}
|
|
1066
|
+
const transformed = this.transformJSONCall(node, ctx, typeChecker, shouldSkipType);
|
|
1067
|
+
if (transformed) {
|
|
1068
|
+
state.needsTypiaImport = true;
|
|
1069
|
+
return transformed;
|
|
709
1070
|
}
|
|
1071
|
+
return node;
|
|
710
1072
|
}
|
|
711
1073
|
}
|
|
712
1074
|
|
|
@@ -745,7 +1107,7 @@ export class TypicalTransformer {
|
|
|
745
1107
|
return ctx.ts.visitEachChild(node, visit, ctx.context);
|
|
746
1108
|
}
|
|
747
1109
|
|
|
748
|
-
needsTypiaImport = true;
|
|
1110
|
+
state.needsTypiaImport = true;
|
|
749
1111
|
|
|
750
1112
|
// Visit the expression first to transform any nested casts
|
|
751
1113
|
const visitedExpression = ctx.ts.visitNode(node.expression, visit) as ts.Expression;
|
|
@@ -776,26 +1138,26 @@ export class TypicalTransformer {
|
|
|
776
1138
|
|
|
777
1139
|
// Transform function declarations
|
|
778
1140
|
if (ts.isFunctionDeclaration(node)) {
|
|
779
|
-
needsTypiaImport = true;
|
|
1141
|
+
state.needsTypiaImport = true;
|
|
780
1142
|
return transformFunction(node);
|
|
781
1143
|
}
|
|
782
1144
|
|
|
783
1145
|
// Transform arrow functions
|
|
784
1146
|
if (ts.isArrowFunction(node)) {
|
|
785
|
-
needsTypiaImport = true;
|
|
1147
|
+
state.needsTypiaImport = true;
|
|
786
1148
|
return transformFunction(node);
|
|
787
1149
|
}
|
|
788
1150
|
|
|
789
1151
|
// Transform method declarations
|
|
790
1152
|
if (ts.isMethodDeclaration(node)) {
|
|
791
|
-
needsTypiaImport = true;
|
|
1153
|
+
state.needsTypiaImport = true;
|
|
792
1154
|
return transformFunction(node);
|
|
793
1155
|
}
|
|
794
1156
|
|
|
795
1157
|
return ctx.ts.visitEachChild(node, visit, ctx.context);
|
|
796
1158
|
};
|
|
797
1159
|
|
|
798
|
-
|
|
1160
|
+
transformFunction = (
|
|
799
1161
|
func: ts.FunctionDeclaration | ts.ArrowFunction | ts.MethodDeclaration
|
|
800
1162
|
): ts.Node => {
|
|
801
1163
|
const body = func.body;
|
|
@@ -859,7 +1221,7 @@ export class TypicalTransformer {
|
|
|
859
1221
|
: null;
|
|
860
1222
|
|
|
861
1223
|
if (validatorName) {
|
|
862
|
-
needsTypiaImport = true;
|
|
1224
|
+
state.needsTypiaImport = true;
|
|
863
1225
|
visitedBody = ctx.factory.createCallExpression(
|
|
864
1226
|
ctx.factory.createPropertyAccessExpression(
|
|
865
1227
|
visitedBody,
|
|
@@ -957,9 +1319,12 @@ export class TypicalTransformer {
|
|
|
957
1319
|
undefined,
|
|
958
1320
|
[paramIdentifier]
|
|
959
1321
|
);
|
|
960
|
-
|
|
1322
|
+
let assertCall: ts.Statement =
|
|
961
1323
|
ctx.factory.createExpressionStatement(validatorCall);
|
|
962
1324
|
|
|
1325
|
+
// Add source map marker pointing to the parameter's type annotation
|
|
1326
|
+
assertCall = addSourceMapMarker(assertCall, ctx.sourceFile, param.type!);
|
|
1327
|
+
|
|
963
1328
|
validationStatements.push(assertCall);
|
|
964
1329
|
} else {
|
|
965
1330
|
// Use inline typia.assert calls
|
|
@@ -974,9 +1339,12 @@ export class TypicalTransformer {
|
|
|
974
1339
|
[param.type],
|
|
975
1340
|
[paramIdentifier]
|
|
976
1341
|
);
|
|
977
|
-
|
|
1342
|
+
let assertCall: ts.Statement =
|
|
978
1343
|
ctx.factory.createExpressionStatement(callExpression);
|
|
979
1344
|
|
|
1345
|
+
// Add source map marker pointing to the parameter's type annotation
|
|
1346
|
+
assertCall = addSourceMapMarker(assertCall, ctx.sourceFile, param.type!);
|
|
1347
|
+
|
|
980
1348
|
validationStatements.push(assertCall);
|
|
981
1349
|
}
|
|
982
1350
|
}
|
|
@@ -1169,7 +1537,12 @@ export class TypicalTransformer {
|
|
|
1169
1537
|
[validatorExpr]
|
|
1170
1538
|
);
|
|
1171
1539
|
|
|
1172
|
-
|
|
1540
|
+
let updatedReturn = ctx.factory.updateReturnStatement(node, thenCall);
|
|
1541
|
+
// Add source map marker pointing to the return type annotation
|
|
1542
|
+
if (returnType && returnType.pos >= 0) {
|
|
1543
|
+
updatedReturn = addSourceMapMarker(updatedReturn, ctx.sourceFile, returnType);
|
|
1544
|
+
}
|
|
1545
|
+
return updatedReturn;
|
|
1173
1546
|
}
|
|
1174
1547
|
|
|
1175
1548
|
// For async functions, we need to await the expression before validating
|
|
@@ -1201,7 +1574,12 @@ export class TypicalTransformer {
|
|
|
1201
1574
|
[expressionToValidate]
|
|
1202
1575
|
);
|
|
1203
1576
|
|
|
1204
|
-
|
|
1577
|
+
let updatedReturn = ctx.factory.updateReturnStatement(node, validatorCall);
|
|
1578
|
+
// Add source map marker pointing to the return type annotation
|
|
1579
|
+
if (returnType && returnType.pos >= 0) {
|
|
1580
|
+
updatedReturn = addSourceMapMarker(updatedReturn, ctx.sourceFile, returnType);
|
|
1581
|
+
}
|
|
1582
|
+
return updatedReturn;
|
|
1205
1583
|
} else {
|
|
1206
1584
|
// Use inline typia.assert calls
|
|
1207
1585
|
const typiaIdentifier = ctx.factory.createIdentifier("typia");
|
|
@@ -1216,7 +1594,12 @@ export class TypicalTransformer {
|
|
|
1216
1594
|
[expressionToValidate]
|
|
1217
1595
|
);
|
|
1218
1596
|
|
|
1219
|
-
|
|
1597
|
+
let updatedReturn = ctx.factory.updateReturnStatement(node, callExpression);
|
|
1598
|
+
// Add source map marker pointing to the return type annotation
|
|
1599
|
+
if (returnType && returnType.pos >= 0) {
|
|
1600
|
+
updatedReturn = addSourceMapMarker(updatedReturn, ctx.sourceFile, returnType);
|
|
1601
|
+
}
|
|
1602
|
+
return updatedReturn;
|
|
1220
1603
|
}
|
|
1221
1604
|
}
|
|
1222
1605
|
return ctx.ts.visitEachChild(node, returnTransformer, ctx.context);
|
|
@@ -1273,13 +1656,43 @@ export class TypicalTransformer {
|
|
|
1273
1656
|
return func;
|
|
1274
1657
|
};
|
|
1275
1658
|
|
|
1659
|
+
return visit;
|
|
1660
|
+
}
|
|
1661
|
+
|
|
1662
|
+
/**
|
|
1663
|
+
* Transform a single source file with TypeScript AST
|
|
1664
|
+
*/
|
|
1665
|
+
private transformSourceFile(
|
|
1666
|
+
sourceFile: ts.SourceFile,
|
|
1667
|
+
ctx: TransformContext,
|
|
1668
|
+
typeChecker: ts.TypeChecker,
|
|
1669
|
+
skippedTypes: Set<string> = new Set()
|
|
1670
|
+
): ts.SourceFile {
|
|
1671
|
+
if (!sourceFile.fileName.includes('transformer.test.ts')) {
|
|
1672
|
+
// Check if this file has already been transformed by us
|
|
1673
|
+
const sourceText = sourceFile.getFullText();
|
|
1674
|
+
if (sourceText.includes('__typical_' + 'assert_') || sourceText.includes('__typical_' + 'stringify_') || sourceText.includes('__typical_' + 'parse_')) {
|
|
1675
|
+
throw new Error(`File ${sourceFile.fileName} has already been transformed by Typical! Double transformation detected.`);
|
|
1676
|
+
}
|
|
1677
|
+
}
|
|
1678
|
+
|
|
1679
|
+
// Reset caches for each file
|
|
1680
|
+
this.typeValidators.clear();
|
|
1681
|
+
this.typeStringifiers.clear();
|
|
1682
|
+
this.typeParsers.clear();
|
|
1683
|
+
|
|
1684
|
+
// Create state object to track mutable state across visitor calls
|
|
1685
|
+
const state: FileTransformState = { needsTypiaImport: false };
|
|
1686
|
+
|
|
1687
|
+
// Create visitor and transform the source file
|
|
1688
|
+
const visit = this.createVisitor(ctx, typeChecker, skippedTypes, state);
|
|
1276
1689
|
let transformedSourceFile = ctx.ts.visitNode(
|
|
1277
1690
|
sourceFile,
|
|
1278
1691
|
visit
|
|
1279
1692
|
) as ts.SourceFile;
|
|
1280
1693
|
|
|
1281
1694
|
// Add typia import and validator statements if needed
|
|
1282
|
-
if (needsTypiaImport) {
|
|
1695
|
+
if (state.needsTypiaImport) {
|
|
1283
1696
|
transformedSourceFile = this.addTypiaImport(transformedSourceFile, ctx);
|
|
1284
1697
|
|
|
1285
1698
|
// Add validator statements after imports (only if using reusable validators)
|
|
@@ -1308,13 +1721,213 @@ export class TypicalTransformer {
|
|
|
1308
1721
|
}
|
|
1309
1722
|
}
|
|
1310
1723
|
|
|
1724
|
+
// Add line markers to original statements for source map identity mappings.
|
|
1725
|
+
// This ensures original source lines map to themselves rather than inheriting
|
|
1726
|
+
// from previous @T markers.
|
|
1727
|
+
transformedSourceFile = this.addLineMarkersToStatements(transformedSourceFile, ctx, sourceFile);
|
|
1728
|
+
|
|
1311
1729
|
return transformedSourceFile;
|
|
1312
1730
|
}
|
|
1313
1731
|
|
|
1732
|
+
/**
|
|
1733
|
+
* Add @L line markers to nodes that have original source positions.
|
|
1734
|
+
* This preserves identity mappings for original code, so lines from the source
|
|
1735
|
+
* file map back to themselves rather than inheriting from generated code markers.
|
|
1736
|
+
*
|
|
1737
|
+
* We need to add markers to every node that will be printed on its own line,
|
|
1738
|
+
* including nested members of interfaces, classes, etc.
|
|
1739
|
+
*/
|
|
1740
|
+
private addLineMarkersToStatements(
|
|
1741
|
+
transformedFile: ts.SourceFile,
|
|
1742
|
+
ctx: TransformContext,
|
|
1743
|
+
originalSourceFile: ts.SourceFile
|
|
1744
|
+
): ts.SourceFile {
|
|
1745
|
+
const { ts, factory } = ctx;
|
|
1746
|
+
|
|
1747
|
+
// Check if a node already has a marker comment
|
|
1748
|
+
const hasMarker = (node: ts.Node): boolean => {
|
|
1749
|
+
const existingComments = ts.getSyntheticLeadingComments(node);
|
|
1750
|
+
return existingComments?.some(c =>
|
|
1751
|
+
c.text.startsWith('@T:') || c.text.startsWith('@L:')
|
|
1752
|
+
) ?? false;
|
|
1753
|
+
};
|
|
1754
|
+
|
|
1755
|
+
// Check if node has valid original position
|
|
1756
|
+
const hasOriginalPosition = (node: ts.Node): boolean => {
|
|
1757
|
+
return node.pos >= 0 && node.end > node.pos;
|
|
1758
|
+
};
|
|
1759
|
+
|
|
1760
|
+
// Recursively process a node and its children to add line markers
|
|
1761
|
+
const addMarkersToNode = <T extends ts.Node>(node: T): T => {
|
|
1762
|
+
// Handle interface declarations - add markers to members
|
|
1763
|
+
if (ts.isInterfaceDeclaration(node)) {
|
|
1764
|
+
const markedMembers = node.members.map(member => {
|
|
1765
|
+
if (!hasMarker(member) && hasOriginalPosition(member)) {
|
|
1766
|
+
return addLineMarker(member, originalSourceFile, member);
|
|
1767
|
+
}
|
|
1768
|
+
return member;
|
|
1769
|
+
});
|
|
1770
|
+
const updatedNode = factory.updateInterfaceDeclaration(
|
|
1771
|
+
node,
|
|
1772
|
+
node.modifiers,
|
|
1773
|
+
node.name,
|
|
1774
|
+
node.typeParameters,
|
|
1775
|
+
node.heritageClauses,
|
|
1776
|
+
markedMembers
|
|
1777
|
+
);
|
|
1778
|
+
// Also mark the interface itself
|
|
1779
|
+
if (!hasMarker(updatedNode) && hasOriginalPosition(node)) {
|
|
1780
|
+
return addLineMarker(updatedNode, originalSourceFile, node) as unknown as T;
|
|
1781
|
+
}
|
|
1782
|
+
return updatedNode as unknown as T;
|
|
1783
|
+
}
|
|
1784
|
+
|
|
1785
|
+
// Handle type alias declarations
|
|
1786
|
+
if (ts.isTypeAliasDeclaration(node)) {
|
|
1787
|
+
if (!hasMarker(node) && hasOriginalPosition(node)) {
|
|
1788
|
+
return addLineMarker(node, originalSourceFile, node);
|
|
1789
|
+
}
|
|
1790
|
+
return node;
|
|
1791
|
+
}
|
|
1792
|
+
|
|
1793
|
+
// Handle class declarations - add markers to members
|
|
1794
|
+
if (ts.isClassDeclaration(node)) {
|
|
1795
|
+
const markedMembers = node.members.map(member => {
|
|
1796
|
+
// Recursively process method bodies
|
|
1797
|
+
let processedMember = member;
|
|
1798
|
+
if (ts.isMethodDeclaration(member) && member.body) {
|
|
1799
|
+
const markedBody = addMarkersToBlock(member.body);
|
|
1800
|
+
if (markedBody !== member.body) {
|
|
1801
|
+
processedMember = factory.updateMethodDeclaration(
|
|
1802
|
+
member,
|
|
1803
|
+
member.modifiers,
|
|
1804
|
+
member.asteriskToken,
|
|
1805
|
+
member.name,
|
|
1806
|
+
member.questionToken,
|
|
1807
|
+
member.typeParameters,
|
|
1808
|
+
member.parameters,
|
|
1809
|
+
member.type,
|
|
1810
|
+
markedBody
|
|
1811
|
+
);
|
|
1812
|
+
}
|
|
1813
|
+
}
|
|
1814
|
+
if (!hasMarker(processedMember) && hasOriginalPosition(member)) {
|
|
1815
|
+
return addLineMarker(processedMember, originalSourceFile, member);
|
|
1816
|
+
}
|
|
1817
|
+
return processedMember;
|
|
1818
|
+
});
|
|
1819
|
+
const updatedNode = factory.updateClassDeclaration(
|
|
1820
|
+
node,
|
|
1821
|
+
node.modifiers,
|
|
1822
|
+
node.name,
|
|
1823
|
+
node.typeParameters,
|
|
1824
|
+
node.heritageClauses,
|
|
1825
|
+
markedMembers
|
|
1826
|
+
);
|
|
1827
|
+
if (!hasMarker(updatedNode) && hasOriginalPosition(node)) {
|
|
1828
|
+
return addLineMarker(updatedNode, originalSourceFile, node) as unknown as T;
|
|
1829
|
+
}
|
|
1830
|
+
return updatedNode as unknown as T;
|
|
1831
|
+
}
|
|
1832
|
+
|
|
1833
|
+
// Handle function declarations - add markers to body statements
|
|
1834
|
+
if (ts.isFunctionDeclaration(node) && node.body) {
|
|
1835
|
+
const markedBody = addMarkersToBlock(node.body);
|
|
1836
|
+
const updatedNode = factory.updateFunctionDeclaration(
|
|
1837
|
+
node,
|
|
1838
|
+
node.modifiers,
|
|
1839
|
+
node.asteriskToken,
|
|
1840
|
+
node.name,
|
|
1841
|
+
node.typeParameters,
|
|
1842
|
+
node.parameters,
|
|
1843
|
+
node.type,
|
|
1844
|
+
markedBody
|
|
1845
|
+
);
|
|
1846
|
+
if (!hasMarker(updatedNode) && hasOriginalPosition(node)) {
|
|
1847
|
+
return addLineMarker(updatedNode, originalSourceFile, node) as unknown as T;
|
|
1848
|
+
}
|
|
1849
|
+
return updatedNode as unknown as T;
|
|
1850
|
+
}
|
|
1851
|
+
|
|
1852
|
+
// Handle variable statements
|
|
1853
|
+
if (ts.isVariableStatement(node)) {
|
|
1854
|
+
if (!hasMarker(node) && hasOriginalPosition(node)) {
|
|
1855
|
+
return addLineMarker(node, originalSourceFile, node);
|
|
1856
|
+
}
|
|
1857
|
+
return node;
|
|
1858
|
+
}
|
|
1859
|
+
|
|
1860
|
+
// Handle expression statements
|
|
1861
|
+
if (ts.isExpressionStatement(node)) {
|
|
1862
|
+
if (!hasMarker(node) && hasOriginalPosition(node)) {
|
|
1863
|
+
return addLineMarker(node, originalSourceFile, node);
|
|
1864
|
+
}
|
|
1865
|
+
return node;
|
|
1866
|
+
}
|
|
1867
|
+
|
|
1868
|
+
// Handle return statements
|
|
1869
|
+
if (ts.isReturnStatement(node)) {
|
|
1870
|
+
if (!hasMarker(node) && hasOriginalPosition(node)) {
|
|
1871
|
+
return addLineMarker(node, originalSourceFile, node);
|
|
1872
|
+
}
|
|
1873
|
+
return node;
|
|
1874
|
+
}
|
|
1875
|
+
|
|
1876
|
+
// Handle if statements
|
|
1877
|
+
if (ts.isIfStatement(node)) {
|
|
1878
|
+
let thenStmt = node.thenStatement;
|
|
1879
|
+
let elseStmt = node.elseStatement;
|
|
1880
|
+
|
|
1881
|
+
if (ts.isBlock(thenStmt)) {
|
|
1882
|
+
thenStmt = addMarkersToBlock(thenStmt);
|
|
1883
|
+
}
|
|
1884
|
+
if (elseStmt && ts.isBlock(elseStmt)) {
|
|
1885
|
+
elseStmt = addMarkersToBlock(elseStmt);
|
|
1886
|
+
}
|
|
1887
|
+
|
|
1888
|
+
const updatedNode = factory.updateIfStatement(node, node.expression, thenStmt, elseStmt);
|
|
1889
|
+
if (!hasMarker(updatedNode) && hasOriginalPosition(node)) {
|
|
1890
|
+
return addLineMarker(updatedNode, originalSourceFile, node) as unknown as T;
|
|
1891
|
+
}
|
|
1892
|
+
return updatedNode as unknown as T;
|
|
1893
|
+
}
|
|
1894
|
+
|
|
1895
|
+
// Default: just mark the node if it has original position
|
|
1896
|
+
if (!hasMarker(node) && hasOriginalPosition(node)) {
|
|
1897
|
+
return addLineMarker(node, originalSourceFile, node);
|
|
1898
|
+
}
|
|
1899
|
+
|
|
1900
|
+
return node;
|
|
1901
|
+
};
|
|
1902
|
+
|
|
1903
|
+
// Add markers to statements in a block
|
|
1904
|
+
const addMarkersToBlock = (block: ts.Block): ts.Block => {
|
|
1905
|
+
const markedStatements = block.statements.map(stmt => addMarkersToNode(stmt));
|
|
1906
|
+
return factory.updateBlock(block, markedStatements);
|
|
1907
|
+
};
|
|
1908
|
+
|
|
1909
|
+
// Process all top-level statements
|
|
1910
|
+
const newStatements = factory.createNodeArray(
|
|
1911
|
+
transformedFile.statements.map(stmt => addMarkersToNode(stmt))
|
|
1912
|
+
);
|
|
1913
|
+
|
|
1914
|
+
return factory.updateSourceFile(transformedFile, newStatements);
|
|
1915
|
+
}
|
|
1916
|
+
|
|
1314
1917
|
public shouldTransformFile(fileName: string): boolean {
|
|
1315
1918
|
return shouldTransformFile(fileName, this.config);
|
|
1316
1919
|
}
|
|
1317
1920
|
|
|
1921
|
+
/**
|
|
1922
|
+
* Get pre-compiled ignore patterns, caching them for performance.
|
|
1923
|
+
*/
|
|
1924
|
+
private getCompiledPatterns(): CompiledIgnorePatterns {
|
|
1925
|
+
if (!this.compiledPatterns) {
|
|
1926
|
+
this.compiledPatterns = getCompiledIgnorePatterns(this.config);
|
|
1927
|
+
}
|
|
1928
|
+
return this.compiledPatterns;
|
|
1929
|
+
}
|
|
1930
|
+
|
|
1318
1931
|
/**
|
|
1319
1932
|
* Check if a TypeNode represents a type that shouldn't be validated.
|
|
1320
1933
|
* This includes:
|
|
@@ -1405,12 +2018,8 @@ export class TypicalTransformer {
|
|
|
1405
2018
|
* Also handles union types: "Document | Element" is ignored if "Document" or "Element" is in ignoreTypes.
|
|
1406
2019
|
*/
|
|
1407
2020
|
private isIgnoredType(typeName: string, typeChecker?: ts.TypeChecker, type?: ts.Type): boolean {
|
|
1408
|
-
|
|
1409
|
-
|
|
1410
|
-
const domPatterns = this.config.ignoreDOMTypes !== false ? DOM_TYPES_TO_IGNORE : [];
|
|
1411
|
-
const patterns = [...userPatterns, ...domPatterns];
|
|
1412
|
-
|
|
1413
|
-
if (patterns.length === 0) return false;
|
|
2021
|
+
const compiled = this.getCompiledPatterns();
|
|
2022
|
+
if (compiled.allPatterns.length === 0) return false;
|
|
1414
2023
|
|
|
1415
2024
|
// For union types, check each constituent
|
|
1416
2025
|
if (type && type.isUnion()) {
|
|
@@ -1419,12 +2028,12 @@ export class TypicalTransformer {
|
|
|
1419
2028
|
);
|
|
1420
2029
|
if (nonNullTypes.length === 0) return false;
|
|
1421
2030
|
// All non-null types must be ignored
|
|
1422
|
-
return nonNullTypes.every(t => this.isIgnoredSingleType(t,
|
|
2031
|
+
return nonNullTypes.every(t => this.isIgnoredSingleType(t, compiled.allPatterns, typeChecker));
|
|
1423
2032
|
}
|
|
1424
2033
|
|
|
1425
2034
|
// For non-union types, check directly
|
|
1426
2035
|
if (type && typeChecker) {
|
|
1427
|
-
return this.isIgnoredSingleType(type,
|
|
2036
|
+
return this.isIgnoredSingleType(type, compiled.allPatterns, typeChecker);
|
|
1428
2037
|
}
|
|
1429
2038
|
|
|
1430
2039
|
// Fallback: string-based matching for union types like "Document | Element | null"
|
|
@@ -1432,25 +2041,42 @@ export class TypicalTransformer {
|
|
|
1432
2041
|
const nonNullParts = typeParts.filter(t => t !== 'null' && t !== 'undefined');
|
|
1433
2042
|
if (nonNullParts.length === 0) return false;
|
|
1434
2043
|
|
|
1435
|
-
return nonNullParts.every(part => this.
|
|
2044
|
+
return nonNullParts.every(part => this.matchesIgnorePatternCompiled(part, compiled.allPatterns));
|
|
1436
2045
|
}
|
|
1437
2046
|
|
|
1438
2047
|
/**
|
|
1439
2048
|
* Check if a single type (not a union) should be ignored.
|
|
1440
2049
|
* Checks both the type name and its base classes.
|
|
2050
|
+
* Uses Set-based cycle detection to handle recursive type hierarchies.
|
|
2051
|
+
* @param patterns Pre-compiled RegExp patterns
|
|
2052
|
+
* @param visited Set of type IDs already visited (for cycle detection)
|
|
1441
2053
|
*/
|
|
1442
|
-
private isIgnoredSingleType(
|
|
1443
|
-
|
|
1444
|
-
|
|
2054
|
+
private isIgnoredSingleType(
|
|
2055
|
+
type: ts.Type,
|
|
2056
|
+
patterns: RegExp[],
|
|
2057
|
+
typeChecker?: ts.TypeChecker,
|
|
2058
|
+
visited: Set<number> = new Set()
|
|
2059
|
+
): boolean {
|
|
2060
|
+
// Use type ID for cycle detection (more precise than depth counter)
|
|
2061
|
+
const typeId = (type as { id?: number }).id;
|
|
2062
|
+
if (typeId !== undefined) {
|
|
2063
|
+
if (visited.has(typeId)) {
|
|
2064
|
+
if (process.env.DEBUG) {
|
|
2065
|
+
console.log(`TYPICAL: Cycle detected for type "${type.symbol?.name || '?'}" (id: ${typeId}), skipping`);
|
|
2066
|
+
}
|
|
2067
|
+
return false; // Already visited this type, not ignored
|
|
2068
|
+
}
|
|
2069
|
+
visited.add(typeId);
|
|
2070
|
+
}
|
|
1445
2071
|
|
|
1446
2072
|
const typeName = type.symbol?.name || '';
|
|
1447
2073
|
|
|
1448
2074
|
if (process.env.DEBUG) {
|
|
1449
|
-
console.log(`TYPICAL: isIgnoredSingleType checking type: "${typeName}" (
|
|
2075
|
+
console.log(`TYPICAL: isIgnoredSingleType checking type: "${typeName}" (visited: ${visited.size})`);
|
|
1450
2076
|
}
|
|
1451
2077
|
|
|
1452
2078
|
// Check direct name match
|
|
1453
|
-
if (this.
|
|
2079
|
+
if (this.matchesIgnorePatternCompiled(typeName, patterns)) {
|
|
1454
2080
|
if (process.env.DEBUG) {
|
|
1455
2081
|
console.log(`TYPICAL: Type "${typeName}" matched ignore pattern directly`);
|
|
1456
2082
|
}
|
|
@@ -1464,7 +2090,7 @@ export class TypicalTransformer {
|
|
|
1464
2090
|
console.log(`TYPICAL: Type "${typeName}" has ${baseTypes.length} base types: ${baseTypes.map(t => t.symbol?.name || '?').join(', ')}`);
|
|
1465
2091
|
}
|
|
1466
2092
|
for (const baseType of baseTypes) {
|
|
1467
|
-
if (this.isIgnoredSingleType(baseType, patterns, typeChecker,
|
|
2093
|
+
if (this.isIgnoredSingleType(baseType, patterns, typeChecker, visited)) {
|
|
1468
2094
|
if (process.env.DEBUG) {
|
|
1469
2095
|
console.log(`TYPICAL: Type "${typeName}" ignored because base type "${baseType.symbol?.name}" is ignored`);
|
|
1470
2096
|
}
|
|
@@ -1484,7 +2110,7 @@ export class TypicalTransformer {
|
|
|
1484
2110
|
if (process.env.DEBUG) {
|
|
1485
2111
|
console.log(`TYPICAL: Type "${typeName}" extends "${baseTypeName}" (from heritage clause)`);
|
|
1486
2112
|
}
|
|
1487
|
-
if (this.
|
|
2113
|
+
if (this.matchesIgnorePatternCompiled(baseTypeName, patterns)) {
|
|
1488
2114
|
if (process.env.DEBUG) {
|
|
1489
2115
|
console.log(`TYPICAL: Type "${typeName}" ignored because it extends "${baseTypeName}"`);
|
|
1490
2116
|
}
|
|
@@ -1493,7 +2119,7 @@ export class TypicalTransformer {
|
|
|
1493
2119
|
// Recursively check the heritage type
|
|
1494
2120
|
if (typeChecker) {
|
|
1495
2121
|
const heritageTypeObj = typeChecker.getTypeAtLocation(heritageType);
|
|
1496
|
-
if (this.isIgnoredSingleType(heritageTypeObj, patterns, typeChecker,
|
|
2122
|
+
if (this.isIgnoredSingleType(heritageTypeObj, patterns, typeChecker, visited)) {
|
|
1497
2123
|
return true;
|
|
1498
2124
|
}
|
|
1499
2125
|
|
|
@@ -1506,7 +2132,7 @@ export class TypicalTransformer {
|
|
|
1506
2132
|
if (process.env.DEBUG) {
|
|
1507
2133
|
console.log(`TYPICAL: Type "${typeName}" mixin arg: "${argType.symbol?.name}" (from call expression)`);
|
|
1508
2134
|
}
|
|
1509
|
-
if (this.isIgnoredSingleType(argType, patterns, typeChecker,
|
|
2135
|
+
if (this.isIgnoredSingleType(argType, patterns, typeChecker, visited)) {
|
|
1510
2136
|
if (process.env.DEBUG) {
|
|
1511
2137
|
console.log(`TYPICAL: Type "${typeName}" ignored because mixin argument "${argType.symbol?.name}" is ignored`);
|
|
1512
2138
|
}
|
|
@@ -1526,16 +2152,11 @@ export class TypicalTransformer {
|
|
|
1526
2152
|
}
|
|
1527
2153
|
|
|
1528
2154
|
/**
|
|
1529
|
-
* Check if a single type name matches any ignore pattern.
|
|
2155
|
+
* Check if a single type name matches any pre-compiled ignore pattern.
|
|
2156
|
+
* @param patterns Pre-compiled RegExp patterns
|
|
1530
2157
|
*/
|
|
1531
|
-
private
|
|
1532
|
-
return patterns.some(pattern =>
|
|
1533
|
-
// Convert glob pattern to regex: "React.*" -> /^React\..*$/
|
|
1534
|
-
const regexStr = '^' + pattern
|
|
1535
|
-
.replace(/[.+^${}()|[\]\\]/g, '\\$&') // Escape special regex chars except *
|
|
1536
|
-
.replace(/\*/g, '.*') + '$';
|
|
1537
|
-
return new RegExp(regexStr).test(typeName);
|
|
1538
|
-
});
|
|
2158
|
+
private matchesIgnorePatternCompiled(typeName: string, patterns: RegExp[]): boolean {
|
|
2159
|
+
return patterns.some(pattern => pattern.test(typeName));
|
|
1539
2160
|
}
|
|
1540
2161
|
|
|
1541
2162
|
/**
|
|
@@ -1979,13 +2600,20 @@ export class TypicalTransformer {
|
|
|
1979
2600
|
[]
|
|
1980
2601
|
);
|
|
1981
2602
|
|
|
1982
|
-
|
|
2603
|
+
let declaration: ts.Statement = factory.createVariableStatement(
|
|
1983
2604
|
undefined,
|
|
1984
2605
|
factory.createVariableDeclarationList(
|
|
1985
2606
|
[factory.createVariableDeclaration(name, undefined, undefined, createCall)],
|
|
1986
2607
|
ctx.ts.NodeFlags.Const
|
|
1987
2608
|
)
|
|
1988
2609
|
);
|
|
2610
|
+
|
|
2611
|
+
// Add source map marker pointing to the type node that triggered this validator
|
|
2612
|
+
// This ensures all the expanded typia validation code maps back to the original type
|
|
2613
|
+
if (typeNode.pos >= 0) {
|
|
2614
|
+
declaration = addSourceMapMarker(declaration, ctx.sourceFile, typeNode);
|
|
2615
|
+
}
|
|
2616
|
+
|
|
1989
2617
|
statements.push(declaration);
|
|
1990
2618
|
}
|
|
1991
2619
|
}
|