@atmyapp/cli 0.0.2 → 0.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +138 -55
- package/dist/cli/commands/migrate.js +55 -10
- package/dist/cli/utils/content-processor.js +16 -7
- package/dist/cli/utils/definition-processor.js +7 -4
- package/dist/cli/utils/index.d.ts +2 -0
- package/dist/cli/utils/index.js +2 -0
- package/dist/cli/utils/parallel-schema-processor.d.ts +5 -0
- package/dist/cli/utils/parallel-schema-processor.js +147 -0
- package/dist/cli/utils/schema-processor.js +138 -16
- package/dist/cli/utils/worker-file-processor.d.ts +7 -0
- package/dist/cli/utils/worker-file-processor.js +255 -0
- package/dist/cli/utils/worker-pool.d.ts +25 -0
- package/dist/cli/utils/worker-pool.js +126 -0
- package/package.json +2 -2
|
@@ -0,0 +1,147 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
|
+
});
|
|
10
|
+
};
|
|
11
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
12
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
13
|
+
};
|
|
14
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
15
|
+
exports.scanFilesOptimized = scanFilesOptimized;
|
|
16
|
+
exports.processFilesParallel = processFilesParallel;
|
|
17
|
+
exports.optimizedMigrationPipeline = optimizedMigrationPipeline;
|
|
18
|
+
const fast_glob_1 = __importDefault(require("fast-glob"));
|
|
19
|
+
const worker_pool_1 = require("./worker-pool");
|
|
20
|
+
const path_1 = __importDefault(require("path"));
|
|
21
|
+
// Enhanced version of scanFiles with better performance
|
|
22
|
+
function scanFilesOptimized(patterns, logger) {
|
|
23
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
24
|
+
logger.info("🔍 Scanning files with optimized parallel processing...");
|
|
25
|
+
logger.verbose_log(`Using patterns: ${patterns.join(", ")}`);
|
|
26
|
+
const files = yield (0, fast_glob_1.default)(patterns, {
|
|
27
|
+
ignore: ["**/node_modules/**", "**/test/**", "**/dist/**", "**/.ama/**"],
|
|
28
|
+
absolute: true,
|
|
29
|
+
cwd: process.cwd(),
|
|
30
|
+
suppressErrors: true, // Don't fail on permission errors
|
|
31
|
+
followSymbolicLinks: false, // Skip symlinks for better performance
|
|
32
|
+
});
|
|
33
|
+
logger.verbose_log(`Found ${files.length} files matching patterns`);
|
|
34
|
+
return files;
|
|
35
|
+
});
|
|
36
|
+
}
|
|
37
|
+
// Parallel processing of files using worker threads
|
|
38
|
+
function processFilesParallel(files, tsconfigPath, continueOnError, logger, maxWorkers) {
|
|
39
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
40
|
+
const contents = [];
|
|
41
|
+
const errors = [];
|
|
42
|
+
let successCount = 0;
|
|
43
|
+
let failureCount = 0;
|
|
44
|
+
logger.info(`📚 Processing ${files.length} files in parallel...`);
|
|
45
|
+
// Filter files that likely contain ATMYAPP exports for better performance
|
|
46
|
+
const relevantFiles = yield filterRelevantFiles(files, logger);
|
|
47
|
+
if (relevantFiles.length === 0) {
|
|
48
|
+
logger.warn("No files with ATMYAPP exports found");
|
|
49
|
+
return { contents, errors, successCount, failureCount };
|
|
50
|
+
}
|
|
51
|
+
logger.info(`🎯 Processing ${relevantFiles.length} relevant files (filtered from ${files.length})`);
|
|
52
|
+
// In test environment, fall back to sequential processing
|
|
53
|
+
// to avoid worker thread module loading issues
|
|
54
|
+
if (process.env.NODE_ENV === "test" || process.env.JEST_WORKER_ID) {
|
|
55
|
+
logger.verbose_log("Using fallback sequential processing in test environment");
|
|
56
|
+
// Import sequential processing functions
|
|
57
|
+
const { scanFiles, createProject, processFiles, } = require("./schema-processor");
|
|
58
|
+
const project = createProject(relevantFiles, tsconfigPath, logger);
|
|
59
|
+
const result = processFiles(project.getSourceFiles(), tsconfigPath, continueOnError, logger);
|
|
60
|
+
return result;
|
|
61
|
+
}
|
|
62
|
+
// Create worker tasks
|
|
63
|
+
const tasks = relevantFiles.map((file, index) => ({
|
|
64
|
+
id: `task-${index}-${path_1.default.basename(file)}`,
|
|
65
|
+
filePath: file,
|
|
66
|
+
tsconfigPath,
|
|
67
|
+
}));
|
|
68
|
+
// Process files using worker pool
|
|
69
|
+
const workerPool = new worker_pool_1.WorkerPool(logger, maxWorkers);
|
|
70
|
+
try {
|
|
71
|
+
const results = yield workerPool.processFiles(tasks);
|
|
72
|
+
// Aggregate results
|
|
73
|
+
for (const result of results) {
|
|
74
|
+
if (result.success) {
|
|
75
|
+
contents.push(...result.contents);
|
|
76
|
+
successCount += result.contents.length;
|
|
77
|
+
if (result.contents.length > 0) {
|
|
78
|
+
logger.verbose_log(`✅ Processed ${result.contents.length} definitions from ${result.id}`);
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
else {
|
|
82
|
+
failureCount++;
|
|
83
|
+
const errorMessage = `❌ ${result.id} - ${result.error}`;
|
|
84
|
+
errors.push(errorMessage);
|
|
85
|
+
if (!continueOnError) {
|
|
86
|
+
throw new Error(errorMessage);
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
logger.success(`✅ Parallel processing completed: ${successCount} definitions from ${relevantFiles.length} files`);
|
|
91
|
+
}
|
|
92
|
+
catch (error) {
|
|
93
|
+
logger.error("Parallel processing failed:", error);
|
|
94
|
+
throw error;
|
|
95
|
+
}
|
|
96
|
+
return { contents, errors, successCount, failureCount };
|
|
97
|
+
});
|
|
98
|
+
}
|
|
99
|
+
// Pre-filter files to only process those likely to contain ATMYAPP exports
|
|
100
|
+
function filterRelevantFiles(files, logger) {
|
|
101
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
102
|
+
logger.verbose_log("🔍 Pre-filtering files for ATMYAPP exports...");
|
|
103
|
+
const fs = require("fs").promises;
|
|
104
|
+
const relevantFiles = [];
|
|
105
|
+
// Process files in chunks for better performance
|
|
106
|
+
const chunkSize = 50;
|
|
107
|
+
const chunks = [];
|
|
108
|
+
for (let i = 0; i < files.length; i += chunkSize) {
|
|
109
|
+
chunks.push(files.slice(i, i + chunkSize));
|
|
110
|
+
}
|
|
111
|
+
for (const chunk of chunks) {
|
|
112
|
+
const chunkPromises = chunk.map((file) => __awaiter(this, void 0, void 0, function* () {
|
|
113
|
+
try {
|
|
114
|
+
// Quick text search for ATMYAPP export
|
|
115
|
+
const content = yield fs.readFile(file, "utf8");
|
|
116
|
+
// Simple regex to check for ATMYAPP exports
|
|
117
|
+
if (/export\s+type\s+ATMYAPP\s*=/.test(content)) {
|
|
118
|
+
return file;
|
|
119
|
+
}
|
|
120
|
+
return null;
|
|
121
|
+
}
|
|
122
|
+
catch (error) {
|
|
123
|
+
// Skip files that can't be read
|
|
124
|
+
logger.verbose_log(`Skipping unreadable file: ${file}`);
|
|
125
|
+
return null;
|
|
126
|
+
}
|
|
127
|
+
}));
|
|
128
|
+
const chunkResults = yield Promise.all(chunkPromises);
|
|
129
|
+
relevantFiles.push(...chunkResults.filter(Boolean));
|
|
130
|
+
}
|
|
131
|
+
logger.verbose_log(`📊 Filtered to ${relevantFiles.length} relevant files from ${files.length} total`);
|
|
132
|
+
return relevantFiles;
|
|
133
|
+
});
|
|
134
|
+
}
|
|
135
|
+
// Optimized file processing pipeline
|
|
136
|
+
function optimizedMigrationPipeline(patterns, tsconfigPath, continueOnError, logger, maxWorkers) {
|
|
137
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
138
|
+
// Step 1: Scan files with optimization
|
|
139
|
+
const files = yield scanFilesOptimized(patterns, logger);
|
|
140
|
+
if (files.length === 0) {
|
|
141
|
+
logger.warn("No files found matching patterns");
|
|
142
|
+
return { contents: [], errors: [], successCount: 0, failureCount: 0 };
|
|
143
|
+
}
|
|
144
|
+
// Step 2: Process files in parallel
|
|
145
|
+
return yield processFilesParallel(files, tsconfigPath, continueOnError, logger, maxWorkers);
|
|
146
|
+
});
|
|
147
|
+
}
|
|
@@ -124,9 +124,87 @@ function extractDefinitionTypes(atmyappType, logger) {
|
|
|
124
124
|
}
|
|
125
125
|
return elementTypes;
|
|
126
126
|
}
|
|
127
|
+
// Extract event information directly from TypeScript AST
|
|
128
|
+
function extractEventInfoFromAST(file, definitionType, logger) {
|
|
129
|
+
try {
|
|
130
|
+
// Find the type alias declaration for this definition type
|
|
131
|
+
const typeAlias = file.getTypeAlias(definitionType);
|
|
132
|
+
if (!typeAlias) {
|
|
133
|
+
logger.verbose_log(`Could not find type alias for ${definitionType}`);
|
|
134
|
+
return null;
|
|
135
|
+
}
|
|
136
|
+
const typeNode = typeAlias.getTypeNode();
|
|
137
|
+
if (!typeNode) {
|
|
138
|
+
logger.verbose_log(`Type alias ${definitionType} has no type node`);
|
|
139
|
+
return null;
|
|
140
|
+
}
|
|
141
|
+
// Check if this is a type reference (like AmaCustomEventDef<...> or AmaEventDef<...>)
|
|
142
|
+
if (ts_morph_1.Node.isTypeReference(typeNode)) {
|
|
143
|
+
const typeName = typeNode.getTypeName();
|
|
144
|
+
const typeArguments = typeNode.getTypeArguments();
|
|
145
|
+
// Check if this is AmaCustomEventDef or AmaEventDef
|
|
146
|
+
if (ts_morph_1.Node.isIdentifier(typeName)) {
|
|
147
|
+
const typeNameText = typeName.getText();
|
|
148
|
+
if (typeNameText === "AmaCustomEventDef") {
|
|
149
|
+
if (typeArguments.length >= 2) {
|
|
150
|
+
// First argument should be the event ID (string literal)
|
|
151
|
+
const idArg = typeArguments[0];
|
|
152
|
+
let eventId = null;
|
|
153
|
+
if (ts_morph_1.Node.isLiteralTypeNode(idArg)) {
|
|
154
|
+
const literal = idArg.getLiteral();
|
|
155
|
+
if (ts_morph_1.Node.isStringLiteral(literal)) {
|
|
156
|
+
eventId = literal.getLiteralValue();
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
// Second argument should be the columns (tuple of string literals)
|
|
160
|
+
const columnsArg = typeArguments[1];
|
|
161
|
+
let columns = [];
|
|
162
|
+
if (ts_morph_1.Node.isTupleTypeNode(columnsArg)) {
|
|
163
|
+
columnsArg.getElements().forEach((element) => {
|
|
164
|
+
if (ts_morph_1.Node.isLiteralTypeNode(element)) {
|
|
165
|
+
const literal = element.getLiteral();
|
|
166
|
+
if (ts_morph_1.Node.isStringLiteral(literal)) {
|
|
167
|
+
columns.push(literal.getLiteralValue());
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
});
|
|
171
|
+
}
|
|
172
|
+
if (eventId && columns.length > 0) {
|
|
173
|
+
logger.verbose_log(`AST extraction successful for ${definitionType}: id=${eventId}, columns=[${columns.join(", ")}]`);
|
|
174
|
+
return { id: eventId, columns };
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
else if (typeNameText === "AmaEventDef") {
|
|
179
|
+
// Handle AmaEventDef (basic events) - only has ID argument
|
|
180
|
+
if (typeArguments.length >= 1) {
|
|
181
|
+
const idArg = typeArguments[0];
|
|
182
|
+
let eventId = null;
|
|
183
|
+
if (ts_morph_1.Node.isLiteralTypeNode(idArg)) {
|
|
184
|
+
const literal = idArg.getLiteral();
|
|
185
|
+
if (ts_morph_1.Node.isStringLiteral(literal)) {
|
|
186
|
+
eventId = literal.getLiteralValue();
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
if (eventId) {
|
|
190
|
+
logger.verbose_log(`AST extraction successful for basic event ${definitionType}: id=${eventId}`);
|
|
191
|
+
return { id: eventId, columns: [] }; // Basic events have no predefined columns
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
logger.verbose_log(`Failed to extract event info from AST for ${definitionType}`);
|
|
198
|
+
return null;
|
|
199
|
+
}
|
|
200
|
+
catch (error) {
|
|
201
|
+
logger.verbose_log(`Error during AST extraction for ${definitionType}: ${error}`);
|
|
202
|
+
return null;
|
|
203
|
+
}
|
|
204
|
+
}
|
|
127
205
|
// Processes an ATMYAPP export to extract content definitions
|
|
128
206
|
function processAtmyappExport(atmyappType, file, tsconfigPath, logger) {
|
|
129
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _j;
|
|
207
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q, _r, _s, _t;
|
|
130
208
|
const contents = [];
|
|
131
209
|
logger.verbose_log(`Processing ATMYAPP export in ${file.getFilePath()}`);
|
|
132
210
|
// Extract individual definition types from the array
|
|
@@ -165,55 +243,99 @@ function processAtmyappExport(atmyappType, file, tsconfigPath, logger) {
|
|
|
165
243
|
continue;
|
|
166
244
|
}
|
|
167
245
|
if (!schema.properties) {
|
|
246
|
+
// For event definitions, the schema generator might fail due to generics
|
|
247
|
+
// Try to extract event information directly from the TypeScript AST
|
|
248
|
+
logger.verbose_log(`Schema has no properties. Attempting AST-based extraction for ${definitionType}`);
|
|
249
|
+
// Try to extract event definition from TypeScript AST
|
|
250
|
+
const eventInfo = extractEventInfoFromAST(file, definitionType, logger);
|
|
251
|
+
if (eventInfo) {
|
|
252
|
+
logger.verbose_log(`Successfully extracted event via AST: ${eventInfo.id} with columns: ${eventInfo.columns.join(", ")}`);
|
|
253
|
+
contents.push({
|
|
254
|
+
path: eventInfo.id,
|
|
255
|
+
structure: {
|
|
256
|
+
type: "event",
|
|
257
|
+
properties: {
|
|
258
|
+
id: { const: eventInfo.id },
|
|
259
|
+
columns: { const: eventInfo.columns },
|
|
260
|
+
type: { const: "event" },
|
|
261
|
+
},
|
|
262
|
+
},
|
|
263
|
+
});
|
|
264
|
+
continue;
|
|
265
|
+
}
|
|
168
266
|
logger.warn(`Invalid schema structure for ${definitionType}`);
|
|
169
267
|
continue;
|
|
170
268
|
}
|
|
171
269
|
const properties = schema.properties;
|
|
270
|
+
// Debug: Log the actual schema structure
|
|
271
|
+
logger.verbose_log(`Schema for ${definitionType}: ${JSON.stringify(properties, null, 2)}`);
|
|
172
272
|
// Check if this is an event definition
|
|
173
273
|
const isEventDef = ((_a = properties.type) === null || _a === void 0 ? void 0 : _a.const) === "event" ||
|
|
174
|
-
((
|
|
274
|
+
((_b = properties.type) === null || _b === void 0 ? void 0 : _b.const) === "basic_event" ||
|
|
275
|
+
(((_c = properties.__is_ATMYAPP_Object) === null || _c === void 0 ? void 0 : _c.const) === true &&
|
|
175
276
|
properties.id &&
|
|
176
|
-
properties.columns);
|
|
277
|
+
(properties.columns || ((_d = properties.type) === null || _d === void 0 ? void 0 : _d.const) === "basic_event"));
|
|
177
278
|
if (isEventDef) {
|
|
178
|
-
// Handle AmaEventDef - use id as path and extract event structure
|
|
279
|
+
// Handle AmaCustomEventDef and AmaEventDef - use id as path and extract event structure
|
|
179
280
|
let eventId = null;
|
|
180
281
|
let columns = [];
|
|
181
|
-
// Extract event ID
|
|
182
|
-
if ((
|
|
282
|
+
// Extract event ID - try different possible structures
|
|
283
|
+
if ((_e = properties.id) === null || _e === void 0 ? void 0 : _e.const) {
|
|
183
284
|
eventId = properties.id.const;
|
|
184
285
|
}
|
|
185
|
-
|
|
186
|
-
|
|
286
|
+
else if (((_f = properties.id) === null || _f === void 0 ? void 0 : _f.enum) && properties.id.enum.length === 1) {
|
|
287
|
+
eventId = properties.id.enum[0];
|
|
288
|
+
}
|
|
289
|
+
else if (((_g = properties.id) === null || _g === void 0 ? void 0 : _g.type) === "string" && ((_h = properties.id) === null || _h === void 0 ? void 0 : _h.title)) {
|
|
290
|
+
// Fallback: try to extract from title or other metadata
|
|
291
|
+
eventId = properties.id.title;
|
|
292
|
+
}
|
|
293
|
+
// Extract columns - try different possible structures
|
|
294
|
+
// For basic events, columns might not exist (they use Record<string, string>)
|
|
295
|
+
if ((_j = properties.columns) === null || _j === void 0 ? void 0 : _j.const) {
|
|
187
296
|
columns = properties.columns.const;
|
|
188
297
|
}
|
|
189
|
-
else if ((
|
|
298
|
+
else if ((_l = (_k = properties.columns) === null || _k === void 0 ? void 0 : _k.items) === null || _l === void 0 ? void 0 : _l.const) {
|
|
190
299
|
columns = properties.columns.items.const;
|
|
191
300
|
}
|
|
192
|
-
else if (((
|
|
301
|
+
else if (((_m = properties.columns) === null || _m === void 0 ? void 0 : _m.items) &&
|
|
193
302
|
Array.isArray(properties.columns.items)) {
|
|
194
303
|
// Handle array of const items - extract const value from each item
|
|
195
304
|
columns = properties.columns.items
|
|
196
305
|
.map((item) => item.const)
|
|
197
306
|
.filter(Boolean);
|
|
198
307
|
}
|
|
308
|
+
else if ((_p = (_o = properties.columns) === null || _o === void 0 ? void 0 : _o.items) === null || _p === void 0 ? void 0 : _p.enum) {
|
|
309
|
+
// Handle tuple type where each position has enum with single value
|
|
310
|
+
columns = properties.columns.items.enum;
|
|
311
|
+
}
|
|
312
|
+
else if (((_q = properties.columns) === null || _q === void 0 ? void 0 : _q.enum) &&
|
|
313
|
+
Array.isArray(properties.columns.enum[0])) {
|
|
314
|
+
// Handle case where columns is an enum with array values
|
|
315
|
+
columns = properties.columns.enum[0];
|
|
316
|
+
}
|
|
317
|
+
// Debug: Log what we extracted
|
|
318
|
+
logger.verbose_log(`Extracted from ${definitionType}: eventId=${eventId}, columns=${JSON.stringify(columns)}`);
|
|
199
319
|
if (!eventId) {
|
|
200
320
|
logger.warn(`Could not extract event ID from ${definitionType}`);
|
|
201
321
|
continue;
|
|
202
322
|
}
|
|
203
|
-
|
|
323
|
+
// For basic events, empty columns array is acceptable
|
|
324
|
+
const isBasicEvent = ((_r = properties.type) === null || _r === void 0 ? void 0 : _r.const) === "basic_event";
|
|
325
|
+
if (columns.length === 0 && !isBasicEvent) {
|
|
204
326
|
logger.warn(`Could not extract columns from ${definitionType}`);
|
|
205
327
|
continue;
|
|
206
328
|
}
|
|
207
|
-
logger.verbose_log(`Successfully extracted event: ${eventId} with columns: ${columns.join(", ")}`);
|
|
329
|
+
logger.verbose_log(`Successfully extracted ${isBasicEvent ? "basic " : ""}event: ${eventId}${columns.length > 0 ? ` with columns: ${columns.join(", ")}` : ""}`);
|
|
208
330
|
// Create event content with special structure
|
|
209
331
|
contents.push({
|
|
210
332
|
path: eventId, // Use event ID as path
|
|
211
333
|
structure: {
|
|
212
|
-
type: "event",
|
|
334
|
+
type: isBasicEvent ? "basic_event" : "event",
|
|
213
335
|
properties: {
|
|
214
336
|
id: { const: eventId },
|
|
215
337
|
columns: { const: columns },
|
|
216
|
-
type: { const: "event" },
|
|
338
|
+
type: { const: isBasicEvent ? "basic_event" : "event" },
|
|
217
339
|
},
|
|
218
340
|
},
|
|
219
341
|
});
|
|
@@ -223,10 +345,10 @@ function processAtmyappExport(atmyappType, file, tsconfigPath, logger) {
|
|
|
223
345
|
let path = null;
|
|
224
346
|
let structure = null;
|
|
225
347
|
// Look for path in different possible locations
|
|
226
|
-
if ((
|
|
348
|
+
if ((_s = properties.path) === null || _s === void 0 ? void 0 : _s.const) {
|
|
227
349
|
path = properties.path.const;
|
|
228
350
|
}
|
|
229
|
-
else if ((
|
|
351
|
+
else if ((_t = properties._path) === null || _t === void 0 ? void 0 : _t.const) {
|
|
230
352
|
path = properties._path.const;
|
|
231
353
|
}
|
|
232
354
|
// Look for structure/data in different possible locations
|
|
@@ -0,0 +1,255 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
36
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
37
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
38
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
39
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
40
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
41
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
42
|
+
});
|
|
43
|
+
};
|
|
44
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
45
|
+
exports.processFileInWorker = processFileInWorker;
|
|
46
|
+
const ts_morph_1 = require("ts-morph");
|
|
47
|
+
const ts = __importStar(require("typescript"));
|
|
48
|
+
const path_1 = require("path");
|
|
49
|
+
const typescript_json_schema_1 = require("typescript-json-schema");
|
|
50
|
+
const fs_1 = require("fs");
|
|
51
|
+
// Cache for TypeScript programs to avoid recompilation
|
|
52
|
+
const programCache = new Map();
|
|
53
|
+
// Optimized function to process a single file in a worker
|
|
54
|
+
function processFileInWorker(filePath, tsconfigPath) {
|
|
55
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
56
|
+
const contents = [];
|
|
57
|
+
// Create or reuse TypeScript project
|
|
58
|
+
const resolvedTsConfigPath = (0, path_1.resolve)(process.cwd(), tsconfigPath);
|
|
59
|
+
const projectOptions = {
|
|
60
|
+
tsConfigFilePath: (0, fs_1.existsSync)(resolvedTsConfigPath)
|
|
61
|
+
? resolvedTsConfigPath
|
|
62
|
+
: undefined,
|
|
63
|
+
skipAddingFilesFromTsConfig: true,
|
|
64
|
+
compilerOptions: !(0, fs_1.existsSync)(resolvedTsConfigPath)
|
|
65
|
+
? {
|
|
66
|
+
target: ts.ScriptTarget.ESNext,
|
|
67
|
+
module: ts.ModuleKind.ESNext,
|
|
68
|
+
moduleResolution: ts.ModuleResolutionKind.NodeJs,
|
|
69
|
+
esModuleInterop: true,
|
|
70
|
+
jsx: ts.JsxEmit.React,
|
|
71
|
+
skipLibCheck: true,
|
|
72
|
+
}
|
|
73
|
+
: undefined,
|
|
74
|
+
};
|
|
75
|
+
const project = new ts_morph_1.Project(projectOptions);
|
|
76
|
+
const sourceFile = project.addSourceFileAtPath(filePath);
|
|
77
|
+
// Look for ATMYAPP exports
|
|
78
|
+
const atmyappExports = sourceFile.getTypeAliases().filter((alias) => {
|
|
79
|
+
const name = alias.getName();
|
|
80
|
+
const isExported = alias.isExported();
|
|
81
|
+
return name === "ATMYAPP" && isExported;
|
|
82
|
+
});
|
|
83
|
+
if (atmyappExports.length === 0) {
|
|
84
|
+
return contents;
|
|
85
|
+
}
|
|
86
|
+
// Process each ATMYAPP export
|
|
87
|
+
for (const atmyappExport of atmyappExports) {
|
|
88
|
+
const fileContents = yield processAtmyappExportOptimized(atmyappExport, sourceFile, tsconfigPath);
|
|
89
|
+
contents.push(...fileContents);
|
|
90
|
+
}
|
|
91
|
+
return contents;
|
|
92
|
+
});
|
|
93
|
+
}
|
|
94
|
+
// Optimized version that reuses TypeScript programs
|
|
95
|
+
function processAtmyappExportOptimized(atmyappType, file, tsconfigPath) {
|
|
96
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
97
|
+
var _a, _b, _c, _d;
|
|
98
|
+
const contents = [];
|
|
99
|
+
const filePath = file.getFilePath();
|
|
100
|
+
// Extract definition types
|
|
101
|
+
const definitionTypes = extractDefinitionTypes(atmyappType);
|
|
102
|
+
if (definitionTypes.length === 0) {
|
|
103
|
+
return contents;
|
|
104
|
+
}
|
|
105
|
+
// Create or reuse TypeScript program
|
|
106
|
+
const resolvedTsConfigPath = (0, path_1.resolve)(process.cwd(), tsconfigPath);
|
|
107
|
+
const cacheKey = `${filePath}:${resolvedTsConfigPath}`;
|
|
108
|
+
let program = programCache.get(cacheKey);
|
|
109
|
+
if (!program) {
|
|
110
|
+
const compilerOptions = (0, fs_1.existsSync)(resolvedTsConfigPath)
|
|
111
|
+
? { configFile: resolvedTsConfigPath }
|
|
112
|
+
: {
|
|
113
|
+
target: ts.ScriptTarget.ES2015,
|
|
114
|
+
module: ts.ModuleKind.ESNext,
|
|
115
|
+
strict: true,
|
|
116
|
+
esModuleInterop: true,
|
|
117
|
+
skipLibCheck: true,
|
|
118
|
+
jsx: ts.JsxEmit.Preserve,
|
|
119
|
+
};
|
|
120
|
+
program = (0, typescript_json_schema_1.getProgramFromFiles)([filePath], compilerOptions);
|
|
121
|
+
programCache.set(cacheKey, program);
|
|
122
|
+
}
|
|
123
|
+
// Batch process all definition types for this file
|
|
124
|
+
const schemaPromises = definitionTypes.map((definitionType) => __awaiter(this, void 0, void 0, function* () {
|
|
125
|
+
try {
|
|
126
|
+
const schema = (0, typescript_json_schema_1.generateSchema)(program, definitionType, {
|
|
127
|
+
required: true,
|
|
128
|
+
noExtraProps: true,
|
|
129
|
+
aliasRef: true,
|
|
130
|
+
ref: false,
|
|
131
|
+
defaultNumberType: "number",
|
|
132
|
+
ignoreErrors: true,
|
|
133
|
+
skipLibCheck: true,
|
|
134
|
+
});
|
|
135
|
+
return { definitionType, schema };
|
|
136
|
+
}
|
|
137
|
+
catch (error) {
|
|
138
|
+
return { definitionType, schema: null, error };
|
|
139
|
+
}
|
|
140
|
+
}));
|
|
141
|
+
const schemaResults = yield Promise.all(schemaPromises);
|
|
142
|
+
// Process schema results
|
|
143
|
+
for (const result of schemaResults) {
|
|
144
|
+
if (!result.schema || !result.schema.properties) {
|
|
145
|
+
continue;
|
|
146
|
+
}
|
|
147
|
+
const properties = result.schema.properties;
|
|
148
|
+
const isEventDef = ((_a = properties.type) === null || _a === void 0 ? void 0 : _a.const) === "event" ||
|
|
149
|
+
((_b = properties.type) === null || _b === void 0 ? void 0 : _b.const) === "basic_event" ||
|
|
150
|
+
(((_c = properties.__is_ATMYAPP_Object) === null || _c === void 0 ? void 0 : _c.const) === true &&
|
|
151
|
+
properties.id &&
|
|
152
|
+
(properties.columns || ((_d = properties.type) === null || _d === void 0 ? void 0 : _d.const) === "basic_event"));
|
|
153
|
+
if (isEventDef) {
|
|
154
|
+
// Handle event definitions
|
|
155
|
+
const eventContent = processEventDefinition(properties, result.definitionType);
|
|
156
|
+
if (eventContent) {
|
|
157
|
+
contents.push(eventContent);
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
else {
|
|
161
|
+
// Handle regular content definitions
|
|
162
|
+
const contentDefinition = processContentDefinition(properties, result.definitionType);
|
|
163
|
+
if (contentDefinition) {
|
|
164
|
+
contents.push(contentDefinition);
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
return contents;
|
|
169
|
+
});
|
|
170
|
+
}
|
|
171
|
+
function extractDefinitionTypes(atmyappType) {
|
|
172
|
+
const typeNode = atmyappType.getTypeNode();
|
|
173
|
+
if (!ts_morph_1.Node.isTupleTypeNode(typeNode) && !ts_morph_1.Node.isArrayTypeNode(typeNode)) {
|
|
174
|
+
return [];
|
|
175
|
+
}
|
|
176
|
+
const elementTypes = [];
|
|
177
|
+
if (ts_morph_1.Node.isTupleTypeNode(typeNode)) {
|
|
178
|
+
typeNode.getElements().forEach((element) => {
|
|
179
|
+
elementTypes.push(element.getText());
|
|
180
|
+
});
|
|
181
|
+
}
|
|
182
|
+
else if (ts_morph_1.Node.isArrayTypeNode(typeNode)) {
|
|
183
|
+
elementTypes.push(typeNode.getElementTypeNode().getText());
|
|
184
|
+
}
|
|
185
|
+
return elementTypes;
|
|
186
|
+
}
|
|
187
|
+
function processEventDefinition(properties, definitionType) {
|
|
188
|
+
var _a, _b, _c, _d, _e, _f;
|
|
189
|
+
let eventId = null;
|
|
190
|
+
let columns = [];
|
|
191
|
+
// Extract event ID
|
|
192
|
+
if ((_a = properties.id) === null || _a === void 0 ? void 0 : _a.const) {
|
|
193
|
+
eventId = properties.id.const;
|
|
194
|
+
}
|
|
195
|
+
// Extract columns (may be empty for basic events)
|
|
196
|
+
if ((_b = properties.columns) === null || _b === void 0 ? void 0 : _b.const) {
|
|
197
|
+
columns = properties.columns.const;
|
|
198
|
+
}
|
|
199
|
+
else if ((_d = (_c = properties.columns) === null || _c === void 0 ? void 0 : _c.items) === null || _d === void 0 ? void 0 : _d.const) {
|
|
200
|
+
columns = properties.columns.items.const;
|
|
201
|
+
}
|
|
202
|
+
else if (((_e = properties.columns) === null || _e === void 0 ? void 0 : _e.items) &&
|
|
203
|
+
Array.isArray(properties.columns.items)) {
|
|
204
|
+
columns = properties.columns.items
|
|
205
|
+
.map((item) => item.const)
|
|
206
|
+
.filter(Boolean);
|
|
207
|
+
}
|
|
208
|
+
// Check if this is a basic event
|
|
209
|
+
const isBasicEvent = ((_f = properties.type) === null || _f === void 0 ? void 0 : _f.const) === "basic_event";
|
|
210
|
+
// For basic events, we don't require columns, but we still need an event ID
|
|
211
|
+
if (!eventId) {
|
|
212
|
+
return null;
|
|
213
|
+
}
|
|
214
|
+
// For custom events, we require columns, but for basic events columns can be empty
|
|
215
|
+
if (!isBasicEvent && columns.length === 0) {
|
|
216
|
+
return null;
|
|
217
|
+
}
|
|
218
|
+
return {
|
|
219
|
+
path: eventId,
|
|
220
|
+
structure: {
|
|
221
|
+
type: isBasicEvent ? "basic_event" : "event",
|
|
222
|
+
properties: {
|
|
223
|
+
id: { const: eventId },
|
|
224
|
+
columns: { const: columns },
|
|
225
|
+
type: { const: isBasicEvent ? "basic_event" : "event" },
|
|
226
|
+
},
|
|
227
|
+
},
|
|
228
|
+
};
|
|
229
|
+
}
|
|
230
|
+
function processContentDefinition(properties, definitionType) {
|
|
231
|
+
var _a, _b;
|
|
232
|
+
let path = null;
|
|
233
|
+
let structure = null;
|
|
234
|
+
// Extract path
|
|
235
|
+
if ((_a = properties.path) === null || _a === void 0 ? void 0 : _a.const) {
|
|
236
|
+
path = properties.path.const;
|
|
237
|
+
}
|
|
238
|
+
else if ((_b = properties._path) === null || _b === void 0 ? void 0 : _b.const) {
|
|
239
|
+
path = properties._path.const;
|
|
240
|
+
}
|
|
241
|
+
// Extract structure
|
|
242
|
+
if (properties.structure) {
|
|
243
|
+
structure = properties.structure;
|
|
244
|
+
}
|
|
245
|
+
else if (properties.data) {
|
|
246
|
+
structure = properties.data;
|
|
247
|
+
}
|
|
248
|
+
else if (properties._data) {
|
|
249
|
+
structure = properties._data;
|
|
250
|
+
}
|
|
251
|
+
if (!path || !structure) {
|
|
252
|
+
return null;
|
|
253
|
+
}
|
|
254
|
+
return { path, structure };
|
|
255
|
+
}
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import { Logger } from "../logger";
|
|
2
|
+
export interface WorkerTask {
|
|
3
|
+
id: string;
|
|
4
|
+
filePath: string;
|
|
5
|
+
tsconfigPath: string;
|
|
6
|
+
}
|
|
7
|
+
export interface WorkerResult {
|
|
8
|
+
id: string;
|
|
9
|
+
success: boolean;
|
|
10
|
+
contents: any[];
|
|
11
|
+
error?: string;
|
|
12
|
+
}
|
|
13
|
+
export declare class WorkerPool {
|
|
14
|
+
private workers;
|
|
15
|
+
private taskQueue;
|
|
16
|
+
private activeWorkers;
|
|
17
|
+
private results;
|
|
18
|
+
private logger;
|
|
19
|
+
private maxWorkers;
|
|
20
|
+
constructor(logger: Logger, maxWorkers?: number);
|
|
21
|
+
processFiles(tasks: WorkerTask[]): Promise<WorkerResult[]>;
|
|
22
|
+
private setupWorker;
|
|
23
|
+
private assignNextTask;
|
|
24
|
+
private cleanup;
|
|
25
|
+
}
|