@atmyapp/cli 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +633 -0
- package/dist/cli/commands/migrate.d.ts +5 -0
- package/dist/cli/commands/migrate.js +76 -0
- package/dist/cli/commands/use.d.ts +2 -0
- package/dist/cli/commands/use.js +109 -0
- package/dist/cli/index.d.ts +2 -0
- package/dist/cli/index.js +16 -0
- package/dist/cli/logger/index.d.ts +12 -0
- package/dist/cli/logger/index.js +34 -0
- package/dist/cli/test-migrate.d.ts +2 -0
- package/dist/cli/test-migrate.js +10 -0
- package/dist/cli/types/migrate.d.ts +34 -0
- package/dist/cli/types/migrate.js +2 -0
- package/dist/cli/utils/config.d.ts +10 -0
- package/dist/cli/utils/config.js +38 -0
- package/dist/cli/utils/content-processor.d.ts +6 -0
- package/dist/cli/utils/content-processor.js +141 -0
- package/dist/cli/utils/definition-processor.d.ts +61 -0
- package/dist/cli/utils/definition-processor.js +248 -0
- package/dist/cli/utils/file-operations.d.ts +4 -0
- package/dist/cli/utils/file-operations.js +24 -0
- package/dist/cli/utils/index.d.ts +9 -0
- package/dist/cli/utils/index.js +27 -0
- package/dist/cli/utils/schema-processor.d.ts +7 -0
- package/dist/cli/utils/schema-processor.js +247 -0
- package/dist/cli/utils/type-transformers.d.ts +8 -0
- package/dist/cli/utils/type-transformers.js +94 -0
- package/dist/cli/utils/upload.d.ts +3 -0
- package/dist/cli/utils/upload.js +62 -0
- package/package.json +50 -0
|
@@ -0,0 +1,248 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.builtInOutputTransformers = exports.builtInValidators = exports.builtInProcessors = exports.definitionPipeline = void 0;
|
|
4
|
+
exports.registerBuiltInProcessors = registerBuiltInProcessors;
|
|
5
|
+
exports.registerBuiltInValidators = registerBuiltInValidators;
|
|
6
|
+
exports.registerBuiltInOutputTransformers = registerBuiltInOutputTransformers;
|
|
7
|
+
// Registry for processors, transformers, and validators
|
|
8
|
+
class DefinitionProcessingPipeline {
|
|
9
|
+
constructor() {
|
|
10
|
+
this.processors = [];
|
|
11
|
+
this.outputTransformers = [];
|
|
12
|
+
this.validators = [];
|
|
13
|
+
}
|
|
14
|
+
// Register a definition processor
|
|
15
|
+
addProcessor(processor) {
|
|
16
|
+
this.processors.push(processor);
|
|
17
|
+
}
|
|
18
|
+
// Register an output transformer
|
|
19
|
+
addOutputTransformer(transformer) {
|
|
20
|
+
this.outputTransformers.push(transformer);
|
|
21
|
+
}
|
|
22
|
+
// Register a validation rule
|
|
23
|
+
addValidator(validator) {
|
|
24
|
+
this.validators.push(validator);
|
|
25
|
+
}
|
|
26
|
+
// Process definitions through the pipeline
|
|
27
|
+
processDefinitions(contents, config, logger) {
|
|
28
|
+
const processedContents = [];
|
|
29
|
+
const validationResults = [];
|
|
30
|
+
logger.verbose_log(`Processing ${contents.length} definitions through pipeline`);
|
|
31
|
+
contents.forEach((content, index) => {
|
|
32
|
+
const context = {
|
|
33
|
+
logger,
|
|
34
|
+
config,
|
|
35
|
+
allContents: contents,
|
|
36
|
+
currentIndex: index,
|
|
37
|
+
};
|
|
38
|
+
// Validate content
|
|
39
|
+
const validation = this.validateContent(content, context);
|
|
40
|
+
validationResults.push(validation);
|
|
41
|
+
if (!validation.isValid) {
|
|
42
|
+
logger.error(`Validation failed for ${content.path}: ${validation.errors.join(", ")}`);
|
|
43
|
+
return; // Skip processing invalid content
|
|
44
|
+
}
|
|
45
|
+
if (validation.warnings.length > 0) {
|
|
46
|
+
validation.warnings.forEach((warning) => logger.warn(`Warning for ${content.path}: ${warning}`));
|
|
47
|
+
}
|
|
48
|
+
// Process content through processors
|
|
49
|
+
let processedContent = content;
|
|
50
|
+
for (const processor of this.processors) {
|
|
51
|
+
if (processedContent === null)
|
|
52
|
+
break;
|
|
53
|
+
logger.verbose_log(`Applying processor: ${processor.name} to ${content.path}`);
|
|
54
|
+
try {
|
|
55
|
+
processedContent = processor.process(processedContent, context);
|
|
56
|
+
}
|
|
57
|
+
catch (error) {
|
|
58
|
+
logger.error(`Processor ${processor.name} failed for ${content.path}:`, error);
|
|
59
|
+
processedContent = null;
|
|
60
|
+
break;
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
if (processedContent !== null) {
|
|
64
|
+
processedContents.push(processedContent);
|
|
65
|
+
logger.verbose_log(`Successfully processed ${content.path}`);
|
|
66
|
+
}
|
|
67
|
+
});
|
|
68
|
+
return { processedContents, validationResults };
|
|
69
|
+
}
|
|
70
|
+
// Transform the final output
|
|
71
|
+
transformOutput(output, config, logger) {
|
|
72
|
+
let transformedOutput = output;
|
|
73
|
+
const context = {
|
|
74
|
+
logger,
|
|
75
|
+
config,
|
|
76
|
+
allContents: [],
|
|
77
|
+
currentIndex: 0,
|
|
78
|
+
};
|
|
79
|
+
for (const transformer of this.outputTransformers) {
|
|
80
|
+
logger.verbose_log(`Applying output transformer: ${transformer.name}`);
|
|
81
|
+
try {
|
|
82
|
+
transformedOutput = transformer.transform(transformedOutput, context);
|
|
83
|
+
}
|
|
84
|
+
catch (error) {
|
|
85
|
+
logger.error(`Output transformer ${transformer.name} failed:`, error);
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
return transformedOutput;
|
|
89
|
+
}
|
|
90
|
+
// Validate a single content item
|
|
91
|
+
validateContent(content, context) {
|
|
92
|
+
const result = {
|
|
93
|
+
isValid: true,
|
|
94
|
+
errors: [],
|
|
95
|
+
warnings: [],
|
|
96
|
+
};
|
|
97
|
+
for (const validator of this.validators) {
|
|
98
|
+
try {
|
|
99
|
+
const validationResult = validator.validate(content, context);
|
|
100
|
+
if (!validationResult.isValid) {
|
|
101
|
+
result.isValid = false;
|
|
102
|
+
}
|
|
103
|
+
result.errors.push(...validationResult.errors);
|
|
104
|
+
result.warnings.push(...validationResult.warnings);
|
|
105
|
+
}
|
|
106
|
+
catch (error) {
|
|
107
|
+
result.isValid = false;
|
|
108
|
+
result.errors.push(`Validator ${validator.name} failed: ${error instanceof Error ? error.message : "Unknown error"}`);
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
return result;
|
|
112
|
+
}
|
|
113
|
+
// Clear all registered processors, transformers, and validators
|
|
114
|
+
clear() {
|
|
115
|
+
this.processors = [];
|
|
116
|
+
this.outputTransformers = [];
|
|
117
|
+
this.validators = [];
|
|
118
|
+
}
|
|
119
|
+
// Get pipeline statistics
|
|
120
|
+
getStats() {
|
|
121
|
+
return {
|
|
122
|
+
processors: this.processors.length,
|
|
123
|
+
transformers: this.outputTransformers.length,
|
|
124
|
+
validators: this.validators.length,
|
|
125
|
+
};
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
// Global pipeline instance
|
|
129
|
+
exports.definitionPipeline = new DefinitionProcessingPipeline();
|
|
130
|
+
// Built-in processors
|
|
131
|
+
exports.builtInProcessors = {
|
|
132
|
+
// Processor to determine content type
|
|
133
|
+
typeDetector: {
|
|
134
|
+
name: "type-detector",
|
|
135
|
+
process: (content, context) => {
|
|
136
|
+
var _a, _b, _c, _d, _e, _f, _g, _h;
|
|
137
|
+
const { logger } = context;
|
|
138
|
+
// Extract file extension
|
|
139
|
+
const fileExt = (_a = content.path.split(".").pop()) === null || _a === void 0 ? void 0 : _a.toLowerCase();
|
|
140
|
+
// Check for event types first
|
|
141
|
+
if (((_b = content.structure) === null || _b === void 0 ? void 0 : _b.type) === "event" ||
|
|
142
|
+
((_e = (_d = (_c = content.structure) === null || _c === void 0 ? void 0 : _c.properties) === null || _d === void 0 ? void 0 : _d.type) === null || _e === void 0 ? void 0 : _e.const) === "event" ||
|
|
143
|
+
((_f = content.structure) === null || _f === void 0 ? void 0 : _f.__amatype) === "AmaEventDef") {
|
|
144
|
+
content.type = "event";
|
|
145
|
+
}
|
|
146
|
+
else if (((_g = content.structure) === null || _g === void 0 ? void 0 : _g.__amatype) === "AmaImageDef") {
|
|
147
|
+
content.type = "image";
|
|
148
|
+
}
|
|
149
|
+
else if (((_h = content.structure) === null || _h === void 0 ? void 0 : _h.__amatype) === "AmaFileDef") {
|
|
150
|
+
content.type = "file";
|
|
151
|
+
}
|
|
152
|
+
else if (["jpg", "jpeg", "png", "gif", "svg", "webp"].includes(fileExt || "")) {
|
|
153
|
+
content.type = "image";
|
|
154
|
+
}
|
|
155
|
+
else if (["pdf", "doc", "docx", "txt", "md"].includes(fileExt || "")) {
|
|
156
|
+
content.type = "file";
|
|
157
|
+
}
|
|
158
|
+
else {
|
|
159
|
+
content.type = "jsonx";
|
|
160
|
+
}
|
|
161
|
+
logger.verbose_log(`Detected type "${content.type}" for ${content.path}`);
|
|
162
|
+
return content;
|
|
163
|
+
},
|
|
164
|
+
},
|
|
165
|
+
// Processor to normalize paths
|
|
166
|
+
pathNormalizer: {
|
|
167
|
+
name: "path-normalizer",
|
|
168
|
+
process: (content, context) => {
|
|
169
|
+
const { logger } = context;
|
|
170
|
+
// Normalize path separators
|
|
171
|
+
const normalizedPath = content.path.replace(/\\/g, "/");
|
|
172
|
+
// Remove leading slashes
|
|
173
|
+
const cleanPath = normalizedPath.replace(/^\/+/, "");
|
|
174
|
+
if (cleanPath !== content.path) {
|
|
175
|
+
logger.verbose_log(`Normalized path from "${content.path}" to "${cleanPath}"`);
|
|
176
|
+
content.path = cleanPath;
|
|
177
|
+
}
|
|
178
|
+
return content;
|
|
179
|
+
},
|
|
180
|
+
},
|
|
181
|
+
};
|
|
182
|
+
// Built-in validators
|
|
183
|
+
exports.builtInValidators = {
|
|
184
|
+
// Validator to check if path is provided
|
|
185
|
+
pathValidator: {
|
|
186
|
+
name: "path-validator",
|
|
187
|
+
validate: (content, context) => {
|
|
188
|
+
const result = {
|
|
189
|
+
isValid: true,
|
|
190
|
+
errors: [],
|
|
191
|
+
warnings: [],
|
|
192
|
+
};
|
|
193
|
+
if (typeof content.path !== "string") {
|
|
194
|
+
result.isValid = false;
|
|
195
|
+
result.errors.push("Content must have a valid path");
|
|
196
|
+
}
|
|
197
|
+
else if (content.path.trim() === "") {
|
|
198
|
+
result.isValid = false;
|
|
199
|
+
result.errors.push("Content path cannot be empty");
|
|
200
|
+
}
|
|
201
|
+
return result;
|
|
202
|
+
},
|
|
203
|
+
},
|
|
204
|
+
// Validator to check for duplicate paths
|
|
205
|
+
duplicatePathValidator: {
|
|
206
|
+
name: "duplicate-path-validator",
|
|
207
|
+
validate: (content, context) => {
|
|
208
|
+
const result = {
|
|
209
|
+
isValid: true,
|
|
210
|
+
errors: [],
|
|
211
|
+
warnings: [],
|
|
212
|
+
};
|
|
213
|
+
const { allContents, currentIndex } = context;
|
|
214
|
+
const duplicates = allContents.filter((other, index) => index !== currentIndex && other.path === content.path);
|
|
215
|
+
if (duplicates.length > 0) {
|
|
216
|
+
result.isValid = false;
|
|
217
|
+
result.errors.push(`Duplicate path found: ${content.path}`);
|
|
218
|
+
}
|
|
219
|
+
return result;
|
|
220
|
+
},
|
|
221
|
+
},
|
|
222
|
+
};
|
|
223
|
+
// Built-in output transformers
|
|
224
|
+
exports.builtInOutputTransformers = {
|
|
225
|
+
// Transformer to add metadata
|
|
226
|
+
metadataEnricher: {
|
|
227
|
+
name: "metadata-enricher",
|
|
228
|
+
transform: (output, context) => {
|
|
229
|
+
const { logger, config } = context;
|
|
230
|
+
// Add processing metadata
|
|
231
|
+
const metadata = Object.assign({ generatedAt: new Date().toISOString(), totalDefinitions: Object.keys(output.definitions).length, totalEvents: Object.keys(output.events || {}).length, version: "1.0.0" }, (config.metadata || {}));
|
|
232
|
+
logger.verbose_log(`Adding metadata to output: ${JSON.stringify(metadata)}`);
|
|
233
|
+
return Object.assign(Object.assign({}, output), { metadata });
|
|
234
|
+
},
|
|
235
|
+
},
|
|
236
|
+
};
|
|
237
|
+
// Helper functions to register built-in components
|
|
238
|
+
function registerBuiltInProcessors() {
|
|
239
|
+
exports.definitionPipeline.addProcessor(exports.builtInProcessors.pathNormalizer);
|
|
240
|
+
exports.definitionPipeline.addProcessor(exports.builtInProcessors.typeDetector);
|
|
241
|
+
}
|
|
242
|
+
function registerBuiltInValidators() {
|
|
243
|
+
exports.definitionPipeline.addValidator(exports.builtInValidators.pathValidator);
|
|
244
|
+
exports.definitionPipeline.addValidator(exports.builtInValidators.duplicatePathValidator);
|
|
245
|
+
}
|
|
246
|
+
function registerBuiltInOutputTransformers() {
|
|
247
|
+
exports.definitionPipeline.addOutputTransformer(exports.builtInOutputTransformers.metadataEnricher);
|
|
248
|
+
}
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.ensureAmaDirectory = ensureAmaDirectory;
|
|
4
|
+
exports.saveOutputToFile = saveOutputToFile;
|
|
5
|
+
const fs_1 = require("fs");
|
|
6
|
+
const path_1 = require("path");
|
|
7
|
+
// Ensures the .ama directory exists
|
|
8
|
+
function ensureAmaDirectory(logger) {
|
|
9
|
+
const amaDir = "./.ama";
|
|
10
|
+
if (!(0, fs_1.existsSync)(amaDir)) {
|
|
11
|
+
logger.verbose_log(`Creating .ama directory at ${(0, path_1.resolve)(process.cwd(), amaDir)}`);
|
|
12
|
+
(0, fs_1.mkdirSync)(amaDir, { recursive: true });
|
|
13
|
+
}
|
|
14
|
+
else {
|
|
15
|
+
logger.verbose_log(`.ama directory already exists at ${(0, path_1.resolve)(process.cwd(), amaDir)}`);
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
// Saves the output definition to a local file
|
|
19
|
+
function saveOutputToFile(output, logger) {
|
|
20
|
+
const outputPath = "./.ama/definitions.json";
|
|
21
|
+
logger.verbose_log(`Saving definitions to ${(0, path_1.resolve)(process.cwd(), outputPath)}`);
|
|
22
|
+
(0, fs_1.writeFileSync)(outputPath, JSON.stringify(output, null, 2));
|
|
23
|
+
logger.success(`✅ Successfully generated ${outputPath}`);
|
|
24
|
+
}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
export * from "./config";
|
|
2
|
+
export * from "./type-transformers";
|
|
3
|
+
export * from "./schema-processor";
|
|
4
|
+
export * from "./content-processor";
|
|
5
|
+
export * from "./file-operations";
|
|
6
|
+
export * from "./upload";
|
|
7
|
+
export * from "./definition-processor";
|
|
8
|
+
export * from "../types/migrate";
|
|
9
|
+
export * from "../logger";
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
14
|
+
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
|
15
|
+
};
|
|
16
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
17
|
+
// Re-export all utilities for easier imports
|
|
18
|
+
__exportStar(require("./config"), exports);
|
|
19
|
+
__exportStar(require("./type-transformers"), exports);
|
|
20
|
+
__exportStar(require("./schema-processor"), exports);
|
|
21
|
+
__exportStar(require("./content-processor"), exports);
|
|
22
|
+
__exportStar(require("./file-operations"), exports);
|
|
23
|
+
__exportStar(require("./upload"), exports);
|
|
24
|
+
__exportStar(require("./definition-processor"), exports);
|
|
25
|
+
// Re-export types and logger
|
|
26
|
+
__exportStar(require("../types/migrate"), exports);
|
|
27
|
+
__exportStar(require("../logger"), exports);
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import { Project, SourceFile, TypeAliasDeclaration } from "ts-morph";
|
|
2
|
+
import { Logger } from "../logger";
|
|
3
|
+
import { Content, ProcessingResult } from "../types/migrate";
|
|
4
|
+
export declare function scanFiles(patterns: string[], logger: Logger): Promise<string[]>;
|
|
5
|
+
export declare function createProject(files: string[], tsconfigPath: string, logger: Logger): Project;
|
|
6
|
+
export declare function processAtmyappExport(atmyappType: TypeAliasDeclaration, file: SourceFile, tsconfigPath: string, logger: Logger): Content[];
|
|
7
|
+
export declare function processFiles(sourceFiles: SourceFile[], tsconfigPath: string, continueOnError: boolean, logger: Logger): ProcessingResult;
|
|
@@ -0,0 +1,247 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
36
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
37
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
38
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
39
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
40
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
41
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
42
|
+
});
|
|
43
|
+
};
|
|
44
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
45
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
46
|
+
};
|
|
47
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
48
|
+
exports.scanFiles = scanFiles;
|
|
49
|
+
exports.createProject = createProject;
|
|
50
|
+
exports.processAtmyappExport = processAtmyappExport;
|
|
51
|
+
exports.processFiles = processFiles;
|
|
52
|
+
const fast_glob_1 = __importDefault(require("fast-glob"));
|
|
53
|
+
const ts_morph_1 = require("ts-morph");
|
|
54
|
+
const ts = __importStar(require("typescript"));
|
|
55
|
+
const path_1 = require("path");
|
|
56
|
+
const typescript_json_schema_1 = require("typescript-json-schema");
|
|
57
|
+
const fs_1 = require("fs");
|
|
58
|
+
// Scans for TypeScript files based on config patterns
|
|
59
|
+
function scanFiles(patterns, logger) {
|
|
60
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
61
|
+
logger.info("🔍 Scanning files...");
|
|
62
|
+
logger.verbose_log(`Using patterns: ${patterns.join(", ")}`);
|
|
63
|
+
const files = yield (0, fast_glob_1.default)(patterns, {
|
|
64
|
+
ignore: ["**/node_modules/**", "**/test/**", "**/dist/**"],
|
|
65
|
+
absolute: true,
|
|
66
|
+
cwd: process.cwd(),
|
|
67
|
+
});
|
|
68
|
+
logger.verbose_log(`Found ${files.length} files matching patterns`);
|
|
69
|
+
return files;
|
|
70
|
+
});
|
|
71
|
+
}
|
|
72
|
+
// Creates and configures the TypeScript project
|
|
73
|
+
function createProject(files, tsconfigPath, logger) {
|
|
74
|
+
const resolvedTsConfigPath = (0, path_1.resolve)(process.cwd(), tsconfigPath);
|
|
75
|
+
if (!(0, fs_1.existsSync)(resolvedTsConfigPath)) {
|
|
76
|
+
logger.warn(`tsconfig at ${resolvedTsConfigPath} not found, using default compiler options`);
|
|
77
|
+
}
|
|
78
|
+
else {
|
|
79
|
+
logger.verbose_log(`Using tsconfig from ${resolvedTsConfigPath}`);
|
|
80
|
+
}
|
|
81
|
+
const projectOptions = {
|
|
82
|
+
tsConfigFilePath: (0, fs_1.existsSync)(resolvedTsConfigPath)
|
|
83
|
+
? resolvedTsConfigPath
|
|
84
|
+
: undefined,
|
|
85
|
+
skipAddingFilesFromTsConfig: true,
|
|
86
|
+
compilerOptions: !(0, fs_1.existsSync)(resolvedTsConfigPath)
|
|
87
|
+
? {
|
|
88
|
+
target: ts.ScriptTarget.ESNext,
|
|
89
|
+
module: ts.ModuleKind.ESNext,
|
|
90
|
+
moduleResolution: ts.ModuleResolutionKind.NodeJs,
|
|
91
|
+
esModuleInterop: true,
|
|
92
|
+
jsx: ts.JsxEmit.React,
|
|
93
|
+
skipLibCheck: true,
|
|
94
|
+
}
|
|
95
|
+
: undefined,
|
|
96
|
+
};
|
|
97
|
+
logger.verbose_log("Creating ts-morph Project");
|
|
98
|
+
const project = new ts_morph_1.Project(projectOptions);
|
|
99
|
+
logger.verbose_log(`Adding ${files.length} source files to project`);
|
|
100
|
+
project.addSourceFilesAtPaths(files);
|
|
101
|
+
return project;
|
|
102
|
+
}
|
|
103
|
+
// Extracts individual definition types from ATMYAPP array
|
|
104
|
+
function extractDefinitionTypes(atmyappType, logger) {
|
|
105
|
+
const typeNode = atmyappType.getTypeNode();
|
|
106
|
+
if (!ts_morph_1.Node.isTupleTypeNode(typeNode) && !ts_morph_1.Node.isArrayTypeNode(typeNode)) {
|
|
107
|
+
logger.warn(`ATMYAPP export should be an array/tuple type in ${atmyappType.getSourceFile().getFilePath()}`);
|
|
108
|
+
return [];
|
|
109
|
+
}
|
|
110
|
+
const elementTypes = [];
|
|
111
|
+
if (ts_morph_1.Node.isTupleTypeNode(typeNode)) {
|
|
112
|
+
// Handle tuple types: [Type1, Type2, ...]
|
|
113
|
+
typeNode.getElements().forEach((element) => {
|
|
114
|
+
const elementText = element.getText();
|
|
115
|
+
elementTypes.push(elementText);
|
|
116
|
+
logger.verbose_log(`Found definition type: ${elementText}`);
|
|
117
|
+
});
|
|
118
|
+
}
|
|
119
|
+
else if (ts_morph_1.Node.isArrayTypeNode(typeNode)) {
|
|
120
|
+
// Handle array types: Type[]
|
|
121
|
+
const elementText = typeNode.getElementTypeNode().getText();
|
|
122
|
+
elementTypes.push(elementText);
|
|
123
|
+
logger.verbose_log(`Found definition type: ${elementText}`);
|
|
124
|
+
}
|
|
125
|
+
return elementTypes;
|
|
126
|
+
}
|
|
127
|
+
// Processes an ATMYAPP export to extract content definitions
|
|
128
|
+
function processAtmyappExport(atmyappType, file, tsconfigPath, logger) {
|
|
129
|
+
var _a, _b;
|
|
130
|
+
const contents = [];
|
|
131
|
+
logger.verbose_log(`Processing ATMYAPP export in ${file.getFilePath()}`);
|
|
132
|
+
// Extract individual definition types from the array
|
|
133
|
+
const definitionTypes = extractDefinitionTypes(atmyappType, logger);
|
|
134
|
+
if (definitionTypes.length === 0) {
|
|
135
|
+
logger.warn(`No definition types found in ATMYAPP export in ${file.getFilePath()}`);
|
|
136
|
+
return contents;
|
|
137
|
+
}
|
|
138
|
+
const resolvedTsConfigPath = (0, path_1.resolve)(process.cwd(), tsconfigPath);
|
|
139
|
+
const compilerOptions = (0, fs_1.existsSync)(resolvedTsConfigPath)
|
|
140
|
+
? { configFile: resolvedTsConfigPath }
|
|
141
|
+
: {
|
|
142
|
+
target: ts.ScriptTarget.ES2015,
|
|
143
|
+
module: ts.ModuleKind.ESNext,
|
|
144
|
+
strict: true,
|
|
145
|
+
esModuleInterop: true,
|
|
146
|
+
skipLibCheck: true,
|
|
147
|
+
jsx: ts.JsxEmit.Preserve,
|
|
148
|
+
};
|
|
149
|
+
const program = (0, typescript_json_schema_1.getProgramFromFiles)([file.getFilePath()], compilerOptions);
|
|
150
|
+
// Process each definition type
|
|
151
|
+
for (const definitionType of definitionTypes) {
|
|
152
|
+
try {
|
|
153
|
+
logger.verbose_log(`Generating schema for definition type: ${definitionType}`);
|
|
154
|
+
const schema = (0, typescript_json_schema_1.generateSchema)(program, definitionType, {
|
|
155
|
+
required: true,
|
|
156
|
+
noExtraProps: true,
|
|
157
|
+
aliasRef: true,
|
|
158
|
+
ref: false,
|
|
159
|
+
defaultNumberType: "number",
|
|
160
|
+
ignoreErrors: true,
|
|
161
|
+
skipLibCheck: true,
|
|
162
|
+
});
|
|
163
|
+
if (!schema) {
|
|
164
|
+
logger.warn(`Failed to generate schema for ${definitionType}`);
|
|
165
|
+
continue;
|
|
166
|
+
}
|
|
167
|
+
if (!schema.properties) {
|
|
168
|
+
logger.warn(`Invalid schema structure for ${definitionType}`);
|
|
169
|
+
continue;
|
|
170
|
+
}
|
|
171
|
+
const properties = schema.properties;
|
|
172
|
+
// Extract path from AmaContentRef structure
|
|
173
|
+
let path = null;
|
|
174
|
+
let structure = null;
|
|
175
|
+
// Look for path in different possible locations
|
|
176
|
+
if ((_a = properties.path) === null || _a === void 0 ? void 0 : _a.const) {
|
|
177
|
+
path = properties.path.const;
|
|
178
|
+
}
|
|
179
|
+
else if ((_b = properties._path) === null || _b === void 0 ? void 0 : _b.const) {
|
|
180
|
+
path = properties._path.const;
|
|
181
|
+
}
|
|
182
|
+
// Look for structure/data in different possible locations
|
|
183
|
+
if (properties.structure) {
|
|
184
|
+
structure = properties.structure;
|
|
185
|
+
}
|
|
186
|
+
else if (properties.data) {
|
|
187
|
+
structure = properties.data;
|
|
188
|
+
}
|
|
189
|
+
else if (properties._data) {
|
|
190
|
+
structure = properties._data;
|
|
191
|
+
}
|
|
192
|
+
if (!path) {
|
|
193
|
+
logger.warn(`Could not extract path from ${definitionType}`);
|
|
194
|
+
continue;
|
|
195
|
+
}
|
|
196
|
+
if (!structure) {
|
|
197
|
+
logger.warn(`Could not extract structure from ${definitionType}`);
|
|
198
|
+
continue;
|
|
199
|
+
}
|
|
200
|
+
logger.verbose_log(`Successfully extracted content: ${path}`);
|
|
201
|
+
contents.push({
|
|
202
|
+
path,
|
|
203
|
+
structure,
|
|
204
|
+
});
|
|
205
|
+
}
|
|
206
|
+
catch (err) {
|
|
207
|
+
logger.error(`Error processing definition type ${definitionType}:`, err);
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
return contents;
|
|
211
|
+
}
|
|
212
|
+
// Processes all files to extract contents
|
|
213
|
+
function processFiles(sourceFiles, tsconfigPath, continueOnError, logger) {
|
|
214
|
+
const contents = [];
|
|
215
|
+
const errors = [];
|
|
216
|
+
let successCount = 0;
|
|
217
|
+
let failureCount = 0;
|
|
218
|
+
logger.info(`📚 Processing ${sourceFiles.length} source files...`);
|
|
219
|
+
sourceFiles.forEach((file) => {
|
|
220
|
+
logger.verbose_log(`Examining file: ${file.getFilePath()}`);
|
|
221
|
+
// Look for exported ATMYAPP type aliases
|
|
222
|
+
const atmyappExports = file.getTypeAliases().filter((alias) => {
|
|
223
|
+
const name = alias.getName();
|
|
224
|
+
const isExported = alias.isExported();
|
|
225
|
+
return name === "ATMYAPP" && isExported;
|
|
226
|
+
});
|
|
227
|
+
logger.verbose_log(`Found ${atmyappExports.length} ATMYAPP exports in ${file.getFilePath()}`);
|
|
228
|
+
atmyappExports.forEach((atmyappExport) => {
|
|
229
|
+
try {
|
|
230
|
+
const fileContents = processAtmyappExport(atmyappExport, file, tsconfigPath, logger);
|
|
231
|
+
contents.push(...fileContents);
|
|
232
|
+
successCount += fileContents.length;
|
|
233
|
+
logger.verbose_log(`Successfully processed ${fileContents.length} definitions from ATMYAPP export`);
|
|
234
|
+
}
|
|
235
|
+
catch (err) {
|
|
236
|
+
failureCount++;
|
|
237
|
+
const errorMessage = `❌ ${file.getFilePath()} - ATMYAPP export - ${err instanceof Error ? err.message : "Unknown error"}`;
|
|
238
|
+
errors.push(errorMessage);
|
|
239
|
+
logger.error(errorMessage);
|
|
240
|
+
if (!continueOnError) {
|
|
241
|
+
throw err;
|
|
242
|
+
}
|
|
243
|
+
}
|
|
244
|
+
});
|
|
245
|
+
});
|
|
246
|
+
return { contents, errors, successCount, failureCount };
|
|
247
|
+
}
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import { TypeTransformer } from "../types/migrate";
|
|
2
|
+
/**
|
|
3
|
+
* Extracts constant values from a JSON Schema-like definition.
|
|
4
|
+
* Assumes all final fields are constants and skips any fields without const values.
|
|
5
|
+
*/
|
|
6
|
+
export declare function extractConstants(schema: any): any;
|
|
7
|
+
export declare function registerTypeTransformer(transformer: TypeTransformer): void;
|
|
8
|
+
export declare function processSpecialTypes(schema: any): any;
|
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.extractConstants = extractConstants;
|
|
4
|
+
exports.registerTypeTransformer = registerTypeTransformer;
|
|
5
|
+
exports.processSpecialTypes = processSpecialTypes;
|
|
6
|
+
/**
|
|
7
|
+
* Extracts constant values from a JSON Schema-like definition.
|
|
8
|
+
* Assumes all final fields are constants and skips any fields without const values.
|
|
9
|
+
*/
|
|
10
|
+
function extractConstants(schema) {
|
|
11
|
+
// If not an object or doesn't have properties, return null
|
|
12
|
+
if (!schema || typeof schema !== "object" || !schema.properties) {
|
|
13
|
+
return null;
|
|
14
|
+
}
|
|
15
|
+
const result = {};
|
|
16
|
+
// Process each property in the schema
|
|
17
|
+
for (const [key, propDef] of Object.entries(schema.properties)) {
|
|
18
|
+
if (typeof propDef === "object" && propDef !== null) {
|
|
19
|
+
if ("const" in propDef) {
|
|
20
|
+
// If property has a const value, add it to the result
|
|
21
|
+
result[key] = propDef.const;
|
|
22
|
+
}
|
|
23
|
+
else if ("type" in propDef &&
|
|
24
|
+
propDef.type === "object" &&
|
|
25
|
+
"properties" in propDef) {
|
|
26
|
+
// If it's a nested object, recursively process it
|
|
27
|
+
const nestedResult = extractConstants(propDef);
|
|
28
|
+
if (nestedResult) {
|
|
29
|
+
result[key] = nestedResult;
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
return Object.keys(result).length > 0 ? result : null;
|
|
35
|
+
}
|
|
36
|
+
// Registry of type transformers
|
|
37
|
+
const typeTransformers = [
|
|
38
|
+
{
|
|
39
|
+
// Transformer for AMA image types
|
|
40
|
+
canTransform: (obj) => {
|
|
41
|
+
var _a, _b, _c, _d, _e;
|
|
42
|
+
return ((_b = (_a = obj === null || obj === void 0 ? void 0 : obj.properties) === null || _a === void 0 ? void 0 : _a.__amatype) === null || _b === void 0 ? void 0 : _b.const) &&
|
|
43
|
+
((_d = (_c = obj === null || obj === void 0 ? void 0 : obj.properties) === null || _c === void 0 ? void 0 : _c.__amatype) === null || _d === void 0 ? void 0 : _d.const) === "AmaImageDef" &&
|
|
44
|
+
((_e = obj === null || obj === void 0 ? void 0 : obj.properties) === null || _e === void 0 ? void 0 : _e.__config);
|
|
45
|
+
},
|
|
46
|
+
transform: (obj) => {
|
|
47
|
+
return {
|
|
48
|
+
__amatype: obj.properties.__amatype.const,
|
|
49
|
+
config: extractConstants(obj.properties.__config),
|
|
50
|
+
};
|
|
51
|
+
},
|
|
52
|
+
},
|
|
53
|
+
{
|
|
54
|
+
// Transformer for AMA file types
|
|
55
|
+
canTransform: (obj) => {
|
|
56
|
+
var _a, _b, _c, _d, _e;
|
|
57
|
+
return ((_b = (_a = obj === null || obj === void 0 ? void 0 : obj.properties) === null || _a === void 0 ? void 0 : _a.__amatype) === null || _b === void 0 ? void 0 : _b.const) &&
|
|
58
|
+
((_d = (_c = obj === null || obj === void 0 ? void 0 : obj.properties) === null || _c === void 0 ? void 0 : _c.__amatype) === null || _d === void 0 ? void 0 : _d.const) === "AmaFileDef" &&
|
|
59
|
+
((_e = obj === null || obj === void 0 ? void 0 : obj.properties) === null || _e === void 0 ? void 0 : _e.__config);
|
|
60
|
+
},
|
|
61
|
+
transform: (obj) => {
|
|
62
|
+
return {
|
|
63
|
+
__amatype: obj.properties.__amatype.const,
|
|
64
|
+
config: extractConstants(obj.properties.__config),
|
|
65
|
+
};
|
|
66
|
+
},
|
|
67
|
+
},
|
|
68
|
+
];
|
|
69
|
+
// Register a new type transformer
|
|
70
|
+
function registerTypeTransformer(transformer) {
|
|
71
|
+
typeTransformers.push(transformer);
|
|
72
|
+
}
|
|
73
|
+
// Recursively process the JSON structure to transform special types
|
|
74
|
+
function processSpecialTypes(schema) {
|
|
75
|
+
if (!schema || typeof schema !== "object") {
|
|
76
|
+
return schema;
|
|
77
|
+
}
|
|
78
|
+
// If it's an array, process each item
|
|
79
|
+
if (Array.isArray(schema)) {
|
|
80
|
+
return schema.map(processSpecialTypes);
|
|
81
|
+
}
|
|
82
|
+
// Check if this object should be transformed
|
|
83
|
+
for (const transformer of typeTransformers) {
|
|
84
|
+
if (transformer.canTransform(schema)) {
|
|
85
|
+
return transformer.transform(schema);
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
// Process object properties recursively
|
|
89
|
+
const result = {};
|
|
90
|
+
for (const key in schema) {
|
|
91
|
+
result[key] = processSpecialTypes(schema[key]);
|
|
92
|
+
}
|
|
93
|
+
return result;
|
|
94
|
+
}
|