@atmyapp/cli 0.0.8 → 0.0.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +47 -1
- package/dist/cli/commands/snapshot.d.ts +2 -0
- package/dist/cli/commands/snapshot.js +287 -0
- package/dist/cli/index.js +2 -0
- package/dist/cli/types/migrate.d.ts +5 -0
- package/dist/cli/utils/collection-transformer.js +28 -0
- package/dist/cli/utils/content-processor.js +14 -9
- package/dist/cli/utils/parallel-schema-processor.js +9 -1
- package/dist/cli/utils/schema-processor.d.ts +1 -0
- package/dist/cli/utils/schema-processor.js +209 -0
- package/package.json +2 -2
package/README.md
CHANGED
|
@@ -19,6 +19,8 @@
|
|
|
19
19
|
- [Content Definitions](#content-definitions)
|
|
20
20
|
- [Event Definitions](#event-definitions)
|
|
21
21
|
- [Image & File Definitions](#image--file-definitions)
|
|
22
|
+
- [Icon Definitions](#icon-definitions)
|
|
23
|
+
- [MDX Fields](#mdx-fields)
|
|
22
24
|
- [💡 Examples](#-examples)
|
|
23
25
|
- [🔧 Configuration](#-configuration)
|
|
24
26
|
- [🏗️ Architecture](#-architecture)
|
|
@@ -290,6 +292,38 @@ export type UserIcon = AmaIconDef<"/icons/user">;
|
|
|
290
292
|
export type ATMYAPP = [MenuIcon, SearchIcon, UserIcon];
|
|
291
293
|
```
|
|
292
294
|
|
|
295
|
+
### MDX Fields
|
|
296
|
+
|
|
297
|
+
Define MDX (Markdown with JSX) fields with component configurations using `AmaMdxFieldDef`, `AmaMdxConfigDef`, and `AmaComponentDef`:
|
|
298
|
+
|
|
299
|
+
```typescript
|
|
300
|
+
import {
|
|
301
|
+
AmaMdxFieldDef,
|
|
302
|
+
AmaMdxConfigDef,
|
|
303
|
+
AmaComponentDef,
|
|
304
|
+
} from "@atmyapp/core";
|
|
305
|
+
|
|
306
|
+
// 1. Define MDX components and their props
|
|
307
|
+
type Callout = AmaComponentDef<"Callout", {
|
|
308
|
+
title: string;
|
|
309
|
+
type: "info" | "warning" | "error";
|
|
310
|
+
}>;
|
|
311
|
+
|
|
312
|
+
// 2. Create an MDX configuration
|
|
313
|
+
type BlogMdxConfig = AmaMdxConfigDef<"blogComponents", [Callout]>;
|
|
314
|
+
|
|
315
|
+
// 3. Use the config in your content definition
|
|
316
|
+
interface BlogPost {
|
|
317
|
+
title: string;
|
|
318
|
+
content: AmaMdxFieldDef<BlogMdxConfig>;
|
|
319
|
+
}
|
|
320
|
+
|
|
321
|
+
export type BlogPostContent = AmaContentDef<"/blog/posts", BlogPost>;
|
|
322
|
+
|
|
323
|
+
// 4. Export the configuration and definitions
|
|
324
|
+
export type ATMYAPP = [BlogMdxConfig, BlogPostContent];
|
|
325
|
+
```
|
|
326
|
+
|
|
293
327
|
## 💡 Examples
|
|
294
328
|
|
|
295
329
|
### 🏪 E-commerce Setup
|
|
@@ -364,13 +398,24 @@ import {
|
|
|
364
398
|
AmaCustomEventDef,
|
|
365
399
|
AmaImageDef,
|
|
366
400
|
AmaIconDef,
|
|
401
|
+
AmaMdxFieldDef,
|
|
402
|
+
AmaMdxConfigDef,
|
|
403
|
+
AmaComponentDef,
|
|
367
404
|
} from "@atmyapp/core";
|
|
368
405
|
|
|
406
|
+
// MDX Components for blog
|
|
407
|
+
type Callout = AmaComponentDef<"Callout", {
|
|
408
|
+
title: string;
|
|
409
|
+
type: "info" | "warning" | "error";
|
|
410
|
+
}>;
|
|
411
|
+
|
|
412
|
+
type BlogMdxConfig = AmaMdxConfigDef<"blogComponents", [Callout]>;
|
|
413
|
+
|
|
369
414
|
// Blog content types
|
|
370
415
|
interface BlogPost {
|
|
371
416
|
title: string;
|
|
372
417
|
slug: string;
|
|
373
|
-
content:
|
|
418
|
+
content: AmaMdxFieldDef<BlogMdxConfig>;
|
|
374
419
|
excerpt: string;
|
|
375
420
|
publishedAt: string;
|
|
376
421
|
author: {
|
|
@@ -428,6 +473,7 @@ export type ATMYAPP = [
|
|
|
428
473
|
BlogPosts,
|
|
429
474
|
FeaturedPost,
|
|
430
475
|
Categories,
|
|
476
|
+
BlogMdxConfig,
|
|
431
477
|
BlogHeroImage,
|
|
432
478
|
ShareIcon,
|
|
433
479
|
LikeIcon,
|
|
@@ -0,0 +1,287 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
36
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
37
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
38
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
39
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
40
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
41
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
42
|
+
});
|
|
43
|
+
};
|
|
44
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
45
|
+
exports.snapshotCommand = snapshotCommand;
|
|
46
|
+
const commander_1 = require("commander");
|
|
47
|
+
const logger_1 = require("../logger");
|
|
48
|
+
const http_1 = require("../utils/http");
|
|
49
|
+
const fs = __importStar(require("fs"));
|
|
50
|
+
const path = __importStar(require("path"));
|
|
51
|
+
const child_process_1 = require("child_process");
|
|
52
|
+
const DEFAULT_OUTPUT_PATH = ".ama/local";
|
|
53
|
+
const API_BASE = "https://ama-core.maciekgamro.workers.dev";
|
|
54
|
+
/**
|
|
55
|
+
* Get the work-container API URL for snapshot operations
|
|
56
|
+
*/
|
|
57
|
+
function getSnapshotApiUrl(projectId, endpoint) {
|
|
58
|
+
return `${API_BASE}/v0/work-container/projects/${projectId}/${endpoint}`;
|
|
59
|
+
}
|
|
60
|
+
/**
|
|
61
|
+
* Create a new snapshot
|
|
62
|
+
*/
|
|
63
|
+
function createSnapshot(session, options, logger) {
|
|
64
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
65
|
+
var _a, _b;
|
|
66
|
+
const url = getSnapshotApiUrl(session.projectId, "snapshot");
|
|
67
|
+
logger.info(`Creating snapshot for project ${session.projectId}...`);
|
|
68
|
+
logger.verbose_log(`POST ${url}`);
|
|
69
|
+
const response = yield fetch(url, {
|
|
70
|
+
method: "POST",
|
|
71
|
+
headers: {
|
|
72
|
+
Authorization: `Bearer ${session.token}`,
|
|
73
|
+
"Content-Type": "application/json",
|
|
74
|
+
},
|
|
75
|
+
body: JSON.stringify({
|
|
76
|
+
branch: (_a = options.branch) !== null && _a !== void 0 ? _a : "main",
|
|
77
|
+
forceFullSync: (_b = options.forceSync) !== null && _b !== void 0 ? _b : false,
|
|
78
|
+
}),
|
|
79
|
+
});
|
|
80
|
+
if (!response.ok) {
|
|
81
|
+
const errorText = yield response.text();
|
|
82
|
+
throw new Error(`Failed to create snapshot: ${response.status} - ${errorText}`);
|
|
83
|
+
}
|
|
84
|
+
return response.json();
|
|
85
|
+
});
|
|
86
|
+
}
|
|
87
|
+
/**
|
|
88
|
+
* Get the latest snapshot info
|
|
89
|
+
*/
|
|
90
|
+
function getLatestSnapshot(session, options, logger) {
|
|
91
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
92
|
+
var _a;
|
|
93
|
+
const branch = (_a = options.branch) !== null && _a !== void 0 ? _a : "main";
|
|
94
|
+
const url = `${getSnapshotApiUrl(session.projectId, "latest-snapshot")}?branch=${branch}`;
|
|
95
|
+
logger.verbose_log(`GET ${url}`);
|
|
96
|
+
const response = yield fetch(url, {
|
|
97
|
+
method: "GET",
|
|
98
|
+
headers: {
|
|
99
|
+
Authorization: `Bearer ${session.token}`,
|
|
100
|
+
},
|
|
101
|
+
});
|
|
102
|
+
if (!response.ok) {
|
|
103
|
+
const errorText = yield response.text();
|
|
104
|
+
throw new Error(`Failed to get latest snapshot: ${response.status} - ${errorText}`);
|
|
105
|
+
}
|
|
106
|
+
return response.json();
|
|
107
|
+
});
|
|
108
|
+
}
|
|
109
|
+
/**
|
|
110
|
+
* Download a snapshot from the given URL
|
|
111
|
+
*/
|
|
112
|
+
function downloadSnapshotFile(downloadUrl, token, outputPath, logger) {
|
|
113
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
114
|
+
logger.info("Downloading snapshot...");
|
|
115
|
+
logger.verbose_log(`GET ${downloadUrl}`);
|
|
116
|
+
const response = yield fetch(downloadUrl, {
|
|
117
|
+
headers: {
|
|
118
|
+
Authorization: `Bearer ${token}`,
|
|
119
|
+
},
|
|
120
|
+
});
|
|
121
|
+
if (!response.ok) {
|
|
122
|
+
const errorText = yield response.text();
|
|
123
|
+
throw new Error(`Failed to download snapshot: ${response.status} - ${errorText}`);
|
|
124
|
+
}
|
|
125
|
+
// Ensure output directory exists
|
|
126
|
+
const outputDir = path.dirname(outputPath);
|
|
127
|
+
if (!fs.existsSync(outputDir)) {
|
|
128
|
+
fs.mkdirSync(outputDir, { recursive: true });
|
|
129
|
+
}
|
|
130
|
+
// Write the ZIP file
|
|
131
|
+
const arrayBuffer = yield response.arrayBuffer();
|
|
132
|
+
const buffer = Buffer.from(arrayBuffer);
|
|
133
|
+
fs.writeFileSync(outputPath, buffer);
|
|
134
|
+
return outputPath;
|
|
135
|
+
});
|
|
136
|
+
}
|
|
137
|
+
/**
|
|
138
|
+
* Extract a ZIP file to a directory
|
|
139
|
+
*/
|
|
140
|
+
function extractZip(zipPath, extractDir, logger) {
|
|
141
|
+
logger.info("Extracting snapshot...");
|
|
142
|
+
// Ensure extract directory exists and is empty
|
|
143
|
+
if (fs.existsSync(extractDir)) {
|
|
144
|
+
// Remove existing contents
|
|
145
|
+
fs.rmSync(extractDir, { recursive: true, force: true });
|
|
146
|
+
}
|
|
147
|
+
fs.mkdirSync(extractDir, { recursive: true });
|
|
148
|
+
// Use tar on Unix or PowerShell on Windows
|
|
149
|
+
const isWindows = process.platform === "win32";
|
|
150
|
+
try {
|
|
151
|
+
if (isWindows) {
|
|
152
|
+
// Use PowerShell's Expand-Archive
|
|
153
|
+
(0, child_process_1.execSync)(`powershell -Command "Expand-Archive -Path '${zipPath}' -DestinationPath '${extractDir}' -Force"`, { stdio: "pipe" });
|
|
154
|
+
}
|
|
155
|
+
else {
|
|
156
|
+
// Use unzip on Unix
|
|
157
|
+
(0, child_process_1.execSync)(`unzip -o "${zipPath}" -d "${extractDir}"`, { stdio: "pipe" });
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
catch (error) {
|
|
161
|
+
throw new Error(`Failed to extract snapshot: ${error instanceof Error ? error.message : error}`);
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
/**
|
|
165
|
+
* Format bytes to human readable string
|
|
166
|
+
*/
|
|
167
|
+
function formatBytes(bytes) {
|
|
168
|
+
if (bytes === 0)
|
|
169
|
+
return "0 Bytes";
|
|
170
|
+
const k = 1024;
|
|
171
|
+
const sizes = ["Bytes", "KB", "MB", "GB"];
|
|
172
|
+
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
|
173
|
+
return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + " " + sizes[i];
|
|
174
|
+
}
|
|
175
|
+
function snapshotCommand() {
|
|
176
|
+
const command = new commander_1.Command("snapshot").description("Manage storage snapshots for local fallback");
|
|
177
|
+
// Subcommand: create
|
|
178
|
+
command
|
|
179
|
+
.command("create")
|
|
180
|
+
.description("Create a new storage snapshot (may take a few minutes)")
|
|
181
|
+
.option("-t, --token <token>", "Authentication token")
|
|
182
|
+
.option("-u, --url <url>", "Project base URL")
|
|
183
|
+
.option("-p, --project-id <id>", "Project identifier")
|
|
184
|
+
.option("-b, --branch <branch>", "Branch name", "main")
|
|
185
|
+
.option("--force-sync", "Force full sync instead of incremental")
|
|
186
|
+
.option("--verbose", "Enable verbose logging")
|
|
187
|
+
.action((options) => __awaiter(this, void 0, void 0, function* () {
|
|
188
|
+
var _a;
|
|
189
|
+
const logger = new logger_1.Logger(Boolean(options.verbose));
|
|
190
|
+
try {
|
|
191
|
+
const session = (0, http_1.resolveSession)({
|
|
192
|
+
token: options.token,
|
|
193
|
+
projectId: options.projectId,
|
|
194
|
+
url: options.url,
|
|
195
|
+
});
|
|
196
|
+
const result = yield createSnapshot(session, options, logger);
|
|
197
|
+
if (!result.success || !result.data) {
|
|
198
|
+
throw new Error((_a = result.error) !== null && _a !== void 0 ? _a : "Failed to create snapshot");
|
|
199
|
+
}
|
|
200
|
+
const { data } = result;
|
|
201
|
+
logger.success("Snapshot created successfully!");
|
|
202
|
+
logger.info(` Project: ${data.projectId}`);
|
|
203
|
+
logger.info(` Branch: ${data.branch}`);
|
|
204
|
+
logger.info(` Collections: ${data.stats.collectionsProcessed}`);
|
|
205
|
+
logger.info(` Entries: ${data.stats.entriesIncluded}`);
|
|
206
|
+
logger.info(` Blobs: ${data.stats.blobsIncluded}`);
|
|
207
|
+
logger.info(` Files: ${data.stats.filesIncluded}`);
|
|
208
|
+
logger.info(` Size: ${formatBytes(data.stats.totalSizeBytes)}`);
|
|
209
|
+
logger.info(` Duration: ${data.duration}ms`);
|
|
210
|
+
logger.info(` Download URL expires: ${data.expiresAt}`);
|
|
211
|
+
logger.info("");
|
|
212
|
+
logger.info("To download this snapshot, run:");
|
|
213
|
+
logger.info(" ama snapshot download");
|
|
214
|
+
}
|
|
215
|
+
catch (error) {
|
|
216
|
+
const message = error instanceof Error ? error.message : "Unknown error";
|
|
217
|
+
logger.error(`Snapshot creation failed: ${message}`, error);
|
|
218
|
+
process.exit(1);
|
|
219
|
+
}
|
|
220
|
+
}));
|
|
221
|
+
// Subcommand: download
|
|
222
|
+
command
|
|
223
|
+
.command("download")
|
|
224
|
+
.description("Download the latest snapshot to local storage")
|
|
225
|
+
.option("-t, --token <token>", "Authentication token")
|
|
226
|
+
.option("-u, --url <url>", "Project base URL")
|
|
227
|
+
.option("-p, --project-id <id>", "Project identifier")
|
|
228
|
+
.option("-b, --branch <branch>", "Branch name", "main")
|
|
229
|
+
.option("-o, --output <path>", "Output directory", DEFAULT_OUTPUT_PATH)
|
|
230
|
+
.option("--verbose", "Enable verbose logging")
|
|
231
|
+
.action((options) => __awaiter(this, void 0, void 0, function* () {
|
|
232
|
+
var _a, _b, _c;
|
|
233
|
+
const logger = new logger_1.Logger(Boolean(options.verbose));
|
|
234
|
+
try {
|
|
235
|
+
const session = (0, http_1.resolveSession)({
|
|
236
|
+
token: options.token,
|
|
237
|
+
projectId: options.projectId,
|
|
238
|
+
url: options.url,
|
|
239
|
+
});
|
|
240
|
+
// Get the latest snapshot info
|
|
241
|
+
logger.info("Checking for latest snapshot...");
|
|
242
|
+
const latestResult = yield getLatestSnapshot(session, options, logger);
|
|
243
|
+
let downloadUrl;
|
|
244
|
+
if (latestResult.data && !latestResult.data.isExpired) {
|
|
245
|
+
// Use existing snapshot
|
|
246
|
+
logger.info(`Found existing snapshot from: ${latestResult.data.createdAt}`);
|
|
247
|
+
downloadUrl = latestResult.data.downloadUrl;
|
|
248
|
+
}
|
|
249
|
+
else {
|
|
250
|
+
// Need to create a new snapshot first
|
|
251
|
+
if ((_a = latestResult.data) === null || _a === void 0 ? void 0 : _a.isExpired) {
|
|
252
|
+
logger.warn("Latest snapshot has expired, creating a new one...");
|
|
253
|
+
}
|
|
254
|
+
else {
|
|
255
|
+
logger.info("No existing snapshot found, creating a new one...");
|
|
256
|
+
}
|
|
257
|
+
const createResult = yield createSnapshot(session, options, logger);
|
|
258
|
+
if (!createResult.success || !createResult.data) {
|
|
259
|
+
throw new Error((_b = createResult.error) !== null && _b !== void 0 ? _b : "Failed to create snapshot");
|
|
260
|
+
}
|
|
261
|
+
downloadUrl = createResult.data.downloadUrl;
|
|
262
|
+
logger.info(`Snapshot created (${formatBytes(createResult.data.stats.totalSizeBytes)})`);
|
|
263
|
+
}
|
|
264
|
+
// Download the snapshot
|
|
265
|
+
const outputDir = path.resolve(process.cwd(), (_c = options.output) !== null && _c !== void 0 ? _c : DEFAULT_OUTPUT_PATH);
|
|
266
|
+
const zipPath = path.join(outputDir, "snapshot.zip");
|
|
267
|
+
yield downloadSnapshotFile(downloadUrl, session.token, zipPath, logger);
|
|
268
|
+
logger.success(`Downloaded to: ${zipPath}`);
|
|
269
|
+
// Extract the snapshot
|
|
270
|
+
extractZip(zipPath, outputDir, logger);
|
|
271
|
+
// Remove the ZIP file after extraction
|
|
272
|
+
fs.unlinkSync(zipPath);
|
|
273
|
+
logger.success(`Snapshot extracted to: ${outputDir}`);
|
|
274
|
+
logger.info("");
|
|
275
|
+
logger.info("Your local storage is now ready. Configure your client with:");
|
|
276
|
+
logger.info(' clientMode: "local"');
|
|
277
|
+
logger.info(" or");
|
|
278
|
+
logger.info(' clientMode: "with-fallback"');
|
|
279
|
+
}
|
|
280
|
+
catch (error) {
|
|
281
|
+
const message = error instanceof Error ? error.message : "Unknown error";
|
|
282
|
+
logger.error(`Snapshot download failed: ${message}`, error);
|
|
283
|
+
process.exit(1);
|
|
284
|
+
}
|
|
285
|
+
}));
|
|
286
|
+
return command;
|
|
287
|
+
}
|
package/dist/cli/index.js
CHANGED
|
@@ -6,6 +6,7 @@ const migrate_1 = require("./commands/migrate");
|
|
|
6
6
|
const use_1 = require("./commands/use");
|
|
7
7
|
const upload_1 = require("./commands/upload");
|
|
8
8
|
const generate_1 = require("./commands/generate");
|
|
9
|
+
const snapshot_1 = require("./commands/snapshot");
|
|
9
10
|
const program = new commander_1.Command()
|
|
10
11
|
.name("ama")
|
|
11
12
|
.description("AtMyApp CLI Tool")
|
|
@@ -14,6 +15,7 @@ program.addCommand((0, use_1.useCommand)());
|
|
|
14
15
|
program.addCommand((0, migrate_1.migrateCommand)());
|
|
15
16
|
program.addCommand((0, upload_1.uploadCommand)());
|
|
16
17
|
program.addCommand((0, generate_1.generateCommand)());
|
|
18
|
+
program.addCommand((0, snapshot_1.snapshotCommand)());
|
|
17
19
|
program.parseAsync(process.argv).catch((err) => {
|
|
18
20
|
console.error("Error:", err.message);
|
|
19
21
|
process.exit(1);
|
|
@@ -15,6 +15,11 @@ export interface OutputDefinition {
|
|
|
15
15
|
events: Record<string, EventConfig>;
|
|
16
16
|
args: any[];
|
|
17
17
|
metadata?: any;
|
|
18
|
+
mdx?: Record<string, {
|
|
19
|
+
components: Record<string, {
|
|
20
|
+
props?: Record<string, string>;
|
|
21
|
+
}>;
|
|
22
|
+
}>;
|
|
18
23
|
}
|
|
19
24
|
export interface MigrateOptions {
|
|
20
25
|
dryRun: boolean;
|
|
@@ -29,6 +29,22 @@ function detectAmaAssetField(schema) {
|
|
|
29
29
|
}
|
|
30
30
|
return Object.assign({ format: assetMapping.format, semanticType: assetMapping.semanticType }, (imageOptions ? { imageOptions } : {}));
|
|
31
31
|
}
|
|
32
|
+
function detectAmaMdxField(schema) {
|
|
33
|
+
var _a, _b, _c, _d, _e, _f;
|
|
34
|
+
const amaType = (_b = (_a = schema === null || schema === void 0 ? void 0 : schema.properties) === null || _a === void 0 ? void 0 : _a.__amatype) === null || _b === void 0 ? void 0 : _b.const;
|
|
35
|
+
if (amaType !== "AmaMdxDef") {
|
|
36
|
+
return null;
|
|
37
|
+
}
|
|
38
|
+
const mdxConfigConst = (_d = (_c = schema === null || schema === void 0 ? void 0 : schema.properties) === null || _c === void 0 ? void 0 : _c.mdxConfig) === null || _d === void 0 ? void 0 : _d.const;
|
|
39
|
+
if (typeof mdxConfigConst === "string") {
|
|
40
|
+
return { mdxConfig: mdxConfigConst };
|
|
41
|
+
}
|
|
42
|
+
const mdxConfigEnum = (_f = (_e = schema === null || schema === void 0 ? void 0 : schema.properties) === null || _e === void 0 ? void 0 : _e.mdxConfig) === null || _f === void 0 ? void 0 : _f.enum;
|
|
43
|
+
if (Array.isArray(mdxConfigEnum) && typeof mdxConfigEnum[0] === "string") {
|
|
44
|
+
return { mdxConfig: mdxConfigEnum[0] };
|
|
45
|
+
}
|
|
46
|
+
return null;
|
|
47
|
+
}
|
|
32
48
|
function ensureDescription(description, fallback) {
|
|
33
49
|
if (typeof description === "string" && description.trim().length > 0) {
|
|
34
50
|
return description.trim();
|
|
@@ -136,6 +152,18 @@ function convertField(schema, fieldName, logger, breadcrumb) {
|
|
|
136
152
|
}
|
|
137
153
|
return base;
|
|
138
154
|
}
|
|
155
|
+
const mdxField = detectAmaMdxField(schema);
|
|
156
|
+
if (mdxField) {
|
|
157
|
+
const description = ensureDescription(schema.description, `Generated description for ${breadcrumb}`);
|
|
158
|
+
return {
|
|
159
|
+
type: "string",
|
|
160
|
+
description,
|
|
161
|
+
format: "mdx",
|
|
162
|
+
storeInBlob: true,
|
|
163
|
+
__amatype: "AmaMdxDef",
|
|
164
|
+
mdxConfig: mdxField.mdxConfig,
|
|
165
|
+
};
|
|
166
|
+
}
|
|
139
167
|
let type = inferType(schema);
|
|
140
168
|
if (!type) {
|
|
141
169
|
logger.error(`Collection conversion failed for field "${breadcrumb}": could not determine field type.`);
|
|
@@ -105,17 +105,26 @@ function generateOutput(contents, config, logger) {
|
|
|
105
105
|
// Separate events from regular definitions
|
|
106
106
|
const events = {};
|
|
107
107
|
const definitions = {};
|
|
108
|
+
const mdx = {};
|
|
108
109
|
transformedContents.forEach((content) => {
|
|
109
|
-
var _a, _b, _c, _d;
|
|
110
|
+
var _a, _b, _c, _d, _e, _f;
|
|
111
|
+
if (content.type === "mdxConfig") {
|
|
112
|
+
const name = (_a = content.structure) === null || _a === void 0 ? void 0 : _a.name;
|
|
113
|
+
const components = (_b = content.structure) === null || _b === void 0 ? void 0 : _b.components;
|
|
114
|
+
if (typeof name === "string" && components && typeof components === "object") {
|
|
115
|
+
mdx[name] = { components };
|
|
116
|
+
}
|
|
117
|
+
return;
|
|
118
|
+
}
|
|
110
119
|
const contentType = determineContentType(content);
|
|
111
120
|
if (contentType === "event") {
|
|
112
121
|
logger.verbose_log(`Processing event: ${content.path}`);
|
|
113
122
|
// Extract event ID from path or structure
|
|
114
123
|
let eventId = content.path;
|
|
115
|
-
if ((
|
|
124
|
+
if ((_e = (_d = (_c = content.structure) === null || _c === void 0 ? void 0 : _c.properties) === null || _d === void 0 ? void 0 : _d.id) === null || _e === void 0 ? void 0 : _e.const) {
|
|
116
125
|
eventId = content.structure.properties.id.const;
|
|
117
126
|
}
|
|
118
|
-
else if ((
|
|
127
|
+
else if ((_f = content.structure) === null || _f === void 0 ? void 0 : _f.id) {
|
|
119
128
|
eventId = content.structure.id;
|
|
120
129
|
}
|
|
121
130
|
const eventConfig = extractEventConfig(content);
|
|
@@ -136,12 +145,8 @@ function generateOutput(contents, config, logger) {
|
|
|
136
145
|
}
|
|
137
146
|
});
|
|
138
147
|
logger.verbose_log("Generating base output definition");
|
|
139
|
-
const baseOutput = {
|
|
140
|
-
|
|
141
|
-
definitions,
|
|
142
|
-
events,
|
|
143
|
-
args: config.args || {},
|
|
144
|
-
};
|
|
148
|
+
const baseOutput = Object.assign({ description: config.description || "AMA Definitions", definitions,
|
|
149
|
+
events, args: config.args || {} }, (Object.keys(mdx).length > 0 ? { mdx } : {}));
|
|
145
150
|
// Transform the final output through the pipeline
|
|
146
151
|
logger.verbose_log("Applying output transformations");
|
|
147
152
|
const finalOutput = definition_processor_1.definitionPipeline.transformOutput(baseOutput, config, logger);
|
|
@@ -93,6 +93,13 @@ function processFilesParallel(files, tsconfigPath, continueOnError, logger, maxW
|
|
|
93
93
|
logger.error("Parallel processing failed:", error);
|
|
94
94
|
throw error;
|
|
95
95
|
}
|
|
96
|
+
const { createProject, extractMdxConfigsFromSourceFiles } = require("./schema-processor");
|
|
97
|
+
const project = createProject(relevantFiles, tsconfigPath, logger);
|
|
98
|
+
const mdxConfigs = extractMdxConfigsFromSourceFiles(project.getSourceFiles(), logger);
|
|
99
|
+
if (mdxConfigs.length > 0) {
|
|
100
|
+
contents.push(...mdxConfigs);
|
|
101
|
+
successCount += mdxConfigs.length;
|
|
102
|
+
}
|
|
96
103
|
return { contents, errors, successCount, failureCount };
|
|
97
104
|
});
|
|
98
105
|
}
|
|
@@ -114,7 +121,8 @@ function filterRelevantFiles(files, logger) {
|
|
|
114
121
|
// Quick text search for ATMYAPP export
|
|
115
122
|
const content = yield fs.readFile(file, "utf8");
|
|
116
123
|
// Simple regex to check for ATMYAPP exports
|
|
117
|
-
if (/export\s+type\s+ATMYAPP\s*=/.test(content)
|
|
124
|
+
if (/export\s+type\s+ATMYAPP\s*=/.test(content) ||
|
|
125
|
+
/AmaMdxConfigDef/.test(content)) {
|
|
118
126
|
return file;
|
|
119
127
|
}
|
|
120
128
|
return null;
|
|
@@ -3,5 +3,6 @@ import { Logger } from "../logger";
|
|
|
3
3
|
import { Content, ProcessingResult } from "../types/migrate";
|
|
4
4
|
export declare function scanFiles(patterns: string[], logger: Logger): Promise<string[]>;
|
|
5
5
|
export declare function createProject(files: string[], tsconfigPath: string, logger: Logger): Project;
|
|
6
|
+
export declare function extractMdxConfigsFromSourceFiles(sourceFiles: SourceFile[], logger: Logger): Content[];
|
|
6
7
|
export declare function processAtmyappExport(atmyappType: TypeAliasDeclaration, file: SourceFile, tsconfigPath: string, logger: Logger): Content[];
|
|
7
8
|
export declare function processFiles(sourceFiles: SourceFile[], tsconfigPath: string, continueOnError: boolean, logger: Logger): ProcessingResult;
|
|
@@ -47,6 +47,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
47
47
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
48
48
|
exports.scanFiles = scanFiles;
|
|
49
49
|
exports.createProject = createProject;
|
|
50
|
+
exports.extractMdxConfigsFromSourceFiles = extractMdxConfigsFromSourceFiles;
|
|
50
51
|
exports.processAtmyappExport = processAtmyappExport;
|
|
51
52
|
exports.processFiles = processFiles;
|
|
52
53
|
const fast_glob_1 = __importDefault(require("fast-glob"));
|
|
@@ -202,6 +203,209 @@ function extractEventInfoFromAST(file, definitionType, logger) {
|
|
|
202
203
|
return null;
|
|
203
204
|
}
|
|
204
205
|
}
|
|
206
|
+
function getTypeAliasByName(file, name) {
|
|
207
|
+
const project = file.getProject();
|
|
208
|
+
for (const sourceFile of project.getSourceFiles()) {
|
|
209
|
+
const alias = sourceFile.getTypeAlias(name);
|
|
210
|
+
if (alias) {
|
|
211
|
+
return alias;
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
return undefined;
|
|
215
|
+
}
|
|
216
|
+
function extractStringLiteral(typeNode) {
|
|
217
|
+
if (!ts_morph_1.Node.isLiteralTypeNode(typeNode)) {
|
|
218
|
+
return null;
|
|
219
|
+
}
|
|
220
|
+
const literal = typeNode.getLiteral();
|
|
221
|
+
return ts_morph_1.Node.isStringLiteral(literal) ? literal.getLiteralValue() : null;
|
|
222
|
+
}
|
|
223
|
+
function mapPrimitiveType(typeNode) {
|
|
224
|
+
switch (typeNode.getKind()) {
|
|
225
|
+
case ts.SyntaxKind.StringKeyword:
|
|
226
|
+
return "string";
|
|
227
|
+
case ts.SyntaxKind.NumberKeyword:
|
|
228
|
+
return "number";
|
|
229
|
+
case ts.SyntaxKind.BooleanKeyword:
|
|
230
|
+
return "boolean";
|
|
231
|
+
case ts.SyntaxKind.ObjectKeyword:
|
|
232
|
+
return "object";
|
|
233
|
+
default:
|
|
234
|
+
break;
|
|
235
|
+
}
|
|
236
|
+
if (ts_morph_1.Node.isLiteralTypeNode(typeNode)) {
|
|
237
|
+
const literal = typeNode.getLiteral();
|
|
238
|
+
if (ts_morph_1.Node.isStringLiteral(literal)) {
|
|
239
|
+
return "string";
|
|
240
|
+
}
|
|
241
|
+
if (ts_morph_1.Node.isNumericLiteral(literal)) {
|
|
242
|
+
return "number";
|
|
243
|
+
}
|
|
244
|
+
if (literal.getKind() === ts.SyntaxKind.TrueKeyword || literal.getKind() === ts.SyntaxKind.FalseKeyword) {
|
|
245
|
+
return "boolean";
|
|
246
|
+
}
|
|
247
|
+
}
|
|
248
|
+
return null;
|
|
249
|
+
}
|
|
250
|
+
function mapPropType(typeNode, file) {
|
|
251
|
+
var _a;
|
|
252
|
+
if (ts_morph_1.Node.isUnionTypeNode(typeNode)) {
|
|
253
|
+
const nonNullable = typeNode
|
|
254
|
+
.getTypeNodes()
|
|
255
|
+
.filter((node) => node.getKind() !== ts.SyntaxKind.UndefinedKeyword &&
|
|
256
|
+
node.getKind() !== ts.SyntaxKind.NullKeyword);
|
|
257
|
+
if (nonNullable.length === 1) {
|
|
258
|
+
return mapPropType(nonNullable[0], file);
|
|
259
|
+
}
|
|
260
|
+
return "object";
|
|
261
|
+
}
|
|
262
|
+
if (ts_morph_1.Node.isArrayTypeNode(typeNode)) {
|
|
263
|
+
const elementType = mapPrimitiveType(typeNode.getElementTypeNode());
|
|
264
|
+
return elementType ? `${elementType}[]` : "object";
|
|
265
|
+
}
|
|
266
|
+
if (ts_morph_1.Node.isTypeReference(typeNode)) {
|
|
267
|
+
const typeName = typeNode.getTypeName();
|
|
268
|
+
if (ts_morph_1.Node.isIdentifier(typeName) && typeName.getText() === "Array") {
|
|
269
|
+
const args = typeNode.getTypeArguments();
|
|
270
|
+
if (args.length > 0) {
|
|
271
|
+
const elementType = mapPrimitiveType(args[0]);
|
|
272
|
+
return elementType ? `${elementType}[]` : "object";
|
|
273
|
+
}
|
|
274
|
+
}
|
|
275
|
+
return "object";
|
|
276
|
+
}
|
|
277
|
+
if (ts_morph_1.Node.isTypeLiteral(typeNode)) {
|
|
278
|
+
return "object";
|
|
279
|
+
}
|
|
280
|
+
return (_a = mapPrimitiveType(typeNode)) !== null && _a !== void 0 ? _a : "object";
|
|
281
|
+
}
|
|
282
|
+
function extractPropsFromTypeNode(typeNode, file) {
|
|
283
|
+
if (!typeNode) {
|
|
284
|
+
return undefined;
|
|
285
|
+
}
|
|
286
|
+
if (ts_morph_1.Node.isTypeReference(typeNode)) {
|
|
287
|
+
const typeName = typeNode.getTypeName();
|
|
288
|
+
if (ts_morph_1.Node.isIdentifier(typeName)) {
|
|
289
|
+
const alias = getTypeAliasByName(file, typeName.getText());
|
|
290
|
+
if (alias) {
|
|
291
|
+
return extractPropsFromTypeNode(alias.getTypeNode(), file);
|
|
292
|
+
}
|
|
293
|
+
}
|
|
294
|
+
}
|
|
295
|
+
if (!ts_morph_1.Node.isTypeLiteral(typeNode)) {
|
|
296
|
+
return undefined;
|
|
297
|
+
}
|
|
298
|
+
const props = {};
|
|
299
|
+
typeNode.getMembers().forEach((member) => {
|
|
300
|
+
if (!ts_morph_1.Node.isPropertySignature(member)) {
|
|
301
|
+
return;
|
|
302
|
+
}
|
|
303
|
+
const name = member.getName();
|
|
304
|
+
const propTypeNode = member.getTypeNode();
|
|
305
|
+
if (!propTypeNode || typeof name !== "string" || name.length === 0) {
|
|
306
|
+
return;
|
|
307
|
+
}
|
|
308
|
+
props[name] = mapPropType(propTypeNode, file);
|
|
309
|
+
});
|
|
310
|
+
return Object.keys(props).length > 0 ? props : undefined;
|
|
311
|
+
}
|
|
312
|
+
function extractComponentFromTypeNode(typeNode, file, logger) {
|
|
313
|
+
if (!ts_morph_1.Node.isTypeReference(typeNode)) {
|
|
314
|
+
return null;
|
|
315
|
+
}
|
|
316
|
+
const typeName = typeNode.getTypeName();
|
|
317
|
+
if (!ts_morph_1.Node.isIdentifier(typeName)) {
|
|
318
|
+
return null;
|
|
319
|
+
}
|
|
320
|
+
const typeNameText = typeName.getText();
|
|
321
|
+
if (typeNameText !== "AmaComponentDef") {
|
|
322
|
+
const alias = getTypeAliasByName(file, typeNameText);
|
|
323
|
+
if (!alias) {
|
|
324
|
+
return null;
|
|
325
|
+
}
|
|
326
|
+
const aliasTypeNode = alias.getTypeNode();
|
|
327
|
+
if (!aliasTypeNode) {
|
|
328
|
+
return null;
|
|
329
|
+
}
|
|
330
|
+
return extractComponentFromTypeNode(aliasTypeNode, file, logger);
|
|
331
|
+
}
|
|
332
|
+
const typeArguments = typeNode.getTypeArguments();
|
|
333
|
+
if (typeArguments.length === 0) {
|
|
334
|
+
return null;
|
|
335
|
+
}
|
|
336
|
+
const componentName = extractStringLiteral(typeArguments[0]);
|
|
337
|
+
if (!componentName) {
|
|
338
|
+
logger.warn(`MDX component name is not a string literal in ${file.getFilePath()}`);
|
|
339
|
+
return null;
|
|
340
|
+
}
|
|
341
|
+
const props = extractPropsFromTypeNode(typeArguments[1], file);
|
|
342
|
+
return Object.assign({ name: componentName }, (props ? { props } : {}));
|
|
343
|
+
}
|
|
344
|
+
function extractMdxConfigsFromAST(file, logger) {
|
|
345
|
+
const configs = {};
|
|
346
|
+
file.getTypeAliases().forEach((alias) => {
|
|
347
|
+
const typeNode = alias.getTypeNode();
|
|
348
|
+
if (!typeNode || !ts_morph_1.Node.isTypeReference(typeNode)) {
|
|
349
|
+
return;
|
|
350
|
+
}
|
|
351
|
+
const typeName = typeNode.getTypeName();
|
|
352
|
+
if (!ts_morph_1.Node.isIdentifier(typeName) || typeName.getText() !== "AmaMdxConfigDef") {
|
|
353
|
+
return;
|
|
354
|
+
}
|
|
355
|
+
const typeArguments = typeNode.getTypeArguments();
|
|
356
|
+
if (typeArguments.length < 2) {
|
|
357
|
+
return;
|
|
358
|
+
}
|
|
359
|
+
const configName = extractStringLiteral(typeArguments[0]);
|
|
360
|
+
if (!configName) {
|
|
361
|
+
logger.warn(`MDX config name is not a string literal in ${file.getFilePath()}`);
|
|
362
|
+
return;
|
|
363
|
+
}
|
|
364
|
+
const componentsArg = typeArguments[1];
|
|
365
|
+
const componentTypeNodes = [];
|
|
366
|
+
if (ts_morph_1.Node.isTupleTypeNode(componentsArg)) {
|
|
367
|
+
componentTypeNodes.push(...componentsArg.getElements());
|
|
368
|
+
}
|
|
369
|
+
else if (ts_morph_1.Node.isArrayTypeNode(componentsArg)) {
|
|
370
|
+
componentTypeNodes.push(componentsArg.getElementTypeNode());
|
|
371
|
+
}
|
|
372
|
+
if (componentTypeNodes.length === 0) {
|
|
373
|
+
configs[configName] = { components: {} };
|
|
374
|
+
return;
|
|
375
|
+
}
|
|
376
|
+
const components = {};
|
|
377
|
+
componentTypeNodes.forEach((componentNode) => {
|
|
378
|
+
const component = extractComponentFromTypeNode(componentNode, file, logger);
|
|
379
|
+
if (!component) {
|
|
380
|
+
return;
|
|
381
|
+
}
|
|
382
|
+
components[component.name] = component.props ? { props: component.props } : {};
|
|
383
|
+
});
|
|
384
|
+
configs[configName] = { components };
|
|
385
|
+
});
|
|
386
|
+
return configs;
|
|
387
|
+
}
|
|
388
|
+
function extractMdxConfigsFromSourceFiles(sourceFiles, logger) {
|
|
389
|
+
const mergedConfigs = {};
|
|
390
|
+
sourceFiles.forEach((file) => {
|
|
391
|
+
const fileConfigs = extractMdxConfigsFromAST(file, logger);
|
|
392
|
+
Object.entries(fileConfigs).forEach(([name, config]) => {
|
|
393
|
+
if (mergedConfigs[name]) {
|
|
394
|
+
logger.warn(`Duplicate MDX config "${name}" found in ${file.getFilePath()}`);
|
|
395
|
+
return;
|
|
396
|
+
}
|
|
397
|
+
mergedConfigs[name] = config;
|
|
398
|
+
});
|
|
399
|
+
});
|
|
400
|
+
return Object.entries(mergedConfigs).map(([name, config]) => ({
|
|
401
|
+
path: `__mdx_config__/${name}`,
|
|
402
|
+
structure: {
|
|
403
|
+
name,
|
|
404
|
+
components: config.components,
|
|
405
|
+
},
|
|
406
|
+
type: "mdxConfig",
|
|
407
|
+
}));
|
|
408
|
+
}
|
|
205
409
|
// Processes an ATMYAPP export to extract content definitions
|
|
206
410
|
function processAtmyappExport(atmyappType, file, tsconfigPath, logger) {
|
|
207
411
|
var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q, _r, _s, _t;
|
|
@@ -416,5 +620,10 @@ function processFiles(sourceFiles, tsconfigPath, continueOnError, logger) {
|
|
|
416
620
|
}
|
|
417
621
|
});
|
|
418
622
|
});
|
|
623
|
+
const mdxConfigs = extractMdxConfigsFromSourceFiles(sourceFiles, logger);
|
|
624
|
+
if (mdxConfigs.length > 0) {
|
|
625
|
+
contents.push(...mdxConfigs);
|
|
626
|
+
successCount += mdxConfigs.length;
|
|
627
|
+
}
|
|
419
628
|
return { contents, errors, successCount, failureCount };
|
|
420
629
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@atmyapp/cli",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.9",
|
|
4
4
|
"main": "dist/index.js",
|
|
5
5
|
"types": "dist/index.d.ts",
|
|
6
6
|
"scripts": {
|
|
@@ -28,7 +28,7 @@
|
|
|
28
28
|
"license": "ISC",
|
|
29
29
|
"description": "",
|
|
30
30
|
"devDependencies": {
|
|
31
|
-
"@atmyapp/core": "^0.0.
|
|
31
|
+
"@atmyapp/core": "^0.0.17",
|
|
32
32
|
"@types/jest": "^29.5.14",
|
|
33
33
|
"@typescript-eslint/eslint-plugin": "^8.32.1",
|
|
34
34
|
"@typescript-eslint/parser": "^8.32.1",
|