@atmyapp/cli 0.0.7 → 0.0.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +47 -1
- package/dist/cli/commands/generate.d.ts +2 -0
- package/dist/cli/commands/generate.js +119 -0
- package/dist/cli/commands/migrate.js +15 -15
- package/dist/cli/commands/snapshot.d.ts +2 -0
- package/dist/cli/commands/snapshot.js +287 -0
- package/dist/cli/commands/upload.d.ts +2 -0
- package/dist/cli/commands/upload.js +334 -0
- package/dist/cli/commands/use.js +21 -9
- package/dist/cli/index.js +6 -0
- package/dist/cli/logger/index.d.ts +5 -3
- package/dist/cli/logger/index.js +55 -9
- package/dist/cli/types/migrate.d.ts +5 -0
- package/dist/cli/utils/collection-transformer.js +28 -0
- package/dist/cli/utils/config.d.ts +5 -5
- package/dist/cli/utils/config.js +0 -1
- package/dist/cli/utils/content-processor.js +14 -9
- package/dist/cli/utils/http.d.ts +36 -0
- package/dist/cli/utils/http.js +242 -0
- package/dist/cli/utils/parallel-schema-processor.js +9 -1
- package/dist/cli/utils/schema-processor.d.ts +1 -0
- package/dist/cli/utils/schema-processor.js +209 -0
- package/package.json +3 -2
package/README.md
CHANGED
|
@@ -19,6 +19,8 @@
|
|
|
19
19
|
- [Content Definitions](#content-definitions)
|
|
20
20
|
- [Event Definitions](#event-definitions)
|
|
21
21
|
- [Image & File Definitions](#image--file-definitions)
|
|
22
|
+
- [Icon Definitions](#icon-definitions)
|
|
23
|
+
- [MDX Fields](#mdx-fields)
|
|
22
24
|
- [💡 Examples](#-examples)
|
|
23
25
|
- [🔧 Configuration](#-configuration)
|
|
24
26
|
- [🏗️ Architecture](#-architecture)
|
|
@@ -290,6 +292,38 @@ export type UserIcon = AmaIconDef<"/icons/user">;
|
|
|
290
292
|
export type ATMYAPP = [MenuIcon, SearchIcon, UserIcon];
|
|
291
293
|
```
|
|
292
294
|
|
|
295
|
+
### MDX Fields
|
|
296
|
+
|
|
297
|
+
Define MDX (Markdown with JSX) fields with component configurations using `AmaMdxFieldDef`, `AmaMdxConfigDef`, and `AmaComponentDef`:
|
|
298
|
+
|
|
299
|
+
```typescript
|
|
300
|
+
import {
|
|
301
|
+
AmaMdxFieldDef,
|
|
302
|
+
AmaMdxConfigDef,
|
|
303
|
+
AmaComponentDef,
|
|
304
|
+
} from "@atmyapp/core";
|
|
305
|
+
|
|
306
|
+
// 1. Define MDX components and their props
|
|
307
|
+
type Callout = AmaComponentDef<"Callout", {
|
|
308
|
+
title: string;
|
|
309
|
+
type: "info" | "warning" | "error";
|
|
310
|
+
}>;
|
|
311
|
+
|
|
312
|
+
// 2. Create an MDX configuration
|
|
313
|
+
type BlogMdxConfig = AmaMdxConfigDef<"blogComponents", [Callout]>;
|
|
314
|
+
|
|
315
|
+
// 3. Use the config in your content definition
|
|
316
|
+
interface BlogPost {
|
|
317
|
+
title: string;
|
|
318
|
+
content: AmaMdxFieldDef<BlogMdxConfig>;
|
|
319
|
+
}
|
|
320
|
+
|
|
321
|
+
export type BlogPostContent = AmaContentDef<"/blog/posts", BlogPost>;
|
|
322
|
+
|
|
323
|
+
// 4. Export the configuration and definitions
|
|
324
|
+
export type ATMYAPP = [BlogMdxConfig, BlogPostContent];
|
|
325
|
+
```
|
|
326
|
+
|
|
293
327
|
## 💡 Examples
|
|
294
328
|
|
|
295
329
|
### 🏪 E-commerce Setup
|
|
@@ -364,13 +398,24 @@ import {
|
|
|
364
398
|
AmaCustomEventDef,
|
|
365
399
|
AmaImageDef,
|
|
366
400
|
AmaIconDef,
|
|
401
|
+
AmaMdxFieldDef,
|
|
402
|
+
AmaMdxConfigDef,
|
|
403
|
+
AmaComponentDef,
|
|
367
404
|
} from "@atmyapp/core";
|
|
368
405
|
|
|
406
|
+
// MDX Components for blog
|
|
407
|
+
type Callout = AmaComponentDef<"Callout", {
|
|
408
|
+
title: string;
|
|
409
|
+
type: "info" | "warning" | "error";
|
|
410
|
+
}>;
|
|
411
|
+
|
|
412
|
+
type BlogMdxConfig = AmaMdxConfigDef<"blogComponents", [Callout]>;
|
|
413
|
+
|
|
369
414
|
// Blog content types
|
|
370
415
|
interface BlogPost {
|
|
371
416
|
title: string;
|
|
372
417
|
slug: string;
|
|
373
|
-
content:
|
|
418
|
+
content: AmaMdxFieldDef<BlogMdxConfig>;
|
|
374
419
|
excerpt: string;
|
|
375
420
|
publishedAt: string;
|
|
376
421
|
author: {
|
|
@@ -428,6 +473,7 @@ export type ATMYAPP = [
|
|
|
428
473
|
BlogPosts,
|
|
429
474
|
FeaturedPost,
|
|
430
475
|
Categories,
|
|
476
|
+
BlogMdxConfig,
|
|
431
477
|
BlogHeroImage,
|
|
432
478
|
ShareIcon,
|
|
433
479
|
LikeIcon,
|
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
|
+
});
|
|
10
|
+
};
|
|
11
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
12
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
13
|
+
};
|
|
14
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
15
|
+
exports.generateCommand = generateCommand;
|
|
16
|
+
const commander_1 = require("commander");
|
|
17
|
+
const path_1 = __importDefault(require("path"));
|
|
18
|
+
const fs_1 = require("fs");
|
|
19
|
+
const logger_1 = require("../logger");
|
|
20
|
+
const http_1 = require("../utils/http");
|
|
21
|
+
const DEFAULT_BRANCH = "main";
|
|
22
|
+
function generateCommand() {
|
|
23
|
+
return new commander_1.Command("generate")
|
|
24
|
+
.description("Generate a placeholder file in project storage")
|
|
25
|
+
.requiredOption("--path <path>", "POSIX project-relative path to generate")
|
|
26
|
+
.option("-t, --token <token>", "Authentication token override")
|
|
27
|
+
.option("-u, --url <url>", "Project base URL override")
|
|
28
|
+
.option("-p, --project-id <id>", "Project identifier override")
|
|
29
|
+
.option("--branch <name>", "Branch to target", DEFAULT_BRANCH)
|
|
30
|
+
.option("--save <file>", "Save generated content to a local file path")
|
|
31
|
+
.option("--json", "Print raw JSON response")
|
|
32
|
+
.option("--warnings-as-error", "Treat validation warnings as errors")
|
|
33
|
+
.option("--verbose", "Enable verbose logging")
|
|
34
|
+
.action((options) => __awaiter(this, void 0, void 0, function* () {
|
|
35
|
+
var _a, _b;
|
|
36
|
+
const logger = new logger_1.Logger(Boolean(options.verbose));
|
|
37
|
+
try {
|
|
38
|
+
const session = (0, http_1.resolveSession)({
|
|
39
|
+
token: options.token,
|
|
40
|
+
projectId: options.projectId,
|
|
41
|
+
url: options.url,
|
|
42
|
+
});
|
|
43
|
+
const targetPath = normalizePlaceholderPath(options.path);
|
|
44
|
+
const endpoint = buildGenerateEndpoint(session);
|
|
45
|
+
const url = (0, http_1.projectUrl)(session, endpoint, {
|
|
46
|
+
branch: (_a = options.branch) !== null && _a !== void 0 ? _a : DEFAULT_BRANCH,
|
|
47
|
+
});
|
|
48
|
+
const fetcher = (0, http_1.createAmaFetch)(session);
|
|
49
|
+
const { data, error } = yield fetcher(url, {
|
|
50
|
+
method: "POST",
|
|
51
|
+
headers: {
|
|
52
|
+
"Content-Type": "application/json",
|
|
53
|
+
},
|
|
54
|
+
body: JSON.stringify({ path: targetPath }),
|
|
55
|
+
});
|
|
56
|
+
if (error) {
|
|
57
|
+
const errorMessage = typeof error === "string"
|
|
58
|
+
? error
|
|
59
|
+
: error instanceof Error
|
|
60
|
+
? error.message
|
|
61
|
+
: (_b = error.message) !== null && _b !== void 0 ? _b : (error.status
|
|
62
|
+
? `HTTP ${error.status}`
|
|
63
|
+
: "Unknown error");
|
|
64
|
+
throw new Error(`Request failed: ${errorMessage}`);
|
|
65
|
+
}
|
|
66
|
+
if (!data) {
|
|
67
|
+
throw new Error("No response received from the server.");
|
|
68
|
+
}
|
|
69
|
+
if (!data.success || !data.data) {
|
|
70
|
+
throw new Error(data.error || "Placeholder generation failed.");
|
|
71
|
+
}
|
|
72
|
+
const result = data.data;
|
|
73
|
+
if (options.json) {
|
|
74
|
+
console.log(JSON.stringify(data, null, 2));
|
|
75
|
+
}
|
|
76
|
+
else {
|
|
77
|
+
logger.success(`Generated placeholder for ${result.path}.`);
|
|
78
|
+
if (result.warnings && result.warnings.length > 0) {
|
|
79
|
+
logger.warn(`Warnings (${result.warnings.length}):\n${result.warnings
|
|
80
|
+
.map((warning) => ` • ${warning}`)
|
|
81
|
+
.join("\n")}`);
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
if (options.save) {
|
|
85
|
+
yield persistContent(options.save, result.content, logger);
|
|
86
|
+
}
|
|
87
|
+
if (options.warningsAsError &&
|
|
88
|
+
result.warnings &&
|
|
89
|
+
result.warnings.length > 0) {
|
|
90
|
+
throw new Error("Generation returned warnings treated as errors.");
|
|
91
|
+
}
|
|
92
|
+
logger.success("Placeholder generation completed.");
|
|
93
|
+
}
|
|
94
|
+
catch (error) {
|
|
95
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
96
|
+
logger.error(`Generation failed: ${message}`);
|
|
97
|
+
process.exit(1);
|
|
98
|
+
}
|
|
99
|
+
}));
|
|
100
|
+
}
|
|
101
|
+
function normalizePlaceholderPath(input) {
|
|
102
|
+
const trimmed = input.trim();
|
|
103
|
+
if (!trimmed) {
|
|
104
|
+
throw new Error("Path is required and cannot be empty.");
|
|
105
|
+
}
|
|
106
|
+
return trimmed.replace(/\\/g, "/");
|
|
107
|
+
}
|
|
108
|
+
function buildGenerateEndpoint(session) {
|
|
109
|
+
return `v0/projects/${encodeURIComponent(session.projectId)}/storage/ghost/placeholders`;
|
|
110
|
+
}
|
|
111
|
+
function persistContent(destination, content, logger) {
|
|
112
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
113
|
+
const absolutePath = path_1.default.resolve(process.cwd(), destination);
|
|
114
|
+
const directory = path_1.default.dirname(absolutePath);
|
|
115
|
+
yield fs_1.promises.mkdir(directory, { recursive: true });
|
|
116
|
+
yield fs_1.promises.writeFile(absolutePath, content, "utf8");
|
|
117
|
+
logger.success(`Saved generated content to ${absolutePath}.`);
|
|
118
|
+
});
|
|
119
|
+
}
|
|
@@ -31,16 +31,16 @@ function migrateCommand() {
|
|
|
31
31
|
const startTime = Date.now();
|
|
32
32
|
const logger = new utils_1.Logger(options.verbose);
|
|
33
33
|
try {
|
|
34
|
-
logger.info("
|
|
34
|
+
logger.info("Starting migration process.");
|
|
35
35
|
logger.verbose_log(`Options: ${JSON.stringify(options)}`);
|
|
36
36
|
if (options.parallel) {
|
|
37
|
-
logger.info("
|
|
37
|
+
logger.info("Parallel processing enabled.");
|
|
38
38
|
if (options.maxWorkers) {
|
|
39
|
-
logger.info(
|
|
39
|
+
logger.info(`Using ${options.maxWorkers} worker threads.`);
|
|
40
40
|
}
|
|
41
41
|
}
|
|
42
42
|
else {
|
|
43
|
-
logger.info("
|
|
43
|
+
logger.info("Using sequential processing.");
|
|
44
44
|
}
|
|
45
45
|
const config = (0, utils_1.getConfig)();
|
|
46
46
|
const patterns = config.include || ["**/*.ts", "**/*.tsx"];
|
|
@@ -49,23 +49,23 @@ function migrateCommand() {
|
|
|
49
49
|
let processingResult;
|
|
50
50
|
if (options.parallel !== false) {
|
|
51
51
|
// Use optimized parallel processing pipeline
|
|
52
|
-
logger.info("
|
|
52
|
+
logger.info("Using optimized parallel processing pipeline.");
|
|
53
53
|
processingResult = yield (0, parallel_schema_processor_1.optimizedMigrationPipeline)(patterns, options.tsconfig, options.continueOnError, logger, options.maxWorkers);
|
|
54
54
|
}
|
|
55
55
|
else {
|
|
56
56
|
// Fallback to original sequential processing
|
|
57
|
-
logger.info("
|
|
57
|
+
logger.info("Using original sequential processing.");
|
|
58
58
|
const files = yield (0, utils_1.scanFiles)(patterns, logger);
|
|
59
|
-
logger.info(
|
|
59
|
+
logger.info(`Found ${files.length} files to process.`);
|
|
60
60
|
const project = (0, utils_1.createProject)(files, options.tsconfig, logger);
|
|
61
61
|
processingResult = (0, utils_1.processFiles)(project.getSourceFiles(), options.tsconfig, options.continueOnError, logger);
|
|
62
62
|
}
|
|
63
63
|
const { contents, errors, successCount, failureCount } = processingResult;
|
|
64
64
|
// Report processing results
|
|
65
65
|
const processingTime = ((Date.now() - startTime) / 1000).toFixed(2);
|
|
66
|
-
logger.success(
|
|
66
|
+
logger.success(`Successfully processed ${successCount} AMA contents in ${processingTime}s.`);
|
|
67
67
|
if (failureCount > 0) {
|
|
68
|
-
logger.warn(
|
|
68
|
+
logger.warn(`Failed to process ${failureCount} items.`);
|
|
69
69
|
if (options.verbose && errors.length > 0) {
|
|
70
70
|
logger.info("Errors encountered:");
|
|
71
71
|
errors.forEach((err) => logger.error(` ${err}`));
|
|
@@ -77,14 +77,14 @@ function migrateCommand() {
|
|
|
77
77
|
}
|
|
78
78
|
// Generate and save output
|
|
79
79
|
const outputStartTime = Date.now();
|
|
80
|
-
logger.info("
|
|
80
|
+
logger.info("Generating output definitions...");
|
|
81
81
|
const output = (0, utils_1.generateOutput)(contents, config, logger);
|
|
82
82
|
const outputTime = ((Date.now() - outputStartTime) / 1000).toFixed(2);
|
|
83
83
|
logger.verbose_log(`Output generation took ${outputTime}s`);
|
|
84
84
|
(0, utils_1.saveOutputToFile)(output, logger);
|
|
85
85
|
// Upload definitions unless dry-run is enabled
|
|
86
86
|
if (!options.dryRun) {
|
|
87
|
-
logger.info("
|
|
87
|
+
logger.info("Uploading definitions to AtMyApp platform.");
|
|
88
88
|
const uploadStartTime = Date.now();
|
|
89
89
|
const uploadSuccess = yield (0, utils_1.uploadDefinitions)(output, config, logger);
|
|
90
90
|
const uploadTime = ((Date.now() - uploadStartTime) / 1000).toFixed(2);
|
|
@@ -95,13 +95,13 @@ function migrateCommand() {
|
|
|
95
95
|
}
|
|
96
96
|
}
|
|
97
97
|
else {
|
|
98
|
-
logger.info("
|
|
98
|
+
logger.info("Dry run mode enabled. Skipping upload to server.");
|
|
99
99
|
}
|
|
100
100
|
const totalTime = ((Date.now() - startTime) / 1000).toFixed(2);
|
|
101
|
-
logger.success(
|
|
101
|
+
logger.success(`Migration completed successfully in ${totalTime}s.`);
|
|
102
102
|
// Performance summary
|
|
103
103
|
if (options.verbose) {
|
|
104
|
-
logger.info("
|
|
104
|
+
logger.info("Performance summary:");
|
|
105
105
|
logger.info(` Total time: ${totalTime}s`);
|
|
106
106
|
logger.info(` Processing time: ${processingTime}s`);
|
|
107
107
|
logger.info(` Files processed: ${successCount}`);
|
|
@@ -114,7 +114,7 @@ function migrateCommand() {
|
|
|
114
114
|
catch (error) {
|
|
115
115
|
const totalTime = ((Date.now() - startTime) / 1000).toFixed(2);
|
|
116
116
|
const message = error instanceof Error ? error.message : "Unknown error";
|
|
117
|
-
logger.error(
|
|
117
|
+
logger.error(`Fatal error after ${totalTime}s: ${message}`, error);
|
|
118
118
|
process.exit(1);
|
|
119
119
|
}
|
|
120
120
|
}));
|
|
@@ -0,0 +1,287 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
36
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
37
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
38
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
39
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
40
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
41
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
42
|
+
});
|
|
43
|
+
};
|
|
44
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
45
|
+
exports.snapshotCommand = snapshotCommand;
|
|
46
|
+
const commander_1 = require("commander");
|
|
47
|
+
const logger_1 = require("../logger");
|
|
48
|
+
const http_1 = require("../utils/http");
|
|
49
|
+
const fs = __importStar(require("fs"));
|
|
50
|
+
const path = __importStar(require("path"));
|
|
51
|
+
const child_process_1 = require("child_process");
|
|
52
|
+
const DEFAULT_OUTPUT_PATH = ".ama/local";
|
|
53
|
+
const API_BASE = "https://ama-core.maciekgamro.workers.dev";
|
|
54
|
+
/**
|
|
55
|
+
* Get the work-container API URL for snapshot operations
|
|
56
|
+
*/
|
|
57
|
+
function getSnapshotApiUrl(projectId, endpoint) {
|
|
58
|
+
return `${API_BASE}/v0/work-container/projects/${projectId}/${endpoint}`;
|
|
59
|
+
}
|
|
60
|
+
/**
|
|
61
|
+
* Create a new snapshot
|
|
62
|
+
*/
|
|
63
|
+
function createSnapshot(session, options, logger) {
|
|
64
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
65
|
+
var _a, _b;
|
|
66
|
+
const url = getSnapshotApiUrl(session.projectId, "snapshot");
|
|
67
|
+
logger.info(`Creating snapshot for project ${session.projectId}...`);
|
|
68
|
+
logger.verbose_log(`POST ${url}`);
|
|
69
|
+
const response = yield fetch(url, {
|
|
70
|
+
method: "POST",
|
|
71
|
+
headers: {
|
|
72
|
+
Authorization: `Bearer ${session.token}`,
|
|
73
|
+
"Content-Type": "application/json",
|
|
74
|
+
},
|
|
75
|
+
body: JSON.stringify({
|
|
76
|
+
branch: (_a = options.branch) !== null && _a !== void 0 ? _a : "main",
|
|
77
|
+
forceFullSync: (_b = options.forceSync) !== null && _b !== void 0 ? _b : false,
|
|
78
|
+
}),
|
|
79
|
+
});
|
|
80
|
+
if (!response.ok) {
|
|
81
|
+
const errorText = yield response.text();
|
|
82
|
+
throw new Error(`Failed to create snapshot: ${response.status} - ${errorText}`);
|
|
83
|
+
}
|
|
84
|
+
return response.json();
|
|
85
|
+
});
|
|
86
|
+
}
|
|
87
|
+
/**
|
|
88
|
+
* Get the latest snapshot info
|
|
89
|
+
*/
|
|
90
|
+
function getLatestSnapshot(session, options, logger) {
|
|
91
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
92
|
+
var _a;
|
|
93
|
+
const branch = (_a = options.branch) !== null && _a !== void 0 ? _a : "main";
|
|
94
|
+
const url = `${getSnapshotApiUrl(session.projectId, "latest-snapshot")}?branch=${branch}`;
|
|
95
|
+
logger.verbose_log(`GET ${url}`);
|
|
96
|
+
const response = yield fetch(url, {
|
|
97
|
+
method: "GET",
|
|
98
|
+
headers: {
|
|
99
|
+
Authorization: `Bearer ${session.token}`,
|
|
100
|
+
},
|
|
101
|
+
});
|
|
102
|
+
if (!response.ok) {
|
|
103
|
+
const errorText = yield response.text();
|
|
104
|
+
throw new Error(`Failed to get latest snapshot: ${response.status} - ${errorText}`);
|
|
105
|
+
}
|
|
106
|
+
return response.json();
|
|
107
|
+
});
|
|
108
|
+
}
|
|
109
|
+
/**
|
|
110
|
+
* Download a snapshot from the given URL
|
|
111
|
+
*/
|
|
112
|
+
function downloadSnapshotFile(downloadUrl, token, outputPath, logger) {
|
|
113
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
114
|
+
logger.info("Downloading snapshot...");
|
|
115
|
+
logger.verbose_log(`GET ${downloadUrl}`);
|
|
116
|
+
const response = yield fetch(downloadUrl, {
|
|
117
|
+
headers: {
|
|
118
|
+
Authorization: `Bearer ${token}`,
|
|
119
|
+
},
|
|
120
|
+
});
|
|
121
|
+
if (!response.ok) {
|
|
122
|
+
const errorText = yield response.text();
|
|
123
|
+
throw new Error(`Failed to download snapshot: ${response.status} - ${errorText}`);
|
|
124
|
+
}
|
|
125
|
+
// Ensure output directory exists
|
|
126
|
+
const outputDir = path.dirname(outputPath);
|
|
127
|
+
if (!fs.existsSync(outputDir)) {
|
|
128
|
+
fs.mkdirSync(outputDir, { recursive: true });
|
|
129
|
+
}
|
|
130
|
+
// Write the ZIP file
|
|
131
|
+
const arrayBuffer = yield response.arrayBuffer();
|
|
132
|
+
const buffer = Buffer.from(arrayBuffer);
|
|
133
|
+
fs.writeFileSync(outputPath, buffer);
|
|
134
|
+
return outputPath;
|
|
135
|
+
});
|
|
136
|
+
}
|
|
137
|
+
/**
|
|
138
|
+
* Extract a ZIP file to a directory
|
|
139
|
+
*/
|
|
140
|
+
function extractZip(zipPath, extractDir, logger) {
|
|
141
|
+
logger.info("Extracting snapshot...");
|
|
142
|
+
// Ensure extract directory exists and is empty
|
|
143
|
+
if (fs.existsSync(extractDir)) {
|
|
144
|
+
// Remove existing contents
|
|
145
|
+
fs.rmSync(extractDir, { recursive: true, force: true });
|
|
146
|
+
}
|
|
147
|
+
fs.mkdirSync(extractDir, { recursive: true });
|
|
148
|
+
// Use tar on Unix or PowerShell on Windows
|
|
149
|
+
const isWindows = process.platform === "win32";
|
|
150
|
+
try {
|
|
151
|
+
if (isWindows) {
|
|
152
|
+
// Use PowerShell's Expand-Archive
|
|
153
|
+
(0, child_process_1.execSync)(`powershell -Command "Expand-Archive -Path '${zipPath}' -DestinationPath '${extractDir}' -Force"`, { stdio: "pipe" });
|
|
154
|
+
}
|
|
155
|
+
else {
|
|
156
|
+
// Use unzip on Unix
|
|
157
|
+
(0, child_process_1.execSync)(`unzip -o "${zipPath}" -d "${extractDir}"`, { stdio: "pipe" });
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
catch (error) {
|
|
161
|
+
throw new Error(`Failed to extract snapshot: ${error instanceof Error ? error.message : error}`);
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
/**
|
|
165
|
+
* Format bytes to human readable string
|
|
166
|
+
*/
|
|
167
|
+
function formatBytes(bytes) {
|
|
168
|
+
if (bytes === 0)
|
|
169
|
+
return "0 Bytes";
|
|
170
|
+
const k = 1024;
|
|
171
|
+
const sizes = ["Bytes", "KB", "MB", "GB"];
|
|
172
|
+
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
|
173
|
+
return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + " " + sizes[i];
|
|
174
|
+
}
|
|
175
|
+
function snapshotCommand() {
|
|
176
|
+
const command = new commander_1.Command("snapshot").description("Manage storage snapshots for local fallback");
|
|
177
|
+
// Subcommand: create
|
|
178
|
+
command
|
|
179
|
+
.command("create")
|
|
180
|
+
.description("Create a new storage snapshot (may take a few minutes)")
|
|
181
|
+
.option("-t, --token <token>", "Authentication token")
|
|
182
|
+
.option("-u, --url <url>", "Project base URL")
|
|
183
|
+
.option("-p, --project-id <id>", "Project identifier")
|
|
184
|
+
.option("-b, --branch <branch>", "Branch name", "main")
|
|
185
|
+
.option("--force-sync", "Force full sync instead of incremental")
|
|
186
|
+
.option("--verbose", "Enable verbose logging")
|
|
187
|
+
.action((options) => __awaiter(this, void 0, void 0, function* () {
|
|
188
|
+
var _a;
|
|
189
|
+
const logger = new logger_1.Logger(Boolean(options.verbose));
|
|
190
|
+
try {
|
|
191
|
+
const session = (0, http_1.resolveSession)({
|
|
192
|
+
token: options.token,
|
|
193
|
+
projectId: options.projectId,
|
|
194
|
+
url: options.url,
|
|
195
|
+
});
|
|
196
|
+
const result = yield createSnapshot(session, options, logger);
|
|
197
|
+
if (!result.success || !result.data) {
|
|
198
|
+
throw new Error((_a = result.error) !== null && _a !== void 0 ? _a : "Failed to create snapshot");
|
|
199
|
+
}
|
|
200
|
+
const { data } = result;
|
|
201
|
+
logger.success("Snapshot created successfully!");
|
|
202
|
+
logger.info(` Project: ${data.projectId}`);
|
|
203
|
+
logger.info(` Branch: ${data.branch}`);
|
|
204
|
+
logger.info(` Collections: ${data.stats.collectionsProcessed}`);
|
|
205
|
+
logger.info(` Entries: ${data.stats.entriesIncluded}`);
|
|
206
|
+
logger.info(` Blobs: ${data.stats.blobsIncluded}`);
|
|
207
|
+
logger.info(` Files: ${data.stats.filesIncluded}`);
|
|
208
|
+
logger.info(` Size: ${formatBytes(data.stats.totalSizeBytes)}`);
|
|
209
|
+
logger.info(` Duration: ${data.duration}ms`);
|
|
210
|
+
logger.info(` Download URL expires: ${data.expiresAt}`);
|
|
211
|
+
logger.info("");
|
|
212
|
+
logger.info("To download this snapshot, run:");
|
|
213
|
+
logger.info(" ama snapshot download");
|
|
214
|
+
}
|
|
215
|
+
catch (error) {
|
|
216
|
+
const message = error instanceof Error ? error.message : "Unknown error";
|
|
217
|
+
logger.error(`Snapshot creation failed: ${message}`, error);
|
|
218
|
+
process.exit(1);
|
|
219
|
+
}
|
|
220
|
+
}));
|
|
221
|
+
// Subcommand: download
|
|
222
|
+
command
|
|
223
|
+
.command("download")
|
|
224
|
+
.description("Download the latest snapshot to local storage")
|
|
225
|
+
.option("-t, --token <token>", "Authentication token")
|
|
226
|
+
.option("-u, --url <url>", "Project base URL")
|
|
227
|
+
.option("-p, --project-id <id>", "Project identifier")
|
|
228
|
+
.option("-b, --branch <branch>", "Branch name", "main")
|
|
229
|
+
.option("-o, --output <path>", "Output directory", DEFAULT_OUTPUT_PATH)
|
|
230
|
+
.option("--verbose", "Enable verbose logging")
|
|
231
|
+
.action((options) => __awaiter(this, void 0, void 0, function* () {
|
|
232
|
+
var _a, _b, _c;
|
|
233
|
+
const logger = new logger_1.Logger(Boolean(options.verbose));
|
|
234
|
+
try {
|
|
235
|
+
const session = (0, http_1.resolveSession)({
|
|
236
|
+
token: options.token,
|
|
237
|
+
projectId: options.projectId,
|
|
238
|
+
url: options.url,
|
|
239
|
+
});
|
|
240
|
+
// Get the latest snapshot info
|
|
241
|
+
logger.info("Checking for latest snapshot...");
|
|
242
|
+
const latestResult = yield getLatestSnapshot(session, options, logger);
|
|
243
|
+
let downloadUrl;
|
|
244
|
+
if (latestResult.data && !latestResult.data.isExpired) {
|
|
245
|
+
// Use existing snapshot
|
|
246
|
+
logger.info(`Found existing snapshot from: ${latestResult.data.createdAt}`);
|
|
247
|
+
downloadUrl = latestResult.data.downloadUrl;
|
|
248
|
+
}
|
|
249
|
+
else {
|
|
250
|
+
// Need to create a new snapshot first
|
|
251
|
+
if ((_a = latestResult.data) === null || _a === void 0 ? void 0 : _a.isExpired) {
|
|
252
|
+
logger.warn("Latest snapshot has expired, creating a new one...");
|
|
253
|
+
}
|
|
254
|
+
else {
|
|
255
|
+
logger.info("No existing snapshot found, creating a new one...");
|
|
256
|
+
}
|
|
257
|
+
const createResult = yield createSnapshot(session, options, logger);
|
|
258
|
+
if (!createResult.success || !createResult.data) {
|
|
259
|
+
throw new Error((_b = createResult.error) !== null && _b !== void 0 ? _b : "Failed to create snapshot");
|
|
260
|
+
}
|
|
261
|
+
downloadUrl = createResult.data.downloadUrl;
|
|
262
|
+
logger.info(`Snapshot created (${formatBytes(createResult.data.stats.totalSizeBytes)})`);
|
|
263
|
+
}
|
|
264
|
+
// Download the snapshot
|
|
265
|
+
const outputDir = path.resolve(process.cwd(), (_c = options.output) !== null && _c !== void 0 ? _c : DEFAULT_OUTPUT_PATH);
|
|
266
|
+
const zipPath = path.join(outputDir, "snapshot.zip");
|
|
267
|
+
yield downloadSnapshotFile(downloadUrl, session.token, zipPath, logger);
|
|
268
|
+
logger.success(`Downloaded to: ${zipPath}`);
|
|
269
|
+
// Extract the snapshot
|
|
270
|
+
extractZip(zipPath, outputDir, logger);
|
|
271
|
+
// Remove the ZIP file after extraction
|
|
272
|
+
fs.unlinkSync(zipPath);
|
|
273
|
+
logger.success(`Snapshot extracted to: ${outputDir}`);
|
|
274
|
+
logger.info("");
|
|
275
|
+
logger.info("Your local storage is now ready. Configure your client with:");
|
|
276
|
+
logger.info(' clientMode: "local"');
|
|
277
|
+
logger.info(" or");
|
|
278
|
+
logger.info(' clientMode: "with-fallback"');
|
|
279
|
+
}
|
|
280
|
+
catch (error) {
|
|
281
|
+
const message = error instanceof Error ? error.message : "Unknown error";
|
|
282
|
+
logger.error(`Snapshot download failed: ${message}`, error);
|
|
283
|
+
process.exit(1);
|
|
284
|
+
}
|
|
285
|
+
}));
|
|
286
|
+
return command;
|
|
287
|
+
}
|