@atmyapp/cli 0.0.6 â 0.0.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli/commands/generate.d.ts +2 -0
- package/dist/cli/commands/generate.js +119 -0
- package/dist/cli/commands/migrate.js +15 -15
- package/dist/cli/commands/upload.d.ts +2 -0
- package/dist/cli/commands/upload.js +334 -0
- package/dist/cli/commands/use.js +21 -9
- package/dist/cli/index.js +4 -0
- package/dist/cli/logger/index.d.ts +5 -3
- package/dist/cli/logger/index.js +55 -9
- package/dist/cli/utils/collection-transformer.d.ts +7 -0
- package/dist/cli/utils/collection-transformer.js +263 -0
- package/dist/cli/utils/config.d.ts +5 -5
- package/dist/cli/utils/config.js +0 -1
- package/dist/cli/utils/content-processor.js +3 -0
- package/dist/cli/utils/definition-processor.d.ts +1 -0
- package/dist/cli/utils/definition-processor.js +21 -0
- package/dist/cli/utils/http.d.ts +36 -0
- package/dist/cli/utils/http.js +242 -0
- package/package.json +3 -2
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
|
+
});
|
|
10
|
+
};
|
|
11
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
12
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
13
|
+
};
|
|
14
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
15
|
+
exports.generateCommand = generateCommand;
|
|
16
|
+
const commander_1 = require("commander");
|
|
17
|
+
const path_1 = __importDefault(require("path"));
|
|
18
|
+
const fs_1 = require("fs");
|
|
19
|
+
const logger_1 = require("../logger");
|
|
20
|
+
const http_1 = require("../utils/http");
|
|
21
|
+
const DEFAULT_BRANCH = "main";
|
|
22
|
+
function generateCommand() {
|
|
23
|
+
return new commander_1.Command("generate")
|
|
24
|
+
.description("Generate a placeholder file in project storage")
|
|
25
|
+
.requiredOption("--path <path>", "POSIX project-relative path to generate")
|
|
26
|
+
.option("-t, --token <token>", "Authentication token override")
|
|
27
|
+
.option("-u, --url <url>", "Project base URL override")
|
|
28
|
+
.option("-p, --project-id <id>", "Project identifier override")
|
|
29
|
+
.option("--branch <name>", "Branch to target", DEFAULT_BRANCH)
|
|
30
|
+
.option("--save <file>", "Save generated content to a local file path")
|
|
31
|
+
.option("--json", "Print raw JSON response")
|
|
32
|
+
.option("--warnings-as-error", "Treat validation warnings as errors")
|
|
33
|
+
.option("--verbose", "Enable verbose logging")
|
|
34
|
+
.action((options) => __awaiter(this, void 0, void 0, function* () {
|
|
35
|
+
var _a, _b;
|
|
36
|
+
const logger = new logger_1.Logger(Boolean(options.verbose));
|
|
37
|
+
try {
|
|
38
|
+
const session = (0, http_1.resolveSession)({
|
|
39
|
+
token: options.token,
|
|
40
|
+
projectId: options.projectId,
|
|
41
|
+
url: options.url,
|
|
42
|
+
});
|
|
43
|
+
const targetPath = normalizePlaceholderPath(options.path);
|
|
44
|
+
const endpoint = buildGenerateEndpoint(session);
|
|
45
|
+
const url = (0, http_1.projectUrl)(session, endpoint, {
|
|
46
|
+
branch: (_a = options.branch) !== null && _a !== void 0 ? _a : DEFAULT_BRANCH,
|
|
47
|
+
});
|
|
48
|
+
const fetcher = (0, http_1.createAmaFetch)(session);
|
|
49
|
+
const { data, error } = yield fetcher(url, {
|
|
50
|
+
method: "POST",
|
|
51
|
+
headers: {
|
|
52
|
+
"Content-Type": "application/json",
|
|
53
|
+
},
|
|
54
|
+
body: JSON.stringify({ path: targetPath }),
|
|
55
|
+
});
|
|
56
|
+
if (error) {
|
|
57
|
+
const errorMessage = typeof error === "string"
|
|
58
|
+
? error
|
|
59
|
+
: error instanceof Error
|
|
60
|
+
? error.message
|
|
61
|
+
: (_b = error.message) !== null && _b !== void 0 ? _b : (error.status
|
|
62
|
+
? `HTTP ${error.status}`
|
|
63
|
+
: "Unknown error");
|
|
64
|
+
throw new Error(`Request failed: ${errorMessage}`);
|
|
65
|
+
}
|
|
66
|
+
if (!data) {
|
|
67
|
+
throw new Error("No response received from the server.");
|
|
68
|
+
}
|
|
69
|
+
if (!data.success || !data.data) {
|
|
70
|
+
throw new Error(data.error || "Placeholder generation failed.");
|
|
71
|
+
}
|
|
72
|
+
const result = data.data;
|
|
73
|
+
if (options.json) {
|
|
74
|
+
console.log(JSON.stringify(data, null, 2));
|
|
75
|
+
}
|
|
76
|
+
else {
|
|
77
|
+
logger.success(`Generated placeholder for ${result.path}.`);
|
|
78
|
+
if (result.warnings && result.warnings.length > 0) {
|
|
79
|
+
logger.warn(`Warnings (${result.warnings.length}):\n${result.warnings
|
|
80
|
+
.map((warning) => ` âĸ ${warning}`)
|
|
81
|
+
.join("\n")}`);
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
if (options.save) {
|
|
85
|
+
yield persistContent(options.save, result.content, logger);
|
|
86
|
+
}
|
|
87
|
+
if (options.warningsAsError &&
|
|
88
|
+
result.warnings &&
|
|
89
|
+
result.warnings.length > 0) {
|
|
90
|
+
throw new Error("Generation returned warnings treated as errors.");
|
|
91
|
+
}
|
|
92
|
+
logger.success("Placeholder generation completed.");
|
|
93
|
+
}
|
|
94
|
+
catch (error) {
|
|
95
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
96
|
+
logger.error(`Generation failed: ${message}`);
|
|
97
|
+
process.exit(1);
|
|
98
|
+
}
|
|
99
|
+
}));
|
|
100
|
+
}
|
|
101
|
+
function normalizePlaceholderPath(input) {
|
|
102
|
+
const trimmed = input.trim();
|
|
103
|
+
if (!trimmed) {
|
|
104
|
+
throw new Error("Path is required and cannot be empty.");
|
|
105
|
+
}
|
|
106
|
+
return trimmed.replace(/\\/g, "/");
|
|
107
|
+
}
|
|
108
|
+
function buildGenerateEndpoint(session) {
|
|
109
|
+
return `v0/projects/${encodeURIComponent(session.projectId)}/storage/ghost/placeholders`;
|
|
110
|
+
}
|
|
111
|
+
function persistContent(destination, content, logger) {
|
|
112
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
113
|
+
const absolutePath = path_1.default.resolve(process.cwd(), destination);
|
|
114
|
+
const directory = path_1.default.dirname(absolutePath);
|
|
115
|
+
yield fs_1.promises.mkdir(directory, { recursive: true });
|
|
116
|
+
yield fs_1.promises.writeFile(absolutePath, content, "utf8");
|
|
117
|
+
logger.success(`Saved generated content to ${absolutePath}.`);
|
|
118
|
+
});
|
|
119
|
+
}
|
|
@@ -31,16 +31,16 @@ function migrateCommand() {
|
|
|
31
31
|
const startTime = Date.now();
|
|
32
32
|
const logger = new utils_1.Logger(options.verbose);
|
|
33
33
|
try {
|
|
34
|
-
logger.info("
|
|
34
|
+
logger.info("Starting migration process.");
|
|
35
35
|
logger.verbose_log(`Options: ${JSON.stringify(options)}`);
|
|
36
36
|
if (options.parallel) {
|
|
37
|
-
logger.info("
|
|
37
|
+
logger.info("Parallel processing enabled.");
|
|
38
38
|
if (options.maxWorkers) {
|
|
39
|
-
logger.info(
|
|
39
|
+
logger.info(`Using ${options.maxWorkers} worker threads.`);
|
|
40
40
|
}
|
|
41
41
|
}
|
|
42
42
|
else {
|
|
43
|
-
logger.info("
|
|
43
|
+
logger.info("Using sequential processing.");
|
|
44
44
|
}
|
|
45
45
|
const config = (0, utils_1.getConfig)();
|
|
46
46
|
const patterns = config.include || ["**/*.ts", "**/*.tsx"];
|
|
@@ -49,23 +49,23 @@ function migrateCommand() {
|
|
|
49
49
|
let processingResult;
|
|
50
50
|
if (options.parallel !== false) {
|
|
51
51
|
// Use optimized parallel processing pipeline
|
|
52
|
-
logger.info("
|
|
52
|
+
logger.info("Using optimized parallel processing pipeline.");
|
|
53
53
|
processingResult = yield (0, parallel_schema_processor_1.optimizedMigrationPipeline)(patterns, options.tsconfig, options.continueOnError, logger, options.maxWorkers);
|
|
54
54
|
}
|
|
55
55
|
else {
|
|
56
56
|
// Fallback to original sequential processing
|
|
57
|
-
logger.info("
|
|
57
|
+
logger.info("Using original sequential processing.");
|
|
58
58
|
const files = yield (0, utils_1.scanFiles)(patterns, logger);
|
|
59
|
-
logger.info(
|
|
59
|
+
logger.info(`Found ${files.length} files to process.`);
|
|
60
60
|
const project = (0, utils_1.createProject)(files, options.tsconfig, logger);
|
|
61
61
|
processingResult = (0, utils_1.processFiles)(project.getSourceFiles(), options.tsconfig, options.continueOnError, logger);
|
|
62
62
|
}
|
|
63
63
|
const { contents, errors, successCount, failureCount } = processingResult;
|
|
64
64
|
// Report processing results
|
|
65
65
|
const processingTime = ((Date.now() - startTime) / 1000).toFixed(2);
|
|
66
|
-
logger.success(
|
|
66
|
+
logger.success(`Successfully processed ${successCount} AMA contents in ${processingTime}s.`);
|
|
67
67
|
if (failureCount > 0) {
|
|
68
|
-
logger.warn(
|
|
68
|
+
logger.warn(`Failed to process ${failureCount} items.`);
|
|
69
69
|
if (options.verbose && errors.length > 0) {
|
|
70
70
|
logger.info("Errors encountered:");
|
|
71
71
|
errors.forEach((err) => logger.error(` ${err}`));
|
|
@@ -77,14 +77,14 @@ function migrateCommand() {
|
|
|
77
77
|
}
|
|
78
78
|
// Generate and save output
|
|
79
79
|
const outputStartTime = Date.now();
|
|
80
|
-
logger.info("
|
|
80
|
+
logger.info("Generating output definitions...");
|
|
81
81
|
const output = (0, utils_1.generateOutput)(contents, config, logger);
|
|
82
82
|
const outputTime = ((Date.now() - outputStartTime) / 1000).toFixed(2);
|
|
83
83
|
logger.verbose_log(`Output generation took ${outputTime}s`);
|
|
84
84
|
(0, utils_1.saveOutputToFile)(output, logger);
|
|
85
85
|
// Upload definitions unless dry-run is enabled
|
|
86
86
|
if (!options.dryRun) {
|
|
87
|
-
logger.info("
|
|
87
|
+
logger.info("Uploading definitions to AtMyApp platform.");
|
|
88
88
|
const uploadStartTime = Date.now();
|
|
89
89
|
const uploadSuccess = yield (0, utils_1.uploadDefinitions)(output, config, logger);
|
|
90
90
|
const uploadTime = ((Date.now() - uploadStartTime) / 1000).toFixed(2);
|
|
@@ -95,13 +95,13 @@ function migrateCommand() {
|
|
|
95
95
|
}
|
|
96
96
|
}
|
|
97
97
|
else {
|
|
98
|
-
logger.info("
|
|
98
|
+
logger.info("Dry run mode enabled. Skipping upload to server.");
|
|
99
99
|
}
|
|
100
100
|
const totalTime = ((Date.now() - startTime) / 1000).toFixed(2);
|
|
101
|
-
logger.success(
|
|
101
|
+
logger.success(`Migration completed successfully in ${totalTime}s.`);
|
|
102
102
|
// Performance summary
|
|
103
103
|
if (options.verbose) {
|
|
104
|
-
logger.info("
|
|
104
|
+
logger.info("Performance summary:");
|
|
105
105
|
logger.info(` Total time: ${totalTime}s`);
|
|
106
106
|
logger.info(` Processing time: ${processingTime}s`);
|
|
107
107
|
logger.info(` Files processed: ${successCount}`);
|
|
@@ -114,7 +114,7 @@ function migrateCommand() {
|
|
|
114
114
|
catch (error) {
|
|
115
115
|
const totalTime = ((Date.now() - startTime) / 1000).toFixed(2);
|
|
116
116
|
const message = error instanceof Error ? error.message : "Unknown error";
|
|
117
|
-
logger.error(
|
|
117
|
+
logger.error(`Fatal error after ${totalTime}s: ${message}`, error);
|
|
118
118
|
process.exit(1);
|
|
119
119
|
}
|
|
120
120
|
}));
|
|
@@ -0,0 +1,334 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
|
+
});
|
|
10
|
+
};
|
|
11
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
12
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
13
|
+
};
|
|
14
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
15
|
+
exports.uploadCommand = uploadCommand;
|
|
16
|
+
const commander_1 = require("commander");
|
|
17
|
+
const fast_glob_1 = __importDefault(require("fast-glob"));
|
|
18
|
+
const path_1 = __importDefault(require("path"));
|
|
19
|
+
const fs_1 = require("fs");
|
|
20
|
+
const logger_1 = require("../logger");
|
|
21
|
+
const http_1 = require("../utils/http");
|
|
22
|
+
const DEFAULT_BRANCH = "main";
|
|
23
|
+
const DEFAULT_IGNORE_PATTERNS = ["**/.git/**", "**/.ama/**"];
|
|
24
|
+
function uploadCommand() {
|
|
25
|
+
return new commander_1.Command("upload")
|
|
26
|
+
.description("Upload files to project storage")
|
|
27
|
+
.argument("[inputs...]")
|
|
28
|
+
.option("-t, --token <token>", "Authentication token override")
|
|
29
|
+
.option("-u, --url <url>", "Project base URL override")
|
|
30
|
+
.option("-p, --project-id <id>", "Project identifier override")
|
|
31
|
+
.option("-b, --base-path <path>", "Base remote path (relative to project root)")
|
|
32
|
+
.option("--branch <name>", "Branch to write to", DEFAULT_BRANCH)
|
|
33
|
+
.option("--environment-name <name>", "Environment name for scoped writes")
|
|
34
|
+
.option("--commit <message>", "Commit message recorded with the upload")
|
|
35
|
+
.option("--delete <path>", "Remote file path to delete (repeatable)", collectValues, [])
|
|
36
|
+
.option("--no-stream", "Disable streaming progress output (enabled by default)")
|
|
37
|
+
.option("--verbose", "Enable verbose logging")
|
|
38
|
+
.action((inputPatterns, options) => __awaiter(this, void 0, void 0, function* () {
|
|
39
|
+
var _a, _b;
|
|
40
|
+
const logger = new logger_1.Logger(Boolean(options.verbose));
|
|
41
|
+
try {
|
|
42
|
+
const session = (0, http_1.resolveSession)({
|
|
43
|
+
token: options.token,
|
|
44
|
+
projectId: options.projectId,
|
|
45
|
+
url: options.url,
|
|
46
|
+
});
|
|
47
|
+
const basePath = normalizeBasePath(options.basePath);
|
|
48
|
+
const streamEnabled = options.stream !== false;
|
|
49
|
+
const files = yield prepareFiles(inputPatterns, basePath, logger);
|
|
50
|
+
const filesToDelete = normalizeDeletionPaths((_a = options.delete) !== null && _a !== void 0 ? _a : [], basePath, logger);
|
|
51
|
+
if (files.length === 0 && filesToDelete.length === 0) {
|
|
52
|
+
throw new Error("Nothing to upload. Provide file inputs or --delete paths to process.");
|
|
53
|
+
}
|
|
54
|
+
const body = {
|
|
55
|
+
files,
|
|
56
|
+
};
|
|
57
|
+
if (filesToDelete.length > 0) {
|
|
58
|
+
body.filesToDelete = filesToDelete;
|
|
59
|
+
}
|
|
60
|
+
if (options.commit) {
|
|
61
|
+
body.commitMessage = options.commit;
|
|
62
|
+
}
|
|
63
|
+
const query = {
|
|
64
|
+
branch: (_b = options.branch) !== null && _b !== void 0 ? _b : DEFAULT_BRANCH,
|
|
65
|
+
};
|
|
66
|
+
if (options.environmentName) {
|
|
67
|
+
query.environment_name = options.environmentName;
|
|
68
|
+
}
|
|
69
|
+
if (streamEnabled) {
|
|
70
|
+
query.stream = true;
|
|
71
|
+
yield performStreamingUpload(session, basePath, body, query, logger);
|
|
72
|
+
}
|
|
73
|
+
else {
|
|
74
|
+
yield performStandardUpload(session, basePath, body, query, logger);
|
|
75
|
+
}
|
|
76
|
+
logger.success("Upload completed successfully.");
|
|
77
|
+
}
|
|
78
|
+
catch (error) {
|
|
79
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
80
|
+
logger.error(`Upload failed: ${message}`);
|
|
81
|
+
process.exit(1);
|
|
82
|
+
}
|
|
83
|
+
}));
|
|
84
|
+
}
|
|
85
|
+
function collectValues(value, previous) {
|
|
86
|
+
previous.push(value);
|
|
87
|
+
return previous;
|
|
88
|
+
}
|
|
89
|
+
function normalizeBasePath(basePath) {
|
|
90
|
+
if (!basePath) {
|
|
91
|
+
return undefined;
|
|
92
|
+
}
|
|
93
|
+
const trimmed = basePath.trim();
|
|
94
|
+
if (!trimmed) {
|
|
95
|
+
return undefined;
|
|
96
|
+
}
|
|
97
|
+
const normalized = toPosix(trimmed)
|
|
98
|
+
.replace(/^\.\/?/, "")
|
|
99
|
+
.replace(/\/+$/, "");
|
|
100
|
+
return normalized || undefined;
|
|
101
|
+
}
|
|
102
|
+
function toPosix(value) {
|
|
103
|
+
return value.replace(/\\/g, "/");
|
|
104
|
+
}
|
|
105
|
+
function prepareFiles(patterns, basePath, logger) {
|
|
106
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
107
|
+
if (!patterns || patterns.length === 0) {
|
|
108
|
+
return [];
|
|
109
|
+
}
|
|
110
|
+
const resolvedPaths = new Set();
|
|
111
|
+
for (const pattern of patterns) {
|
|
112
|
+
const expanded = yield expandInput(pattern);
|
|
113
|
+
if (expanded.length === 0) {
|
|
114
|
+
logger.warn(`No files matched pattern '${pattern}'.`);
|
|
115
|
+
}
|
|
116
|
+
for (const filePath of expanded) {
|
|
117
|
+
resolvedPaths.add(path_1.default.resolve(filePath));
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
const sortedPaths = Array.from(resolvedPaths).sort();
|
|
121
|
+
const files = [];
|
|
122
|
+
for (const absolutePath of sortedPaths) {
|
|
123
|
+
const buffer = yield fs_1.promises.readFile(absolutePath);
|
|
124
|
+
const relativePath = path_1.default.relative(process.cwd(), absolutePath) ||
|
|
125
|
+
path_1.default.basename(absolutePath);
|
|
126
|
+
const posixPath = toPosix(relativePath);
|
|
127
|
+
const remotePath = computeRemotePath(posixPath, basePath, logger);
|
|
128
|
+
logger.verbose_log(`Preparing file '${absolutePath}' as remote path '${remotePath}'.`);
|
|
129
|
+
files.push({
|
|
130
|
+
path: remotePath,
|
|
131
|
+
content: buffer.toString("base64"),
|
|
132
|
+
});
|
|
133
|
+
}
|
|
134
|
+
return files;
|
|
135
|
+
});
|
|
136
|
+
}
|
|
137
|
+
function expandInput(input) {
|
|
138
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
139
|
+
const cwd = process.cwd();
|
|
140
|
+
const absoluteCandidate = path_1.default.resolve(cwd, input);
|
|
141
|
+
try {
|
|
142
|
+
const stats = yield fs_1.promises.stat(absoluteCandidate);
|
|
143
|
+
if (stats.isDirectory()) {
|
|
144
|
+
const entries = yield (0, fast_glob_1.default)("**/*", {
|
|
145
|
+
cwd: absoluteCandidate,
|
|
146
|
+
dot: true,
|
|
147
|
+
onlyFiles: true,
|
|
148
|
+
followSymbolicLinks: false,
|
|
149
|
+
ignore: DEFAULT_IGNORE_PATTERNS,
|
|
150
|
+
});
|
|
151
|
+
return entries.map((entry) => path_1.default.resolve(absoluteCandidate, entry));
|
|
152
|
+
}
|
|
153
|
+
if (stats.isFile()) {
|
|
154
|
+
return [absoluteCandidate];
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
catch (error) {
|
|
158
|
+
// Treat as glob when path resolution fails
|
|
159
|
+
}
|
|
160
|
+
const normalizedPattern = toPosix(input);
|
|
161
|
+
const matches = yield (0, fast_glob_1.default)(normalizedPattern, {
|
|
162
|
+
cwd,
|
|
163
|
+
dot: true,
|
|
164
|
+
onlyFiles: true,
|
|
165
|
+
followSymbolicLinks: false,
|
|
166
|
+
ignore: DEFAULT_IGNORE_PATTERNS,
|
|
167
|
+
unique: true,
|
|
168
|
+
});
|
|
169
|
+
return matches.map((match) => path_1.default.resolve(cwd, match));
|
|
170
|
+
});
|
|
171
|
+
}
|
|
172
|
+
function computeRemotePath(posixPath, basePath, logger) {
|
|
173
|
+
const cleaned = posixPath.replace(/^\.\//, "");
|
|
174
|
+
if (!basePath) {
|
|
175
|
+
return cleaned;
|
|
176
|
+
}
|
|
177
|
+
const relativeToBase = path_1.default.posix.relative(basePath, cleaned);
|
|
178
|
+
if (relativeToBase.startsWith("..")) {
|
|
179
|
+
logger.warn(`File '${cleaned}' is outside the base path '${basePath}'. Using absolute path.`);
|
|
180
|
+
return cleaned;
|
|
181
|
+
}
|
|
182
|
+
const normalized = relativeToBase.replace(/^\.\//, "");
|
|
183
|
+
const finalSegment = normalized || path_1.default.posix.basename(cleaned);
|
|
184
|
+
return `./${finalSegment}`;
|
|
185
|
+
}
|
|
186
|
+
function normalizeDeletionPaths(values, basePath, logger) {
|
|
187
|
+
if (!values || values.length === 0) {
|
|
188
|
+
return [];
|
|
189
|
+
}
|
|
190
|
+
const result = [];
|
|
191
|
+
for (const original of values) {
|
|
192
|
+
const trimmed = original.trim();
|
|
193
|
+
if (!trimmed) {
|
|
194
|
+
continue;
|
|
195
|
+
}
|
|
196
|
+
const posix = toPosix(trimmed);
|
|
197
|
+
if (posix.startsWith("./")) {
|
|
198
|
+
result.push(`./${posix.replace(/^\.\/+/, "")}`);
|
|
199
|
+
continue;
|
|
200
|
+
}
|
|
201
|
+
if (posix.startsWith("/")) {
|
|
202
|
+
result.push(posix.replace(/^\/+/, ""));
|
|
203
|
+
continue;
|
|
204
|
+
}
|
|
205
|
+
result.push(computeRemotePath(posix, basePath, logger));
|
|
206
|
+
}
|
|
207
|
+
return result;
|
|
208
|
+
}
|
|
209
|
+
function buildUploadEndpoint(session, basePath) {
|
|
210
|
+
const projectSegment = `v0/projects/${encodeURIComponent(session.projectId)}/storage/f`;
|
|
211
|
+
if (!basePath) {
|
|
212
|
+
return projectSegment;
|
|
213
|
+
}
|
|
214
|
+
const encodedBase = (0, http_1.encodePathSegment)(basePath);
|
|
215
|
+
return `${projectSegment}/${encodedBase}`;
|
|
216
|
+
}
|
|
217
|
+
function performStreamingUpload(session, basePath, body, query, logger) {
|
|
218
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
219
|
+
const endpoint = buildUploadEndpoint(session, basePath);
|
|
220
|
+
const url = (0, http_1.projectUrl)(session, endpoint, {
|
|
221
|
+
branch: query.branch,
|
|
222
|
+
environment_name: query.environment_name,
|
|
223
|
+
stream: true,
|
|
224
|
+
});
|
|
225
|
+
const summary = {
|
|
226
|
+
updated: new Set(),
|
|
227
|
+
deleted: new Set(),
|
|
228
|
+
errors: new Map(),
|
|
229
|
+
hadErrorEvent: false,
|
|
230
|
+
};
|
|
231
|
+
yield (0, http_1.streamSse)({
|
|
232
|
+
url,
|
|
233
|
+
fetchInit: {
|
|
234
|
+
method: "PUT",
|
|
235
|
+
headers: {
|
|
236
|
+
"Content-Type": "application/json",
|
|
237
|
+
Authorization: `Bearer ${session.token}`,
|
|
238
|
+
},
|
|
239
|
+
body: JSON.stringify(body),
|
|
240
|
+
},
|
|
241
|
+
onEvent: (event) => __awaiter(this, void 0, void 0, function* () {
|
|
242
|
+
if (event.data === "[DONE]") {
|
|
243
|
+
return;
|
|
244
|
+
}
|
|
245
|
+
const payload = event.parsed;
|
|
246
|
+
if (!payload) {
|
|
247
|
+
logger.verbose_log(`SSE: ${event.data}`);
|
|
248
|
+
return;
|
|
249
|
+
}
|
|
250
|
+
if (payload.message) {
|
|
251
|
+
logger.info(payload.message);
|
|
252
|
+
}
|
|
253
|
+
if (payload.updated) {
|
|
254
|
+
payload.updated.forEach((item) => summary.updated.add(item));
|
|
255
|
+
}
|
|
256
|
+
if (payload.deleted) {
|
|
257
|
+
payload.deleted.forEach((item) => summary.deleted.add(item));
|
|
258
|
+
}
|
|
259
|
+
if (payload.errors) {
|
|
260
|
+
Object.entries(payload.errors).forEach(([key, value]) => {
|
|
261
|
+
summary.errors.set(key, value);
|
|
262
|
+
logger.error(`Error processing '${key}': ${value}`);
|
|
263
|
+
});
|
|
264
|
+
}
|
|
265
|
+
if (payload.type === "error" || payload.success === false) {
|
|
266
|
+
summary.hadErrorEvent = true;
|
|
267
|
+
}
|
|
268
|
+
if (payload.type === "complete") {
|
|
269
|
+
logger.info("Upload stream completed.");
|
|
270
|
+
}
|
|
271
|
+
}),
|
|
272
|
+
});
|
|
273
|
+
if (summary.updated.size > 0) {
|
|
274
|
+
logger.success(`Updated ${summary.updated.size} file(s): ${Array.from(summary.updated).join(", ")}`);
|
|
275
|
+
}
|
|
276
|
+
if (summary.deleted.size > 0) {
|
|
277
|
+
logger.warn(`Deleted ${summary.deleted.size} file(s): ${Array.from(summary.deleted).join(", ")}`);
|
|
278
|
+
}
|
|
279
|
+
if (summary.errors.size > 0 || summary.hadErrorEvent) {
|
|
280
|
+
throw new Error("Upload completed with errors. Check the log for details.");
|
|
281
|
+
}
|
|
282
|
+
});
|
|
283
|
+
}
|
|
284
|
+
function performStandardUpload(session, basePath, body, query, logger) {
|
|
285
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
286
|
+
var _a;
|
|
287
|
+
const endpoint = buildUploadEndpoint(session, basePath);
|
|
288
|
+
const url = (0, http_1.projectUrl)(session, endpoint, {
|
|
289
|
+
branch: query.branch,
|
|
290
|
+
environment_name: query.environment_name,
|
|
291
|
+
});
|
|
292
|
+
const fetcher = (0, http_1.createAmaFetch)(session);
|
|
293
|
+
const { data, error } = yield fetcher(url, {
|
|
294
|
+
method: "PUT",
|
|
295
|
+
headers: {
|
|
296
|
+
"Content-Type": "application/json",
|
|
297
|
+
},
|
|
298
|
+
body: JSON.stringify(body),
|
|
299
|
+
});
|
|
300
|
+
if (error) {
|
|
301
|
+
const errorMessage = typeof error === "string"
|
|
302
|
+
? error
|
|
303
|
+
: error instanceof Error
|
|
304
|
+
? error.message
|
|
305
|
+
: (_a = error.message) !== null && _a !== void 0 ? _a : (error.status
|
|
306
|
+
? `HTTP ${error.status}`
|
|
307
|
+
: "Unknown error");
|
|
308
|
+
throw new Error(`Request failed: ${errorMessage}`);
|
|
309
|
+
}
|
|
310
|
+
if (!data) {
|
|
311
|
+
throw new Error("No response data received from the server.");
|
|
312
|
+
}
|
|
313
|
+
if (!data.success) {
|
|
314
|
+
throw new Error(data.error || "Upload failed without a server error message.");
|
|
315
|
+
}
|
|
316
|
+
const payload = data.data;
|
|
317
|
+
if (!payload) {
|
|
318
|
+
logger.success("Server reported success with no additional details.");
|
|
319
|
+
return;
|
|
320
|
+
}
|
|
321
|
+
if (payload.updated && payload.updated.length > 0) {
|
|
322
|
+
logger.success(`Updated files: ${payload.updated.join(", ")}`);
|
|
323
|
+
}
|
|
324
|
+
if (payload.deleted && payload.deleted.length > 0) {
|
|
325
|
+
logger.warn(`Deleted files: ${payload.deleted.join(", ")}`);
|
|
326
|
+
}
|
|
327
|
+
if (payload.errors && Object.keys(payload.errors).length > 0) {
|
|
328
|
+
for (const [pathKey, value] of Object.entries(payload.errors)) {
|
|
329
|
+
logger.error(`Error for '${pathKey}': ${value}`);
|
|
330
|
+
}
|
|
331
|
+
throw new Error("Upload completed with file-specific errors.");
|
|
332
|
+
}
|
|
333
|
+
});
|
|
334
|
+
}
|
package/dist/cli/commands/use.js
CHANGED
|
@@ -41,13 +41,10 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|
|
41
41
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
42
42
|
});
|
|
43
43
|
};
|
|
44
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
45
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
46
|
-
};
|
|
47
44
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
48
45
|
exports.useCommand = useCommand;
|
|
49
46
|
const commander_1 = require("commander");
|
|
50
|
-
const
|
|
47
|
+
const logger_1 = require("../logger");
|
|
51
48
|
const config_1 = require("../utils/config");
|
|
52
49
|
const fs = __importStar(require("fs"));
|
|
53
50
|
const path = __importStar(require("path"));
|
|
@@ -56,7 +53,10 @@ function useCommand() {
|
|
|
56
53
|
.description("Set authentication token for AMA project")
|
|
57
54
|
.option("-t, --token <token>", "Authentication token")
|
|
58
55
|
.option("-u, --url <url>", "Project base URL")
|
|
56
|
+
.option("-p, --project-id <id>", "Project identifier override")
|
|
57
|
+
.option("--verbose", "Enable verbose logging")
|
|
59
58
|
.action((options) => __awaiter(this, void 0, void 0, function* () {
|
|
59
|
+
const logger = new logger_1.Logger(Boolean(options.verbose));
|
|
60
60
|
const rlQuestion = (query) => {
|
|
61
61
|
return new Promise((resolve) => {
|
|
62
62
|
const rl = require("readline").createInterface({
|
|
@@ -74,35 +74,47 @@ function useCommand() {
|
|
|
74
74
|
const projectUrl = options.url || (yield rlQuestion("Enter the project URL: "));
|
|
75
75
|
const authToken = options.token ||
|
|
76
76
|
(yield rlQuestion("Enter the authentication token: "));
|
|
77
|
+
const detectProjectId = (url) => {
|
|
78
|
+
const match = url.match(/\/projects\/([^/?#]+)/i);
|
|
79
|
+
return match === null || match === void 0 ? void 0 : match[1];
|
|
80
|
+
};
|
|
81
|
+
const detectedProjectId = detectProjectId(projectUrl);
|
|
82
|
+
const projectId = options.projectId || detectedProjectId;
|
|
83
|
+
if (!projectId) {
|
|
84
|
+
logger.warn("Project ID could not be detected from the URL. Rerun with --project-id to set it explicitly.");
|
|
85
|
+
}
|
|
77
86
|
// Create .ama directory if it doesn't exist
|
|
78
87
|
const amaDir = path.join(process.cwd(), ".ama");
|
|
79
88
|
if (!fs.existsSync(amaDir)) {
|
|
80
89
|
fs.mkdirSync(amaDir, { recursive: true });
|
|
90
|
+
logger.verbose_log(`Created directory ${amaDir}.`);
|
|
81
91
|
}
|
|
82
92
|
// Add .gitignore if it doesn't exist or update it
|
|
83
93
|
const gitignorePath = path.join(process.cwd(), ".gitignore");
|
|
84
94
|
const gitignoreEntry = "\n# AMA configuration\n.ama/session.json\n";
|
|
85
95
|
if (!fs.existsSync(gitignorePath)) {
|
|
86
96
|
fs.writeFileSync(gitignorePath, gitignoreEntry);
|
|
97
|
+
logger.verbose_log(`Created ${gitignorePath} with AMA ignore rules.`);
|
|
87
98
|
}
|
|
88
99
|
else {
|
|
89
100
|
const currentContent = fs.readFileSync(gitignorePath, "utf8");
|
|
90
101
|
if (!currentContent.includes(".ama/session.json")) {
|
|
91
102
|
fs.appendFileSync(gitignorePath, gitignoreEntry);
|
|
103
|
+
logger.verbose_log(`Updated ${gitignorePath} with AMA ignore rules.`);
|
|
92
104
|
}
|
|
93
105
|
}
|
|
94
|
-
const projectId = "proj_" + Math.random().toString(36).slice(2, 9);
|
|
95
106
|
const configData = { token: authToken, projectId, url: projectUrl };
|
|
96
107
|
(0, config_1.setConfig)(configData);
|
|
97
108
|
// Save session data to .ama/session.json
|
|
98
109
|
fs.writeFileSync(path.join(amaDir, "session.json"), JSON.stringify(configData, null, 2));
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
110
|
+
logger.success("Authentication details saved for the project.");
|
|
111
|
+
logger.info(`Session file stored at ${path.join(amaDir, "session.json")}.`);
|
|
112
|
+
logger.warn("Keep your .ama/session.json file private and exclude it from version control.");
|
|
113
|
+
logger.info("Session file has been added to the project .gitignore file.");
|
|
102
114
|
}
|
|
103
115
|
catch (error) {
|
|
104
116
|
const message = error instanceof Error ? error.message : "Unknown error";
|
|
105
|
-
|
|
117
|
+
logger.error(`Configuration update failed: ${message}`, error);
|
|
106
118
|
process.exit(1);
|
|
107
119
|
}
|
|
108
120
|
}));
|
package/dist/cli/index.js
CHANGED
|
@@ -4,12 +4,16 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
4
4
|
const commander_1 = require("commander");
|
|
5
5
|
const migrate_1 = require("./commands/migrate");
|
|
6
6
|
const use_1 = require("./commands/use");
|
|
7
|
+
const upload_1 = require("./commands/upload");
|
|
8
|
+
const generate_1 = require("./commands/generate");
|
|
7
9
|
const program = new commander_1.Command()
|
|
8
10
|
.name("ama")
|
|
9
11
|
.description("AtMyApp CLI Tool")
|
|
10
12
|
.version("1.0.0");
|
|
11
13
|
program.addCommand((0, use_1.useCommand)());
|
|
12
14
|
program.addCommand((0, migrate_1.migrateCommand)());
|
|
15
|
+
program.addCommand((0, upload_1.uploadCommand)());
|
|
16
|
+
program.addCommand((0, generate_1.generateCommand)());
|
|
13
17
|
program.parseAsync(process.argv).catch((err) => {
|
|
14
18
|
console.error("Error:", err.message);
|
|
15
19
|
process.exit(1);
|
|
@@ -1,12 +1,14 @@
|
|
|
1
1
|
/**
|
|
2
|
-
* Logger utility to handle verbose logging
|
|
2
|
+
* Logger utility to handle verbose logging with consistent formatting.
|
|
3
3
|
*/
|
|
4
4
|
export declare class Logger {
|
|
5
|
-
private verbose;
|
|
5
|
+
private readonly verbose;
|
|
6
6
|
constructor(verbose: boolean);
|
|
7
7
|
info(message: string): void;
|
|
8
8
|
success(message: string): void;
|
|
9
|
+
warn(message: string): void;
|
|
9
10
|
error(message: string, error?: unknown): void;
|
|
10
11
|
verbose_log(message: string): void;
|
|
11
|
-
|
|
12
|
+
private write;
|
|
13
|
+
private formatErrorDetail;
|
|
12
14
|
}
|