@atmyapp/cli 0.0.7 → 0.0.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,2 @@
1
+ import { Command } from "commander";
2
+ export declare function generateCommand(): Command;
@@ -0,0 +1,119 @@
1
+ "use strict";
2
+ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
3
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
4
+ return new (P || (P = Promise))(function (resolve, reject) {
5
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
6
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
7
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
8
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
9
+ });
10
+ };
11
+ var __importDefault = (this && this.__importDefault) || function (mod) {
12
+ return (mod && mod.__esModule) ? mod : { "default": mod };
13
+ };
14
+ Object.defineProperty(exports, "__esModule", { value: true });
15
+ exports.generateCommand = generateCommand;
16
+ const commander_1 = require("commander");
17
+ const path_1 = __importDefault(require("path"));
18
+ const fs_1 = require("fs");
19
+ const logger_1 = require("../logger");
20
+ const http_1 = require("../utils/http");
21
+ const DEFAULT_BRANCH = "main";
22
+ function generateCommand() {
23
+ return new commander_1.Command("generate")
24
+ .description("Generate a placeholder file in project storage")
25
+ .requiredOption("--path <path>", "POSIX project-relative path to generate")
26
+ .option("-t, --token <token>", "Authentication token override")
27
+ .option("-u, --url <url>", "Project base URL override")
28
+ .option("-p, --project-id <id>", "Project identifier override")
29
+ .option("--branch <name>", "Branch to target", DEFAULT_BRANCH)
30
+ .option("--save <file>", "Save generated content to a local file path")
31
+ .option("--json", "Print raw JSON response")
32
+ .option("--warnings-as-error", "Treat validation warnings as errors")
33
+ .option("--verbose", "Enable verbose logging")
34
+ .action((options) => __awaiter(this, void 0, void 0, function* () {
35
+ var _a, _b;
36
+ const logger = new logger_1.Logger(Boolean(options.verbose));
37
+ try {
38
+ const session = (0, http_1.resolveSession)({
39
+ token: options.token,
40
+ projectId: options.projectId,
41
+ url: options.url,
42
+ });
43
+ const targetPath = normalizePlaceholderPath(options.path);
44
+ const endpoint = buildGenerateEndpoint(session);
45
+ const url = (0, http_1.projectUrl)(session, endpoint, {
46
+ branch: (_a = options.branch) !== null && _a !== void 0 ? _a : DEFAULT_BRANCH,
47
+ });
48
+ const fetcher = (0, http_1.createAmaFetch)(session);
49
+ const { data, error } = yield fetcher(url, {
50
+ method: "POST",
51
+ headers: {
52
+ "Content-Type": "application/json",
53
+ },
54
+ body: JSON.stringify({ path: targetPath }),
55
+ });
56
+ if (error) {
57
+ const errorMessage = typeof error === "string"
58
+ ? error
59
+ : error instanceof Error
60
+ ? error.message
61
+ : (_b = error.message) !== null && _b !== void 0 ? _b : (error.status
62
+ ? `HTTP ${error.status}`
63
+ : "Unknown error");
64
+ throw new Error(`Request failed: ${errorMessage}`);
65
+ }
66
+ if (!data) {
67
+ throw new Error("No response received from the server.");
68
+ }
69
+ if (!data.success || !data.data) {
70
+ throw new Error(data.error || "Placeholder generation failed.");
71
+ }
72
+ const result = data.data;
73
+ if (options.json) {
74
+ console.log(JSON.stringify(data, null, 2));
75
+ }
76
+ else {
77
+ logger.success(`Generated placeholder for ${result.path}.`);
78
+ if (result.warnings && result.warnings.length > 0) {
79
+ logger.warn(`Warnings (${result.warnings.length}):\n${result.warnings
80
+ .map((warning) => ` • ${warning}`)
81
+ .join("\n")}`);
82
+ }
83
+ }
84
+ if (options.save) {
85
+ yield persistContent(options.save, result.content, logger);
86
+ }
87
+ if (options.warningsAsError &&
88
+ result.warnings &&
89
+ result.warnings.length > 0) {
90
+ throw new Error("Generation returned warnings treated as errors.");
91
+ }
92
+ logger.success("Placeholder generation completed.");
93
+ }
94
+ catch (error) {
95
+ const message = error instanceof Error ? error.message : String(error);
96
+ logger.error(`Generation failed: ${message}`);
97
+ process.exit(1);
98
+ }
99
+ }));
100
+ }
101
+ function normalizePlaceholderPath(input) {
102
+ const trimmed = input.trim();
103
+ if (!trimmed) {
104
+ throw new Error("Path is required and cannot be empty.");
105
+ }
106
+ return trimmed.replace(/\\/g, "/");
107
+ }
108
+ function buildGenerateEndpoint(session) {
109
+ return `v0/projects/${encodeURIComponent(session.projectId)}/storage/ghost/placeholders`;
110
+ }
111
+ function persistContent(destination, content, logger) {
112
+ return __awaiter(this, void 0, void 0, function* () {
113
+ const absolutePath = path_1.default.resolve(process.cwd(), destination);
114
+ const directory = path_1.default.dirname(absolutePath);
115
+ yield fs_1.promises.mkdir(directory, { recursive: true });
116
+ yield fs_1.promises.writeFile(absolutePath, content, "utf8");
117
+ logger.success(`Saved generated content to ${absolutePath}.`);
118
+ });
119
+ }
@@ -31,16 +31,16 @@ function migrateCommand() {
31
31
  const startTime = Date.now();
32
32
  const logger = new utils_1.Logger(options.verbose);
33
33
  try {
34
- logger.info("🚀 Starting migration process");
34
+ logger.info("Starting migration process.");
35
35
  logger.verbose_log(`Options: ${JSON.stringify(options)}`);
36
36
  if (options.parallel) {
37
- logger.info("Parallel processing enabled");
37
+ logger.info("Parallel processing enabled.");
38
38
  if (options.maxWorkers) {
39
- logger.info(`👥 Using ${options.maxWorkers} worker threads`);
39
+ logger.info(`Using ${options.maxWorkers} worker threads.`);
40
40
  }
41
41
  }
42
42
  else {
43
- logger.info("🔄 Using sequential processing");
43
+ logger.info("Using sequential processing.");
44
44
  }
45
45
  const config = (0, utils_1.getConfig)();
46
46
  const patterns = config.include || ["**/*.ts", "**/*.tsx"];
@@ -49,23 +49,23 @@ function migrateCommand() {
49
49
  let processingResult;
50
50
  if (options.parallel !== false) {
51
51
  // Use optimized parallel processing pipeline
52
- logger.info("🚀 Using optimized parallel processing pipeline");
52
+ logger.info("Using optimized parallel processing pipeline.");
53
53
  processingResult = yield (0, parallel_schema_processor_1.optimizedMigrationPipeline)(patterns, options.tsconfig, options.continueOnError, logger, options.maxWorkers);
54
54
  }
55
55
  else {
56
56
  // Fallback to original sequential processing
57
- logger.info("🔄 Using original sequential processing");
57
+ logger.info("Using original sequential processing.");
58
58
  const files = yield (0, utils_1.scanFiles)(patterns, logger);
59
- logger.info(`📚 Found ${files.length} files to process`);
59
+ logger.info(`Found ${files.length} files to process.`);
60
60
  const project = (0, utils_1.createProject)(files, options.tsconfig, logger);
61
61
  processingResult = (0, utils_1.processFiles)(project.getSourceFiles(), options.tsconfig, options.continueOnError, logger);
62
62
  }
63
63
  const { contents, errors, successCount, failureCount } = processingResult;
64
64
  // Report processing results
65
65
  const processingTime = ((Date.now() - startTime) / 1000).toFixed(2);
66
- logger.success(`✅ Successfully processed ${successCount} AMA contents in ${processingTime}s`);
66
+ logger.success(`Successfully processed ${successCount} AMA contents in ${processingTime}s.`);
67
67
  if (failureCount > 0) {
68
- logger.warn(`⚠️ Failed to process ${failureCount} items`);
68
+ logger.warn(`Failed to process ${failureCount} items.`);
69
69
  if (options.verbose && errors.length > 0) {
70
70
  logger.info("Errors encountered:");
71
71
  errors.forEach((err) => logger.error(` ${err}`));
@@ -77,14 +77,14 @@ function migrateCommand() {
77
77
  }
78
78
  // Generate and save output
79
79
  const outputStartTime = Date.now();
80
- logger.info("🔧 Generating output definitions...");
80
+ logger.info("Generating output definitions...");
81
81
  const output = (0, utils_1.generateOutput)(contents, config, logger);
82
82
  const outputTime = ((Date.now() - outputStartTime) / 1000).toFixed(2);
83
83
  logger.verbose_log(`Output generation took ${outputTime}s`);
84
84
  (0, utils_1.saveOutputToFile)(output, logger);
85
85
  // Upload definitions unless dry-run is enabled
86
86
  if (!options.dryRun) {
87
- logger.info("📤 Uploading definitions to AtMyApp platform");
87
+ logger.info("Uploading definitions to AtMyApp platform.");
88
88
  const uploadStartTime = Date.now();
89
89
  const uploadSuccess = yield (0, utils_1.uploadDefinitions)(output, config, logger);
90
90
  const uploadTime = ((Date.now() - uploadStartTime) / 1000).toFixed(2);
@@ -95,13 +95,13 @@ function migrateCommand() {
95
95
  }
96
96
  }
97
97
  else {
98
- logger.info("🏁 Dry run mode enabled. Skipping upload to server.");
98
+ logger.info("Dry run mode enabled. Skipping upload to server.");
99
99
  }
100
100
  const totalTime = ((Date.now() - startTime) / 1000).toFixed(2);
101
- logger.success(`🎉 Migration completed successfully in ${totalTime}s`);
101
+ logger.success(`Migration completed successfully in ${totalTime}s.`);
102
102
  // Performance summary
103
103
  if (options.verbose) {
104
- logger.info("📊 Performance Summary:");
104
+ logger.info("Performance summary:");
105
105
  logger.info(` Total time: ${totalTime}s`);
106
106
  logger.info(` Processing time: ${processingTime}s`);
107
107
  logger.info(` Files processed: ${successCount}`);
@@ -114,7 +114,7 @@ function migrateCommand() {
114
114
  catch (error) {
115
115
  const totalTime = ((Date.now() - startTime) / 1000).toFixed(2);
116
116
  const message = error instanceof Error ? error.message : "Unknown error";
117
- logger.error(`💥 Fatal error after ${totalTime}s: ${message}`, error);
117
+ logger.error(`Fatal error after ${totalTime}s: ${message}`, error);
118
118
  process.exit(1);
119
119
  }
120
120
  }));
@@ -0,0 +1,2 @@
1
+ import { Command } from "commander";
2
+ export declare function uploadCommand(): Command;
@@ -0,0 +1,334 @@
1
+ "use strict";
2
+ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
3
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
4
+ return new (P || (P = Promise))(function (resolve, reject) {
5
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
6
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
7
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
8
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
9
+ });
10
+ };
11
+ var __importDefault = (this && this.__importDefault) || function (mod) {
12
+ return (mod && mod.__esModule) ? mod : { "default": mod };
13
+ };
14
+ Object.defineProperty(exports, "__esModule", { value: true });
15
+ exports.uploadCommand = uploadCommand;
16
+ const commander_1 = require("commander");
17
+ const fast_glob_1 = __importDefault(require("fast-glob"));
18
+ const path_1 = __importDefault(require("path"));
19
+ const fs_1 = require("fs");
20
+ const logger_1 = require("../logger");
21
+ const http_1 = require("../utils/http");
22
+ const DEFAULT_BRANCH = "main";
23
+ const DEFAULT_IGNORE_PATTERNS = ["**/.git/**", "**/.ama/**"];
24
+ function uploadCommand() {
25
+ return new commander_1.Command("upload")
26
+ .description("Upload files to project storage")
27
+ .argument("[inputs...]")
28
+ .option("-t, --token <token>", "Authentication token override")
29
+ .option("-u, --url <url>", "Project base URL override")
30
+ .option("-p, --project-id <id>", "Project identifier override")
31
+ .option("-b, --base-path <path>", "Base remote path (relative to project root)")
32
+ .option("--branch <name>", "Branch to write to", DEFAULT_BRANCH)
33
+ .option("--environment-name <name>", "Environment name for scoped writes")
34
+ .option("--commit <message>", "Commit message recorded with the upload")
35
+ .option("--delete <path>", "Remote file path to delete (repeatable)", collectValues, [])
36
+ .option("--no-stream", "Disable streaming progress output (enabled by default)")
37
+ .option("--verbose", "Enable verbose logging")
38
+ .action((inputPatterns, options) => __awaiter(this, void 0, void 0, function* () {
39
+ var _a, _b;
40
+ const logger = new logger_1.Logger(Boolean(options.verbose));
41
+ try {
42
+ const session = (0, http_1.resolveSession)({
43
+ token: options.token,
44
+ projectId: options.projectId,
45
+ url: options.url,
46
+ });
47
+ const basePath = normalizeBasePath(options.basePath);
48
+ const streamEnabled = options.stream !== false;
49
+ const files = yield prepareFiles(inputPatterns, basePath, logger);
50
+ const filesToDelete = normalizeDeletionPaths((_a = options.delete) !== null && _a !== void 0 ? _a : [], basePath, logger);
51
+ if (files.length === 0 && filesToDelete.length === 0) {
52
+ throw new Error("Nothing to upload. Provide file inputs or --delete paths to process.");
53
+ }
54
+ const body = {
55
+ files,
56
+ };
57
+ if (filesToDelete.length > 0) {
58
+ body.filesToDelete = filesToDelete;
59
+ }
60
+ if (options.commit) {
61
+ body.commitMessage = options.commit;
62
+ }
63
+ const query = {
64
+ branch: (_b = options.branch) !== null && _b !== void 0 ? _b : DEFAULT_BRANCH,
65
+ };
66
+ if (options.environmentName) {
67
+ query.environment_name = options.environmentName;
68
+ }
69
+ if (streamEnabled) {
70
+ query.stream = true;
71
+ yield performStreamingUpload(session, basePath, body, query, logger);
72
+ }
73
+ else {
74
+ yield performStandardUpload(session, basePath, body, query, logger);
75
+ }
76
+ logger.success("Upload completed successfully.");
77
+ }
78
+ catch (error) {
79
+ const message = error instanceof Error ? error.message : String(error);
80
+ logger.error(`Upload failed: ${message}`);
81
+ process.exit(1);
82
+ }
83
+ }));
84
+ }
85
+ function collectValues(value, previous) {
86
+ previous.push(value);
87
+ return previous;
88
+ }
89
+ function normalizeBasePath(basePath) {
90
+ if (!basePath) {
91
+ return undefined;
92
+ }
93
+ const trimmed = basePath.trim();
94
+ if (!trimmed) {
95
+ return undefined;
96
+ }
97
+ const normalized = toPosix(trimmed)
98
+ .replace(/^\.\/?/, "")
99
+ .replace(/\/+$/, "");
100
+ return normalized || undefined;
101
+ }
102
+ function toPosix(value) {
103
+ return value.replace(/\\/g, "/");
104
+ }
105
+ function prepareFiles(patterns, basePath, logger) {
106
+ return __awaiter(this, void 0, void 0, function* () {
107
+ if (!patterns || patterns.length === 0) {
108
+ return [];
109
+ }
110
+ const resolvedPaths = new Set();
111
+ for (const pattern of patterns) {
112
+ const expanded = yield expandInput(pattern);
113
+ if (expanded.length === 0) {
114
+ logger.warn(`No files matched pattern '${pattern}'.`);
115
+ }
116
+ for (const filePath of expanded) {
117
+ resolvedPaths.add(path_1.default.resolve(filePath));
118
+ }
119
+ }
120
+ const sortedPaths = Array.from(resolvedPaths).sort();
121
+ const files = [];
122
+ for (const absolutePath of sortedPaths) {
123
+ const buffer = yield fs_1.promises.readFile(absolutePath);
124
+ const relativePath = path_1.default.relative(process.cwd(), absolutePath) ||
125
+ path_1.default.basename(absolutePath);
126
+ const posixPath = toPosix(relativePath);
127
+ const remotePath = computeRemotePath(posixPath, basePath, logger);
128
+ logger.verbose_log(`Preparing file '${absolutePath}' as remote path '${remotePath}'.`);
129
+ files.push({
130
+ path: remotePath,
131
+ content: buffer.toString("base64"),
132
+ });
133
+ }
134
+ return files;
135
+ });
136
+ }
137
+ function expandInput(input) {
138
+ return __awaiter(this, void 0, void 0, function* () {
139
+ const cwd = process.cwd();
140
+ const absoluteCandidate = path_1.default.resolve(cwd, input);
141
+ try {
142
+ const stats = yield fs_1.promises.stat(absoluteCandidate);
143
+ if (stats.isDirectory()) {
144
+ const entries = yield (0, fast_glob_1.default)("**/*", {
145
+ cwd: absoluteCandidate,
146
+ dot: true,
147
+ onlyFiles: true,
148
+ followSymbolicLinks: false,
149
+ ignore: DEFAULT_IGNORE_PATTERNS,
150
+ });
151
+ return entries.map((entry) => path_1.default.resolve(absoluteCandidate, entry));
152
+ }
153
+ if (stats.isFile()) {
154
+ return [absoluteCandidate];
155
+ }
156
+ }
157
+ catch (error) {
158
+ // Treat as glob when path resolution fails
159
+ }
160
+ const normalizedPattern = toPosix(input);
161
+ const matches = yield (0, fast_glob_1.default)(normalizedPattern, {
162
+ cwd,
163
+ dot: true,
164
+ onlyFiles: true,
165
+ followSymbolicLinks: false,
166
+ ignore: DEFAULT_IGNORE_PATTERNS,
167
+ unique: true,
168
+ });
169
+ return matches.map((match) => path_1.default.resolve(cwd, match));
170
+ });
171
+ }
172
+ function computeRemotePath(posixPath, basePath, logger) {
173
+ const cleaned = posixPath.replace(/^\.\//, "");
174
+ if (!basePath) {
175
+ return cleaned;
176
+ }
177
+ const relativeToBase = path_1.default.posix.relative(basePath, cleaned);
178
+ if (relativeToBase.startsWith("..")) {
179
+ logger.warn(`File '${cleaned}' is outside the base path '${basePath}'. Using absolute path.`);
180
+ return cleaned;
181
+ }
182
+ const normalized = relativeToBase.replace(/^\.\//, "");
183
+ const finalSegment = normalized || path_1.default.posix.basename(cleaned);
184
+ return `./${finalSegment}`;
185
+ }
186
+ function normalizeDeletionPaths(values, basePath, logger) {
187
+ if (!values || values.length === 0) {
188
+ return [];
189
+ }
190
+ const result = [];
191
+ for (const original of values) {
192
+ const trimmed = original.trim();
193
+ if (!trimmed) {
194
+ continue;
195
+ }
196
+ const posix = toPosix(trimmed);
197
+ if (posix.startsWith("./")) {
198
+ result.push(`./${posix.replace(/^\.\/+/, "")}`);
199
+ continue;
200
+ }
201
+ if (posix.startsWith("/")) {
202
+ result.push(posix.replace(/^\/+/, ""));
203
+ continue;
204
+ }
205
+ result.push(computeRemotePath(posix, basePath, logger));
206
+ }
207
+ return result;
208
+ }
209
+ function buildUploadEndpoint(session, basePath) {
210
+ const projectSegment = `v0/projects/${encodeURIComponent(session.projectId)}/storage/f`;
211
+ if (!basePath) {
212
+ return projectSegment;
213
+ }
214
+ const encodedBase = (0, http_1.encodePathSegment)(basePath);
215
+ return `${projectSegment}/${encodedBase}`;
216
+ }
217
+ function performStreamingUpload(session, basePath, body, query, logger) {
218
+ return __awaiter(this, void 0, void 0, function* () {
219
+ const endpoint = buildUploadEndpoint(session, basePath);
220
+ const url = (0, http_1.projectUrl)(session, endpoint, {
221
+ branch: query.branch,
222
+ environment_name: query.environment_name,
223
+ stream: true,
224
+ });
225
+ const summary = {
226
+ updated: new Set(),
227
+ deleted: new Set(),
228
+ errors: new Map(),
229
+ hadErrorEvent: false,
230
+ };
231
+ yield (0, http_1.streamSse)({
232
+ url,
233
+ fetchInit: {
234
+ method: "PUT",
235
+ headers: {
236
+ "Content-Type": "application/json",
237
+ Authorization: `Bearer ${session.token}`,
238
+ },
239
+ body: JSON.stringify(body),
240
+ },
241
+ onEvent: (event) => __awaiter(this, void 0, void 0, function* () {
242
+ if (event.data === "[DONE]") {
243
+ return;
244
+ }
245
+ const payload = event.parsed;
246
+ if (!payload) {
247
+ logger.verbose_log(`SSE: ${event.data}`);
248
+ return;
249
+ }
250
+ if (payload.message) {
251
+ logger.info(payload.message);
252
+ }
253
+ if (payload.updated) {
254
+ payload.updated.forEach((item) => summary.updated.add(item));
255
+ }
256
+ if (payload.deleted) {
257
+ payload.deleted.forEach((item) => summary.deleted.add(item));
258
+ }
259
+ if (payload.errors) {
260
+ Object.entries(payload.errors).forEach(([key, value]) => {
261
+ summary.errors.set(key, value);
262
+ logger.error(`Error processing '${key}': ${value}`);
263
+ });
264
+ }
265
+ if (payload.type === "error" || payload.success === false) {
266
+ summary.hadErrorEvent = true;
267
+ }
268
+ if (payload.type === "complete") {
269
+ logger.info("Upload stream completed.");
270
+ }
271
+ }),
272
+ });
273
+ if (summary.updated.size > 0) {
274
+ logger.success(`Updated ${summary.updated.size} file(s): ${Array.from(summary.updated).join(", ")}`);
275
+ }
276
+ if (summary.deleted.size > 0) {
277
+ logger.warn(`Deleted ${summary.deleted.size} file(s): ${Array.from(summary.deleted).join(", ")}`);
278
+ }
279
+ if (summary.errors.size > 0 || summary.hadErrorEvent) {
280
+ throw new Error("Upload completed with errors. Check the log for details.");
281
+ }
282
+ });
283
+ }
284
+ function performStandardUpload(session, basePath, body, query, logger) {
285
+ return __awaiter(this, void 0, void 0, function* () {
286
+ var _a;
287
+ const endpoint = buildUploadEndpoint(session, basePath);
288
+ const url = (0, http_1.projectUrl)(session, endpoint, {
289
+ branch: query.branch,
290
+ environment_name: query.environment_name,
291
+ });
292
+ const fetcher = (0, http_1.createAmaFetch)(session);
293
+ const { data, error } = yield fetcher(url, {
294
+ method: "PUT",
295
+ headers: {
296
+ "Content-Type": "application/json",
297
+ },
298
+ body: JSON.stringify(body),
299
+ });
300
+ if (error) {
301
+ const errorMessage = typeof error === "string"
302
+ ? error
303
+ : error instanceof Error
304
+ ? error.message
305
+ : (_a = error.message) !== null && _a !== void 0 ? _a : (error.status
306
+ ? `HTTP ${error.status}`
307
+ : "Unknown error");
308
+ throw new Error(`Request failed: ${errorMessage}`);
309
+ }
310
+ if (!data) {
311
+ throw new Error("No response data received from the server.");
312
+ }
313
+ if (!data.success) {
314
+ throw new Error(data.error || "Upload failed without a server error message.");
315
+ }
316
+ const payload = data.data;
317
+ if (!payload) {
318
+ logger.success("Server reported success with no additional details.");
319
+ return;
320
+ }
321
+ if (payload.updated && payload.updated.length > 0) {
322
+ logger.success(`Updated files: ${payload.updated.join(", ")}`);
323
+ }
324
+ if (payload.deleted && payload.deleted.length > 0) {
325
+ logger.warn(`Deleted files: ${payload.deleted.join(", ")}`);
326
+ }
327
+ if (payload.errors && Object.keys(payload.errors).length > 0) {
328
+ for (const [pathKey, value] of Object.entries(payload.errors)) {
329
+ logger.error(`Error for '${pathKey}': ${value}`);
330
+ }
331
+ throw new Error("Upload completed with file-specific errors.");
332
+ }
333
+ });
334
+ }
@@ -41,13 +41,10 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
41
41
  step((generator = generator.apply(thisArg, _arguments || [])).next());
42
42
  });
43
43
  };
44
- var __importDefault = (this && this.__importDefault) || function (mod) {
45
- return (mod && mod.__esModule) ? mod : { "default": mod };
46
- };
47
44
  Object.defineProperty(exports, "__esModule", { value: true });
48
45
  exports.useCommand = useCommand;
49
46
  const commander_1 = require("commander");
50
- const chalk_1 = __importDefault(require("chalk"));
47
+ const logger_1 = require("../logger");
51
48
  const config_1 = require("../utils/config");
52
49
  const fs = __importStar(require("fs"));
53
50
  const path = __importStar(require("path"));
@@ -56,7 +53,10 @@ function useCommand() {
56
53
  .description("Set authentication token for AMA project")
57
54
  .option("-t, --token <token>", "Authentication token")
58
55
  .option("-u, --url <url>", "Project base URL")
56
+ .option("-p, --project-id <id>", "Project identifier override")
57
+ .option("--verbose", "Enable verbose logging")
59
58
  .action((options) => __awaiter(this, void 0, void 0, function* () {
59
+ const logger = new logger_1.Logger(Boolean(options.verbose));
60
60
  const rlQuestion = (query) => {
61
61
  return new Promise((resolve) => {
62
62
  const rl = require("readline").createInterface({
@@ -74,35 +74,47 @@ function useCommand() {
74
74
  const projectUrl = options.url || (yield rlQuestion("Enter the project URL: "));
75
75
  const authToken = options.token ||
76
76
  (yield rlQuestion("Enter the authentication token: "));
77
+ const detectProjectId = (url) => {
78
+ const match = url.match(/\/projects\/([^/?#]+)/i);
79
+ return match === null || match === void 0 ? void 0 : match[1];
80
+ };
81
+ const detectedProjectId = detectProjectId(projectUrl);
82
+ const projectId = options.projectId || detectedProjectId;
83
+ if (!projectId) {
84
+ logger.warn("Project ID could not be detected from the URL. Rerun with --project-id to set it explicitly.");
85
+ }
77
86
  // Create .ama directory if it doesn't exist
78
87
  const amaDir = path.join(process.cwd(), ".ama");
79
88
  if (!fs.existsSync(amaDir)) {
80
89
  fs.mkdirSync(amaDir, { recursive: true });
90
+ logger.verbose_log(`Created directory ${amaDir}.`);
81
91
  }
82
92
  // Add .gitignore if it doesn't exist or update it
83
93
  const gitignorePath = path.join(process.cwd(), ".gitignore");
84
94
  const gitignoreEntry = "\n# AMA configuration\n.ama/session.json\n";
85
95
  if (!fs.existsSync(gitignorePath)) {
86
96
  fs.writeFileSync(gitignorePath, gitignoreEntry);
97
+ logger.verbose_log(`Created ${gitignorePath} with AMA ignore rules.`);
87
98
  }
88
99
  else {
89
100
  const currentContent = fs.readFileSync(gitignorePath, "utf8");
90
101
  if (!currentContent.includes(".ama/session.json")) {
91
102
  fs.appendFileSync(gitignorePath, gitignoreEntry);
103
+ logger.verbose_log(`Updated ${gitignorePath} with AMA ignore rules.`);
92
104
  }
93
105
  }
94
- const projectId = "proj_" + Math.random().toString(36).slice(2, 9);
95
106
  const configData = { token: authToken, projectId, url: projectUrl };
96
107
  (0, config_1.setConfig)(configData);
97
108
  // Save session data to .ama/session.json
98
109
  fs.writeFileSync(path.join(amaDir, "session.json"), JSON.stringify(configData, null, 2));
99
- console.log(chalk_1.default.green("🔐 Successfully authenticated and joined project"));
100
- console.log(chalk_1.default.yellow("⚠️ Warning: Keep your .ama/session.json file private and do not commit it to version control"));
101
- console.log(chalk_1.default.blue("ℹ️ Note: Session file has been automatically added to .gitignore"));
110
+ logger.success("Authentication details saved for the project.");
111
+ logger.info(`Session file stored at ${path.join(amaDir, "session.json")}.`);
112
+ logger.warn("Keep your .ama/session.json file private and exclude it from version control.");
113
+ logger.info("Session file has been added to the project .gitignore file.");
102
114
  }
103
115
  catch (error) {
104
116
  const message = error instanceof Error ? error.message : "Unknown error";
105
- console.error(chalk_1.default.red(`❌ Error: ${message}`));
117
+ logger.error(`Configuration update failed: ${message}`, error);
106
118
  process.exit(1);
107
119
  }
108
120
  }));
package/dist/cli/index.js CHANGED
@@ -4,12 +4,16 @@ Object.defineProperty(exports, "__esModule", { value: true });
4
4
  const commander_1 = require("commander");
5
5
  const migrate_1 = require("./commands/migrate");
6
6
  const use_1 = require("./commands/use");
7
+ const upload_1 = require("./commands/upload");
8
+ const generate_1 = require("./commands/generate");
7
9
  const program = new commander_1.Command()
8
10
  .name("ama")
9
11
  .description("AtMyApp CLI Tool")
10
12
  .version("1.0.0");
11
13
  program.addCommand((0, use_1.useCommand)());
12
14
  program.addCommand((0, migrate_1.migrateCommand)());
15
+ program.addCommand((0, upload_1.uploadCommand)());
16
+ program.addCommand((0, generate_1.generateCommand)());
13
17
  program.parseAsync(process.argv).catch((err) => {
14
18
  console.error("Error:", err.message);
15
19
  process.exit(1);
@@ -1,12 +1,14 @@
1
1
  /**
2
- * Logger utility to handle verbose logging
2
+ * Logger utility to handle verbose logging with consistent formatting.
3
3
  */
4
4
  export declare class Logger {
5
- private verbose;
5
+ private readonly verbose;
6
6
  constructor(verbose: boolean);
7
7
  info(message: string): void;
8
8
  success(message: string): void;
9
+ warn(message: string): void;
9
10
  error(message: string, error?: unknown): void;
10
11
  verbose_log(message: string): void;
11
- warn(message: string): void;
12
+ private write;
13
+ private formatErrorDetail;
12
14
  }
@@ -5,30 +5,76 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
5
5
  Object.defineProperty(exports, "__esModule", { value: true });
6
6
  exports.Logger = void 0;
7
7
  const chalk_1 = __importDefault(require("chalk"));
8
+ const LEVEL_CONFIG = {
9
+ info: { label: "INFO", style: (text) => chalk_1.default.blueBright.bold(text), method: "log" },
10
+ success: { label: "SUCCESS", style: (text) => chalk_1.default.greenBright.bold(text), method: "log" },
11
+ warn: { label: "WARN", style: (text) => chalk_1.default.yellowBright.bold(text), method: "warn" },
12
+ error: { label: "ERROR", style: (text) => chalk_1.default.redBright.bold(text), method: "error" },
13
+ verbose: { label: "VERBOSE", style: (text) => chalk_1.default.gray.bold(text), method: "log" },
14
+ };
8
15
  /**
9
- * Logger utility to handle verbose logging
16
+ * Logger utility to handle verbose logging with consistent formatting.
10
17
  */
11
18
  class Logger {
12
19
  constructor(verbose) {
13
20
  this.verbose = verbose;
14
21
  }
15
22
  info(message) {
16
- console.log(chalk_1.default.blue(message));
23
+ this.write("info", message);
17
24
  }
18
25
  success(message) {
19
- console.log(chalk_1.default.green(message));
26
+ this.write("success", message);
27
+ }
28
+ warn(message) {
29
+ this.write("warn", message);
20
30
  }
21
31
  error(message, error) {
22
- const errorMessage = error instanceof Error ? error.message : String(error);
23
- console.error(chalk_1.default.red(message), error ? chalk_1.default.red(errorMessage) : "");
32
+ const detail = this.formatErrorDetail(error);
33
+ const formatted = detail ? `${message}\n${chalk_1.default.gray(detail)}` : message;
34
+ this.write("error", formatted);
24
35
  }
25
36
  verbose_log(message) {
26
- if (this.verbose) {
27
- console.log(chalk_1.default.cyan(`[VERBOSE] ${message}`));
37
+ if (!this.verbose) {
38
+ return;
28
39
  }
40
+ this.write("verbose", message);
29
41
  }
30
- warn(message) {
31
- console.warn(chalk_1.default.yellow(message));
42
+ write(level, message) {
43
+ const { label, style, method } = LEVEL_CONFIG[level];
44
+ const rawLabel = `[${label}]`;
45
+ const styledLabel = style(rawLabel);
46
+ const lines = message.split(/\r?\n/);
47
+ const continuationPrefix = " ".repeat(rawLabel.length + 1);
48
+ for (const [index, line] of lines.entries()) {
49
+ const output = index === 0 ? `${styledLabel} ${line}` : `${continuationPrefix}${line}`;
50
+ if (method === "warn") {
51
+ console.warn(output);
52
+ }
53
+ else if (method === "error") {
54
+ console.error(output);
55
+ }
56
+ else {
57
+ console.log(output);
58
+ }
59
+ }
60
+ }
61
+ formatErrorDetail(error) {
62
+ var _a;
63
+ if (!error) {
64
+ return undefined;
65
+ }
66
+ if (error instanceof Error) {
67
+ return (_a = error.stack) !== null && _a !== void 0 ? _a : error.message;
68
+ }
69
+ if (typeof error === "string") {
70
+ return error;
71
+ }
72
+ try {
73
+ return JSON.stringify(error, null, 2);
74
+ }
75
+ catch (_b) {
76
+ return String(error);
77
+ }
32
78
  }
33
79
  }
34
80
  exports.Logger = Logger;
@@ -1,10 +1,10 @@
1
- type Config = {
1
+ export interface AmaConfig {
2
2
  token?: string;
3
3
  projectId?: string;
4
+ url?: string;
4
5
  include?: string[];
5
6
  description?: string;
6
7
  args?: Record<string, string>;
7
- };
8
- export declare function setConfig(config: Config): void;
9
- export declare function getConfig(): Config;
10
- export {};
8
+ }
9
+ export declare function setConfig(config: AmaConfig): void;
10
+ export declare function getConfig(): AmaConfig;
@@ -19,7 +19,6 @@ function ensureConfigDir() {
19
19
  }
20
20
  function setConfig(config) {
21
21
  ensureConfigDir();
22
- (0, fs_1.writeFileSync)(CONFIG_PATH, JSON.stringify(config, null, 2));
23
22
  try {
24
23
  (0, fs_1.writeFileSync)(CONFIG_PATH, JSON.stringify(config, null, 2));
25
24
  }
@@ -0,0 +1,36 @@
1
+ export interface AmaSession {
2
+ token: string;
3
+ projectId: string;
4
+ url: string;
5
+ }
6
+ export interface SessionOverrides {
7
+ token?: string;
8
+ projectId?: string;
9
+ url?: string;
10
+ }
11
+ export type AmaFetch = ReturnType<typeof createAmaFetch>;
12
+ export interface SseEvent<T = unknown> {
13
+ event?: string;
14
+ id?: string;
15
+ retry?: number;
16
+ data: string;
17
+ parsed?: T;
18
+ raw: string;
19
+ }
20
+ export interface StreamSseOptions<T = unknown> {
21
+ url: string;
22
+ fetchInit?: RequestInit;
23
+ signal?: AbortSignal;
24
+ onEvent: (event: SseEvent<T>) => void | Promise<void>;
25
+ }
26
+ export declare function resolveSession(overrides?: SessionOverrides): AmaSession;
27
+ export declare function createAmaFetch(session: AmaSession): import("@better-fetch/fetch").BetterFetch<{
28
+ baseURL: string;
29
+ headers: {
30
+ Authorization: string;
31
+ };
32
+ throw: false;
33
+ }, unknown, unknown, unknown>;
34
+ export declare function projectUrl(session: AmaSession, subPath: string, query?: Record<string, string | number | boolean | null | undefined>): string;
35
+ export declare function streamSse<T = unknown>(options: StreamSseOptions<T>): Promise<void>;
36
+ export declare function encodePathSegment(path: string): string;
@@ -0,0 +1,242 @@
1
+ "use strict";
2
+ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
3
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
4
+ return new (P || (P = Promise))(function (resolve, reject) {
5
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
6
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
7
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
8
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
9
+ });
10
+ };
11
+ var __rest = (this && this.__rest) || function (s, e) {
12
+ var t = {};
13
+ for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
14
+ t[p] = s[p];
15
+ if (s != null && typeof Object.getOwnPropertySymbols === "function")
16
+ for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
17
+ if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
18
+ t[p[i]] = s[p[i]];
19
+ }
20
+ return t;
21
+ };
22
+ Object.defineProperty(exports, "__esModule", { value: true });
23
+ exports.resolveSession = resolveSession;
24
+ exports.createAmaFetch = createAmaFetch;
25
+ exports.projectUrl = projectUrl;
26
+ exports.streamSse = streamSse;
27
+ exports.encodePathSegment = encodePathSegment;
28
+ const fetch_1 = require("@better-fetch/fetch");
29
+ const config_1 = require("./config");
30
+ function resolveSession(overrides = {}) {
31
+ var _a, _b, _c;
32
+ let config;
33
+ try {
34
+ config = (0, config_1.getConfig)();
35
+ }
36
+ catch (error) {
37
+ throw new Error("AMA session not configured. Run 'ama use' first or supply --url, --token, and --project-id options.");
38
+ }
39
+ const token = (_a = overrides.token) !== null && _a !== void 0 ? _a : config.token;
40
+ const projectId = (_b = overrides.projectId) !== null && _b !== void 0 ? _b : config.projectId;
41
+ const url = (_c = overrides.url) !== null && _c !== void 0 ? _c : config.url;
42
+ if (!token) {
43
+ throw new Error("Authentication token is missing. Run 'ama use' or provide it with --token.");
44
+ }
45
+ if (!url) {
46
+ throw new Error("Project URL is missing. Run 'ama use' or provide it with --url.");
47
+ }
48
+ if (!projectId) {
49
+ throw new Error("Project ID is missing. Ensure the URL contains /projects/{id} or pass --project-id.");
50
+ }
51
+ return {
52
+ token,
53
+ projectId,
54
+ url: stripTrailingSlashes(url),
55
+ };
56
+ }
57
+ function createAmaFetch(session) {
58
+ return (0, fetch_1.createFetch)({
59
+ baseURL: ensureTrailingSlash(session.url),
60
+ headers: {
61
+ Authorization: `Bearer ${session.token}`,
62
+ },
63
+ throw: false,
64
+ });
65
+ }
66
+ function projectUrl(session, subPath, query) {
67
+ const base = ensureTrailingSlash(session.url);
68
+ const cleanPath = subPath.replace(/^\/+/, "");
69
+ const url = new URL(cleanPath, base);
70
+ if (query) {
71
+ const params = new URLSearchParams();
72
+ for (const [key, value] of Object.entries(query)) {
73
+ if (value === undefined || value === null) {
74
+ continue;
75
+ }
76
+ params.append(key, typeof value === "boolean" ? String(value) : String(value));
77
+ }
78
+ const queryString = params.toString();
79
+ if (queryString) {
80
+ url.search = queryString;
81
+ }
82
+ }
83
+ return url.toString();
84
+ }
85
+ function streamSse(options) {
86
+ return __awaiter(this, void 0, void 0, function* () {
87
+ var _a;
88
+ const { url, onEvent, signal, fetchInit = {} } = options;
89
+ const { signal: initSignal, headers: initHeaders } = fetchInit, restInit = __rest(fetchInit, ["signal", "headers"]);
90
+ const headers = new Headers(initHeaders !== null && initHeaders !== void 0 ? initHeaders : {});
91
+ if (!headers.has("Accept")) {
92
+ headers.set("Accept", "text/event-stream");
93
+ }
94
+ const requestInit = Object.assign(Object.assign({ method: (_a = restInit.method) !== null && _a !== void 0 ? _a : "GET" }, restInit), { headers, signal: signal !== null && signal !== void 0 ? signal : initSignal });
95
+ const response = yield fetch(url, requestInit);
96
+ if (!response.ok) {
97
+ const errorBody = yield safeReadText(response);
98
+ throw new Error(`Request failed with status ${response.status}: ${errorBody !== null && errorBody !== void 0 ? errorBody : "<no body>"}`);
99
+ }
100
+ if (!response.body) {
101
+ throw new Error("Streaming response body is not available.");
102
+ }
103
+ const reader = response.body.getReader();
104
+ const decoder = new TextDecoder("utf-8");
105
+ let buffer = "";
106
+ while (true) {
107
+ const { done, value } = yield reader.read();
108
+ if (done) {
109
+ break;
110
+ }
111
+ buffer += decoder.decode(value, { stream: true });
112
+ buffer = yield dispatchSseBuffer(buffer, onEvent);
113
+ }
114
+ buffer += decoder.decode();
115
+ yield dispatchSseBuffer(buffer, onEvent, true);
116
+ });
117
+ }
118
+ function encodePathSegment(path) {
119
+ return path
120
+ .split("/")
121
+ .map((segment) => segment === "" || segment === "." ? segment : encodeURIComponent(segment))
122
+ .join("/");
123
+ }
124
+ function stripTrailingSlashes(value) {
125
+ return value.replace(/\/+$/, "");
126
+ }
127
+ function ensureTrailingSlash(value) {
128
+ return value.endsWith("/") ? value : `${value}/`;
129
+ }
130
+ function dispatchSseBuffer(buffer_1, onEvent_1) {
131
+ return __awaiter(this, arguments, void 0, function* (buffer, onEvent, flush = false) {
132
+ let working = buffer;
133
+ while (true) {
134
+ const delimiter = nextDelimiter(working);
135
+ if (!delimiter) {
136
+ break;
137
+ }
138
+ const { index, length } = delimiter;
139
+ const rawEvent = working.slice(0, index);
140
+ working = working.slice(index + length);
141
+ if (!rawEvent.trim()) {
142
+ continue;
143
+ }
144
+ const event = parseSseEvent(rawEvent);
145
+ if (event) {
146
+ yield onEvent(event);
147
+ }
148
+ }
149
+ if (flush && working.trim()) {
150
+ const event = parseSseEvent(working);
151
+ if (event) {
152
+ yield onEvent(event);
153
+ }
154
+ return "";
155
+ }
156
+ return working;
157
+ });
158
+ }
159
+ function nextDelimiter(buffer) {
160
+ const lfIndex = buffer.indexOf("\n\n");
161
+ const crlfIndex = buffer.indexOf("\r\n\r\n");
162
+ if (lfIndex === -1 && crlfIndex === -1) {
163
+ return null;
164
+ }
165
+ if (lfIndex === -1) {
166
+ return { index: crlfIndex, length: 4 };
167
+ }
168
+ if (crlfIndex === -1) {
169
+ return { index: lfIndex, length: 2 };
170
+ }
171
+ return lfIndex < crlfIndex
172
+ ? { index: lfIndex, length: 2 }
173
+ : { index: crlfIndex, length: 4 };
174
+ }
175
+ function parseSseEvent(raw) {
176
+ const trimmed = raw.trim();
177
+ if (!trimmed) {
178
+ return null;
179
+ }
180
+ const lines = trimmed.split(/\r?\n/);
181
+ const dataLines = [];
182
+ const event = {
183
+ data: "",
184
+ raw: trimmed,
185
+ };
186
+ for (const line of lines) {
187
+ if (!line) {
188
+ continue;
189
+ }
190
+ if (line.startsWith(":")) {
191
+ continue;
192
+ }
193
+ const separatorIndex = line.indexOf(":");
194
+ const field = separatorIndex === -1 ? line : line.slice(0, separatorIndex).trim();
195
+ const value = separatorIndex === -1
196
+ ? ""
197
+ : line
198
+ .slice(separatorIndex + 1)
199
+ .replace(/^\s+/, "");
200
+ switch (field) {
201
+ case "event":
202
+ event.event = value;
203
+ break;
204
+ case "data":
205
+ dataLines.push(value);
206
+ break;
207
+ case "id":
208
+ event.id = value;
209
+ break;
210
+ case "retry":
211
+ {
212
+ const numeric = Number(value);
213
+ if (!Number.isNaN(numeric)) {
214
+ event.retry = numeric;
215
+ }
216
+ }
217
+ break;
218
+ default:
219
+ break;
220
+ }
221
+ }
222
+ event.data = dataLines.join("\n");
223
+ if (event.data) {
224
+ try {
225
+ event.parsed = JSON.parse(event.data);
226
+ }
227
+ catch (error) {
228
+ // Non-JSON payloads are expected occasionally (e.g. [DONE])
229
+ }
230
+ }
231
+ return event;
232
+ }
233
+ function safeReadText(response) {
234
+ return __awaiter(this, void 0, void 0, function* () {
235
+ try {
236
+ return yield response.text();
237
+ }
238
+ catch (error) {
239
+ return null;
240
+ }
241
+ });
242
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@atmyapp/cli",
3
- "version": "0.0.7",
3
+ "version": "0.0.8",
4
4
  "main": "dist/index.js",
5
5
  "types": "dist/index.d.ts",
6
6
  "scripts": {
@@ -28,7 +28,7 @@
28
28
  "license": "ISC",
29
29
  "description": "",
30
30
  "devDependencies": {
31
- "@atmyapp/core": "^0.0.6",
31
+ "@atmyapp/core": "^0.0.11",
32
32
  "@types/jest": "^29.5.14",
33
33
  "@typescript-eslint/eslint-plugin": "^8.32.1",
34
34
  "@typescript-eslint/parser": "^8.32.1",
@@ -40,6 +40,7 @@
40
40
  "typescript": "^5.8.3"
41
41
  },
42
42
  "dependencies": {
43
+ "@better-fetch/fetch": "^1.1.18",
43
44
  "chalk": "^4.1.2",
44
45
  "commander": "^14.0.0",
45
46
  "fast-glob": "^3.3.3",