@atmyapp/cli 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,109 @@
1
+ "use strict";
2
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
+ if (k2 === undefined) k2 = k;
4
+ var desc = Object.getOwnPropertyDescriptor(m, k);
5
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
+ desc = { enumerable: true, get: function() { return m[k]; } };
7
+ }
8
+ Object.defineProperty(o, k2, desc);
9
+ }) : (function(o, m, k, k2) {
10
+ if (k2 === undefined) k2 = k;
11
+ o[k2] = m[k];
12
+ }));
13
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
14
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
15
+ }) : function(o, v) {
16
+ o["default"] = v;
17
+ });
18
+ var __importStar = (this && this.__importStar) || (function () {
19
+ var ownKeys = function(o) {
20
+ ownKeys = Object.getOwnPropertyNames || function (o) {
21
+ var ar = [];
22
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
23
+ return ar;
24
+ };
25
+ return ownKeys(o);
26
+ };
27
+ return function (mod) {
28
+ if (mod && mod.__esModule) return mod;
29
+ var result = {};
30
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
31
+ __setModuleDefault(result, mod);
32
+ return result;
33
+ };
34
+ })();
35
+ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
36
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
37
+ return new (P || (P = Promise))(function (resolve, reject) {
38
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
39
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
40
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
41
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
42
+ });
43
+ };
44
+ var __importDefault = (this && this.__importDefault) || function (mod) {
45
+ return (mod && mod.__esModule) ? mod : { "default": mod };
46
+ };
47
+ Object.defineProperty(exports, "__esModule", { value: true });
48
+ exports.useCommand = useCommand;
49
+ const commander_1 = require("commander");
50
+ const chalk_1 = __importDefault(require("chalk"));
51
+ const config_1 = require("../utils/config");
52
+ const fs = __importStar(require("fs"));
53
+ const path = __importStar(require("path"));
54
+ function useCommand() {
55
+ return new commander_1.Command("use")
56
+ .description("Set authentication token for AMA project")
57
+ .option("-t, --token <token>", "Authentication token")
58
+ .option("-u, --url <url>", "Project base URL")
59
+ .action((options) => __awaiter(this, void 0, void 0, function* () {
60
+ const rlQuestion = (query) => {
61
+ return new Promise((resolve) => {
62
+ const rl = require("readline").createInterface({
63
+ input: process.stdin,
64
+ output: process.stdout,
65
+ });
66
+ rl.question(query, (answer) => {
67
+ rl.close();
68
+ resolve(answer);
69
+ });
70
+ });
71
+ };
72
+ try {
73
+ // Prompt user for URL and token if not provided
74
+ const projectUrl = options.url || (yield rlQuestion("Enter the project URL: "));
75
+ const authToken = options.token ||
76
+ (yield rlQuestion("Enter the authentication token: "));
77
+ // Create .ama directory if it doesn't exist
78
+ const amaDir = path.join(process.cwd(), ".ama");
79
+ if (!fs.existsSync(amaDir)) {
80
+ fs.mkdirSync(amaDir, { recursive: true });
81
+ }
82
+ // Add .gitignore if it doesn't exist or update it
83
+ const gitignorePath = path.join(process.cwd(), ".gitignore");
84
+ const gitignoreEntry = "\n# AMA configuration\n.ama/session.json\n";
85
+ if (!fs.existsSync(gitignorePath)) {
86
+ fs.writeFileSync(gitignorePath, gitignoreEntry);
87
+ }
88
+ else {
89
+ const currentContent = fs.readFileSync(gitignorePath, "utf8");
90
+ if (!currentContent.includes(".ama/session.json")) {
91
+ fs.appendFileSync(gitignorePath, gitignoreEntry);
92
+ }
93
+ }
94
+ const projectId = "proj_" + Math.random().toString(36).slice(2, 9);
95
+ const configData = { token: authToken, projectId, url: projectUrl };
96
+ (0, config_1.setConfig)(configData);
97
+ // Save session data to .ama/session.json
98
+ fs.writeFileSync(path.join(amaDir, "session.json"), JSON.stringify(configData, null, 2));
99
+ console.log(chalk_1.default.green("🔐 Successfully authenticated and joined project"));
100
+ console.log(chalk_1.default.yellow("⚠️ Warning: Keep your .ama/session.json file private and do not commit it to version control"));
101
+ console.log(chalk_1.default.blue("ℹ️ Note: Session file has been automatically added to .gitignore"));
102
+ }
103
+ catch (error) {
104
+ const message = error instanceof Error ? error.message : "Unknown error";
105
+ console.error(chalk_1.default.red(`❌ Error: ${message}`));
106
+ process.exit(1);
107
+ }
108
+ }));
109
+ }
@@ -0,0 +1,2 @@
1
+ #!/usr/bin/env node
2
+ export {};
@@ -0,0 +1,16 @@
1
+ #!/usr/bin/env node
2
+ "use strict";
3
+ Object.defineProperty(exports, "__esModule", { value: true });
4
+ const commander_1 = require("commander");
5
+ const migrate_1 = require("./commands/migrate");
6
+ const use_1 = require("./commands/use");
7
+ const program = new commander_1.Command()
8
+ .name("ama")
9
+ .description("AtMyApp CLI Tool")
10
+ .version("1.0.0");
11
+ program.addCommand((0, use_1.useCommand)());
12
+ program.addCommand((0, migrate_1.migrateCommand)());
13
+ program.parseAsync(process.argv).catch((err) => {
14
+ console.error("Error:", err.message);
15
+ process.exit(1);
16
+ });
@@ -0,0 +1,12 @@
1
+ /**
2
+ * Logger utility to handle verbose logging
3
+ */
4
+ export declare class Logger {
5
+ private verbose;
6
+ constructor(verbose: boolean);
7
+ info(message: string): void;
8
+ success(message: string): void;
9
+ error(message: string, error?: unknown): void;
10
+ verbose_log(message: string): void;
11
+ warn(message: string): void;
12
+ }
@@ -0,0 +1,34 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.Logger = void 0;
7
+ const chalk_1 = __importDefault(require("chalk"));
8
+ /**
9
+ * Logger utility to handle verbose logging
10
+ */
11
+ class Logger {
12
+ constructor(verbose) {
13
+ this.verbose = verbose;
14
+ }
15
+ info(message) {
16
+ console.log(chalk_1.default.blue(message));
17
+ }
18
+ success(message) {
19
+ console.log(chalk_1.default.green(message));
20
+ }
21
+ error(message, error) {
22
+ const errorMessage = error instanceof Error ? error.message : String(error);
23
+ console.error(chalk_1.default.red(message), error ? chalk_1.default.red(errorMessage) : "");
24
+ }
25
+ verbose_log(message) {
26
+ if (this.verbose) {
27
+ console.log(chalk_1.default.cyan(`[VERBOSE] ${message}`));
28
+ }
29
+ }
30
+ warn(message) {
31
+ console.warn(chalk_1.default.yellow(message));
32
+ }
33
+ }
34
+ exports.Logger = Logger;
@@ -0,0 +1,2 @@
1
+ #!/usr/bin/env node
2
+ export {};
@@ -0,0 +1,10 @@
1
+ #!/usr/bin/env node
2
+ "use strict";
3
+ Object.defineProperty(exports, "__esModule", { value: true });
4
+ const migrate_1 = require("./commands/migrate");
5
+ // Create and execute the migrate command
6
+ const command = (0, migrate_1.migrateCommand)();
7
+ command.parse(process.argv);
8
+ // Sample usage:
9
+ // npm run build
10
+ // node dist/cli/test-migrate.js
@@ -0,0 +1,34 @@
1
+ export interface Content {
2
+ path: string;
3
+ structure: any;
4
+ type?: string;
5
+ }
6
+ export interface EventConfig {
7
+ columns: string[];
8
+ }
9
+ export interface OutputDefinition {
10
+ description: string;
11
+ definitions: Record<string, {
12
+ structure: any;
13
+ type?: string;
14
+ }>;
15
+ events: Record<string, EventConfig>;
16
+ args: any[];
17
+ metadata?: any;
18
+ }
19
+ export interface MigrateOptions {
20
+ dryRun: boolean;
21
+ verbose: boolean;
22
+ tsconfig: string;
23
+ continueOnError: boolean;
24
+ }
25
+ export interface TypeTransformer {
26
+ canTransform: (obj: any) => boolean;
27
+ transform: (obj: any) => any;
28
+ }
29
+ export interface ProcessingResult {
30
+ contents: Content[];
31
+ errors: string[];
32
+ successCount: number;
33
+ failureCount: number;
34
+ }
@@ -0,0 +1,2 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
@@ -0,0 +1,10 @@
1
+ type Config = {
2
+ token?: string;
3
+ projectId?: string;
4
+ include?: string[];
5
+ description?: string;
6
+ args?: Record<string, string>;
7
+ };
8
+ export declare function setConfig(config: Config): void;
9
+ export declare function getConfig(): Config;
10
+ export {};
@@ -0,0 +1,38 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.setConfig = setConfig;
7
+ exports.getConfig = getConfig;
8
+ const fs_1 = require("fs");
9
+ const path_1 = __importDefault(require("path"));
10
+ const CONFIG_PATH = path_1.default.join(process.cwd(), "./.ama/session.json");
11
+ const CONFIG_DIR = path_1.default.dirname(CONFIG_PATH);
12
+ function ensureConfigDir() {
13
+ try {
14
+ (0, fs_1.mkdirSync)(CONFIG_DIR, { recursive: true });
15
+ }
16
+ catch (error) {
17
+ throw new Error(`Failed to create config directory: ${error instanceof Error ? error.message : error}`);
18
+ }
19
+ }
20
+ function setConfig(config) {
21
+ ensureConfigDir();
22
+ (0, fs_1.writeFileSync)(CONFIG_PATH, JSON.stringify(config, null, 2));
23
+ try {
24
+ (0, fs_1.writeFileSync)(CONFIG_PATH, JSON.stringify(config, null, 2));
25
+ }
26
+ catch (error) {
27
+ throw new Error(`Failed to save config: ${error instanceof Error ? error.message : error}`);
28
+ }
29
+ }
30
+ function getConfig() {
31
+ ensureConfigDir();
32
+ try {
33
+ return JSON.parse((0, fs_1.readFileSync)(CONFIG_PATH, "utf-8"));
34
+ }
35
+ catch (error) {
36
+ throw new Error(`Failed to read config: ${error instanceof Error ? error.message : error}`);
37
+ }
38
+ }
@@ -0,0 +1,6 @@
1
+ import { Logger } from "../logger";
2
+ import { Content, OutputDefinition } from "../types/migrate";
3
+ import { definitionPipeline } from "./definition-processor";
4
+ export declare function determineContentType(content: Content): string;
5
+ export declare function generateOutput(contents: Content[], config: any, logger: Logger): OutputDefinition;
6
+ export { definitionPipeline };
@@ -0,0 +1,141 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.definitionPipeline = void 0;
4
+ exports.determineContentType = determineContentType;
5
+ exports.generateOutput = generateOutput;
6
+ const type_transformers_1 = require("./type-transformers");
7
+ const definition_processor_1 = require("./definition-processor");
8
+ Object.defineProperty(exports, "definitionPipeline", { enumerable: true, get: function () { return definition_processor_1.definitionPipeline; } });
9
+ // Initialize the pipeline with built-in components
10
+ function initializePipeline() {
11
+ // Always register built-in components
12
+ (0, definition_processor_1.registerBuiltInProcessors)();
13
+ (0, definition_processor_1.registerBuiltInValidators)();
14
+ (0, definition_processor_1.registerBuiltInOutputTransformers)();
15
+ }
16
+ // Determines the content type based on its structure and path
17
+ function determineContentType(content) {
18
+ var _a, _b, _c, _d, _e, _f, _g, _h;
19
+ // Extract file extension
20
+ const fileExt = (_a = content.path.split(".").pop()) === null || _a === void 0 ? void 0 : _a.toLowerCase();
21
+ // Check for event types
22
+ if (((_b = content.structure) === null || _b === void 0 ? void 0 : _b.type) === "event" ||
23
+ ((_e = (_d = (_c = content.structure) === null || _c === void 0 ? void 0 : _c.properties) === null || _d === void 0 ? void 0 : _d.type) === null || _e === void 0 ? void 0 : _e.const) === "event" ||
24
+ ((_f = content.structure) === null || _f === void 0 ? void 0 : _f.__amatype) === "AmaEventDef") {
25
+ return "event";
26
+ }
27
+ // Check for image types based on structure or extension
28
+ if (((_g = content.structure) === null || _g === void 0 ? void 0 : _g.__amatype) === "AmaImageDef") {
29
+ return "image";
30
+ }
31
+ // Check for file types
32
+ if (((_h = content.structure) === null || _h === void 0 ? void 0 : _h.__amatype) === "AmaFileDef") {
33
+ return "file";
34
+ }
35
+ // Default type for other content
36
+ return "jsonx";
37
+ }
38
+ // Extracts event configuration from event content
39
+ function extractEventConfig(content) {
40
+ var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m;
41
+ try {
42
+ let columns = [];
43
+ let eventId = "";
44
+ // Try to extract from different possible structures
45
+ if ((_c = (_b = (_a = content.structure) === null || _a === void 0 ? void 0 : _a.properties) === null || _b === void 0 ? void 0 : _b.columns) === null || _c === void 0 ? void 0 : _c.const) {
46
+ columns = content.structure.properties.columns.const;
47
+ }
48
+ else if ((_g = (_f = (_e = (_d = content.structure) === null || _d === void 0 ? void 0 : _d.properties) === null || _e === void 0 ? void 0 : _e.columns) === null || _f === void 0 ? void 0 : _f.items) === null || _g === void 0 ? void 0 : _g.const) {
49
+ // Handle array of constants
50
+ columns = content.structure.properties.columns.items.const;
51
+ }
52
+ else if ((_h = content.structure) === null || _h === void 0 ? void 0 : _h.columns) {
53
+ columns = content.structure.columns;
54
+ }
55
+ // Extract event ID
56
+ if ((_l = (_k = (_j = content.structure) === null || _j === void 0 ? void 0 : _j.properties) === null || _k === void 0 ? void 0 : _k.id) === null || _l === void 0 ? void 0 : _l.const) {
57
+ eventId = content.structure.properties.id.const;
58
+ }
59
+ else if ((_m = content.structure) === null || _m === void 0 ? void 0 : _m.id) {
60
+ eventId = content.structure.id;
61
+ }
62
+ if (columns.length > 0) {
63
+ return { columns };
64
+ }
65
+ }
66
+ catch (error) {
67
+ // Silent failure, let the caller handle
68
+ }
69
+ return null;
70
+ }
71
+ // Generates the final output definition using the processing pipeline
72
+ function generateOutput(contents, config, logger) {
73
+ // Initialize the pipeline with built-in components
74
+ initializePipeline();
75
+ logger.verbose_log("Processing contents through definition pipeline");
76
+ // Process definitions through the pipeline
77
+ const { processedContents, validationResults } = definition_processor_1.definitionPipeline.processDefinitions(contents, config, logger);
78
+ // Log validation summary
79
+ const validationErrors = validationResults.reduce((sum, result) => sum + result.errors.length, 0);
80
+ const validationWarnings = validationResults.reduce((sum, result) => sum + result.warnings.length, 0);
81
+ if (validationErrors > 0) {
82
+ logger.warn(`Found ${validationErrors} validation errors`);
83
+ }
84
+ if (validationWarnings > 0) {
85
+ logger.warn(`Found ${validationWarnings} validation warnings`);
86
+ }
87
+ // Apply special type transformations to processed contents
88
+ logger.verbose_log("Applying special type transformations");
89
+ const transformedContents = processedContents.map((content) => {
90
+ logger.verbose_log(`Transforming special types for path: ${content.path}`);
91
+ return Object.assign(Object.assign({}, content), { structure: (0, type_transformers_1.processSpecialTypes)(content.structure) });
92
+ });
93
+ // Separate events from regular definitions
94
+ const events = {};
95
+ const definitions = {};
96
+ transformedContents.forEach((content) => {
97
+ var _a, _b, _c, _d;
98
+ const contentType = determineContentType(content);
99
+ if (contentType === "event") {
100
+ logger.verbose_log(`Processing event: ${content.path}`);
101
+ // Extract event ID from path or structure
102
+ let eventId = content.path;
103
+ if ((_c = (_b = (_a = content.structure) === null || _a === void 0 ? void 0 : _a.properties) === null || _b === void 0 ? void 0 : _b.id) === null || _c === void 0 ? void 0 : _c.const) {
104
+ eventId = content.structure.properties.id.const;
105
+ }
106
+ else if ((_d = content.structure) === null || _d === void 0 ? void 0 : _d.id) {
107
+ eventId = content.structure.id;
108
+ }
109
+ const eventConfig = extractEventConfig(content);
110
+ if (eventConfig) {
111
+ events[eventId] = eventConfig;
112
+ logger.verbose_log(`Added event "${eventId}" with columns: ${eventConfig.columns.join(", ")}`);
113
+ }
114
+ else {
115
+ logger.warn(`Failed to extract event configuration for ${content.path}`);
116
+ }
117
+ }
118
+ else {
119
+ // Regular definition
120
+ definitions[content.path] = {
121
+ type: content.type,
122
+ structure: content.structure,
123
+ };
124
+ }
125
+ });
126
+ logger.verbose_log("Generating base output definition");
127
+ const baseOutput = {
128
+ description: config.description || "AMA Definitions",
129
+ definitions,
130
+ events,
131
+ args: config.args || {},
132
+ };
133
+ // Transform the final output through the pipeline
134
+ logger.verbose_log("Applying output transformations");
135
+ const finalOutput = definition_processor_1.definitionPipeline.transformOutput(baseOutput, config, logger);
136
+ // Log pipeline statistics
137
+ const stats = definition_processor_1.definitionPipeline.getStats();
138
+ logger.verbose_log(`Pipeline used ${stats.processors} processors, ${stats.validators} validators, ${stats.transformers} transformers`);
139
+ logger.verbose_log(`Generated ${Object.keys(finalOutput.definitions).length} definitions and ${Object.keys(finalOutput.events).length} events`);
140
+ return finalOutput;
141
+ }
@@ -0,0 +1,61 @@
1
+ import { Logger } from "../logger";
2
+ import { Content, OutputDefinition } from "../types/migrate";
3
+ export interface DefinitionProcessor {
4
+ name: string;
5
+ process: (content: Content, context: ProcessingContext) => Content | null;
6
+ }
7
+ export interface ProcessingContext {
8
+ logger: Logger;
9
+ config: any;
10
+ allContents: Content[];
11
+ currentIndex: number;
12
+ }
13
+ export interface OutputTransformer {
14
+ name: string;
15
+ transform: (output: OutputDefinition, context: ProcessingContext) => OutputDefinition;
16
+ }
17
+ export interface ValidationRule {
18
+ name: string;
19
+ validate: (content: Content, context: ProcessingContext) => ValidationResult;
20
+ }
21
+ export interface ValidationResult {
22
+ isValid: boolean;
23
+ errors: string[];
24
+ warnings: string[];
25
+ }
26
+ declare class DefinitionProcessingPipeline {
27
+ private processors;
28
+ private outputTransformers;
29
+ private validators;
30
+ addProcessor(processor: DefinitionProcessor): void;
31
+ addOutputTransformer(transformer: OutputTransformer): void;
32
+ addValidator(validator: ValidationRule): void;
33
+ processDefinitions(contents: Content[], config: any, logger: Logger): {
34
+ processedContents: Content[];
35
+ validationResults: ValidationResult[];
36
+ };
37
+ transformOutput(output: OutputDefinition, config: any, logger: Logger): OutputDefinition;
38
+ private validateContent;
39
+ clear(): void;
40
+ getStats(): {
41
+ processors: number;
42
+ transformers: number;
43
+ validators: number;
44
+ };
45
+ }
46
+ export declare const definitionPipeline: DefinitionProcessingPipeline;
47
+ export declare const builtInProcessors: {
48
+ typeDetector: DefinitionProcessor;
49
+ pathNormalizer: DefinitionProcessor;
50
+ };
51
+ export declare const builtInValidators: {
52
+ pathValidator: ValidationRule;
53
+ duplicatePathValidator: ValidationRule;
54
+ };
55
+ export declare const builtInOutputTransformers: {
56
+ metadataEnricher: OutputTransformer;
57
+ };
58
+ export declare function registerBuiltInProcessors(): void;
59
+ export declare function registerBuiltInValidators(): void;
60
+ export declare function registerBuiltInOutputTransformers(): void;
61
+ export {};