@open-mercato/cli 0.4.2-canary-3f5eaf79f7 → 0.4.2-canary-a91dde3d1d
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/lib/db/commands.js +4 -2
- package/dist/lib/db/commands.js.map +2 -2
- package/dist/lib/generators/module-entities.js +4 -3
- package/dist/lib/generators/module-entities.js.map +2 -2
- package/package.json +2 -2
- package/src/lib/db/commands.ts +5 -2
- package/src/lib/generators/module-entities.ts +4 -3
package/dist/lib/db/commands.js
CHANGED
|
@@ -58,14 +58,16 @@ async function loadModuleEntities(entry, resolver) {
|
|
|
58
58
|
path.join(roots.appBase, "db"),
|
|
59
59
|
path.join(roots.pkgBase, "db")
|
|
60
60
|
];
|
|
61
|
-
const candidates = ["entities.ts", "schema.ts"];
|
|
61
|
+
const candidates = ["entities.ts", "entities.js", "schema.ts", "schema.js"];
|
|
62
62
|
for (const base of bases) {
|
|
63
63
|
for (const f of candidates) {
|
|
64
64
|
const p = path.join(base, f);
|
|
65
65
|
if (fs.existsSync(p)) {
|
|
66
66
|
const sub = path.basename(base);
|
|
67
67
|
const fromApp = base.startsWith(roots.appBase);
|
|
68
|
-
const
|
|
68
|
+
const ext = path.extname(f);
|
|
69
|
+
const baseName = f.replace(ext, "");
|
|
70
|
+
const importPath = isAppModule && fromApp ? `file://${p.replace(/\.ts$/, ".js")}` : `${fromApp ? imps.appBase : imps.pkgBase}/${sub}/${baseName}`;
|
|
69
71
|
try {
|
|
70
72
|
const mod = await import(importPath);
|
|
71
73
|
const entities = Object.values(mod).filter((v) => typeof v === "function");
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/lib/db/commands.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'node:path'\nimport fs from 'node:fs'\nimport { MikroORM, type Logger } from '@mikro-orm/core'\nimport { Migrator } from '@mikro-orm/migrations'\nimport { PostgreSqlDriver } from '@mikro-orm/postgresql'\nimport type { PackageResolver, ModuleEntry } from '../resolver'\n\nconst QUIET_MODE = process.env.OM_CLI_QUIET === '1' || process.env.MERCATO_QUIET === '1'\nconst PROGRESS_EMOJI = ''\n\nfunction formatResult(modId: string, message: string, emoji = '\u2022') {\n return `${emoji} ${modId}: ${message}`\n}\n\nfunction createProgressRenderer(total: number) {\n const width = 20\n const normalizedTotal = total > 0 ? total : 1\n return (current: number) => {\n const clamped = Math.min(Math.max(current, 0), normalizedTotal)\n const filled = Math.round((clamped / normalizedTotal) * width)\n const bar = `${'='.repeat(filled)}${'.'.repeat(Math.max(width - filled, 0))}`\n return `[${bar}] ${clamped}/${normalizedTotal}`\n }\n}\n\nfunction createMinimalLogger(): Logger {\n return {\n log: () => {},\n error: (_namespace, message) => console.error(message),\n warn: (_namespace, message) => {\n if (!QUIET_MODE) console.warn(message)\n },\n logQuery: () => {},\n setDebugMode: () => {},\n isEnabled: () => false,\n }\n}\n\nfunction getClientUrl(): string {\n const url = process.env.DATABASE_URL\n if (!url) throw new Error('DATABASE_URL is not set')\n return url\n}\n\nfunction sortModules(mods: ModuleEntry[]): ModuleEntry[] {\n // Sort modules alphabetically since they are now isomorphic\n return mods.slice().sort((a, b) => a.id.localeCompare(b.id))\n}\n\n/**\n * Sanitizes a module ID for use in SQL identifiers (table names).\n * Replaces non-alphanumeric characters with underscores to prevent SQL injection.\n * @public Exported for testing\n */\nexport function sanitizeModuleId(modId: string): string {\n return modId.replace(/[^a-z0-9_]/gi, '_')\n}\n\n/**\n * Validates that a table name is safe for use in SQL queries.\n * @throws Error if the table name contains invalid characters.\n * @public Exported for testing\n */\nexport function validateTableName(tableName: string): void {\n if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(tableName)) {\n throw new Error(`Invalid table name: ${tableName}. Table names must start with a letter or underscore and contain only alphanumeric characters and underscores.`)\n }\n}\n\nasync function loadModuleEntities(entry: ModuleEntry, resolver: PackageResolver): Promise<any[]> {\n const roots = resolver.getModulePaths(entry)\n const imps = resolver.getModuleImportBase(entry)\n const isAppModule = entry.from === '@app'\n const bases = [\n path.join(roots.appBase, 'data'),\n path.join(roots.pkgBase, 'data'),\n path.join(roots.appBase, 'db'),\n path.join(roots.pkgBase, 'db'),\n ]\n const candidates = ['entities.ts', 'schema.ts']\n\n for (const base of bases) {\n for (const f of candidates) {\n const p = path.join(base, f)\n if (fs.existsSync(p)) {\n const sub = path.basename(base)\n const fromApp = base.startsWith(roots.appBase)\n // For @app modules, use file:// URL since @/ alias doesn't work in Node.js runtime\n const importPath = (isAppModule && fromApp)\n ? `file://${p.replace(/\\.ts$/, '.js')}`\n : `${fromApp ? imps.appBase : imps.pkgBase}/${sub}/${f.replace(/\\.ts$/, '')}`\n try {\n const mod = await import(importPath)\n const entities = Object.values(mod).filter((v) => typeof v === 'function')\n if (entities.length) return entities as any[]\n } catch (err) {\n // For @app modules with TypeScript files, they can't be directly imported\n // Skip and let MikroORM handle entities through discovery\n if (isAppModule) continue\n throw err\n }\n }\n }\n }\n return []\n}\n\nfunction getMigrationsPath(entry: ModuleEntry, resolver: PackageResolver): string {\n const from = entry.from || '@open-mercato/core'\n\n if (from === '@app') {\n // For @app modules, use the app directory\n return path.join(resolver.getAppDir(), 'src/modules', entry.id, 'migrations')\n }\n\n // Use resolver's getModulePaths which handles monorepo vs production mode\n const { pkgBase } = resolver.getModulePaths(entry)\n return path.join(pkgBase, 'migrations')\n}\n\nexport interface DbOptions {\n quiet?: boolean\n}\n\nexport interface GreenfieldOptions extends DbOptions {\n yes: boolean\n}\n\nexport async function dbGenerate(resolver: PackageResolver, options: DbOptions = {}): Promise<void> {\n const modules = resolver.loadEnabledModules()\n const ordered = sortModules(modules)\n const results: string[] = []\n\n for (const entry of ordered) {\n const modId = entry.id\n const sanitizedModId = sanitizeModuleId(modId)\n const entities = await loadModuleEntities(entry, resolver)\n if (!entities.length) continue\n\n const migrationsPath = getMigrationsPath(entry, resolver)\n fs.mkdirSync(migrationsPath, { recursive: true })\n\n const tableName = `mikro_orm_migrations_${sanitizedModId}`\n validateTableName(tableName)\n\n const orm = await MikroORM.init<PostgreSqlDriver>({\n driver: PostgreSqlDriver,\n clientUrl: getClientUrl(),\n loggerFactory: () => createMinimalLogger(),\n entities,\n migrations: {\n path: migrationsPath,\n glob: '!(*.d).{ts,js}',\n tableName,\n dropTables: false,\n },\n schemaGenerator: {\n disableForeignKeys: true,\n },\n pool: {\n min: 1,\n max: 3,\n idleTimeoutMillis: 30000,\n acquireTimeoutMillis: 60000,\n destroyTimeoutMillis: 30000,\n },\n })\n\n const migrator = orm.getMigrator() as Migrator\n const diff = await migrator.createMigration()\n if (diff && diff.fileName) {\n try {\n const orig = diff.fileName\n const base = path.basename(orig)\n const dir = path.dirname(orig)\n const ext = path.extname(base)\n const stem = base.replace(ext, '')\n const suffix = `_${modId}`\n const newBase = stem.endsWith(suffix) ? base : `${stem}${suffix}${ext}`\n const newPath = path.join(dir, newBase)\n let content = fs.readFileSync(orig, 'utf8')\n // Rename class to ensure uniqueness as well\n content = content.replace(\n /export class (Migration\\d+)/,\n `export class $1_${modId.replace(/[^a-zA-Z0-9]/g, '_')}`\n )\n fs.writeFileSync(newPath, content, 'utf8')\n if (newPath !== orig) fs.unlinkSync(orig)\n results.push(formatResult(modId, `generated ${newBase}`, ''))\n } catch {\n results.push(formatResult(modId, `generated ${path.basename(diff.fileName)} (rename failed)`, ''))\n }\n } else {\n results.push(formatResult(modId, 'no changes', ''))\n }\n\n await orm.close(true)\n }\n\n console.log(results.join('\\n'))\n}\n\nexport async function dbMigrate(resolver: PackageResolver, options: DbOptions = {}): Promise<void> {\n const modules = resolver.loadEnabledModules()\n const ordered = sortModules(modules)\n const results: string[] = []\n\n for (const entry of ordered) {\n const modId = entry.id\n const sanitizedModId = sanitizeModuleId(modId)\n const entities = await loadModuleEntities(entry, resolver)\n\n const migrationsPath = getMigrationsPath(entry, resolver)\n\n // Skip if no entities AND no migrations directory exists\n // (allows @app modules to run migrations even if entities can't be dynamically imported)\n if (!entities.length && !fs.existsSync(migrationsPath)) continue\n fs.mkdirSync(migrationsPath, { recursive: true })\n\n const tableName = `mikro_orm_migrations_${sanitizedModId}`\n validateTableName(tableName)\n\n // For @app modules, entities may be empty since TypeScript files can't be imported at runtime\n // Use discovery.warnWhenNoEntities: false to allow running migrations without entities\n const orm = await MikroORM.init<PostgreSqlDriver>({\n driver: PostgreSqlDriver,\n clientUrl: getClientUrl(),\n loggerFactory: () => createMinimalLogger(),\n entities: entities.length ? entities : [],\n discovery: { warnWhenNoEntities: false },\n migrations: {\n path: migrationsPath,\n glob: '!(*.d).{ts,js}',\n tableName,\n dropTables: false,\n },\n schemaGenerator: {\n disableForeignKeys: true,\n },\n pool: {\n min: 1,\n max: 3,\n idleTimeoutMillis: 30000,\n acquireTimeoutMillis: 60000,\n destroyTimeoutMillis: 30000,\n },\n })\n\n const migrator = orm.getMigrator() as Migrator\n const pending = await migrator.getPendingMigrations()\n if (!pending.length) {\n results.push(formatResult(modId, 'no pending migrations', ''))\n } else {\n const renderProgress = createProgressRenderer(pending.length)\n let applied = 0\n if (!QUIET_MODE) {\n process.stdout.write(` ${PROGRESS_EMOJI} ${modId}: ${renderProgress(applied)}`)\n }\n for (const migration of pending) {\n const migrationName =\n typeof migration === 'string'\n ? migration\n : (migration as any).name ?? (migration as any).fileName\n await migrator.up(migrationName ? { migrations: [migrationName] } : undefined)\n applied += 1\n if (!QUIET_MODE) {\n process.stdout.write(`\\r ${PROGRESS_EMOJI} ${modId}: ${renderProgress(applied)}`)\n }\n }\n if (!QUIET_MODE) process.stdout.write('\\n')\n results.push(\n formatResult(modId, `${pending.length} migration${pending.length === 1 ? '' : 's'} applied`, '')\n )\n }\n\n await orm.close(true)\n }\n\n console.log(results.join('\\n'))\n}\n\nexport async function dbGreenfield(resolver: PackageResolver, options: GreenfieldOptions): Promise<void> {\n if (!options.yes) {\n console.error('This command will DELETE all data. Use --yes to confirm.')\n process.exit(1)\n }\n\n console.log('Cleaning up migrations and snapshots for greenfield setup...')\n\n const modules = resolver.loadEnabledModules()\n const ordered = sortModules(modules)\n const results: string[] = []\n const outputDir = resolver.getOutputDir()\n\n for (const entry of ordered) {\n const modId = entry.id\n const migrationsPath = getMigrationsPath(entry, resolver)\n\n if (fs.existsSync(migrationsPath)) {\n // Remove all migration files\n const migrationFiles = fs\n .readdirSync(migrationsPath)\n .filter((file) => file.endsWith('.ts') && file.startsWith('Migration'))\n\n for (const file of migrationFiles) {\n fs.unlinkSync(path.join(migrationsPath, file))\n }\n\n // Remove snapshot files\n const snapshotFiles = fs\n .readdirSync(migrationsPath)\n .filter((file) => file.endsWith('.json') && file.includes('snapshot'))\n\n for (const file of snapshotFiles) {\n fs.unlinkSync(path.join(migrationsPath, file))\n }\n\n if (migrationFiles.length > 0 || snapshotFiles.length > 0) {\n results.push(\n formatResult(modId, `cleaned ${migrationFiles.length} migrations, ${snapshotFiles.length} snapshots`, '')\n )\n } else {\n results.push(formatResult(modId, 'already clean', ''))\n }\n } else {\n results.push(formatResult(modId, 'no migrations directory', ''))\n }\n\n // Clean up checksum files using glob pattern\n if (fs.existsSync(outputDir)) {\n const files = fs.readdirSync(outputDir)\n const checksumFiles = files.filter((file) => file.endsWith('.checksum'))\n\n for (const file of checksumFiles) {\n fs.unlinkSync(path.join(outputDir, file))\n }\n\n if (checksumFiles.length > 0) {\n results.push(formatResult(modId, `cleaned ${checksumFiles.length} checksum files`, ''))\n }\n }\n }\n\n console.log(results.join('\\n'))\n\n // Drop per-module MikroORM migration tables to ensure clean slate\n console.log('Dropping per-module migration tables...')\n try {\n const { Client } = await import('pg')\n const client = new Client({ connectionString: getClientUrl() })\n await client.connect()\n try {\n await client.query('BEGIN')\n for (const entry of ordered) {\n const modId = entry.id\n const sanitizedModId = sanitizeModuleId(modId)\n const tableName = `mikro_orm_migrations_${sanitizedModId}`\n validateTableName(tableName)\n await client.query(`DROP TABLE IF EXISTS \"${tableName}\"`)\n console.log(` ${modId}: dropped table ${tableName}`)\n }\n await client.query('COMMIT')\n } catch (e) {\n await client.query('ROLLBACK')\n throw e\n } finally {\n try {\n await client.end()\n } catch {}\n }\n } catch (e) {\n console.error('Failed to drop migration tables:', (e as any)?.message || e)\n throw e\n }\n\n // Drop all existing user tables to ensure fresh CREATE-only migrations\n console.log('Dropping ALL public tables for true greenfield...')\n try {\n const { Client } = await import('pg')\n const client = new Client({ connectionString: getClientUrl() })\n await client.connect()\n try {\n const res = await client.query(`SELECT tablename FROM pg_tables WHERE schemaname = 'public'`)\n const tables: string[] = (res.rows || []).map((r: any) => String(r.tablename))\n if (tables.length) {\n await client.query('BEGIN')\n try {\n await client.query(\"SET session_replication_role = 'replica'\")\n for (const t of tables) {\n await client.query(`DROP TABLE IF EXISTS \"${t}\" CASCADE`)\n }\n await client.query(\"SET session_replication_role = 'origin'\")\n await client.query('COMMIT')\n console.log(` Dropped ${tables.length} tables.`)\n } catch (e) {\n await client.query('ROLLBACK')\n throw e\n }\n } else {\n console.log(' No tables found to drop.')\n }\n } finally {\n try {\n await client.end()\n } catch {}\n }\n } catch (e) {\n console.error('Failed to drop public tables:', (e as any)?.message || e)\n throw e\n }\n\n // Generate fresh migrations for all modules\n console.log('Generating fresh migrations for all modules...')\n await dbGenerate(resolver)\n\n // Apply migrations\n console.log('Applying migrations...')\n await dbMigrate(resolver)\n\n console.log('Greenfield reset complete! Fresh migrations generated and applied.')\n}\n"],
|
|
5
|
-
"mappings": "AAAA,OAAO,UAAU;AACjB,OAAO,QAAQ;AACf,SAAS,gBAA6B;AAEtC,SAAS,wBAAwB;AAGjC,MAAM,aAAa,QAAQ,IAAI,iBAAiB,OAAO,QAAQ,IAAI,kBAAkB;AACrF,MAAM,iBAAiB;AAEvB,SAAS,aAAa,OAAe,SAAiB,QAAQ,UAAK;AACjE,SAAO,GAAG,KAAK,IAAI,KAAK,KAAK,OAAO;AACtC;AAEA,SAAS,uBAAuB,OAAe;AAC7C,QAAM,QAAQ;AACd,QAAM,kBAAkB,QAAQ,IAAI,QAAQ;AAC5C,SAAO,CAAC,YAAoB;AAC1B,UAAM,UAAU,KAAK,IAAI,KAAK,IAAI,SAAS,CAAC,GAAG,eAAe;AAC9D,UAAM,SAAS,KAAK,MAAO,UAAU,kBAAmB,KAAK;AAC7D,UAAM,MAAM,GAAG,IAAI,OAAO,MAAM,CAAC,GAAG,IAAI,OAAO,KAAK,IAAI,QAAQ,QAAQ,CAAC,CAAC,CAAC;AAC3E,WAAO,IAAI,GAAG,KAAK,OAAO,IAAI,eAAe;AAAA,EAC/C;AACF;AAEA,SAAS,sBAA8B;AACrC,SAAO;AAAA,IACL,KAAK,MAAM;AAAA,IAAC;AAAA,IACZ,OAAO,CAAC,YAAY,YAAY,QAAQ,MAAM,OAAO;AAAA,IACrD,MAAM,CAAC,YAAY,YAAY;AAC7B,UAAI,CAAC,WAAY,SAAQ,KAAK,OAAO;AAAA,IACvC;AAAA,IACA,UAAU,MAAM;AAAA,IAAC;AAAA,IACjB,cAAc,MAAM;AAAA,IAAC;AAAA,IACrB,WAAW,MAAM;AAAA,EACnB;AACF;AAEA,SAAS,eAAuB;AAC9B,QAAM,MAAM,QAAQ,IAAI;AACxB,MAAI,CAAC,IAAK,OAAM,IAAI,MAAM,yBAAyB;AACnD,SAAO;AACT;AAEA,SAAS,YAAY,MAAoC;AAEvD,SAAO,KAAK,MAAM,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,GAAG,cAAc,EAAE,EAAE,CAAC;AAC7D;AAOO,SAAS,iBAAiB,OAAuB;AACtD,SAAO,MAAM,QAAQ,gBAAgB,GAAG;AAC1C;AAOO,SAAS,kBAAkB,WAAyB;AACzD,MAAI,CAAC,2BAA2B,KAAK,SAAS,GAAG;AAC/C,UAAM,IAAI,MAAM,uBAAuB,SAAS,gHAAgH;AAAA,EAClK;AACF;AAEA,eAAe,mBAAmB,OAAoB,UAA2C;AAC/F,QAAM,QAAQ,SAAS,eAAe,KAAK;AAC3C,QAAM,OAAO,SAAS,oBAAoB,KAAK;AAC/C,QAAM,cAAc,MAAM,SAAS;AACnC,QAAM,QAAQ;AAAA,IACZ,KAAK,KAAK,MAAM,SAAS,MAAM;AAAA,IAC/B,KAAK,KAAK,MAAM,SAAS,MAAM;AAAA,IAC/B,KAAK,KAAK,MAAM,SAAS,IAAI;AAAA,IAC7B,KAAK,KAAK,MAAM,SAAS,IAAI;AAAA,EAC/B;
|
|
4
|
+
"sourcesContent": ["import path from 'node:path'\nimport fs from 'node:fs'\nimport { MikroORM, type Logger } from '@mikro-orm/core'\nimport { Migrator } from '@mikro-orm/migrations'\nimport { PostgreSqlDriver } from '@mikro-orm/postgresql'\nimport type { PackageResolver, ModuleEntry } from '../resolver'\n\nconst QUIET_MODE = process.env.OM_CLI_QUIET === '1' || process.env.MERCATO_QUIET === '1'\nconst PROGRESS_EMOJI = ''\n\nfunction formatResult(modId: string, message: string, emoji = '\u2022') {\n return `${emoji} ${modId}: ${message}`\n}\n\nfunction createProgressRenderer(total: number) {\n const width = 20\n const normalizedTotal = total > 0 ? total : 1\n return (current: number) => {\n const clamped = Math.min(Math.max(current, 0), normalizedTotal)\n const filled = Math.round((clamped / normalizedTotal) * width)\n const bar = `${'='.repeat(filled)}${'.'.repeat(Math.max(width - filled, 0))}`\n return `[${bar}] ${clamped}/${normalizedTotal}`\n }\n}\n\nfunction createMinimalLogger(): Logger {\n return {\n log: () => {},\n error: (_namespace, message) => console.error(message),\n warn: (_namespace, message) => {\n if (!QUIET_MODE) console.warn(message)\n },\n logQuery: () => {},\n setDebugMode: () => {},\n isEnabled: () => false,\n }\n}\n\nfunction getClientUrl(): string {\n const url = process.env.DATABASE_URL\n if (!url) throw new Error('DATABASE_URL is not set')\n return url\n}\n\nfunction sortModules(mods: ModuleEntry[]): ModuleEntry[] {\n // Sort modules alphabetically since they are now isomorphic\n return mods.slice().sort((a, b) => a.id.localeCompare(b.id))\n}\n\n/**\n * Sanitizes a module ID for use in SQL identifiers (table names).\n * Replaces non-alphanumeric characters with underscores to prevent SQL injection.\n * @public Exported for testing\n */\nexport function sanitizeModuleId(modId: string): string {\n return modId.replace(/[^a-z0-9_]/gi, '_')\n}\n\n/**\n * Validates that a table name is safe for use in SQL queries.\n * @throws Error if the table name contains invalid characters.\n * @public Exported for testing\n */\nexport function validateTableName(tableName: string): void {\n if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(tableName)) {\n throw new Error(`Invalid table name: ${tableName}. Table names must start with a letter or underscore and contain only alphanumeric characters and underscores.`)\n }\n}\n\nasync function loadModuleEntities(entry: ModuleEntry, resolver: PackageResolver): Promise<any[]> {\n const roots = resolver.getModulePaths(entry)\n const imps = resolver.getModuleImportBase(entry)\n const isAppModule = entry.from === '@app'\n const bases = [\n path.join(roots.appBase, 'data'),\n path.join(roots.pkgBase, 'data'),\n path.join(roots.appBase, 'db'),\n path.join(roots.pkgBase, 'db'),\n ]\n // Check both .ts (src/monorepo) and .js (dist/production) extensions\n const candidates = ['entities.ts', 'entities.js', 'schema.ts', 'schema.js']\n\n for (const base of bases) {\n for (const f of candidates) {\n const p = path.join(base, f)\n if (fs.existsSync(p)) {\n const sub = path.basename(base)\n const fromApp = base.startsWith(roots.appBase)\n const ext = path.extname(f)\n const baseName = f.replace(ext, '')\n // For @app modules, use file:// URL since @/ alias doesn't work in Node.js runtime\n const importPath = (isAppModule && fromApp)\n ? `file://${p.replace(/\\.ts$/, '.js')}`\n : `${fromApp ? imps.appBase : imps.pkgBase}/${sub}/${baseName}`\n try {\n const mod = await import(importPath)\n const entities = Object.values(mod).filter((v) => typeof v === 'function')\n if (entities.length) return entities as any[]\n } catch (err) {\n // For @app modules with TypeScript files, they can't be directly imported\n // Skip and let MikroORM handle entities through discovery\n if (isAppModule) continue\n throw err\n }\n }\n }\n }\n return []\n}\n\nfunction getMigrationsPath(entry: ModuleEntry, resolver: PackageResolver): string {\n const from = entry.from || '@open-mercato/core'\n\n if (from === '@app') {\n // For @app modules, use the app directory\n return path.join(resolver.getAppDir(), 'src/modules', entry.id, 'migrations')\n }\n\n // Use resolver's getModulePaths which handles monorepo vs production mode\n const { pkgBase } = resolver.getModulePaths(entry)\n return path.join(pkgBase, 'migrations')\n}\n\nexport interface DbOptions {\n quiet?: boolean\n}\n\nexport interface GreenfieldOptions extends DbOptions {\n yes: boolean\n}\n\nexport async function dbGenerate(resolver: PackageResolver, options: DbOptions = {}): Promise<void> {\n const modules = resolver.loadEnabledModules()\n const ordered = sortModules(modules)\n const results: string[] = []\n\n for (const entry of ordered) {\n const modId = entry.id\n const sanitizedModId = sanitizeModuleId(modId)\n const entities = await loadModuleEntities(entry, resolver)\n if (!entities.length) continue\n\n const migrationsPath = getMigrationsPath(entry, resolver)\n fs.mkdirSync(migrationsPath, { recursive: true })\n\n const tableName = `mikro_orm_migrations_${sanitizedModId}`\n validateTableName(tableName)\n\n const orm = await MikroORM.init<PostgreSqlDriver>({\n driver: PostgreSqlDriver,\n clientUrl: getClientUrl(),\n loggerFactory: () => createMinimalLogger(),\n entities,\n migrations: {\n path: migrationsPath,\n glob: '!(*.d).{ts,js}',\n tableName,\n dropTables: false,\n },\n schemaGenerator: {\n disableForeignKeys: true,\n },\n pool: {\n min: 1,\n max: 3,\n idleTimeoutMillis: 30000,\n acquireTimeoutMillis: 60000,\n destroyTimeoutMillis: 30000,\n },\n })\n\n const migrator = orm.getMigrator() as Migrator\n const diff = await migrator.createMigration()\n if (diff && diff.fileName) {\n try {\n const orig = diff.fileName\n const base = path.basename(orig)\n const dir = path.dirname(orig)\n const ext = path.extname(base)\n const stem = base.replace(ext, '')\n const suffix = `_${modId}`\n const newBase = stem.endsWith(suffix) ? base : `${stem}${suffix}${ext}`\n const newPath = path.join(dir, newBase)\n let content = fs.readFileSync(orig, 'utf8')\n // Rename class to ensure uniqueness as well\n content = content.replace(\n /export class (Migration\\d+)/,\n `export class $1_${modId.replace(/[^a-zA-Z0-9]/g, '_')}`\n )\n fs.writeFileSync(newPath, content, 'utf8')\n if (newPath !== orig) fs.unlinkSync(orig)\n results.push(formatResult(modId, `generated ${newBase}`, ''))\n } catch {\n results.push(formatResult(modId, `generated ${path.basename(diff.fileName)} (rename failed)`, ''))\n }\n } else {\n results.push(formatResult(modId, 'no changes', ''))\n }\n\n await orm.close(true)\n }\n\n console.log(results.join('\\n'))\n}\n\nexport async function dbMigrate(resolver: PackageResolver, options: DbOptions = {}): Promise<void> {\n const modules = resolver.loadEnabledModules()\n const ordered = sortModules(modules)\n const results: string[] = []\n\n for (const entry of ordered) {\n const modId = entry.id\n const sanitizedModId = sanitizeModuleId(modId)\n const entities = await loadModuleEntities(entry, resolver)\n\n const migrationsPath = getMigrationsPath(entry, resolver)\n\n // Skip if no entities AND no migrations directory exists\n // (allows @app modules to run migrations even if entities can't be dynamically imported)\n if (!entities.length && !fs.existsSync(migrationsPath)) continue\n fs.mkdirSync(migrationsPath, { recursive: true })\n\n const tableName = `mikro_orm_migrations_${sanitizedModId}`\n validateTableName(tableName)\n\n // For @app modules, entities may be empty since TypeScript files can't be imported at runtime\n // Use discovery.warnWhenNoEntities: false to allow running migrations without entities\n const orm = await MikroORM.init<PostgreSqlDriver>({\n driver: PostgreSqlDriver,\n clientUrl: getClientUrl(),\n loggerFactory: () => createMinimalLogger(),\n entities: entities.length ? entities : [],\n discovery: { warnWhenNoEntities: false },\n migrations: {\n path: migrationsPath,\n glob: '!(*.d).{ts,js}',\n tableName,\n dropTables: false,\n },\n schemaGenerator: {\n disableForeignKeys: true,\n },\n pool: {\n min: 1,\n max: 3,\n idleTimeoutMillis: 30000,\n acquireTimeoutMillis: 60000,\n destroyTimeoutMillis: 30000,\n },\n })\n\n const migrator = orm.getMigrator() as Migrator\n const pending = await migrator.getPendingMigrations()\n if (!pending.length) {\n results.push(formatResult(modId, 'no pending migrations', ''))\n } else {\n const renderProgress = createProgressRenderer(pending.length)\n let applied = 0\n if (!QUIET_MODE) {\n process.stdout.write(` ${PROGRESS_EMOJI} ${modId}: ${renderProgress(applied)}`)\n }\n for (const migration of pending) {\n const migrationName =\n typeof migration === 'string'\n ? migration\n : (migration as any).name ?? (migration as any).fileName\n await migrator.up(migrationName ? { migrations: [migrationName] } : undefined)\n applied += 1\n if (!QUIET_MODE) {\n process.stdout.write(`\\r ${PROGRESS_EMOJI} ${modId}: ${renderProgress(applied)}`)\n }\n }\n if (!QUIET_MODE) process.stdout.write('\\n')\n results.push(\n formatResult(modId, `${pending.length} migration${pending.length === 1 ? '' : 's'} applied`, '')\n )\n }\n\n await orm.close(true)\n }\n\n console.log(results.join('\\n'))\n}\n\nexport async function dbGreenfield(resolver: PackageResolver, options: GreenfieldOptions): Promise<void> {\n if (!options.yes) {\n console.error('This command will DELETE all data. Use --yes to confirm.')\n process.exit(1)\n }\n\n console.log('Cleaning up migrations and snapshots for greenfield setup...')\n\n const modules = resolver.loadEnabledModules()\n const ordered = sortModules(modules)\n const results: string[] = []\n const outputDir = resolver.getOutputDir()\n\n for (const entry of ordered) {\n const modId = entry.id\n const migrationsPath = getMigrationsPath(entry, resolver)\n\n if (fs.existsSync(migrationsPath)) {\n // Remove all migration files\n const migrationFiles = fs\n .readdirSync(migrationsPath)\n .filter((file) => file.endsWith('.ts') && file.startsWith('Migration'))\n\n for (const file of migrationFiles) {\n fs.unlinkSync(path.join(migrationsPath, file))\n }\n\n // Remove snapshot files\n const snapshotFiles = fs\n .readdirSync(migrationsPath)\n .filter((file) => file.endsWith('.json') && file.includes('snapshot'))\n\n for (const file of snapshotFiles) {\n fs.unlinkSync(path.join(migrationsPath, file))\n }\n\n if (migrationFiles.length > 0 || snapshotFiles.length > 0) {\n results.push(\n formatResult(modId, `cleaned ${migrationFiles.length} migrations, ${snapshotFiles.length} snapshots`, '')\n )\n } else {\n results.push(formatResult(modId, 'already clean', ''))\n }\n } else {\n results.push(formatResult(modId, 'no migrations directory', ''))\n }\n\n // Clean up checksum files using glob pattern\n if (fs.existsSync(outputDir)) {\n const files = fs.readdirSync(outputDir)\n const checksumFiles = files.filter((file) => file.endsWith('.checksum'))\n\n for (const file of checksumFiles) {\n fs.unlinkSync(path.join(outputDir, file))\n }\n\n if (checksumFiles.length > 0) {\n results.push(formatResult(modId, `cleaned ${checksumFiles.length} checksum files`, ''))\n }\n }\n }\n\n console.log(results.join('\\n'))\n\n // Drop per-module MikroORM migration tables to ensure clean slate\n console.log('Dropping per-module migration tables...')\n try {\n const { Client } = await import('pg')\n const client = new Client({ connectionString: getClientUrl() })\n await client.connect()\n try {\n await client.query('BEGIN')\n for (const entry of ordered) {\n const modId = entry.id\n const sanitizedModId = sanitizeModuleId(modId)\n const tableName = `mikro_orm_migrations_${sanitizedModId}`\n validateTableName(tableName)\n await client.query(`DROP TABLE IF EXISTS \"${tableName}\"`)\n console.log(` ${modId}: dropped table ${tableName}`)\n }\n await client.query('COMMIT')\n } catch (e) {\n await client.query('ROLLBACK')\n throw e\n } finally {\n try {\n await client.end()\n } catch {}\n }\n } catch (e) {\n console.error('Failed to drop migration tables:', (e as any)?.message || e)\n throw e\n }\n\n // Drop all existing user tables to ensure fresh CREATE-only migrations\n console.log('Dropping ALL public tables for true greenfield...')\n try {\n const { Client } = await import('pg')\n const client = new Client({ connectionString: getClientUrl() })\n await client.connect()\n try {\n const res = await client.query(`SELECT tablename FROM pg_tables WHERE schemaname = 'public'`)\n const tables: string[] = (res.rows || []).map((r: any) => String(r.tablename))\n if (tables.length) {\n await client.query('BEGIN')\n try {\n await client.query(\"SET session_replication_role = 'replica'\")\n for (const t of tables) {\n await client.query(`DROP TABLE IF EXISTS \"${t}\" CASCADE`)\n }\n await client.query(\"SET session_replication_role = 'origin'\")\n await client.query('COMMIT')\n console.log(` Dropped ${tables.length} tables.`)\n } catch (e) {\n await client.query('ROLLBACK')\n throw e\n }\n } else {\n console.log(' No tables found to drop.')\n }\n } finally {\n try {\n await client.end()\n } catch {}\n }\n } catch (e) {\n console.error('Failed to drop public tables:', (e as any)?.message || e)\n throw e\n }\n\n // Generate fresh migrations for all modules\n console.log('Generating fresh migrations for all modules...')\n await dbGenerate(resolver)\n\n // Apply migrations\n console.log('Applying migrations...')\n await dbMigrate(resolver)\n\n console.log('Greenfield reset complete! Fresh migrations generated and applied.')\n}\n"],
|
|
5
|
+
"mappings": "AAAA,OAAO,UAAU;AACjB,OAAO,QAAQ;AACf,SAAS,gBAA6B;AAEtC,SAAS,wBAAwB;AAGjC,MAAM,aAAa,QAAQ,IAAI,iBAAiB,OAAO,QAAQ,IAAI,kBAAkB;AACrF,MAAM,iBAAiB;AAEvB,SAAS,aAAa,OAAe,SAAiB,QAAQ,UAAK;AACjE,SAAO,GAAG,KAAK,IAAI,KAAK,KAAK,OAAO;AACtC;AAEA,SAAS,uBAAuB,OAAe;AAC7C,QAAM,QAAQ;AACd,QAAM,kBAAkB,QAAQ,IAAI,QAAQ;AAC5C,SAAO,CAAC,YAAoB;AAC1B,UAAM,UAAU,KAAK,IAAI,KAAK,IAAI,SAAS,CAAC,GAAG,eAAe;AAC9D,UAAM,SAAS,KAAK,MAAO,UAAU,kBAAmB,KAAK;AAC7D,UAAM,MAAM,GAAG,IAAI,OAAO,MAAM,CAAC,GAAG,IAAI,OAAO,KAAK,IAAI,QAAQ,QAAQ,CAAC,CAAC,CAAC;AAC3E,WAAO,IAAI,GAAG,KAAK,OAAO,IAAI,eAAe;AAAA,EAC/C;AACF;AAEA,SAAS,sBAA8B;AACrC,SAAO;AAAA,IACL,KAAK,MAAM;AAAA,IAAC;AAAA,IACZ,OAAO,CAAC,YAAY,YAAY,QAAQ,MAAM,OAAO;AAAA,IACrD,MAAM,CAAC,YAAY,YAAY;AAC7B,UAAI,CAAC,WAAY,SAAQ,KAAK,OAAO;AAAA,IACvC;AAAA,IACA,UAAU,MAAM;AAAA,IAAC;AAAA,IACjB,cAAc,MAAM;AAAA,IAAC;AAAA,IACrB,WAAW,MAAM;AAAA,EACnB;AACF;AAEA,SAAS,eAAuB;AAC9B,QAAM,MAAM,QAAQ,IAAI;AACxB,MAAI,CAAC,IAAK,OAAM,IAAI,MAAM,yBAAyB;AACnD,SAAO;AACT;AAEA,SAAS,YAAY,MAAoC;AAEvD,SAAO,KAAK,MAAM,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,GAAG,cAAc,EAAE,EAAE,CAAC;AAC7D;AAOO,SAAS,iBAAiB,OAAuB;AACtD,SAAO,MAAM,QAAQ,gBAAgB,GAAG;AAC1C;AAOO,SAAS,kBAAkB,WAAyB;AACzD,MAAI,CAAC,2BAA2B,KAAK,SAAS,GAAG;AAC/C,UAAM,IAAI,MAAM,uBAAuB,SAAS,gHAAgH;AAAA,EAClK;AACF;AAEA,eAAe,mBAAmB,OAAoB,UAA2C;AAC/F,QAAM,QAAQ,SAAS,eAAe,KAAK;AAC3C,QAAM,OAAO,SAAS,oBAAoB,KAAK;AAC/C,QAAM,cAAc,MAAM,SAAS;AACnC,QAAM,QAAQ;AAAA,IACZ,KAAK,KAAK,MAAM,SAAS,MAAM;AAAA,IAC/B,KAAK,KAAK,MAAM,SAAS,MAAM;AAAA,IAC/B,KAAK,KAAK,MAAM,SAAS,IAAI;AAAA,IAC7B,KAAK,KAAK,MAAM,SAAS,IAAI;AAAA,EAC/B;AAEA,QAAM,aAAa,CAAC,eAAe,eAAe,aAAa,WAAW;AAE1E,aAAW,QAAQ,OAAO;AACxB,eAAW,KAAK,YAAY;AAC1B,YAAM,IAAI,KAAK,KAAK,MAAM,CAAC;AAC3B,UAAI,GAAG,WAAW,CAAC,GAAG;AACpB,cAAM,MAAM,KAAK,SAAS,IAAI;AAC9B,cAAM,UAAU,KAAK,WAAW,MAAM,OAAO;AAC7C,cAAM,MAAM,KAAK,QAAQ,CAAC;AAC1B,cAAM,WAAW,EAAE,QAAQ,KAAK,EAAE;AAElC,cAAM,aAAc,eAAe,UAC/B,UAAU,EAAE,QAAQ,SAAS,KAAK,CAAC,KACnC,GAAG,UAAU,KAAK,UAAU,KAAK,OAAO,IAAI,GAAG,IAAI,QAAQ;AAC/D,YAAI;AACF,gBAAM,MAAM,MAAM,OAAO;AACzB,gBAAM,WAAW,OAAO,OAAO,GAAG,EAAE,OAAO,CAAC,MAAM,OAAO,MAAM,UAAU;AACzE,cAAI,SAAS,OAAQ,QAAO;AAAA,QAC9B,SAAS,KAAK;AAGZ,cAAI,YAAa;AACjB,gBAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACA,SAAO,CAAC;AACV;AAEA,SAAS,kBAAkB,OAAoB,UAAmC;AAChF,QAAM,OAAO,MAAM,QAAQ;AAE3B,MAAI,SAAS,QAAQ;AAEnB,WAAO,KAAK,KAAK,SAAS,UAAU,GAAG,eAAe,MAAM,IAAI,YAAY;AAAA,EAC9E;AAGA,QAAM,EAAE,QAAQ,IAAI,SAAS,eAAe,KAAK;AACjD,SAAO,KAAK,KAAK,SAAS,YAAY;AACxC;AAUA,eAAsB,WAAW,UAA2B,UAAqB,CAAC,GAAkB;AAClG,QAAM,UAAU,SAAS,mBAAmB;AAC5C,QAAM,UAAU,YAAY,OAAO;AACnC,QAAM,UAAoB,CAAC;AAE3B,aAAW,SAAS,SAAS;AAC3B,UAAM,QAAQ,MAAM;AACpB,UAAM,iBAAiB,iBAAiB,KAAK;AAC7C,UAAM,WAAW,MAAM,mBAAmB,OAAO,QAAQ;AACzD,QAAI,CAAC,SAAS,OAAQ;AAEtB,UAAM,iBAAiB,kBAAkB,OAAO,QAAQ;AACxD,OAAG,UAAU,gBAAgB,EAAE,WAAW,KAAK,CAAC;AAEhD,UAAM,YAAY,wBAAwB,cAAc;AACxD,sBAAkB,SAAS;AAE3B,UAAM,MAAM,MAAM,SAAS,KAAuB;AAAA,MAChD,QAAQ;AAAA,MACR,WAAW,aAAa;AAAA,MACxB,eAAe,MAAM,oBAAoB;AAAA,MACzC;AAAA,MACA,YAAY;AAAA,QACV,MAAM;AAAA,QACN,MAAM;AAAA,QACN;AAAA,QACA,YAAY;AAAA,MACd;AAAA,MACA,iBAAiB;AAAA,QACf,oBAAoB;AAAA,MACtB;AAAA,MACA,MAAM;AAAA,QACJ,KAAK;AAAA,QACL,KAAK;AAAA,QACL,mBAAmB;AAAA,QACnB,sBAAsB;AAAA,QACtB,sBAAsB;AAAA,MACxB;AAAA,IACF,CAAC;AAED,UAAM,WAAW,IAAI,YAAY;AACjC,UAAM,OAAO,MAAM,SAAS,gBAAgB;AAC5C,QAAI,QAAQ,KAAK,UAAU;AACzB,UAAI;AACF,cAAM,OAAO,KAAK;AAClB,cAAM,OAAO,KAAK,SAAS,IAAI;AAC/B,cAAM,MAAM,KAAK,QAAQ,IAAI;AAC7B,cAAM,MAAM,KAAK,QAAQ,IAAI;AAC7B,cAAM,OAAO,KAAK,QAAQ,KAAK,EAAE;AACjC,cAAM,SAAS,IAAI,KAAK;AACxB,cAAM,UAAU,KAAK,SAAS,MAAM,IAAI,OAAO,GAAG,IAAI,GAAG,MAAM,GAAG,GAAG;AACrE,cAAM,UAAU,KAAK,KAAK,KAAK,OAAO;AACtC,YAAI,UAAU,GAAG,aAAa,MAAM,MAAM;AAE1C,kBAAU,QAAQ;AAAA,UAChB;AAAA,UACA,mBAAmB,MAAM,QAAQ,iBAAiB,GAAG,CAAC;AAAA,QACxD;AACA,WAAG,cAAc,SAAS,SAAS,MAAM;AACzC,YAAI,YAAY,KAAM,IAAG,WAAW,IAAI;AACxC,gBAAQ,KAAK,aAAa,OAAO,aAAa,OAAO,IAAI,EAAE,CAAC;AAAA,MAC9D,QAAQ;AACN,gBAAQ,KAAK,aAAa,OAAO,aAAa,KAAK,SAAS,KAAK,QAAQ,CAAC,oBAAoB,EAAE,CAAC;AAAA,MACnG;AAAA,IACF,OAAO;AACL,cAAQ,KAAK,aAAa,OAAO,cAAc,EAAE,CAAC;AAAA,IACpD;AAEA,UAAM,IAAI,MAAM,IAAI;AAAA,EACtB;AAEA,UAAQ,IAAI,QAAQ,KAAK,IAAI,CAAC;AAChC;AAEA,eAAsB,UAAU,UAA2B,UAAqB,CAAC,GAAkB;AACjG,QAAM,UAAU,SAAS,mBAAmB;AAC5C,QAAM,UAAU,YAAY,OAAO;AACnC,QAAM,UAAoB,CAAC;AAE3B,aAAW,SAAS,SAAS;AAC3B,UAAM,QAAQ,MAAM;AACpB,UAAM,iBAAiB,iBAAiB,KAAK;AAC7C,UAAM,WAAW,MAAM,mBAAmB,OAAO,QAAQ;AAEzD,UAAM,iBAAiB,kBAAkB,OAAO,QAAQ;AAIxD,QAAI,CAAC,SAAS,UAAU,CAAC,GAAG,WAAW,cAAc,EAAG;AACxD,OAAG,UAAU,gBAAgB,EAAE,WAAW,KAAK,CAAC;AAEhD,UAAM,YAAY,wBAAwB,cAAc;AACxD,sBAAkB,SAAS;AAI3B,UAAM,MAAM,MAAM,SAAS,KAAuB;AAAA,MAChD,QAAQ;AAAA,MACR,WAAW,aAAa;AAAA,MACxB,eAAe,MAAM,oBAAoB;AAAA,MACzC,UAAU,SAAS,SAAS,WAAW,CAAC;AAAA,MACxC,WAAW,EAAE,oBAAoB,MAAM;AAAA,MACvC,YAAY;AAAA,QACV,MAAM;AAAA,QACN,MAAM;AAAA,QACN;AAAA,QACA,YAAY;AAAA,MACd;AAAA,MACA,iBAAiB;AAAA,QACf,oBAAoB;AAAA,MACtB;AAAA,MACA,MAAM;AAAA,QACJ,KAAK;AAAA,QACL,KAAK;AAAA,QACL,mBAAmB;AAAA,QACnB,sBAAsB;AAAA,QACtB,sBAAsB;AAAA,MACxB;AAAA,IACF,CAAC;AAED,UAAM,WAAW,IAAI,YAAY;AACjC,UAAM,UAAU,MAAM,SAAS,qBAAqB;AACpD,QAAI,CAAC,QAAQ,QAAQ;AACnB,cAAQ,KAAK,aAAa,OAAO,yBAAyB,EAAE,CAAC;AAAA,IAC/D,OAAO;AACL,YAAM,iBAAiB,uBAAuB,QAAQ,MAAM;AAC5D,UAAI,UAAU;AACd,UAAI,CAAC,YAAY;AACf,gBAAQ,OAAO,MAAM,MAAM,cAAc,IAAI,KAAK,KAAK,eAAe,OAAO,CAAC,EAAE;AAAA,MAClF;AACA,iBAAW,aAAa,SAAS;AAC/B,cAAM,gBACJ,OAAO,cAAc,WACjB,YACC,UAAkB,QAAS,UAAkB;AACpD,cAAM,SAAS,GAAG,gBAAgB,EAAE,YAAY,CAAC,aAAa,EAAE,IAAI,MAAS;AAC7E,mBAAW;AACX,YAAI,CAAC,YAAY;AACf,kBAAQ,OAAO,MAAM,QAAQ,cAAc,IAAI,KAAK,KAAK,eAAe,OAAO,CAAC,EAAE;AAAA,QACpF;AAAA,MACF;AACA,UAAI,CAAC,WAAY,SAAQ,OAAO,MAAM,IAAI;AAC1C,cAAQ;AAAA,QACN,aAAa,OAAO,GAAG,QAAQ,MAAM,aAAa,QAAQ,WAAW,IAAI,KAAK,GAAG,YAAY,EAAE;AAAA,MACjG;AAAA,IACF;AAEA,UAAM,IAAI,MAAM,IAAI;AAAA,EACtB;AAEA,UAAQ,IAAI,QAAQ,KAAK,IAAI,CAAC;AAChC;AAEA,eAAsB,aAAa,UAA2B,SAA2C;AACvG,MAAI,CAAC,QAAQ,KAAK;AAChB,YAAQ,MAAM,0DAA0D;AACxE,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,UAAQ,IAAI,8DAA8D;AAE1E,QAAM,UAAU,SAAS,mBAAmB;AAC5C,QAAM,UAAU,YAAY,OAAO;AACnC,QAAM,UAAoB,CAAC;AAC3B,QAAM,YAAY,SAAS,aAAa;AAExC,aAAW,SAAS,SAAS;AAC3B,UAAM,QAAQ,MAAM;AACpB,UAAM,iBAAiB,kBAAkB,OAAO,QAAQ;AAExD,QAAI,GAAG,WAAW,cAAc,GAAG;AAEjC,YAAM,iBAAiB,GACpB,YAAY,cAAc,EAC1B,OAAO,CAAC,SAAS,KAAK,SAAS,KAAK,KAAK,KAAK,WAAW,WAAW,CAAC;AAExE,iBAAW,QAAQ,gBAAgB;AACjC,WAAG,WAAW,KAAK,KAAK,gBAAgB,IAAI,CAAC;AAAA,MAC/C;AAGA,YAAM,gBAAgB,GACnB,YAAY,cAAc,EAC1B,OAAO,CAAC,SAAS,KAAK,SAAS,OAAO,KAAK,KAAK,SAAS,UAAU,CAAC;AAEvE,iBAAW,QAAQ,eAAe;AAChC,WAAG,WAAW,KAAK,KAAK,gBAAgB,IAAI,CAAC;AAAA,MAC/C;AAEA,UAAI,eAAe,SAAS,KAAK,cAAc,SAAS,GAAG;AACzD,gBAAQ;AAAA,UACN,aAAa,OAAO,WAAW,eAAe,MAAM,gBAAgB,cAAc,MAAM,cAAc,EAAE;AAAA,QAC1G;AAAA,MACF,OAAO;AACL,gBAAQ,KAAK,aAAa,OAAO,iBAAiB,EAAE,CAAC;AAAA,MACvD;AAAA,IACF,OAAO;AACL,cAAQ,KAAK,aAAa,OAAO,2BAA2B,EAAE,CAAC;AAAA,IACjE;AAGA,QAAI,GAAG,WAAW,SAAS,GAAG;AAC5B,YAAM,QAAQ,GAAG,YAAY,SAAS;AACtC,YAAM,gBAAgB,MAAM,OAAO,CAAC,SAAS,KAAK,SAAS,WAAW,CAAC;AAEvE,iBAAW,QAAQ,eAAe;AAChC,WAAG,WAAW,KAAK,KAAK,WAAW,IAAI,CAAC;AAAA,MAC1C;AAEA,UAAI,cAAc,SAAS,GAAG;AAC5B,gBAAQ,KAAK,aAAa,OAAO,WAAW,cAAc,MAAM,mBAAmB,EAAE,CAAC;AAAA,MACxF;AAAA,IACF;AAAA,EACF;AAEA,UAAQ,IAAI,QAAQ,KAAK,IAAI,CAAC;AAG9B,UAAQ,IAAI,yCAAyC;AACrD,MAAI;AACF,UAAM,EAAE,OAAO,IAAI,MAAM,OAAO,IAAI;AACpC,UAAM,SAAS,IAAI,OAAO,EAAE,kBAAkB,aAAa,EAAE,CAAC;AAC9D,UAAM,OAAO,QAAQ;AACrB,QAAI;AACF,YAAM,OAAO,MAAM,OAAO;AAC1B,iBAAW,SAAS,SAAS;AAC3B,cAAM,QAAQ,MAAM;AACpB,cAAM,iBAAiB,iBAAiB,KAAK;AAC7C,cAAM,YAAY,wBAAwB,cAAc;AACxD,0BAAkB,SAAS;AAC3B,cAAM,OAAO,MAAM,yBAAyB,SAAS,GAAG;AACxD,gBAAQ,IAAI,MAAM,KAAK,mBAAmB,SAAS,EAAE;AAAA,MACvD;AACA,YAAM,OAAO,MAAM,QAAQ;AAAA,IAC7B,SAAS,GAAG;AACV,YAAM,OAAO,MAAM,UAAU;AAC7B,YAAM;AAAA,IACR,UAAE;AACA,UAAI;AACF,cAAM,OAAO,IAAI;AAAA,MACnB,QAAQ;AAAA,MAAC;AAAA,IACX;AAAA,EACF,SAAS,GAAG;AACV,YAAQ,MAAM,oCAAqC,GAAW,WAAW,CAAC;AAC1E,UAAM;AAAA,EACR;AAGA,UAAQ,IAAI,mDAAmD;AAC/D,MAAI;AACF,UAAM,EAAE,OAAO,IAAI,MAAM,OAAO,IAAI;AACpC,UAAM,SAAS,IAAI,OAAO,EAAE,kBAAkB,aAAa,EAAE,CAAC;AAC9D,UAAM,OAAO,QAAQ;AACrB,QAAI;AACF,YAAM,MAAM,MAAM,OAAO,MAAM,6DAA6D;AAC5F,YAAM,UAAoB,IAAI,QAAQ,CAAC,GAAG,IAAI,CAAC,MAAW,OAAO,EAAE,SAAS,CAAC;AAC7E,UAAI,OAAO,QAAQ;AACjB,cAAM,OAAO,MAAM,OAAO;AAC1B,YAAI;AACF,gBAAM,OAAO,MAAM,0CAA0C;AAC7D,qBAAW,KAAK,QAAQ;AACtB,kBAAM,OAAO,MAAM,yBAAyB,CAAC,WAAW;AAAA,UAC1D;AACA,gBAAM,OAAO,MAAM,yCAAyC;AAC5D,gBAAM,OAAO,MAAM,QAAQ;AAC3B,kBAAQ,IAAI,cAAc,OAAO,MAAM,UAAU;AAAA,QACnD,SAAS,GAAG;AACV,gBAAM,OAAO,MAAM,UAAU;AAC7B,gBAAM;AAAA,QACR;AAAA,MACF,OAAO;AACL,gBAAQ,IAAI,6BAA6B;AAAA,MAC3C;AAAA,IACF,UAAE;AACA,UAAI;AACF,cAAM,OAAO,IAAI;AAAA,MACnB,QAAQ;AAAA,MAAC;AAAA,IACX;AAAA,EACF,SAAS,GAAG;AACV,YAAQ,MAAM,iCAAkC,GAAW,WAAW,CAAC;AACvE,UAAM;AAAA,EACR;AAGA,UAAQ,IAAI,gDAAgD;AAC5D,QAAM,WAAW,QAAQ;AAGzB,UAAQ,IAAI,wBAAwB;AACpC,QAAM,UAAU,QAAQ;AAExB,UAAQ,IAAI,oEAAoE;AAClF;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -28,7 +28,7 @@ async function generateModuleEntities(options) {
|
|
|
28
28
|
const appDb = path.join(roots.appBase, "db");
|
|
29
29
|
const pkgDb = path.join(roots.pkgBase, "db");
|
|
30
30
|
const bases = [appData, pkgData, appDb, pkgDb];
|
|
31
|
-
const candidates = ["entities.override.ts", "entities.ts", "schema.ts"];
|
|
31
|
+
const candidates = ["entities.override.ts", "entities.override.js", "entities.ts", "entities.js", "schema.ts", "schema.js"];
|
|
32
32
|
let found = null;
|
|
33
33
|
for (const base of bases) {
|
|
34
34
|
for (const f of candidates) {
|
|
@@ -46,11 +46,12 @@ async function generateModuleEntities(options) {
|
|
|
46
46
|
const fromApp = found.base.startsWith(roots.appBase);
|
|
47
47
|
const isAppModule = entry.from === "@app";
|
|
48
48
|
let relImport;
|
|
49
|
+
const fileBaseName = found.file.replace(/\.(ts|js)$/, "");
|
|
49
50
|
if (isAppModule && fromApp) {
|
|
50
|
-
relImport = `../../src/modules/${modId}/${sub}/${
|
|
51
|
+
relImport = `../../src/modules/${modId}/${sub}/${fileBaseName}`;
|
|
51
52
|
} else {
|
|
52
53
|
const baseImport = fromApp ? imp.appBase : imp.pkgBase;
|
|
53
|
-
relImport = `${baseImport}/${sub}/${
|
|
54
|
+
relImport = `${baseImport}/${sub}/${fileBaseName}`;
|
|
54
55
|
}
|
|
55
56
|
imports.push(`import * as ${importName} from '${relImport}'`);
|
|
56
57
|
entitySources.push({ importName, moduleId: modId });
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/lib/generators/module-entities.ts"],
|
|
4
|
-
"sourcesContent": ["import fs from 'node:fs'\nimport path from 'node:path'\nimport type { PackageResolver } from '../resolver'\nimport {\n calculateChecksum,\n readChecksumRecord,\n writeChecksumRecord,\n ensureDir,\n toVar,\n logGenerationResult,\n type GeneratorResult,\n createGeneratorResult,\n} from '../utils'\n\nexport interface ModuleEntitiesOptions {\n resolver: PackageResolver\n quiet?: boolean\n}\n\nexport async function generateModuleEntities(options: ModuleEntitiesOptions): Promise<GeneratorResult> {\n const { resolver, quiet = false } = options\n const result = createGeneratorResult()\n\n const outputDir = resolver.getOutputDir()\n const outFile = path.join(outputDir, 'entities.generated.ts')\n const checksumFile = path.join(outputDir, 'entities.generated.checksum')\n\n const mods = resolver.loadEnabledModules()\n const imports: string[] = []\n const entitySources: Array<{ importName: string; moduleId: string }> = []\n let n = 0\n\n for (const entry of mods) {\n const modId = entry.id\n const roots = resolver.getModulePaths(entry)\n const imp = resolver.getModuleImportBase(entry)\n\n // prefer app override data/, fallback to core data/, then legacy db/\n const appData = path.join(roots.appBase, 'data')\n const pkgData = path.join(roots.pkgBase, 'data')\n const appDb = path.join(roots.appBase, 'db')\n const pkgDb = path.join(roots.pkgBase, 'db')\n const bases = [appData, pkgData, appDb, pkgDb]\n const candidates = ['entities.override.ts', 'entities.ts', 'schema.ts']\n\n let found: { base: string; file: string } | null = null\n for (const base of bases) {\n for (const f of candidates) {\n const p = path.join(base, f)\n if (fs.existsSync(p)) {\n found = { base, file: f }\n break\n }\n }\n if (found) break\n }\n if (!found) continue\n\n const importName = `E_${toVar(modId)}_${n++}`\n const sub = path.basename(found.base) // 'data' or 'db'\n const fromApp = found.base.startsWith(roots.appBase)\n const isAppModule = entry.from === '@app'\n // For @app modules, use relative path to ensure it works both in Next.js and Node.js CLI context\n // From .mercato/generated/, the relative path to src/modules/ is ../src/modules/\n let relImport: string\n if (isAppModule && fromApp) {\n // From .mercato/generated/, go up two levels (../..) to reach the app root, then into src/modules/\n relImport = `../../src/modules/${modId}/${sub}/${
|
|
5
|
-
"mappings": "AAAA,OAAO,QAAQ;AACf,OAAO,UAAU;AAEjB;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,OACK;AAOP,eAAsB,uBAAuB,SAA0D;AACrG,QAAM,EAAE,UAAU,QAAQ,MAAM,IAAI;AACpC,QAAM,SAAS,sBAAsB;AAErC,QAAM,YAAY,SAAS,aAAa;AACxC,QAAM,UAAU,KAAK,KAAK,WAAW,uBAAuB;AAC5D,QAAM,eAAe,KAAK,KAAK,WAAW,6BAA6B;AAEvE,QAAM,OAAO,SAAS,mBAAmB;AACzC,QAAM,UAAoB,CAAC;AAC3B,QAAM,gBAAiE,CAAC;AACxE,MAAI,IAAI;AAER,aAAW,SAAS,MAAM;AACxB,UAAM,QAAQ,MAAM;AACpB,UAAM,QAAQ,SAAS,eAAe,KAAK;AAC3C,UAAM,MAAM,SAAS,oBAAoB,KAAK;AAG9C,UAAM,UAAU,KAAK,KAAK,MAAM,SAAS,MAAM;AAC/C,UAAM,UAAU,KAAK,KAAK,MAAM,SAAS,MAAM;AAC/C,UAAM,QAAQ,KAAK,KAAK,MAAM,SAAS,IAAI;AAC3C,UAAM,QAAQ,KAAK,KAAK,MAAM,SAAS,IAAI;AAC3C,UAAM,QAAQ,CAAC,SAAS,SAAS,OAAO,KAAK;AAC7C,UAAM,aAAa,CAAC,wBAAwB,eAAe,WAAW;
|
|
4
|
+
"sourcesContent": ["import fs from 'node:fs'\nimport path from 'node:path'\nimport type { PackageResolver } from '../resolver'\nimport {\n calculateChecksum,\n readChecksumRecord,\n writeChecksumRecord,\n ensureDir,\n toVar,\n logGenerationResult,\n type GeneratorResult,\n createGeneratorResult,\n} from '../utils'\n\nexport interface ModuleEntitiesOptions {\n resolver: PackageResolver\n quiet?: boolean\n}\n\nexport async function generateModuleEntities(options: ModuleEntitiesOptions): Promise<GeneratorResult> {\n const { resolver, quiet = false } = options\n const result = createGeneratorResult()\n\n const outputDir = resolver.getOutputDir()\n const outFile = path.join(outputDir, 'entities.generated.ts')\n const checksumFile = path.join(outputDir, 'entities.generated.checksum')\n\n const mods = resolver.loadEnabledModules()\n const imports: string[] = []\n const entitySources: Array<{ importName: string; moduleId: string }> = []\n let n = 0\n\n for (const entry of mods) {\n const modId = entry.id\n const roots = resolver.getModulePaths(entry)\n const imp = resolver.getModuleImportBase(entry)\n\n // prefer app override data/, fallback to core data/, then legacy db/\n const appData = path.join(roots.appBase, 'data')\n const pkgData = path.join(roots.pkgBase, 'data')\n const appDb = path.join(roots.appBase, 'db')\n const pkgDb = path.join(roots.pkgBase, 'db')\n const bases = [appData, pkgData, appDb, pkgDb]\n const candidates = ['entities.override.ts', 'entities.override.js', 'entities.ts', 'entities.js', 'schema.ts', 'schema.js']\n\n let found: { base: string; file: string } | null = null\n for (const base of bases) {\n for (const f of candidates) {\n const p = path.join(base, f)\n if (fs.existsSync(p)) {\n found = { base, file: f }\n break\n }\n }\n if (found) break\n }\n if (!found) continue\n\n const importName = `E_${toVar(modId)}_${n++}`\n const sub = path.basename(found.base) // 'data' or 'db'\n const fromApp = found.base.startsWith(roots.appBase)\n const isAppModule = entry.from === '@app'\n // For @app modules, use relative path to ensure it works both in Next.js and Node.js CLI context\n // From .mercato/generated/, the relative path to src/modules/ is ../src/modules/\n let relImport: string\n const fileBaseName = found.file.replace(/\\.(ts|js)$/, '')\n if (isAppModule && fromApp) {\n // From .mercato/generated/, go up two levels (../..) to reach the app root, then into src/modules/\n relImport = `../../src/modules/${modId}/${sub}/${fileBaseName}`\n } else {\n const baseImport = fromApp ? imp.appBase : imp.pkgBase\n relImport = `${baseImport}/${sub}/${fileBaseName}`\n }\n imports.push(`import * as ${importName} from '${relImport}'`)\n entitySources.push({ importName, moduleId: modId })\n }\n\n const output = `// AUTO-GENERATED by mercato generate entities\n${imports.join('\\n')}\n\nfunction enhanceEntities(namespace: Record<string, unknown>, moduleId: string): any[] {\n return Object.entries(namespace)\n .filter(([, value]) => typeof value === 'function')\n .map(([exportName, value]) => {\n const entity = value as { entityName?: string }\n if (entity && typeof entity === 'function' && !Object.prototype.hasOwnProperty.call(entity, 'entityName')) {\n Object.defineProperty(entity, 'entityName', {\n value: \\`\\${moduleId}.\\${exportName}\\`,\n configurable: true,\n enumerable: false,\n writable: false,\n })\n }\n return entity\n })\n}\n\nexport const entities = [\n ${entitySources.map(({ importName, moduleId }) => `...enhanceEntities(${importName}, '${moduleId}')`).join(',\\n ')}\n]\n`\n\n // Check if content has changed\n const newChecksum = calculateChecksum(output)\n let shouldWrite = true\n\n const existingRecord = readChecksumRecord(checksumFile)\n if (existingRecord && existingRecord.content === newChecksum) {\n shouldWrite = false\n }\n\n if (shouldWrite) {\n ensureDir(outFile)\n fs.writeFileSync(outFile, output)\n writeChecksumRecord(checksumFile, { content: newChecksum, structure: '' })\n result.filesWritten.push(outFile)\n if (!quiet) {\n logGenerationResult(path.relative(process.cwd(), outFile), true)\n }\n } else {\n result.filesUnchanged.push(outFile)\n }\n\n return result\n}\n"],
|
|
5
|
+
"mappings": "AAAA,OAAO,QAAQ;AACf,OAAO,UAAU;AAEjB;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,OACK;AAOP,eAAsB,uBAAuB,SAA0D;AACrG,QAAM,EAAE,UAAU,QAAQ,MAAM,IAAI;AACpC,QAAM,SAAS,sBAAsB;AAErC,QAAM,YAAY,SAAS,aAAa;AACxC,QAAM,UAAU,KAAK,KAAK,WAAW,uBAAuB;AAC5D,QAAM,eAAe,KAAK,KAAK,WAAW,6BAA6B;AAEvE,QAAM,OAAO,SAAS,mBAAmB;AACzC,QAAM,UAAoB,CAAC;AAC3B,QAAM,gBAAiE,CAAC;AACxE,MAAI,IAAI;AAER,aAAW,SAAS,MAAM;AACxB,UAAM,QAAQ,MAAM;AACpB,UAAM,QAAQ,SAAS,eAAe,KAAK;AAC3C,UAAM,MAAM,SAAS,oBAAoB,KAAK;AAG9C,UAAM,UAAU,KAAK,KAAK,MAAM,SAAS,MAAM;AAC/C,UAAM,UAAU,KAAK,KAAK,MAAM,SAAS,MAAM;AAC/C,UAAM,QAAQ,KAAK,KAAK,MAAM,SAAS,IAAI;AAC3C,UAAM,QAAQ,KAAK,KAAK,MAAM,SAAS,IAAI;AAC3C,UAAM,QAAQ,CAAC,SAAS,SAAS,OAAO,KAAK;AAC7C,UAAM,aAAa,CAAC,wBAAwB,wBAAwB,eAAe,eAAe,aAAa,WAAW;AAE1H,QAAI,QAA+C;AACnD,eAAW,QAAQ,OAAO;AACxB,iBAAW,KAAK,YAAY;AAC1B,cAAM,IAAI,KAAK,KAAK,MAAM,CAAC;AAC3B,YAAI,GAAG,WAAW,CAAC,GAAG;AACpB,kBAAQ,EAAE,MAAM,MAAM,EAAE;AACxB;AAAA,QACF;AAAA,MACF;AACA,UAAI,MAAO;AAAA,IACb;AACA,QAAI,CAAC,MAAO;AAEZ,UAAM,aAAa,KAAK,MAAM,KAAK,CAAC,IAAI,GAAG;AAC3C,UAAM,MAAM,KAAK,SAAS,MAAM,IAAI;AACpC,UAAM,UAAU,MAAM,KAAK,WAAW,MAAM,OAAO;AACnD,UAAM,cAAc,MAAM,SAAS;AAGnC,QAAI;AACJ,UAAM,eAAe,MAAM,KAAK,QAAQ,cAAc,EAAE;AACxD,QAAI,eAAe,SAAS;AAE1B,kBAAY,qBAAqB,KAAK,IAAI,GAAG,IAAI,YAAY;AAAA,IAC/D,OAAO;AACL,YAAM,aAAa,UAAU,IAAI,UAAU,IAAI;AAC/C,kBAAY,GAAG,UAAU,IAAI,GAAG,IAAI,YAAY;AAAA,IAClD;AACA,YAAQ,KAAK,eAAe,UAAU,UAAU,SAAS,GAAG;AAC5D,kBAAc,KAAK,EAAE,YAAY,UAAU,MAAM,CAAC;AAAA,EACpD;AAEA,QAAM,SAAS;AAAA,EACf,QAAQ,KAAK,IAAI,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAoBhB,cAAc,IAAI,CAAC,EAAE,YAAY,SAAS,MAAM,sBAAsB,UAAU,MAAM,QAAQ,IAAI,EAAE,KAAK,OAAO,CAAC;AAAA;AAAA;AAKnH,QAAM,cAAc,kBAAkB,MAAM;AAC5C,MAAI,cAAc;AAElB,QAAM,iBAAiB,mBAAmB,YAAY;AACtD,MAAI,kBAAkB,eAAe,YAAY,aAAa;AAC5D,kBAAc;AAAA,EAChB;AAEA,MAAI,aAAa;AACf,cAAU,OAAO;AACjB,OAAG,cAAc,SAAS,MAAM;AAChC,wBAAoB,cAAc,EAAE,SAAS,aAAa,WAAW,GAAG,CAAC;AACzE,WAAO,aAAa,KAAK,OAAO;AAChC,QAAI,CAAC,OAAO;AACV,0BAAoB,KAAK,SAAS,QAAQ,IAAI,GAAG,OAAO,GAAG,IAAI;AAAA,IACjE;AAAA,EACF,OAAO;AACL,WAAO,eAAe,KAAK,OAAO;AAAA,EACpC;AAEA,SAAO;AACT;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@open-mercato/cli",
|
|
3
|
-
"version": "0.4.2-canary-
|
|
3
|
+
"version": "0.4.2-canary-a91dde3d1d",
|
|
4
4
|
"type": "module",
|
|
5
5
|
"main": "./dist/index.js",
|
|
6
6
|
"exports": {
|
|
@@ -55,7 +55,7 @@
|
|
|
55
55
|
"@mikro-orm/core": "^6.6.2",
|
|
56
56
|
"@mikro-orm/migrations": "^6.6.2",
|
|
57
57
|
"@mikro-orm/postgresql": "^6.6.2",
|
|
58
|
-
"@open-mercato/shared": "0.4.2-canary-
|
|
58
|
+
"@open-mercato/shared": "0.4.2-canary-a91dde3d1d",
|
|
59
59
|
"pg": "^8.16.3",
|
|
60
60
|
"typescript": "^5.9.3"
|
|
61
61
|
},
|
package/src/lib/db/commands.ts
CHANGED
|
@@ -77,7 +77,8 @@ async function loadModuleEntities(entry: ModuleEntry, resolver: PackageResolver)
|
|
|
77
77
|
path.join(roots.appBase, 'db'),
|
|
78
78
|
path.join(roots.pkgBase, 'db'),
|
|
79
79
|
]
|
|
80
|
-
|
|
80
|
+
// Check both .ts (src/monorepo) and .js (dist/production) extensions
|
|
81
|
+
const candidates = ['entities.ts', 'entities.js', 'schema.ts', 'schema.js']
|
|
81
82
|
|
|
82
83
|
for (const base of bases) {
|
|
83
84
|
for (const f of candidates) {
|
|
@@ -85,10 +86,12 @@ async function loadModuleEntities(entry: ModuleEntry, resolver: PackageResolver)
|
|
|
85
86
|
if (fs.existsSync(p)) {
|
|
86
87
|
const sub = path.basename(base)
|
|
87
88
|
const fromApp = base.startsWith(roots.appBase)
|
|
89
|
+
const ext = path.extname(f)
|
|
90
|
+
const baseName = f.replace(ext, '')
|
|
88
91
|
// For @app modules, use file:// URL since @/ alias doesn't work in Node.js runtime
|
|
89
92
|
const importPath = (isAppModule && fromApp)
|
|
90
93
|
? `file://${p.replace(/\.ts$/, '.js')}`
|
|
91
|
-
: `${fromApp ? imps.appBase : imps.pkgBase}/${sub}/${
|
|
94
|
+
: `${fromApp ? imps.appBase : imps.pkgBase}/${sub}/${baseName}`
|
|
92
95
|
try {
|
|
93
96
|
const mod = await import(importPath)
|
|
94
97
|
const entities = Object.values(mod).filter((v) => typeof v === 'function')
|
|
@@ -41,7 +41,7 @@ export async function generateModuleEntities(options: ModuleEntitiesOptions): Pr
|
|
|
41
41
|
const appDb = path.join(roots.appBase, 'db')
|
|
42
42
|
const pkgDb = path.join(roots.pkgBase, 'db')
|
|
43
43
|
const bases = [appData, pkgData, appDb, pkgDb]
|
|
44
|
-
const candidates = ['entities.override.ts', 'entities.ts', 'schema.ts']
|
|
44
|
+
const candidates = ['entities.override.ts', 'entities.override.js', 'entities.ts', 'entities.js', 'schema.ts', 'schema.js']
|
|
45
45
|
|
|
46
46
|
let found: { base: string; file: string } | null = null
|
|
47
47
|
for (const base of bases) {
|
|
@@ -63,12 +63,13 @@ export async function generateModuleEntities(options: ModuleEntitiesOptions): Pr
|
|
|
63
63
|
// For @app modules, use relative path to ensure it works both in Next.js and Node.js CLI context
|
|
64
64
|
// From .mercato/generated/, the relative path to src/modules/ is ../src/modules/
|
|
65
65
|
let relImport: string
|
|
66
|
+
const fileBaseName = found.file.replace(/\.(ts|js)$/, '')
|
|
66
67
|
if (isAppModule && fromApp) {
|
|
67
68
|
// From .mercato/generated/, go up two levels (../..) to reach the app root, then into src/modules/
|
|
68
|
-
relImport = `../../src/modules/${modId}/${sub}/${
|
|
69
|
+
relImport = `../../src/modules/${modId}/${sub}/${fileBaseName}`
|
|
69
70
|
} else {
|
|
70
71
|
const baseImport = fromApp ? imp.appBase : imp.pkgBase
|
|
71
|
-
relImport = `${baseImport}/${sub}/${
|
|
72
|
+
relImport = `${baseImport}/${sub}/${fileBaseName}`
|
|
72
73
|
}
|
|
73
74
|
imports.push(`import * as ${importName} from '${relImport}'`)
|
|
74
75
|
entitySources.push({ importName, moduleId: modId })
|