@vira-ui/cli 1.1.1 → 1.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +454 -965
- package/dist/go/appYaml.js +30 -30
- package/dist/go/backendEnvExample.js +17 -17
- package/dist/go/backendReadme.js +14 -14
- package/dist/go/channelHelpers.js +25 -25
- package/dist/go/configGo.js +258 -258
- package/dist/go/dbGo.js +43 -43
- package/dist/go/dbYaml.js +7 -7
- package/dist/go/dockerCompose.js +48 -48
- package/dist/go/dockerComposeProd.js +78 -78
- package/dist/go/dockerfile.js +15 -15
- package/dist/go/eventHandlerTemplate.js +22 -22
- package/dist/go/eventsAPI.js +411 -411
- package/dist/go/goMod.js +16 -16
- package/dist/go/kafkaGo.js +67 -67
- package/dist/go/kafkaYaml.js +6 -6
- package/dist/go/kanbanHandlers.js +216 -216
- package/dist/go/mainGo.js +558 -558
- package/dist/go/readme.js +27 -27
- package/dist/go/redisGo.js +31 -31
- package/dist/go/redisYaml.js +4 -4
- package/dist/go/registryGo.js +38 -38
- package/dist/go/sqlcYaml.js +13 -13
- package/dist/go/stateStore.js +115 -115
- package/dist/go/typesGo.js +11 -11
- package/dist/index.js +636 -38
- package/dist/react/envExample.js +3 -3
- package/dist/react/envLocal.js +1 -1
- package/dist/react/indexCss.js +17 -17
- package/dist/react/indexHtml.js +12 -12
- package/dist/react/kanbanAppTsx.js +29 -29
- package/dist/react/kanbanBoard.js +58 -58
- package/dist/react/kanbanCard.js +60 -60
- package/dist/react/kanbanColumn.js +62 -62
- package/dist/react/kanbanModels.js +32 -32
- package/dist/react/mainTsx.js +12 -12
- package/dist/react/viteConfig.js +27 -27
- package/package.json +47 -45
- package/dist/go/useViraState.js +0 -160
- package/dist/go/useViraStream.js +0 -167
package/dist/index.js
CHANGED
|
@@ -91,7 +91,7 @@ const program = new commander_1.Command();
|
|
|
91
91
|
program
|
|
92
92
|
.name("vira")
|
|
93
93
|
.description("ViraJS CLI - Create projects and generate code")
|
|
94
|
-
.version("1.
|
|
94
|
+
.version("1.2.0");
|
|
95
95
|
const SUPPORTED_TEMPLATES = ["frontend", "fullstack", "kanban"];
|
|
96
96
|
/**
|
|
97
97
|
* Инициализация проекта в текущей директории
|
|
@@ -292,6 +292,48 @@ make
|
|
|
292
292
|
await generateMigration(name, options.dir);
|
|
293
293
|
console.log(chalk_1.default.green(`✓ migration ${name} created in ${options.dir}`));
|
|
294
294
|
});
|
|
295
|
+
// Команды для выполнения миграций
|
|
296
|
+
const dbCommand = program
|
|
297
|
+
.command("db")
|
|
298
|
+
.description("Database migration commands");
|
|
299
|
+
dbCommand
|
|
300
|
+
.command("migrate")
|
|
301
|
+
.alias("up")
|
|
302
|
+
.description("Run database migrations")
|
|
303
|
+
.option("-d, --dir <directory>", "Migrations directory", "migrations")
|
|
304
|
+
.option("--db-url <url>", "Database connection URL (or use DATABASE_URL env var)")
|
|
305
|
+
.option("--driver <driver>", "Database driver (postgres, mysql, sqlite3)", "postgres")
|
|
306
|
+
.action(async (options) => {
|
|
307
|
+
await runMigrations(options.dir || "migrations", options.dbUrl, options.driver || "postgres", "up");
|
|
308
|
+
});
|
|
309
|
+
dbCommand
|
|
310
|
+
.command("rollback")
|
|
311
|
+
.alias("down")
|
|
312
|
+
.description("Rollback last migration")
|
|
313
|
+
.option("-d, --dir <directory>", "Migrations directory", "migrations")
|
|
314
|
+
.option("--db-url <url>", "Database connection URL (or use DATABASE_URL env var)")
|
|
315
|
+
.option("--driver <driver>", "Database driver (postgres, mysql, sqlite3)", "postgres")
|
|
316
|
+
.action(async (options) => {
|
|
317
|
+
await runMigrations(options.dir || "migrations", options.dbUrl, options.driver || "postgres", "down");
|
|
318
|
+
});
|
|
319
|
+
dbCommand
|
|
320
|
+
.command("status")
|
|
321
|
+
.description("Show migration status")
|
|
322
|
+
.option("-d, --dir <directory>", "Migrations directory", "migrations")
|
|
323
|
+
.option("--db-url <url>", "Database connection URL (or use DATABASE_URL env var)")
|
|
324
|
+
.option("--driver <driver>", "Database driver (postgres, mysql, sqlite3)", "postgres")
|
|
325
|
+
.action(async (options) => {
|
|
326
|
+
await showMigrationStatus(options.dir || "migrations", options.dbUrl, options.driver || "postgres");
|
|
327
|
+
});
|
|
328
|
+
dbCommand
|
|
329
|
+
.command("seed")
|
|
330
|
+
.description("Run database seeds")
|
|
331
|
+
.option("-d, --dir <directory>", "Seeds directory", "seeds")
|
|
332
|
+
.option("--db-url <url>", "Database connection URL (or use DATABASE_URL env var)")
|
|
333
|
+
.option("--driver <driver>", "Database driver (postgres, mysql, sqlite3)", "postgres")
|
|
334
|
+
.action(async (options) => {
|
|
335
|
+
await runSeeds(options.dir || "seeds", options.dbUrl, options.driver || "postgres");
|
|
336
|
+
});
|
|
295
337
|
make
|
|
296
338
|
.command("event")
|
|
297
339
|
.description("Create Go event handler stub")
|
|
@@ -433,6 +475,8 @@ program
|
|
|
433
475
|
.description("Sync artifacts between backend and frontend")
|
|
434
476
|
.option("--types", "Sync TypeScript types from Go structs", true)
|
|
435
477
|
.option("--backend <path>", "Path to Go types file", path.join("backend", "internal", "types", "types.go"))
|
|
478
|
+
.option("--from-models", "Generate TS types from Go models directory (backend/internal/models)", false)
|
|
479
|
+
.option("--models <path>", "Path to Go models directory", path.join("backend", "internal", "models"))
|
|
436
480
|
.option("--frontend <path>", "Output TS types path (frontend)", path.join("frontend", "src", "vira-types.ts"))
|
|
437
481
|
.option("--ui <path>", "Output TS types path (ui)", path.join("ui", "src", "vira-types.ts"))
|
|
438
482
|
.option("-w, --watch", "Watch mode: automatically sync on file changes", false)
|
|
@@ -828,7 +872,29 @@ ${props.map((p) => ` ${p.name}${p.required ? "" : "?"}: ${p.type};`).join("\n")
|
|
|
828
872
|
function buildComponentBody(name, vrpConfig, props, hasProps, useViraUI) {
|
|
829
873
|
const propsUsage = props.map((p) => ` ${p.name}={props.${p.name}}`).join("\n");
|
|
830
874
|
if (vrpConfig) {
|
|
831
|
-
|
|
875
|
+
// Проверяем, есть ли в channel плейсхолдер {id} или подобный
|
|
876
|
+
const hasPlaceholder = vrpConfig.channel.includes('{id}') || vrpConfig.channel.includes('${id}');
|
|
877
|
+
let channelCode = '';
|
|
878
|
+
if (hasPlaceholder && hasProps) {
|
|
879
|
+
// Находим prop, который может быть id (clientId, userId, id и т.д.)
|
|
880
|
+
const idProp = props.find(p => p.name.toLowerCase().includes('id') ||
|
|
881
|
+
p.name === 'id');
|
|
882
|
+
if (idProp) {
|
|
883
|
+
// Заменяем {id} на значение из props
|
|
884
|
+
const placeholder = vrpConfig.channel.includes('${id}') ? '${id}' : '{id}';
|
|
885
|
+
const channelTemplate = vrpConfig.channel.replace(placeholder, `\${props.${idProp.name}}`);
|
|
886
|
+
channelCode = ` // Динамически формируем channel с ${idProp.name} из props
|
|
887
|
+
const channel = \`${channelTemplate}\`;
|
|
888
|
+
const { data, sendEvent, sendUpdate, sendDiff } = useViraState<${vrpConfig.stateType}>(channel, null);`;
|
|
889
|
+
}
|
|
890
|
+
else {
|
|
891
|
+
channelCode = ` const { data, sendEvent, sendUpdate, sendDiff } = useViraState<${vrpConfig.stateType}>('${vrpConfig.channel}', null);`;
|
|
892
|
+
}
|
|
893
|
+
}
|
|
894
|
+
else {
|
|
895
|
+
channelCode = ` const { data, sendEvent, sendUpdate, sendDiff } = useViraState<${vrpConfig.stateType}>('${vrpConfig.channel}', null);`;
|
|
896
|
+
}
|
|
897
|
+
return `${channelCode}
|
|
832
898
|
|
|
833
899
|
// Use data from VRP state
|
|
834
900
|
// Example: const value = data?.field ?? defaultValue;
|
|
@@ -841,7 +907,7 @@ function buildComponentBody(name, vrpConfig, props, hasProps, useViraUI) {
|
|
|
841
907
|
// }, { debounce: 500 });
|
|
842
908
|
|
|
843
909
|
return createElement('div', { className: '${name.toLowerCase()}' },
|
|
844
|
-
|
|
910
|
+
// Add your content here
|
|
845
911
|
);`;
|
|
846
912
|
}
|
|
847
913
|
if (useViraUI) {
|
|
@@ -888,7 +954,12 @@ async function generateComponent(name, dir, config) {
|
|
|
888
954
|
`;
|
|
889
955
|
// Интерфейс состояния VRP
|
|
890
956
|
if (vrpConfig) {
|
|
891
|
-
|
|
957
|
+
const typeName = vrpConfig.stateType.replace('State', '');
|
|
958
|
+
componentCode += `// TODO: Если у вас есть синхронизированные типы из backend, используйте их:
|
|
959
|
+
// import type { ${typeName} } from '../vira-types';
|
|
960
|
+
// export type ${vrpConfig.stateType} = ${typeName};
|
|
961
|
+
|
|
962
|
+
export interface ${vrpConfig.stateType} {
|
|
892
963
|
// Add your state fields here
|
|
893
964
|
id?: string;
|
|
894
965
|
}
|
|
@@ -967,10 +1038,15 @@ async function collectServiceVRPConfig(name, useVRP, interactive) {
|
|
|
967
1038
|
*/
|
|
968
1039
|
function buildVRPService(name, vrpConfig) {
|
|
969
1040
|
const lowerName = name.toLowerCase();
|
|
1041
|
+
// Пытаемся определить имя типа из синхронизированных типов (например, Client вместо ClientState)
|
|
1042
|
+
const typeName = vrpConfig.stateType.replace('State', '');
|
|
970
1043
|
return `// ${name} service using Vira Core DI container + VRP
|
|
971
|
-
import { createService, useService } from '@vira-ui/core';
|
|
1044
|
+
import { createService, useService, batch } from '@vira-ui/core';
|
|
972
1045
|
import { useViraState } from '@vira-ui/react';
|
|
973
1046
|
import { v4 as uuid } from 'uuid';
|
|
1047
|
+
// TODO: Если у вас есть синхронизированные типы из backend, используйте их:
|
|
1048
|
+
// import type { ${typeName} } from '../vira-types';
|
|
1049
|
+
// export type ${vrpConfig.stateType} = ${typeName};
|
|
974
1050
|
|
|
975
1051
|
export interface ${vrpConfig.stateType} {
|
|
976
1052
|
// Add your state fields here
|
|
@@ -1000,22 +1076,26 @@ createService('${lowerName}', () => ({
|
|
|
1000
1076
|
// Hook for ${name} operations (combines service + VRP state)
|
|
1001
1077
|
export function use${name}(id?: string) {
|
|
1002
1078
|
const channel = id ? \`${vrpConfig.channel}:\${id}\` : '${vrpConfig.channel}';
|
|
1003
|
-
const
|
|
1079
|
+
const vrpState = id
|
|
1004
1080
|
? useViraState<${vrpConfig.stateType}>(channel, null)
|
|
1005
1081
|
: useVrpList<${vrpConfig.stateType}>(channel);
|
|
1006
1082
|
const ${lowerName}Service = useService<{ processData: (data: ${vrpConfig.stateType} | null) => any }>('${lowerName}');
|
|
1007
1083
|
|
|
1084
|
+
// Извлекаем методы из VRP состояния
|
|
1085
|
+
const { data, sendEvent, sendDiff } = vrpState;
|
|
1086
|
+
const sendUpdate = 'sendUpdate' in vrpState ? vrpState.sendUpdate : undefined;
|
|
1087
|
+
|
|
1008
1088
|
return {
|
|
1009
1089
|
data,
|
|
1010
1090
|
// 🎯 1️⃣ Создание с авто-генерацией UUID на фронте (VRP сразу знает id, не надо ждать бэка)
|
|
1011
1091
|
create(item: Omit<${vrpConfig.stateType}, 'id' | 'created_at' | 'updated_at'>) {
|
|
1012
|
-
const
|
|
1092
|
+
const itemId = uuid();
|
|
1013
1093
|
const newItem: ${vrpConfig.stateType} = {
|
|
1014
1094
|
...item,
|
|
1015
|
-
id,
|
|
1095
|
+
id: itemId,
|
|
1016
1096
|
created_at: new Date().toISOString(),
|
|
1017
1097
|
updated_at: new Date().toISOString(),
|
|
1018
|
-
};
|
|
1098
|
+
} as ${vrpConfig.stateType};
|
|
1019
1099
|
sendEvent('${lowerName}.created', {
|
|
1020
1100
|
...newItem,
|
|
1021
1101
|
timestamp: new Date().toISOString()
|
|
@@ -1039,16 +1119,17 @@ export function use${name}(id?: string) {
|
|
|
1039
1119
|
},
|
|
1040
1120
|
sendEvent,
|
|
1041
1121
|
sendDiff,
|
|
1122
|
+
sendUpdate,
|
|
1042
1123
|
// Service methods
|
|
1043
1124
|
processData() {
|
|
1044
|
-
|
|
1125
|
+
// data может быть массивом или объектом, обрабатываем оба случая
|
|
1126
|
+
const singleData = Array.isArray(data) ? null : (data as ${vrpConfig.stateType} | null);
|
|
1127
|
+
return ${lowerName}Service.processData(singleData);
|
|
1045
1128
|
},
|
|
1046
1129
|
};
|
|
1047
1130
|
}
|
|
1048
1131
|
|
|
1049
1132
|
// 🎯 2️⃣ Сервис для bulk actions с VRP (переиспользуемый для любых сущностей)
|
|
1050
|
-
// Используем batch() из @vira-ui/core для оптимизации множественных обновлений
|
|
1051
|
-
import { batch } from '@vira-ui/core';
|
|
1052
1133
|
|
|
1053
1134
|
createService('${lowerName}Bulk', () => ({
|
|
1054
1135
|
bulkUpdate(ids: string[], payload: Partial<${vrpConfig.stateType}>, sendEvent: Function) {
|
|
@@ -1292,9 +1373,31 @@ describe('${name}Page', () => {
|
|
|
1292
1373
|
* Sync TypeScript types from Go structs (scaffold-level parser)
|
|
1293
1374
|
*/
|
|
1294
1375
|
async function syncTypes(options) {
|
|
1295
|
-
|
|
1296
|
-
|
|
1297
|
-
|
|
1376
|
+
let structs = [];
|
|
1377
|
+
if (options.fromModels) {
|
|
1378
|
+
const modelsDir = path.resolve(process.cwd(), options.models || path.join("backend", "internal", "models"));
|
|
1379
|
+
structs = await parseGoStructsFromDir(modelsDir);
|
|
1380
|
+
}
|
|
1381
|
+
else {
|
|
1382
|
+
const backendPath = path.resolve(process.cwd(), options.backend);
|
|
1383
|
+
const exists = await fs.pathExists(backendPath);
|
|
1384
|
+
if (!exists) {
|
|
1385
|
+
// Friendly fallback: if types.go isn't present (many projects don't need it anymore),
|
|
1386
|
+
// but models exist — generate from models.
|
|
1387
|
+
const modelsDir = path.resolve(process.cwd(), options.models || path.join("backend", "internal", "models"));
|
|
1388
|
+
if (await fs.pathExists(modelsDir)) {
|
|
1389
|
+
structs = await parseGoStructsFromDir(modelsDir);
|
|
1390
|
+
}
|
|
1391
|
+
else {
|
|
1392
|
+
throw new Error(`Go types file not found: ${backendPath}. ` +
|
|
1393
|
+
`Either create it, or run "vira sync --types --from-models" (and ensure ${modelsDir} exists).`);
|
|
1394
|
+
}
|
|
1395
|
+
}
|
|
1396
|
+
else {
|
|
1397
|
+
const goSource = await fs.readFile(backendPath, "utf8");
|
|
1398
|
+
structs = parseGoStructs(goSource);
|
|
1399
|
+
}
|
|
1400
|
+
}
|
|
1298
1401
|
const tsContent = renderTsTypes(structs);
|
|
1299
1402
|
const targets = [
|
|
1300
1403
|
path.resolve(process.cwd(), options.frontend),
|
|
@@ -1306,6 +1409,39 @@ async function syncTypes(options) {
|
|
|
1306
1409
|
}
|
|
1307
1410
|
console.log(chalk_1.default.green(`✓ Synced ${structs.length} type(s) to ${options.frontend} and ${options.ui}`));
|
|
1308
1411
|
}
|
|
1412
|
+
async function parseGoStructsFromDir(modelsDir) {
|
|
1413
|
+
const out = [];
|
|
1414
|
+
const exists = await fs.pathExists(modelsDir);
|
|
1415
|
+
if (!exists) {
|
|
1416
|
+
throw new Error(`Models directory not found: ${modelsDir}`);
|
|
1417
|
+
}
|
|
1418
|
+
const stat = await fs.stat(modelsDir);
|
|
1419
|
+
if (!stat.isDirectory()) {
|
|
1420
|
+
throw new Error(`Models path is not a directory: ${modelsDir}`);
|
|
1421
|
+
}
|
|
1422
|
+
const files = (await fs.readdir(modelsDir))
|
|
1423
|
+
.filter((f) => f.endsWith(".go") && !f.endsWith("_test.go"));
|
|
1424
|
+
for (const file of files) {
|
|
1425
|
+
const full = path.join(modelsDir, file);
|
|
1426
|
+
const src = await fs.readFile(full, "utf8");
|
|
1427
|
+
out.push(...parseGoStructs(src));
|
|
1428
|
+
}
|
|
1429
|
+
// De-dupe by struct name (first wins)
|
|
1430
|
+
const seen = new Set();
|
|
1431
|
+
const deduped = [];
|
|
1432
|
+
for (const s of out) {
|
|
1433
|
+
if (!s?.name)
|
|
1434
|
+
continue;
|
|
1435
|
+
// Only exported types (to avoid internal helpers)
|
|
1436
|
+
if (s.name[0] !== s.name[0].toUpperCase())
|
|
1437
|
+
continue;
|
|
1438
|
+
if (seen.has(s.name))
|
|
1439
|
+
continue;
|
|
1440
|
+
seen.add(s.name);
|
|
1441
|
+
deduped.push(s);
|
|
1442
|
+
}
|
|
1443
|
+
return deduped;
|
|
1444
|
+
}
|
|
1309
1445
|
function parseGoStructs(source) {
|
|
1310
1446
|
const structs = [];
|
|
1311
1447
|
const structRegex = /type\s+(\w+)\s+struct\s*\{([^}]*)\}/gm;
|
|
@@ -1326,9 +1462,13 @@ function parseGoStructs(source) {
|
|
|
1326
1462
|
const fieldName = tokens[0];
|
|
1327
1463
|
const fieldType = tokens[1];
|
|
1328
1464
|
let jsonTag;
|
|
1329
|
-
|
|
1465
|
+
// json tag can appear anywhere inside the struct tags: `db:"..." json:"..."`
|
|
1466
|
+
const tagMatch = line.match(/json:"([^"]+)"/);
|
|
1330
1467
|
if (tagMatch) {
|
|
1331
|
-
jsonTag = tagMatch[1].
|
|
1468
|
+
jsonTag = tagMatch[1].split(",")[0];
|
|
1469
|
+
if (jsonTag === "-" || jsonTag === "") {
|
|
1470
|
+
continue; // skip non-exposed fields
|
|
1471
|
+
}
|
|
1332
1472
|
}
|
|
1333
1473
|
fields.push({ name: fieldName, type: fieldType, json: jsonTag });
|
|
1334
1474
|
}
|
|
@@ -1444,6 +1584,9 @@ function goTypeToTs(goType) {
|
|
|
1444
1584
|
function toCamel(name) {
|
|
1445
1585
|
if (!name)
|
|
1446
1586
|
return name;
|
|
1587
|
+
// "ID" -> "id", "URL" -> "url" (common Go acronym fields)
|
|
1588
|
+
if (/^[A-Z0-9]+$/.test(name))
|
|
1589
|
+
return name.toLowerCase();
|
|
1447
1590
|
return name.charAt(0).toLowerCase() + name.slice(1);
|
|
1448
1591
|
}
|
|
1449
1592
|
/**
|
|
@@ -1570,16 +1713,463 @@ async function generateMigration(name, dir) {
|
|
|
1570
1713
|
const baseName = `${timestamp}_${name}`;
|
|
1571
1714
|
const targetDir = path.join(process.cwd(), dir);
|
|
1572
1715
|
await fs.ensureDir(targetDir);
|
|
1573
|
-
|
|
1574
|
-
|
|
1575
|
-
const
|
|
1716
|
+
// Goose v3 имеет проблему с отдельными .up.sql/.down.sql файлами (дубликат версии)
|
|
1717
|
+
// Используем один файл .sql с обеими директивами (работает надежнее)
|
|
1718
|
+
const migrationPath = path.join(targetDir, `${baseName}.sql`);
|
|
1719
|
+
const migrationTemplate = `-- +goose Up
|
|
1720
|
+
-- +goose StatementBegin
|
|
1576
1721
|
-- TODO: add migration SQL here
|
|
1577
|
-
|
|
1578
|
-
|
|
1722
|
+
-- +goose StatementEnd
|
|
1723
|
+
|
|
1724
|
+
-- +goose Down
|
|
1725
|
+
-- +goose StatementBegin
|
|
1579
1726
|
-- TODO: rollback SQL here
|
|
1727
|
+
-- +goose StatementEnd
|
|
1580
1728
|
`;
|
|
1581
|
-
await fs.writeFile(
|
|
1582
|
-
|
|
1729
|
+
await fs.writeFile(migrationPath, migrationTemplate);
|
|
1730
|
+
}
|
|
1731
|
+
/**
|
|
1732
|
+
* Получить DATABASE_URL из переменных окружения или .env файлов
|
|
1733
|
+
*/
|
|
1734
|
+
async function getDatabaseUrl() {
|
|
1735
|
+
// Сначала проверяем переменную окружения
|
|
1736
|
+
if (process.env.DATABASE_URL) {
|
|
1737
|
+
return process.env.DATABASE_URL;
|
|
1738
|
+
}
|
|
1739
|
+
// Пытаемся прочитать .env файлы
|
|
1740
|
+
const possibleEnvPaths = [
|
|
1741
|
+
path.join(process.cwd(), ".env"),
|
|
1742
|
+
path.join(process.cwd(), "env"),
|
|
1743
|
+
path.join(process.cwd(), "backend", ".env"),
|
|
1744
|
+
path.join(process.cwd(), ".env.local"),
|
|
1745
|
+
];
|
|
1746
|
+
for (const envPath of possibleEnvPaths) {
|
|
1747
|
+
if (await fs.pathExists(envPath)) {
|
|
1748
|
+
try {
|
|
1749
|
+
const envContent = await fs.readFile(envPath, "utf8");
|
|
1750
|
+
const lines = envContent.split("\n");
|
|
1751
|
+
for (const line of lines) {
|
|
1752
|
+
// Пропускаем комментарии и пустые строки
|
|
1753
|
+
const trimmed = line.trim();
|
|
1754
|
+
if (!trimmed || trimmed.startsWith("#"))
|
|
1755
|
+
continue;
|
|
1756
|
+
// Парсим DATABASE_URL=value
|
|
1757
|
+
const match = trimmed.match(/^DATABASE_URL\s*=\s*(.+)$/);
|
|
1758
|
+
if (match) {
|
|
1759
|
+
// Убираем кавычки если есть
|
|
1760
|
+
let value = match[1].trim();
|
|
1761
|
+
if ((value.startsWith('"') && value.endsWith('"')) ||
|
|
1762
|
+
(value.startsWith("'") && value.endsWith("'"))) {
|
|
1763
|
+
value = value.slice(1, -1);
|
|
1764
|
+
}
|
|
1765
|
+
return value;
|
|
1766
|
+
}
|
|
1767
|
+
}
|
|
1768
|
+
}
|
|
1769
|
+
catch (error) {
|
|
1770
|
+
// Игнорируем ошибки чтения .env файла
|
|
1771
|
+
}
|
|
1772
|
+
}
|
|
1773
|
+
}
|
|
1774
|
+
// Пытаемся собрать URL из отдельных переменных DB_*
|
|
1775
|
+
const dbHost = process.env.DB_HOST || process.env.POSTGRES_HOST || "localhost";
|
|
1776
|
+
const dbPort = process.env.DB_PORT || process.env.POSTGRES_PORT || "5432";
|
|
1777
|
+
const dbUser = process.env.DB_USER || process.env.POSTGRES_USER || "postgres";
|
|
1778
|
+
const dbPassword = process.env.DB_PASSWORD || process.env.POSTGRES_PASSWORD || "";
|
|
1779
|
+
const dbName = process.env.DB_NAME || process.env.POSTGRES_DB || process.env.DB_DATABASE || "";
|
|
1780
|
+
if (dbName) {
|
|
1781
|
+
return `postgres://${dbUser}${dbPassword ? `:${dbPassword}` : ""}@${dbHost}:${dbPort}/${dbName}?sslmode=disable`;
|
|
1782
|
+
}
|
|
1783
|
+
return undefined;
|
|
1784
|
+
}
|
|
1785
|
+
/**
|
|
1786
|
+
* Выполнение миграций через goose
|
|
1787
|
+
*/
|
|
1788
|
+
async function runMigrations(migrationsDir, dbUrl, driver, direction) {
|
|
1789
|
+
const { execSync } = require("child_process");
|
|
1790
|
+
const migrationsPath = path.resolve(process.cwd(), migrationsDir);
|
|
1791
|
+
// Проверяем наличие директории миграций
|
|
1792
|
+
if (!(await fs.pathExists(migrationsPath))) {
|
|
1793
|
+
console.error(chalk_1.default.red(`✗ Migrations directory not found: ${migrationsPath}`));
|
|
1794
|
+
console.log(chalk_1.default.yellow(` Create migrations with: npx vira make migration <name>`));
|
|
1795
|
+
process.exit(1);
|
|
1796
|
+
}
|
|
1797
|
+
// Получаем URL базы данных (проверяем .env файлы)
|
|
1798
|
+
const databaseUrl = dbUrl || await getDatabaseUrl();
|
|
1799
|
+
if (!databaseUrl) {
|
|
1800
|
+
console.error(chalk_1.default.red("✗ Database URL not provided"));
|
|
1801
|
+
console.log(chalk_1.default.yellow(" Use --db-url option, set DATABASE_URL environment variable, or create .env file"));
|
|
1802
|
+
console.log(chalk_1.default.yellow(" Example: DATABASE_URL=postgres://user:pass@localhost/dbname?sslmode=disable"));
|
|
1803
|
+
console.log(chalk_1.default.yellow(" Or create .env in project root with: DATABASE_URL=..."));
|
|
1804
|
+
process.exit(1);
|
|
1805
|
+
}
|
|
1806
|
+
// Проверяем наличие goose (кроссплатформенная проверка)
|
|
1807
|
+
const isWindows = process.platform === "win32";
|
|
1808
|
+
const checkCommand = isWindows ? "where goose" : "which goose";
|
|
1809
|
+
let gooseInstalled = false;
|
|
1810
|
+
try {
|
|
1811
|
+
execSync(checkCommand, { stdio: "ignore" });
|
|
1812
|
+
gooseInstalled = true;
|
|
1813
|
+
}
|
|
1814
|
+
catch {
|
|
1815
|
+
// goose не найден
|
|
1816
|
+
}
|
|
1817
|
+
if (!gooseInstalled) {
|
|
1818
|
+
console.log(chalk_1.default.blue("goose not found. Installing..."));
|
|
1819
|
+
console.log(chalk_1.default.gray(" This will download goose and its dependencies (this is normal)"));
|
|
1820
|
+
try {
|
|
1821
|
+
// Скрываем вывод загрузки зависимостей, показываем только прогресс
|
|
1822
|
+
execSync("go install github.com/pressly/goose/v3/cmd/goose@latest", {
|
|
1823
|
+
stdio: "inherit",
|
|
1824
|
+
env: { ...process.env, GOFLAGS: "-mod=mod" }
|
|
1825
|
+
});
|
|
1826
|
+
console.log(chalk_1.default.green("✓ goose installed successfully"));
|
|
1827
|
+
}
|
|
1828
|
+
catch (error) {
|
|
1829
|
+
console.error(chalk_1.default.red("✗ Failed to install goose"));
|
|
1830
|
+
console.log(chalk_1.default.yellow(" Install manually: go install github.com/pressly/goose/v3/cmd/goose@latest"));
|
|
1831
|
+
console.log(chalk_1.default.yellow(" Or download from: https://github.com/pressly/goose/releases"));
|
|
1832
|
+
process.exit(1);
|
|
1833
|
+
}
|
|
1834
|
+
}
|
|
1835
|
+
// Выполняем миграции
|
|
1836
|
+
try {
|
|
1837
|
+
const command = direction === "up" ? "up" : "down";
|
|
1838
|
+
console.log(chalk_1.default.blue(`Running migrations ${direction}...`));
|
|
1839
|
+
// Используем переменные окружения для goose (более надежно на Windows)
|
|
1840
|
+
const env = {
|
|
1841
|
+
...process.env,
|
|
1842
|
+
GOOSE_DRIVER: driver,
|
|
1843
|
+
GOOSE_DBSTRING: databaseUrl,
|
|
1844
|
+
};
|
|
1845
|
+
execSync(`goose -dir "${migrationsPath}" ${command}`, {
|
|
1846
|
+
stdio: "inherit",
|
|
1847
|
+
cwd: process.cwd(),
|
|
1848
|
+
env: env
|
|
1849
|
+
});
|
|
1850
|
+
console.log(chalk_1.default.green(`✓ Migrations ${direction} completed successfully`));
|
|
1851
|
+
}
|
|
1852
|
+
catch (error) {
|
|
1853
|
+
console.error(chalk_1.default.red(`✗ Migration failed: ${error.message}`));
|
|
1854
|
+
process.exit(1);
|
|
1855
|
+
}
|
|
1856
|
+
}
|
|
1857
|
+
/**
|
|
1858
|
+
* Показать статус миграций
|
|
1859
|
+
*/
|
|
1860
|
+
async function showMigrationStatus(migrationsDir, dbUrl, driver) {
|
|
1861
|
+
const { execSync } = require("child_process");
|
|
1862
|
+
const migrationsPath = path.resolve(process.cwd(), migrationsDir);
|
|
1863
|
+
if (!(await fs.pathExists(migrationsPath))) {
|
|
1864
|
+
console.error(chalk_1.default.red(`✗ Migrations directory not found: ${migrationsPath}`));
|
|
1865
|
+
process.exit(1);
|
|
1866
|
+
}
|
|
1867
|
+
const databaseUrl = dbUrl || await getDatabaseUrl();
|
|
1868
|
+
if (!databaseUrl) {
|
|
1869
|
+
console.error(chalk_1.default.red("✗ Database URL not provided"));
|
|
1870
|
+
console.log(chalk_1.default.yellow(" Use --db-url option, set DATABASE_URL environment variable, or create .env file"));
|
|
1871
|
+
process.exit(1);
|
|
1872
|
+
}
|
|
1873
|
+
try {
|
|
1874
|
+
console.log(chalk_1.default.blue("Migration status:"));
|
|
1875
|
+
// Используем переменные окружения для goose (более надежно на Windows)
|
|
1876
|
+
const env = {
|
|
1877
|
+
...process.env,
|
|
1878
|
+
GOOSE_DRIVER: driver,
|
|
1879
|
+
GOOSE_DBSTRING: databaseUrl,
|
|
1880
|
+
};
|
|
1881
|
+
execSync(`goose -dir "${migrationsPath}" status`, {
|
|
1882
|
+
stdio: "inherit",
|
|
1883
|
+
cwd: process.cwd(),
|
|
1884
|
+
env: env
|
|
1885
|
+
});
|
|
1886
|
+
}
|
|
1887
|
+
catch (error) {
|
|
1888
|
+
console.error(chalk_1.default.red(`✗ Failed to get migration status: ${error.message}`));
|
|
1889
|
+
process.exit(1);
|
|
1890
|
+
}
|
|
1891
|
+
}
|
|
1892
|
+
/**
|
|
1893
|
+
* Выполнение сидеров (seeds)
|
|
1894
|
+
*/
|
|
1895
|
+
async function runSeeds(seedsDir, dbUrl, driver) {
|
|
1896
|
+
const { execSync } = require("child_process");
|
|
1897
|
+
const seedsPath = path.resolve(process.cwd(), seedsDir);
|
|
1898
|
+
// Проверяем наличие директории сидеров
|
|
1899
|
+
if (!(await fs.pathExists(seedsPath))) {
|
|
1900
|
+
console.error(chalk_1.default.red(`✗ Seeds directory not found: ${seedsPath}`));
|
|
1901
|
+
console.log(chalk_1.default.yellow(` Create seeds directory and add SQL files`));
|
|
1902
|
+
process.exit(1);
|
|
1903
|
+
}
|
|
1904
|
+
// Получаем URL базы данных
|
|
1905
|
+
const databaseUrl = dbUrl || await getDatabaseUrl();
|
|
1906
|
+
if (!databaseUrl) {
|
|
1907
|
+
console.error(chalk_1.default.red("✗ Database URL not provided"));
|
|
1908
|
+
console.log(chalk_1.default.yellow(" Use --db-url option, set DATABASE_URL environment variable, or create .env file"));
|
|
1909
|
+
process.exit(1);
|
|
1910
|
+
}
|
|
1911
|
+
// Получаем список SQL файлов из папки seeds
|
|
1912
|
+
const files = await fs.readdir(seedsPath);
|
|
1913
|
+
const sqlFiles = files
|
|
1914
|
+
.filter((f) => f.endsWith(".sql"))
|
|
1915
|
+
.sort(); // Сортируем по имени для последовательного выполнения
|
|
1916
|
+
if (sqlFiles.length === 0) {
|
|
1917
|
+
console.log(chalk_1.default.yellow(`⚠ No SQL files found in ${seedsPath}`));
|
|
1918
|
+
return;
|
|
1919
|
+
}
|
|
1920
|
+
console.log(chalk_1.default.blue(`Found ${sqlFiles.length} seed file(s):`));
|
|
1921
|
+
sqlFiles.forEach((file) => {
|
|
1922
|
+
console.log(chalk_1.default.gray(` - ${file}`));
|
|
1923
|
+
});
|
|
1924
|
+
// Выполняем каждый SQL файл
|
|
1925
|
+
try {
|
|
1926
|
+
console.log(chalk_1.default.blue(`\nRunning seeds...`));
|
|
1927
|
+
if (driver === "postgres") {
|
|
1928
|
+
// Для PostgreSQL используем psql
|
|
1929
|
+
const isWindows = process.platform === "win32";
|
|
1930
|
+
// Парсим DATABASE_URL используя встроенный URL класс
|
|
1931
|
+
let parsedUrl;
|
|
1932
|
+
try {
|
|
1933
|
+
parsedUrl = new URL(databaseUrl);
|
|
1934
|
+
}
|
|
1935
|
+
catch (error) {
|
|
1936
|
+
console.error(chalk_1.default.red("✗ Invalid DATABASE_URL format for PostgreSQL"));
|
|
1937
|
+
console.log(chalk_1.default.yellow(" Expected format: postgres://user:password@host:port/dbname"));
|
|
1938
|
+
console.log(chalk_1.default.gray(` Error: ${error instanceof Error ? error.message : String(error)}`));
|
|
1939
|
+
process.exit(1);
|
|
1940
|
+
}
|
|
1941
|
+
// Проверяем протокол
|
|
1942
|
+
if (parsedUrl.protocol !== "postgres:" && parsedUrl.protocol !== "postgresql:") {
|
|
1943
|
+
console.error(chalk_1.default.red("✗ Invalid database protocol"));
|
|
1944
|
+
console.log(chalk_1.default.yellow(" Expected protocol: postgres:// or postgresql://"));
|
|
1945
|
+
process.exit(1);
|
|
1946
|
+
}
|
|
1947
|
+
const user = parsedUrl.username || "postgres";
|
|
1948
|
+
const password = parsedUrl.password || "";
|
|
1949
|
+
const host = parsedUrl.hostname || "localhost";
|
|
1950
|
+
const port = parsedUrl.port || "5432";
|
|
1951
|
+
const dbname = parsedUrl.pathname ? parsedUrl.pathname.slice(1) : ""; // Убираем ведущий /
|
|
1952
|
+
if (!dbname) {
|
|
1953
|
+
console.error(chalk_1.default.red("✗ Database name not specified in DATABASE_URL"));
|
|
1954
|
+
process.exit(1);
|
|
1955
|
+
}
|
|
1956
|
+
// Проверяем наличие psql локально
|
|
1957
|
+
let useDocker = false;
|
|
1958
|
+
let dockerContainer = "";
|
|
1959
|
+
try {
|
|
1960
|
+
execSync(isWindows ? "where psql" : "which psql", { stdio: "ignore" });
|
|
1961
|
+
}
|
|
1962
|
+
catch {
|
|
1963
|
+
// psql не найден локально, пробуем Docker
|
|
1964
|
+
useDocker = true;
|
|
1965
|
+
console.log(chalk_1.default.blue(" psql not found locally, trying Docker..."));
|
|
1966
|
+
// Пробуем найти контейнер PostgreSQL
|
|
1967
|
+
try {
|
|
1968
|
+
// Список запущенных контейнеров
|
|
1969
|
+
const containersOutput = execSync("docker ps --format {{.Names}}", { encoding: "utf8" });
|
|
1970
|
+
const containers = containersOutput.trim().split("\n")
|
|
1971
|
+
.filter((c) => c.trim())
|
|
1972
|
+
.map((c) => c.trim().replace(/['"]/g, "")); // Убираем кавычки
|
|
1973
|
+
// Ищем контейнер с postgres в имени
|
|
1974
|
+
const postgresContainer = containers.find((c) => c.toLowerCase().includes("postgres") ||
|
|
1975
|
+
c.toLowerCase().includes("db") ||
|
|
1976
|
+
c.toLowerCase().includes("database"));
|
|
1977
|
+
if (postgresContainer) {
|
|
1978
|
+
dockerContainer = postgresContainer;
|
|
1979
|
+
console.log(chalk_1.default.gray(` Found Docker container: ${dockerContainer}`));
|
|
1980
|
+
// Проверяем, что контейнер действительно запущен
|
|
1981
|
+
try {
|
|
1982
|
+
const checkOutput = execSync(`docker ps --filter "name=^${dockerContainer}$" --format {{.Names}}`, { encoding: "utf8", stdio: "pipe" });
|
|
1983
|
+
if (!checkOutput.trim()) {
|
|
1984
|
+
console.error(chalk_1.default.red(`✗ Container ${dockerContainer} is not running`));
|
|
1985
|
+
process.exit(1);
|
|
1986
|
+
}
|
|
1987
|
+
}
|
|
1988
|
+
catch {
|
|
1989
|
+
console.error(chalk_1.default.red(`✗ Container ${dockerContainer} is not running`));
|
|
1990
|
+
process.exit(1);
|
|
1991
|
+
}
|
|
1992
|
+
}
|
|
1993
|
+
else {
|
|
1994
|
+
// Пробуем стандартные имена
|
|
1995
|
+
const standardNames = ["postgres", "postgresql", "db", "database", "vira-db", "vira_db"];
|
|
1996
|
+
for (const name of standardNames) {
|
|
1997
|
+
try {
|
|
1998
|
+
const checkOutput = execSync(`docker ps --filter "name=^${name}$" --format {{.Names}}`, { encoding: "utf8", stdio: "pipe" });
|
|
1999
|
+
if (checkOutput.trim()) {
|
|
2000
|
+
dockerContainer = name;
|
|
2001
|
+
console.log(chalk_1.default.gray(` Using Docker container: ${dockerContainer}`));
|
|
2002
|
+
break;
|
|
2003
|
+
}
|
|
2004
|
+
}
|
|
2005
|
+
catch {
|
|
2006
|
+
// Продолжаем поиск
|
|
2007
|
+
}
|
|
2008
|
+
}
|
|
2009
|
+
}
|
|
2010
|
+
if (!dockerContainer) {
|
|
2011
|
+
console.error(chalk_1.default.red("✗ psql not found and no PostgreSQL Docker container detected"));
|
|
2012
|
+
console.log(chalk_1.default.yellow(" Please install PostgreSQL client tools or ensure Docker container is running"));
|
|
2013
|
+
console.log(chalk_1.default.yellow(" Container name should contain 'postgres', 'db', or 'database'"));
|
|
2014
|
+
process.exit(1);
|
|
2015
|
+
}
|
|
2016
|
+
}
|
|
2017
|
+
catch (dockerError) {
|
|
2018
|
+
console.error(chalk_1.default.red("✗ Docker not available or PostgreSQL container not found"));
|
|
2019
|
+
console.log(chalk_1.default.yellow(" Please install PostgreSQL client tools (psql) or ensure Docker is running"));
|
|
2020
|
+
process.exit(1);
|
|
2021
|
+
}
|
|
2022
|
+
}
|
|
2023
|
+
// Устанавливаем переменную окружения PGPASSWORD для psql
|
|
2024
|
+
const env = {
|
|
2025
|
+
...process.env,
|
|
2026
|
+
PGPASSWORD: password,
|
|
2027
|
+
};
|
|
2028
|
+
// Функция для выполнения SQL запроса
|
|
2029
|
+
const executeSQL = async (sql) => {
|
|
2030
|
+
const tempFile = path.join(seedsPath, `.temp_${Date.now()}.sql`);
|
|
2031
|
+
await fs.writeFile(tempFile, sql);
|
|
2032
|
+
try {
|
|
2033
|
+
if (useDocker && dockerContainer) {
|
|
2034
|
+
const containerPath = `/tmp/temp_${Date.now()}.sql`;
|
|
2035
|
+
execSync(`docker cp "${tempFile}" "${dockerContainer}":${containerPath}`, { stdio: "ignore" });
|
|
2036
|
+
const escapedPassword = password.replace(/'/g, "'\\''");
|
|
2037
|
+
const output = execSync(`docker exec -e PGPASSWORD='${escapedPassword}' "${dockerContainer}" psql -U ${user} -d ${dbname} -t -A -f ${containerPath}`, { encoding: "utf8", stdio: "pipe" });
|
|
2038
|
+
execSync(`docker exec "${dockerContainer}" rm ${containerPath}`, { stdio: "ignore" });
|
|
2039
|
+
return output.trim();
|
|
2040
|
+
}
|
|
2041
|
+
else {
|
|
2042
|
+
const output = execSync(`psql -h ${host} -p ${port} -U ${user} -d ${dbname} -t -A -f "${tempFile}"`, { encoding: "utf8", stdio: "pipe", env: env });
|
|
2043
|
+
return output.trim();
|
|
2044
|
+
}
|
|
2045
|
+
}
|
|
2046
|
+
finally {
|
|
2047
|
+
await fs.remove(tempFile);
|
|
2048
|
+
}
|
|
2049
|
+
};
|
|
2050
|
+
// Получаем список уже выполненных seed-файлов
|
|
2051
|
+
let executedSeeds = new Set();
|
|
2052
|
+
try {
|
|
2053
|
+
const checkTableSQL = `
|
|
2054
|
+
SELECT EXISTS (
|
|
2055
|
+
SELECT FROM information_schema.tables
|
|
2056
|
+
WHERE table_schema = 'public'
|
|
2057
|
+
AND table_name = 'seed_history'
|
|
2058
|
+
);
|
|
2059
|
+
`;
|
|
2060
|
+
const tableExists = await executeSQL(checkTableSQL);
|
|
2061
|
+
if (tableExists === "t") {
|
|
2062
|
+
const getExecutedSQL = `SELECT seed_file FROM seed_history WHERE success = true;`;
|
|
2063
|
+
const executedOutput = await executeSQL(getExecutedSQL);
|
|
2064
|
+
if (executedOutput) {
|
|
2065
|
+
executedSeeds = new Set(executedOutput.split("\n").filter((f) => f.trim()));
|
|
2066
|
+
}
|
|
2067
|
+
}
|
|
2068
|
+
}
|
|
2069
|
+
catch (error) {
|
|
2070
|
+
// Игнорируем ошибки при проверке - возможно таблица еще не создана
|
|
2071
|
+
console.log(chalk_1.default.yellow(" ⚠ Could not check seed history, will execute all seeds"));
|
|
2072
|
+
}
|
|
2073
|
+
// Фильтруем уже выполненные seed-файлы
|
|
2074
|
+
const pendingSeeds = sqlFiles.filter((file) => !executedSeeds.has(file));
|
|
2075
|
+
if (pendingSeeds.length === 0) {
|
|
2076
|
+
console.log(chalk_1.default.green(`\n✓ All seeds have already been executed`));
|
|
2077
|
+
return;
|
|
2078
|
+
}
|
|
2079
|
+
if (pendingSeeds.length < sqlFiles.length) {
|
|
2080
|
+
console.log(chalk_1.default.blue(`\n ${sqlFiles.length - pendingSeeds.length} seed(s) already executed, ${pendingSeeds.length} pending`));
|
|
2081
|
+
}
|
|
2082
|
+
for (const file of pendingSeeds) {
|
|
2083
|
+
const filePath = path.join(seedsPath, file);
|
|
2084
|
+
const startTime = Date.now();
|
|
2085
|
+
console.log(chalk_1.default.blue(` Running ${file}...`));
|
|
2086
|
+
let success = false;
|
|
2087
|
+
let errorMsg = "";
|
|
2088
|
+
try {
|
|
2089
|
+
if (useDocker && dockerContainer) {
|
|
2090
|
+
// Копируем файл в контейнер и выполняем
|
|
2091
|
+
const fileName = path.basename(filePath);
|
|
2092
|
+
const containerPath = `/tmp/${fileName}`;
|
|
2093
|
+
// Копируем файл в контейнер (используем кавычки для имени контейнера на случай пробелов)
|
|
2094
|
+
execSync(`docker cp "${filePath}" "${dockerContainer}":${containerPath}`, { stdio: "inherit" });
|
|
2095
|
+
// Выполняем SQL в контейнере (экранируем пароль и используем кавычки)
|
|
2096
|
+
const escapedPassword = password.replace(/'/g, "'\\''");
|
|
2097
|
+
execSync(`docker exec -e PGPASSWORD='${escapedPassword}' "${dockerContainer}" psql -U ${user} -d ${dbname} -f ${containerPath}`, {
|
|
2098
|
+
stdio: "inherit",
|
|
2099
|
+
cwd: process.cwd(),
|
|
2100
|
+
});
|
|
2101
|
+
// Удаляем временный файл из контейнера
|
|
2102
|
+
try {
|
|
2103
|
+
execSync(`docker exec "${dockerContainer}" rm ${containerPath}`, { stdio: "ignore" });
|
|
2104
|
+
}
|
|
2105
|
+
catch {
|
|
2106
|
+
// Игнорируем ошибки удаления
|
|
2107
|
+
}
|
|
2108
|
+
}
|
|
2109
|
+
else {
|
|
2110
|
+
// Используем локальный psql
|
|
2111
|
+
execSync(`psql -h ${host} -p ${port} -U ${user} -d ${dbname} -f "${filePath}"`, {
|
|
2112
|
+
stdio: "inherit",
|
|
2113
|
+
cwd: process.cwd(),
|
|
2114
|
+
env: env,
|
|
2115
|
+
});
|
|
2116
|
+
}
|
|
2117
|
+
const executionTime = Date.now() - startTime;
|
|
2118
|
+
success = true;
|
|
2119
|
+
console.log(chalk_1.default.green(` ✓ ${file} completed (${executionTime}ms)`));
|
|
2120
|
+
// Записываем успешное выполнение в seed_history
|
|
2121
|
+
try {
|
|
2122
|
+
const insertSQL = `
|
|
2123
|
+
INSERT INTO seed_history (seed_file, execution_time_ms, success)
|
|
2124
|
+
VALUES ('${file.replace(/'/g, "''")}', ${executionTime}, true)
|
|
2125
|
+
ON CONFLICT (seed_file) DO UPDATE SET
|
|
2126
|
+
executed_at = CURRENT_TIMESTAMP,
|
|
2127
|
+
execution_time_ms = ${executionTime},
|
|
2128
|
+
success = true,
|
|
2129
|
+
error_message = NULL;
|
|
2130
|
+
`;
|
|
2131
|
+
await executeSQL(insertSQL);
|
|
2132
|
+
}
|
|
2133
|
+
catch (historyError) {
|
|
2134
|
+
// Игнорируем ошибки записи истории
|
|
2135
|
+
console.log(chalk_1.default.yellow(` ⚠ Could not record seed history: ${historyError instanceof Error ? historyError.message : String(historyError)}`));
|
|
2136
|
+
}
|
|
2137
|
+
}
|
|
2138
|
+
catch (error) {
|
|
2139
|
+
const executionTime = Date.now() - startTime;
|
|
2140
|
+
errorMsg = error.message || String(error);
|
|
2141
|
+
console.error(chalk_1.default.red(` ✗ ${file} failed: ${errorMsg}`));
|
|
2142
|
+
// Записываем неудачное выполнение в seed_history
|
|
2143
|
+
try {
|
|
2144
|
+
const insertSQL = `
|
|
2145
|
+
INSERT INTO seed_history (seed_file, execution_time_ms, success, error_message)
|
|
2146
|
+
VALUES ('${file.replace(/'/g, "''")}', ${executionTime}, false, '${errorMsg.replace(/'/g, "''")}')
|
|
2147
|
+
ON CONFLICT (seed_file) DO UPDATE SET
|
|
2148
|
+
executed_at = CURRENT_TIMESTAMP,
|
|
2149
|
+
execution_time_ms = ${executionTime},
|
|
2150
|
+
success = false,
|
|
2151
|
+
error_message = '${errorMsg.replace(/'/g, "''")}';
|
|
2152
|
+
`;
|
|
2153
|
+
await executeSQL(insertSQL);
|
|
2154
|
+
}
|
|
2155
|
+
catch (historyError) {
|
|
2156
|
+
// Игнорируем ошибки записи истории
|
|
2157
|
+
}
|
|
2158
|
+
// Продолжаем выполнение остальных файлов
|
|
2159
|
+
}
|
|
2160
|
+
}
|
|
2161
|
+
}
|
|
2162
|
+
else {
|
|
2163
|
+
// Для других драйверов можно добавить поддержку позже
|
|
2164
|
+
console.error(chalk_1.default.red(`✗ Seeds are currently only supported for PostgreSQL`));
|
|
2165
|
+
process.exit(1);
|
|
2166
|
+
}
|
|
2167
|
+
console.log(chalk_1.default.green(`\n✓ Seeds completed successfully`));
|
|
2168
|
+
}
|
|
2169
|
+
catch (error) {
|
|
2170
|
+
console.error(chalk_1.default.red(`✗ Seeds failed: ${error.message}`));
|
|
2171
|
+
process.exit(1);
|
|
2172
|
+
}
|
|
1583
2173
|
}
|
|
1584
2174
|
async function generateEventHandler(name, dir) {
|
|
1585
2175
|
const targetDir = path.join(process.cwd(), dir);
|
|
@@ -1637,12 +2227,20 @@ async function generateCRUDHandler(name, dir, modelName) {
|
|
|
1637
2227
|
// Попытка определить модуль из go.mod
|
|
1638
2228
|
let modulePath = "your-project/backend";
|
|
1639
2229
|
try {
|
|
1640
|
-
|
|
1641
|
-
|
|
1642
|
-
|
|
1643
|
-
|
|
1644
|
-
|
|
1645
|
-
|
|
2230
|
+
// Ищем go.mod в разных возможных местах
|
|
2231
|
+
const possiblePaths = [
|
|
2232
|
+
path.join(process.cwd(), "go.mod"), // корень проекта
|
|
2233
|
+
path.join(process.cwd(), dir, "..", "..", "go.mod"), // backend/../go.mod
|
|
2234
|
+
path.join(process.cwd(), "backend", "go.mod"), // backend/go.mod
|
|
2235
|
+
];
|
|
2236
|
+
for (const goModPath of possiblePaths) {
|
|
2237
|
+
if (await fs.pathExists(goModPath)) {
|
|
2238
|
+
const goModContent = await fs.readFile(goModPath, "utf8");
|
|
2239
|
+
const moduleMatch = goModContent.match(/^module\s+(.+)$/m);
|
|
2240
|
+
if (moduleMatch) {
|
|
2241
|
+
modulePath = moduleMatch[1];
|
|
2242
|
+
break;
|
|
2243
|
+
}
|
|
1646
2244
|
}
|
|
1647
2245
|
}
|
|
1648
2246
|
}
|
|
@@ -1814,13 +2412,13 @@ func Create${handlerName}(w http.ResponseWriter, r *http.Request) {
|
|
|
1814
2412
|
// }
|
|
1815
2413
|
|
|
1816
2414
|
// 🎯 Production-ready: Log event for audit trail
|
|
1817
|
-
event := ${handlerName}Event{
|
|
1818
|
-
|
|
1819
|
-
|
|
1820
|
-
|
|
1821
|
-
|
|
1822
|
-
|
|
1823
|
-
}
|
|
2415
|
+
// event := ${handlerName}Event{
|
|
2416
|
+
// ID: uuid.New().String(),
|
|
2417
|
+
// Type: "created",
|
|
2418
|
+
// EntityID: input.ID,
|
|
2419
|
+
// NewValue: &input,
|
|
2420
|
+
// Timestamp: time.Now(),
|
|
2421
|
+
// }
|
|
1824
2422
|
// logEvent(event) // Implement event logging to client_events table
|
|
1825
2423
|
|
|
1826
2424
|
// 🎯 Production-ready: Invalidate cache
|
|
@@ -1892,7 +2490,7 @@ func Update${handlerName}(w http.ResponseWriter, r *http.Request) {
|
|
|
1892
2490
|
// Delete${handlerName} handles DELETE /${safeName}/{id}
|
|
1893
2491
|
func Delete${handlerName}(w http.ResponseWriter, r *http.Request) {
|
|
1894
2492
|
vars := mux.Vars(r)
|
|
1895
|
-
id := vars["id"]
|
|
2493
|
+
id := vars["id"] // id will be used when implementing DB delete and event logging
|
|
1896
2494
|
|
|
1897
2495
|
// 🎯 Production-ready: Get value for event logging
|
|
1898
2496
|
// oldValue, _ := db.Get${handlerName}(id)
|