sonamu 0.3.1 → 0.4.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.pnp.cjs +11 -0
- package/dist/base-model-BzMJ2E_I.d.mts +43 -0
- package/dist/base-model-CWRKUX49.d.ts +43 -0
- package/dist/bin/cli.js +118 -89
- package/dist/bin/cli.js.map +1 -1
- package/dist/bin/cli.mjs +74 -45
- package/dist/bin/cli.mjs.map +1 -1
- package/dist/chunk-6HSW7OS3.js +1567 -0
- package/dist/chunk-6HSW7OS3.js.map +1 -0
- package/dist/chunk-FLPD24HS.mjs +231 -0
- package/dist/chunk-FLPD24HS.mjs.map +1 -0
- package/dist/{chunk-MPXE4IHO.mjs → chunk-PP2PSSAG.mjs} +5284 -5617
- package/dist/chunk-PP2PSSAG.mjs.map +1 -0
- package/dist/chunk-QK5XXJUX.mjs +280 -0
- package/dist/chunk-QK5XXJUX.mjs.map +1 -0
- package/dist/chunk-S6FYTR3V.mjs +1567 -0
- package/dist/chunk-S6FYTR3V.mjs.map +1 -0
- package/dist/chunk-U636LQJJ.js +231 -0
- package/dist/chunk-U636LQJJ.js.map +1 -0
- package/dist/chunk-W7KDVJLQ.js +280 -0
- package/dist/chunk-W7KDVJLQ.js.map +1 -0
- package/dist/{chunk-YXILRRDT.js → chunk-XT6LHCX5.js} +5252 -5585
- package/dist/chunk-XT6LHCX5.js.map +1 -0
- package/dist/database/drivers/knex/base-model.d.mts +16 -0
- package/dist/database/drivers/knex/base-model.d.ts +16 -0
- package/dist/database/drivers/knex/base-model.js +55 -0
- package/dist/database/drivers/knex/base-model.js.map +1 -0
- package/dist/database/drivers/knex/base-model.mjs +56 -0
- package/dist/database/drivers/knex/base-model.mjs.map +1 -0
- package/dist/database/drivers/kysely/base-model.d.mts +22 -0
- package/dist/database/drivers/kysely/base-model.d.ts +22 -0
- package/dist/database/drivers/kysely/base-model.js +64 -0
- package/dist/database/drivers/kysely/base-model.js.map +1 -0
- package/dist/database/drivers/kysely/base-model.mjs +65 -0
- package/dist/database/drivers/kysely/base-model.mjs.map +1 -0
- package/dist/index.d.mts +222 -928
- package/dist/index.d.ts +222 -928
- package/dist/index.js +13 -26
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +18 -31
- package/dist/index.mjs.map +1 -1
- package/dist/model-CAH_4oQh.d.mts +1042 -0
- package/dist/model-CAH_4oQh.d.ts +1042 -0
- package/import-to-require.js +27 -0
- package/package.json +24 -3
- package/src/api/caster.ts +6 -0
- package/src/api/code-converters.ts +3 -1
- package/src/api/sonamu.ts +41 -22
- package/src/bin/cli.ts +79 -46
- package/src/database/_batch_update.ts +16 -11
- package/src/database/base-model.abstract.ts +97 -0
- package/src/database/base-model.ts +214 -280
- package/src/database/code-generator.ts +72 -0
- package/src/database/db.abstract.ts +75 -0
- package/src/database/db.ts +21 -82
- package/src/database/drivers/knex/base-model.ts +55 -0
- package/src/database/drivers/knex/client.ts +209 -0
- package/src/database/drivers/knex/db.ts +227 -0
- package/src/database/drivers/knex/generator.ts +659 -0
- package/src/database/drivers/kysely/base-model.ts +89 -0
- package/src/database/drivers/kysely/client.ts +309 -0
- package/src/database/drivers/kysely/db.ts +238 -0
- package/src/database/drivers/kysely/generator.ts +714 -0
- package/src/database/types.ts +117 -0
- package/src/database/upsert-builder.ts +31 -18
- package/src/entity/entity-utils.ts +1 -1
- package/src/entity/migrator.ts +148 -711
- package/src/index.ts +1 -1
- package/src/syncer/syncer.ts +69 -27
- package/src/templates/generated_http.template.ts +14 -0
- package/src/templates/kysely_types.template.ts +205 -0
- package/src/templates/model.template.ts +2 -139
- package/src/templates/service.template.ts +3 -1
- package/src/testing/_relation-graph.ts +111 -0
- package/src/testing/fixture-manager.ts +216 -332
- package/src/types/types.ts +56 -6
- package/src/utils/utils.ts +56 -4
- package/src/utils/zod-error.ts +189 -0
- package/tsconfig.json +2 -2
- package/tsup.config.js +11 -10
- package/dist/chunk-MPXE4IHO.mjs.map +0 -1
- package/dist/chunk-YXILRRDT.js.map +0 -1
- /package/src/database/{knex-plugins → drivers/knex/plugins}/knex-on-duplicate-update.ts +0 -0
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
const fs = require("fs/promises");
|
|
2
|
+
|
|
3
|
+
export const ImportToRequirePlugin = {
|
|
4
|
+
name: "import-to-require",
|
|
5
|
+
setup(build) {
|
|
6
|
+
if (build.initialOptions.define.TSUP_FORMAT === '"cjs"') {
|
|
7
|
+
// 빌드 전에 src/database/db.ts 파일을 읽어서 변환
|
|
8
|
+
build.onLoad({ filter: /database\/db.ts/ }, async (args) => {
|
|
9
|
+
console.debug(`reading ${args.path}`);
|
|
10
|
+
let contents = await fs.readFile(args.path, "utf8");
|
|
11
|
+
|
|
12
|
+
// 'await import(' 패턴을 찾아 'require('로 변환
|
|
13
|
+
contents = contents.replace(
|
|
14
|
+
/\bawait import\(([^)]+)\)/g,
|
|
15
|
+
(_, modulePath) => {
|
|
16
|
+
return `require(${modulePath})`;
|
|
17
|
+
}
|
|
18
|
+
);
|
|
19
|
+
|
|
20
|
+
return {
|
|
21
|
+
contents,
|
|
22
|
+
loader: "ts", // TypeScript를 지원하도록 'ts' 로더 설정
|
|
23
|
+
};
|
|
24
|
+
});
|
|
25
|
+
}
|
|
26
|
+
},
|
|
27
|
+
};
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "sonamu",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.4.2",
|
|
4
4
|
"description": "Sonamu — TypeScript Fullstack API Framework",
|
|
5
5
|
"keywords": [
|
|
6
6
|
"typescript",
|
|
@@ -12,11 +12,22 @@
|
|
|
12
12
|
"exports": {
|
|
13
13
|
".": {
|
|
14
14
|
"import": "./dist/index.mjs",
|
|
15
|
-
"require": "./dist/index.js"
|
|
15
|
+
"require": "./dist/index.js",
|
|
16
|
+
"types": "./dist/index.d.ts"
|
|
17
|
+
},
|
|
18
|
+
"./kysely": {
|
|
19
|
+
"import": "./dist/database/drivers/kysely/base-model.mjs",
|
|
20
|
+
"require": "./dist/database/drivers/kysely/base-model.js",
|
|
21
|
+
"types": "./dist/database/drivers/kysely/base-model.d.mts"
|
|
22
|
+
},
|
|
23
|
+
"./knex": {
|
|
24
|
+
"import": "./dist/database/drivers/knex/base-model.mjs",
|
|
25
|
+
"require": "./dist/database/drivers/knex/base-model.js",
|
|
26
|
+
"types": "./dist/database/drivers/knex/base-model.d.mts"
|
|
16
27
|
}
|
|
17
28
|
},
|
|
18
29
|
"scripts": {
|
|
19
|
-
"dev": "
|
|
30
|
+
"dev": "tsup --config ./tsup.config.js --watch",
|
|
20
31
|
"build": "tsup --config ./tsup.config.js"
|
|
21
32
|
},
|
|
22
33
|
"license": "MIT",
|
|
@@ -39,6 +50,7 @@
|
|
|
39
50
|
"glob": "^8.0.3",
|
|
40
51
|
"inflection": "^1.13.2",
|
|
41
52
|
"knex": "^3.1.0",
|
|
53
|
+
"kysely": "^0.27.4",
|
|
42
54
|
"lodash": "^4.17.21",
|
|
43
55
|
"luxon": "^3.0.3",
|
|
44
56
|
"mysql2": "^3.6.1",
|
|
@@ -68,7 +80,16 @@
|
|
|
68
80
|
"peerDependencies": {
|
|
69
81
|
"fastify": "^4.23.2",
|
|
70
82
|
"knex": "^3.1.0",
|
|
83
|
+
"kysely": "^0.27.4",
|
|
71
84
|
"mysql2": "^3.6.1"
|
|
72
85
|
},
|
|
86
|
+
"peerDependenciesMeta": {
|
|
87
|
+
"knex": {
|
|
88
|
+
"optional": true
|
|
89
|
+
},
|
|
90
|
+
"kysely": {
|
|
91
|
+
"optional": true
|
|
92
|
+
}
|
|
93
|
+
},
|
|
73
94
|
"packageManager": "yarn@3.6.3"
|
|
74
95
|
}
|
package/src/api/caster.ts
CHANGED
|
@@ -71,6 +71,12 @@ export function caster(zodType: z.ZodType<any>, raw: any): any {
|
|
|
71
71
|
} else if (zodType instanceof z.ZodNullable) {
|
|
72
72
|
// nullable
|
|
73
73
|
return caster(zodType._def.innerType, raw);
|
|
74
|
+
} else if (
|
|
75
|
+
zodType instanceof z.ZodDate &&
|
|
76
|
+
new Date(raw).toString() !== "Invalid Date"
|
|
77
|
+
) {
|
|
78
|
+
// date
|
|
79
|
+
return new Date(raw);
|
|
74
80
|
} else {
|
|
75
81
|
// 나머지는 처리 안함
|
|
76
82
|
return raw;
|
|
@@ -52,7 +52,9 @@ export function getZodObjectFromApi(
|
|
|
52
52
|
api.parameters.filter(
|
|
53
53
|
(param) =>
|
|
54
54
|
!ApiParamType.isContext(param.type) &&
|
|
55
|
-
!ApiParamType.isRefKnex(param.type)
|
|
55
|
+
!ApiParamType.isRefKnex(param.type) &&
|
|
56
|
+
!ApiParamType.isRefKysely(param.type) &&
|
|
57
|
+
!(param.optional === true && param.name.startsWith("_")) // _로 시작하는 파라미터는 제외
|
|
56
58
|
),
|
|
57
59
|
references
|
|
58
60
|
);
|
package/src/api/sonamu.ts
CHANGED
|
@@ -2,6 +2,8 @@ import chalk from "chalk";
|
|
|
2
2
|
import { FastifyInstance, FastifyReply, FastifyRequest } from "fastify";
|
|
3
3
|
import { IncomingMessage, Server, ServerResponse } from "http";
|
|
4
4
|
import { ZodError } from "zod";
|
|
5
|
+
import path from "path";
|
|
6
|
+
import fs from "fs-extra";
|
|
5
7
|
import { getZodObjectFromApi } from "./code-converters";
|
|
6
8
|
import { Context } from "./context";
|
|
7
9
|
import { BadRequestException } from "../exceptions/so-exceptions";
|
|
@@ -10,15 +12,14 @@ import { fastifyCaster } from "./caster";
|
|
|
10
12
|
import { ApiParam, ApiParamType } from "../types/types";
|
|
11
13
|
import { Syncer } from "../syncer/syncer";
|
|
12
14
|
import { isLocal, isTest } from "../utils/controller";
|
|
13
|
-
import { DB, SonamuDBConfig } from "../database/db";
|
|
14
|
-
import { BaseModel } from "../database/base-model";
|
|
15
15
|
import { findApiRootPath } from "../utils/utils";
|
|
16
|
-
import path from "path";
|
|
17
|
-
import fs from "fs-extra";
|
|
18
16
|
import { ApiDecoratorOptions } from "./decorators";
|
|
19
|
-
import {
|
|
17
|
+
import { humanizeZodError } from "../utils/zod-error";
|
|
18
|
+
import { DatabaseDriver, SonamuDBConfig } from "../database/types";
|
|
19
|
+
import { DB } from "../database/db";
|
|
20
20
|
|
|
21
21
|
export type SonamuConfig = {
|
|
22
|
+
projectName?: string;
|
|
22
23
|
api: {
|
|
23
24
|
dir: string;
|
|
24
25
|
};
|
|
@@ -91,13 +92,24 @@ class SonamuClass {
|
|
|
91
92
|
set dbConfig(dbConfig: SonamuDBConfig) {
|
|
92
93
|
this._dbConfig = dbConfig;
|
|
93
94
|
}
|
|
94
|
-
get dbConfig()
|
|
95
|
+
get dbConfig() {
|
|
95
96
|
if (this._dbConfig === null) {
|
|
96
97
|
throw new Error("Sonamu has not been initialized");
|
|
97
98
|
}
|
|
98
99
|
return this._dbConfig!;
|
|
99
100
|
}
|
|
100
101
|
|
|
102
|
+
private _dbClient: DatabaseDriver | null = null;
|
|
103
|
+
set dbClient(_dbClient: DatabaseDriver) {
|
|
104
|
+
this._dbClient = _dbClient;
|
|
105
|
+
}
|
|
106
|
+
get dbClient() {
|
|
107
|
+
if (this._dbClient === null) {
|
|
108
|
+
throw new Error("Sonamu has not been initialized");
|
|
109
|
+
}
|
|
110
|
+
return this._dbClient!;
|
|
111
|
+
}
|
|
112
|
+
|
|
101
113
|
private _syncer: Syncer | null = null;
|
|
102
114
|
set syncer(syncer: Syncer) {
|
|
103
115
|
this._syncer = syncer;
|
|
@@ -154,9 +166,12 @@ class SonamuClass {
|
|
|
154
166
|
}
|
|
155
167
|
|
|
156
168
|
// DB 로드
|
|
157
|
-
|
|
169
|
+
const baseConfig = await DB.getBaseConfig(this.apiRootPath);
|
|
170
|
+
this.dbClient = baseConfig.client;
|
|
171
|
+
DB.init(baseConfig as any);
|
|
172
|
+
this.dbConfig = DB.fullConfig;
|
|
158
173
|
!doSilent && console.log(chalk.green("DB Config Loaded!"));
|
|
159
|
-
attachOnDuplicateUpdate();
|
|
174
|
+
// attachOnDuplicateUpdate();
|
|
160
175
|
|
|
161
176
|
// Entity 로드
|
|
162
177
|
await EntityManager.autoload(doSilent);
|
|
@@ -237,11 +252,10 @@ class SonamuClass {
|
|
|
237
252
|
reqBody = fastifyCaster(ReqType).parse(request[which] ?? {});
|
|
238
253
|
} catch (e) {
|
|
239
254
|
if (e instanceof ZodError) {
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
);
|
|
255
|
+
const messages = humanizeZodError(e)
|
|
256
|
+
.map((issue) => issue.message)
|
|
257
|
+
.join(" ");
|
|
258
|
+
throw new BadRequestException(messages);
|
|
245
259
|
} else {
|
|
246
260
|
throw e;
|
|
247
261
|
}
|
|
@@ -253,15 +267,20 @@ class SonamuClass {
|
|
|
253
267
|
// 캐시
|
|
254
268
|
const { cacheKey, cacheTtl, cachedData } = await (async () => {
|
|
255
269
|
if (config.cache) {
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
270
|
+
try {
|
|
271
|
+
const cacheKeyRes = config.cache.resolveKey(api.path, reqBody);
|
|
272
|
+
if (cacheKeyRes.cache === false) {
|
|
273
|
+
return { cacheKey: null, cachedData: null };
|
|
274
|
+
}
|
|
260
275
|
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
276
|
+
const cacheKey = cacheKeyRes.key;
|
|
277
|
+
const cacheTtl = cacheKeyRes.ttl;
|
|
278
|
+
const cachedData = await config.cache.get(cacheKey);
|
|
279
|
+
return { cacheKey, cacheTtl, cachedData };
|
|
280
|
+
} catch (e) {
|
|
281
|
+
console.error(e);
|
|
282
|
+
}
|
|
283
|
+
return { cacheKey: null, cachedData: null };
|
|
265
284
|
}
|
|
266
285
|
return { cacheKey: null, cachedData: null };
|
|
267
286
|
})();
|
|
@@ -301,7 +320,7 @@ class SonamuClass {
|
|
|
301
320
|
}
|
|
302
321
|
|
|
303
322
|
async destroy(): Promise<void> {
|
|
304
|
-
await
|
|
323
|
+
await DB.destroy();
|
|
305
324
|
}
|
|
306
325
|
}
|
|
307
326
|
export const Sonamu = new SonamuClass();
|
package/src/bin/cli.ts
CHANGED
|
@@ -6,19 +6,27 @@ import dotenv from "dotenv";
|
|
|
6
6
|
dotenv.config();
|
|
7
7
|
|
|
8
8
|
import path from "path";
|
|
9
|
-
import { BaseModel } from "../database/base-model";
|
|
10
|
-
import { EntityManager } from "../entity/entity-manager";
|
|
11
|
-
import { Migrator } from "../entity/migrator";
|
|
12
|
-
import { FixtureManager } from "../testing/fixture-manager";
|
|
13
9
|
import { tsicli } from "tsicli";
|
|
14
10
|
import { execSync } from "child_process";
|
|
15
11
|
import fs from "fs-extra";
|
|
16
|
-
import { Sonamu } from "../api";
|
|
17
|
-
import knex, { Knex } from "knex";
|
|
18
12
|
import inflection from "inflection";
|
|
19
13
|
import prettier from "prettier";
|
|
20
|
-
import { SMDManager } from "../smd/smd-manager";
|
|
21
14
|
import process from "process";
|
|
15
|
+
import _ from "lodash";
|
|
16
|
+
import { Sonamu } from "../api";
|
|
17
|
+
import { EntityManager } from "../entity/entity-manager";
|
|
18
|
+
import { Migrator } from "../entity/migrator";
|
|
19
|
+
import { FixtureManager } from "../testing/fixture-manager";
|
|
20
|
+
import { SMDManager } from "../smd/smd-manager";
|
|
21
|
+
import { DB } from "../database/db";
|
|
22
|
+
import {
|
|
23
|
+
KnexConfig,
|
|
24
|
+
KyselyConfig,
|
|
25
|
+
SonamuKnexDBConfig,
|
|
26
|
+
SonamuKyselyDBConfig,
|
|
27
|
+
} from "../database/types";
|
|
28
|
+
import { KnexClient } from "../database/drivers/knex/client";
|
|
29
|
+
import { KyselyClient } from "../database/drivers/kysely/client";
|
|
22
30
|
|
|
23
31
|
let migrator: Migrator;
|
|
24
32
|
|
|
@@ -48,6 +56,7 @@ async function bootstrap() {
|
|
|
48
56
|
["migrate", "rollback"],
|
|
49
57
|
["migrate", "reset"],
|
|
50
58
|
["migrate", "clear"],
|
|
59
|
+
["migrate", "status"],
|
|
51
60
|
["stub", "practice", "#name"],
|
|
52
61
|
["stub", "entity", "#name"],
|
|
53
62
|
["scaffold", "model", "#entityId"],
|
|
@@ -63,6 +72,7 @@ async function bootstrap() {
|
|
|
63
72
|
migrate_rollback,
|
|
64
73
|
migrate_clear,
|
|
65
74
|
migrate_reset,
|
|
75
|
+
migrate_status,
|
|
66
76
|
fixture_init,
|
|
67
77
|
fixture_import,
|
|
68
78
|
fixture_sync,
|
|
@@ -82,7 +92,6 @@ bootstrap().finally(async () => {
|
|
|
82
92
|
await migrator.destroy();
|
|
83
93
|
}
|
|
84
94
|
await FixtureManager.destory();
|
|
85
|
-
await BaseModel.destroy();
|
|
86
95
|
|
|
87
96
|
/* Global End */
|
|
88
97
|
console.log(chalk.bgBlue(`END ${new Date()}\n`));
|
|
@@ -113,6 +122,14 @@ async function migrate_check() {
|
|
|
113
122
|
await migrator.check();
|
|
114
123
|
}
|
|
115
124
|
|
|
125
|
+
async function migrate_status() {
|
|
126
|
+
await setupMigrator();
|
|
127
|
+
|
|
128
|
+
const status = await migrator.getStatus();
|
|
129
|
+
// status;
|
|
130
|
+
console.log(status);
|
|
131
|
+
}
|
|
132
|
+
|
|
116
133
|
async function migrate_rollback() {
|
|
117
134
|
await setupMigrator();
|
|
118
135
|
|
|
@@ -132,79 +149,94 @@ async function migrate_reset() {
|
|
|
132
149
|
}
|
|
133
150
|
|
|
134
151
|
async function fixture_init() {
|
|
135
|
-
const
|
|
152
|
+
const _db = DB.getClient("development_master");
|
|
153
|
+
const srcConn = _db.connectionInfo;
|
|
154
|
+
|
|
136
155
|
const targets = [
|
|
137
156
|
{
|
|
138
157
|
label: "(REMOTE) Fixture DB",
|
|
139
|
-
|
|
158
|
+
connKey: "fixture_remote",
|
|
140
159
|
},
|
|
141
160
|
{
|
|
142
161
|
label: "(LOCAL) Fixture DB",
|
|
143
|
-
|
|
144
|
-
toSkip: (() => {
|
|
145
|
-
const remoteConn = Sonamu.dbConfig.fixture_remote
|
|
146
|
-
.connection as Knex.ConnectionConfig;
|
|
147
|
-
const localConn = Sonamu.dbConfig.fixture_local
|
|
148
|
-
.connection as Knex.ConnectionConfig;
|
|
149
|
-
return (
|
|
150
|
-
remoteConn.host === localConn.host &&
|
|
151
|
-
remoteConn.database === localConn.database
|
|
152
|
-
);
|
|
153
|
-
})(),
|
|
162
|
+
connKey: "fixture_local",
|
|
154
163
|
},
|
|
155
164
|
{
|
|
156
165
|
label: "(LOCAL) Testing DB",
|
|
157
|
-
|
|
166
|
+
connKey: "test",
|
|
158
167
|
},
|
|
159
168
|
] as {
|
|
160
169
|
label: string;
|
|
161
|
-
|
|
162
|
-
toSkip?: boolean;
|
|
170
|
+
connKey: keyof SonamuKnexDBConfig | keyof SonamuKyselyDBConfig;
|
|
163
171
|
}[];
|
|
164
172
|
|
|
165
173
|
// 1. 기준DB 스키마를 덤프
|
|
166
174
|
console.log("DUMP...");
|
|
167
175
|
const dumpFilename = `/tmp/sonamu-fixture-init-${Date.now()}.sql`;
|
|
168
|
-
const srcConn = srcConfig.connection as Knex.ConnectionConfig;
|
|
169
176
|
const migrationsDump = `/tmp/sonamu-fixture-init-migrations-${Date.now()}.sql`;
|
|
170
177
|
execSync(
|
|
171
|
-
`mysqldump -h${srcConn.host} -u${srcConn.user} -p${srcConn.password} --single-transaction -d --no-create-db --triggers ${srcConn.database} > ${dumpFilename}`
|
|
178
|
+
`mysqldump -h${srcConn.host} -P${srcConn.port} -u${srcConn.user} -p${srcConn.password} --single-transaction -d --no-create-db --triggers ${srcConn.database} > ${dumpFilename}`
|
|
172
179
|
);
|
|
173
|
-
|
|
174
|
-
|
|
180
|
+
|
|
181
|
+
// 2. 마이그레이션 테이블이 존재하면 덤프
|
|
182
|
+
const dbClient = DB.baseConfig!.client;
|
|
183
|
+
const migrationTable = DB.migrationTable;
|
|
184
|
+
const [migrations] = await _db.raw<{ count: number }>(
|
|
185
|
+
"SELECT COUNT(*) as count FROM information_schema.tables WHERE table_schema = ? AND table_name = ?",
|
|
186
|
+
[srcConn.database, migrationTable]
|
|
175
187
|
);
|
|
188
|
+
if (migrations.count > 0) {
|
|
189
|
+
execSync(
|
|
190
|
+
`mysqldump -h${srcConn.host} -P${srcConn.port} -u${srcConn.user} -p${srcConn.password} --single-transaction --no-create-db --triggers ${srcConn.database} ${migrationTable} ${migrationTable}_lock > ${migrationsDump}`
|
|
191
|
+
);
|
|
192
|
+
}
|
|
176
193
|
|
|
177
194
|
// 2. 대상DB 각각에 대하여 존재여부 확인 후 붓기
|
|
178
|
-
for await (const { label,
|
|
179
|
-
const
|
|
180
|
-
|
|
181
|
-
if (
|
|
195
|
+
for await (const { label, connKey } of targets) {
|
|
196
|
+
const config = DB.connectionInfo[connKey];
|
|
197
|
+
|
|
198
|
+
if (
|
|
199
|
+
label === "(LOCAL) Fixture DB" &&
|
|
200
|
+
targets.find(
|
|
201
|
+
(t) =>
|
|
202
|
+
t.label === "(REMOTE) Fixture DB" &&
|
|
203
|
+
DB.connectionInfo[t.connKey].host === config.host &&
|
|
204
|
+
DB.connectionInfo[t.connKey].database === config.database
|
|
205
|
+
)
|
|
206
|
+
) {
|
|
182
207
|
console.log(chalk.red(`${label}: Skipped!`));
|
|
183
208
|
continue;
|
|
184
209
|
}
|
|
185
210
|
|
|
186
|
-
const db =
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
}
|
|
192
|
-
|
|
193
|
-
|
|
211
|
+
const db = (() => {
|
|
212
|
+
if (dbClient === "knex") {
|
|
213
|
+
const config = _.cloneDeep(DB.fullConfig[connKey]) as KnexConfig;
|
|
214
|
+
config.connection.database = undefined;
|
|
215
|
+
return new KnexClient(config);
|
|
216
|
+
} else {
|
|
217
|
+
const config = _.cloneDeep(DB.fullConfig[connKey]) as KyselyConfig;
|
|
218
|
+
config.database = undefined;
|
|
219
|
+
return new KyselyClient(config);
|
|
220
|
+
}
|
|
221
|
+
})();
|
|
222
|
+
|
|
223
|
+
const [row] = await db.raw(`SHOW DATABASES LIKE "${config.database}"`);
|
|
194
224
|
if (row) {
|
|
195
225
|
console.log(
|
|
196
|
-
chalk.yellow(`${label}: Database "${
|
|
226
|
+
chalk.yellow(`${label}: Database "${config.database}" Already exists`)
|
|
197
227
|
);
|
|
198
228
|
await db.destroy();
|
|
199
229
|
continue;
|
|
200
230
|
}
|
|
201
231
|
|
|
202
232
|
console.log(`SYNC to ${label}...`);
|
|
203
|
-
const mysqlCmd = `mysql -h${
|
|
204
|
-
execSync(`${mysqlCmd} -e 'DROP DATABASE IF EXISTS \`${
|
|
205
|
-
execSync(`${mysqlCmd} -e 'CREATE DATABASE \`${
|
|
206
|
-
execSync(`${mysqlCmd} ${
|
|
207
|
-
|
|
233
|
+
const mysqlCmd = `mysql -h${config.host} -P${srcConn.port} -u${config.user} -p${config.password}`;
|
|
234
|
+
execSync(`${mysqlCmd} -e 'DROP DATABASE IF EXISTS \`${config.database}\`'`);
|
|
235
|
+
execSync(`${mysqlCmd} -e 'CREATE DATABASE \`${config.database}\`'`);
|
|
236
|
+
execSync(`${mysqlCmd} ${config.database} < ${dumpFilename}`);
|
|
237
|
+
if (fs.existsSync(migrationsDump)) {
|
|
238
|
+
execSync(`${mysqlCmd} ${config.database} < ${migrationsDump}`);
|
|
239
|
+
}
|
|
208
240
|
|
|
209
241
|
await db.destroy();
|
|
210
242
|
}
|
|
@@ -253,6 +285,7 @@ async function stub_practice(name: string) {
|
|
|
253
285
|
const fileName = `p${currentSeqNo}-${name}.ts`;
|
|
254
286
|
const dstPath = path.join(practiceDir, fileName);
|
|
255
287
|
|
|
288
|
+
// FIXME
|
|
256
289
|
const code = [
|
|
257
290
|
`import { BaseModel } from "sonamu";`,
|
|
258
291
|
"",
|
|
@@ -4,6 +4,11 @@
|
|
|
4
4
|
*/
|
|
5
5
|
|
|
6
6
|
import { Knex } from "knex";
|
|
7
|
+
import { DB } from "./db";
|
|
8
|
+
import { KnexClient } from "./drivers/knex/client";
|
|
9
|
+
import { KyselyClient } from "./drivers/kysely/client";
|
|
10
|
+
import { Transaction } from "kysely";
|
|
11
|
+
import { Database } from "./types";
|
|
7
12
|
|
|
8
13
|
export type RowWithId<Id extends string> = {
|
|
9
14
|
[key in Id]: any;
|
|
@@ -11,7 +16,7 @@ export type RowWithId<Id extends string> = {
|
|
|
11
16
|
|
|
12
17
|
/**
|
|
13
18
|
* Batch update rows in a table. Technically its a patch since it only updates the specified columns. Any omitted columns will not be affected
|
|
14
|
-
* @param
|
|
19
|
+
* @param db
|
|
15
20
|
* @param tableName
|
|
16
21
|
* @param ids
|
|
17
22
|
* @param rows
|
|
@@ -19,12 +24,12 @@ export type RowWithId<Id extends string> = {
|
|
|
19
24
|
* @param trx
|
|
20
25
|
*/
|
|
21
26
|
export async function batchUpdate<Id extends string>(
|
|
22
|
-
|
|
27
|
+
db: KnexClient | KyselyClient,
|
|
23
28
|
tableName: string,
|
|
24
29
|
ids: Id[],
|
|
25
30
|
rows: RowWithId<Id>[],
|
|
26
31
|
chunkSize = 50,
|
|
27
|
-
trx: Knex.Transaction | null = null
|
|
32
|
+
trx: Knex.Transaction | Transaction<Database> | null = null
|
|
28
33
|
) {
|
|
29
34
|
const chunks: RowWithId<Id>[][] = [];
|
|
30
35
|
for (let i = 0; i < rows.length; i += chunkSize) {
|
|
@@ -33,18 +38,18 @@ export async function batchUpdate<Id extends string>(
|
|
|
33
38
|
|
|
34
39
|
const executeUpdate = async (
|
|
35
40
|
chunk: RowWithId<Id>[],
|
|
36
|
-
transaction:
|
|
41
|
+
transaction: KyselyClient | KnexClient
|
|
37
42
|
) => {
|
|
38
|
-
const sql = generateBatchUpdateSQL(
|
|
39
|
-
return
|
|
43
|
+
const sql = generateBatchUpdateSQL(db, tableName, chunk, ids);
|
|
44
|
+
return transaction.raw(sql);
|
|
40
45
|
};
|
|
41
46
|
|
|
42
47
|
if (trx) {
|
|
43
48
|
for (const chunk of chunks) {
|
|
44
|
-
await executeUpdate(chunk, trx);
|
|
49
|
+
await executeUpdate(chunk, DB.toClient(trx));
|
|
45
50
|
}
|
|
46
51
|
} else {
|
|
47
|
-
await
|
|
52
|
+
await db.trx(async (newTrx) => {
|
|
48
53
|
for (const chunk of chunks) {
|
|
49
54
|
await executeUpdate(chunk, newTrx);
|
|
50
55
|
}
|
|
@@ -70,7 +75,7 @@ function generateKeySetFromData(data: Record<string, any>[]) {
|
|
|
70
75
|
}
|
|
71
76
|
|
|
72
77
|
function generateBatchUpdateSQL<Id extends string>(
|
|
73
|
-
|
|
78
|
+
db: KnexClient | KyselyClient,
|
|
74
79
|
tableName: string,
|
|
75
80
|
data: Record<string, any>[],
|
|
76
81
|
identifiers: Id[]
|
|
@@ -112,10 +117,10 @@ function generateBatchUpdateSQL<Id extends string>(
|
|
|
112
117
|
data.map((row) => row[col])
|
|
113
118
|
);
|
|
114
119
|
|
|
115
|
-
const sql =
|
|
120
|
+
const sql = db.createRawQuery(
|
|
116
121
|
`UPDATE \`${tableName}\` SET ${cases.join(", ")} WHERE ${whereInClauses}`,
|
|
117
122
|
[...bindings, ...whereInBindings]
|
|
118
123
|
);
|
|
119
124
|
|
|
120
|
-
return sql
|
|
125
|
+
return sql;
|
|
121
126
|
}
|
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
import { DateTime } from "luxon";
|
|
2
|
+
import _ from "lodash";
|
|
3
|
+
import { Knex } from "knex";
|
|
4
|
+
import { RawBuilder } from "kysely";
|
|
5
|
+
import { BaseListParams } from "../utils/model";
|
|
6
|
+
import { DBPreset, DatabaseDriver, DriverSpec } from "./types";
|
|
7
|
+
import { SubsetQuery } from "../types/types";
|
|
8
|
+
|
|
9
|
+
export abstract class BaseModelAbstract<D extends DatabaseDriver> {
|
|
10
|
+
public modelName: string = "Unknown";
|
|
11
|
+
|
|
12
|
+
abstract runSubsetQuery<T extends BaseListParams, U extends string>(options: {
|
|
13
|
+
params: T;
|
|
14
|
+
baseTable?: string;
|
|
15
|
+
subset: U;
|
|
16
|
+
subsetQuery: SubsetQuery;
|
|
17
|
+
build: (buildParams: {
|
|
18
|
+
qb: DriverSpec[D]["queryBuilder"];
|
|
19
|
+
db: DriverSpec[D]["adapter"];
|
|
20
|
+
select: SubsetQuery["select"];
|
|
21
|
+
joins: SubsetQuery["joins"];
|
|
22
|
+
virtual: string[];
|
|
23
|
+
}) => any;
|
|
24
|
+
debug?: boolean | "list" | "count";
|
|
25
|
+
db?: DriverSpec[D]["adapter"];
|
|
26
|
+
optimizeCountQuery?: boolean;
|
|
27
|
+
}): Promise<{
|
|
28
|
+
rows: any[];
|
|
29
|
+
total?: number;
|
|
30
|
+
subsetQuery: SubsetQuery;
|
|
31
|
+
qb: DriverSpec[D]["queryBuilder"];
|
|
32
|
+
}>;
|
|
33
|
+
abstract getDB(which: DBPreset): DriverSpec[D]["adapter"];
|
|
34
|
+
abstract destroy(): Promise<void>;
|
|
35
|
+
abstract useLoaders(
|
|
36
|
+
db: DriverSpec[D]["adapter"],
|
|
37
|
+
rows: any[],
|
|
38
|
+
loaders: SubsetQuery["loaders"]
|
|
39
|
+
): Promise<any[]>;
|
|
40
|
+
abstract getJoinClause(
|
|
41
|
+
db: DriverSpec[D]["adapter"],
|
|
42
|
+
join: SubsetQuery["joins"][number]
|
|
43
|
+
): string | Knex.Raw<any> | RawBuilder<unknown>;
|
|
44
|
+
|
|
45
|
+
myNow(timestamp?: number): string {
|
|
46
|
+
const dt: DateTime =
|
|
47
|
+
timestamp === undefined
|
|
48
|
+
? DateTime.local()
|
|
49
|
+
: DateTime.fromSeconds(timestamp);
|
|
50
|
+
return dt.toFormat("yyyy-MM-dd HH:mm:ss");
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
hydrate<T>(rows: T[]): T[] {
|
|
54
|
+
return rows.map((row: any) => {
|
|
55
|
+
// nullable relation인 경우 관련된 필드가 전부 null로 생성되는 것 방지하는 코드
|
|
56
|
+
const nestedKeys = Object.keys(row).filter((key) => key.includes("__"));
|
|
57
|
+
const groups = _.groupBy(nestedKeys, (key) => key.split("__")[0]);
|
|
58
|
+
const nullKeys = Object.keys(groups).filter(
|
|
59
|
+
(key) =>
|
|
60
|
+
groups[key].length > 1 &&
|
|
61
|
+
groups[key].every((field) => row[field] === null)
|
|
62
|
+
);
|
|
63
|
+
|
|
64
|
+
const hydrated = Object.keys(row).reduce((r, field) => {
|
|
65
|
+
if (!field.includes("__")) {
|
|
66
|
+
if (Array.isArray(row[field]) && _.isObject(row[field][0])) {
|
|
67
|
+
r[field] = this.hydrate(row[field]);
|
|
68
|
+
return r;
|
|
69
|
+
} else {
|
|
70
|
+
r[field] = row[field];
|
|
71
|
+
return r;
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
const parts = field.split("__");
|
|
76
|
+
const objPath =
|
|
77
|
+
parts[0] +
|
|
78
|
+
parts
|
|
79
|
+
.slice(1)
|
|
80
|
+
.map((part) => `[${part}]`)
|
|
81
|
+
.join("");
|
|
82
|
+
_.set(
|
|
83
|
+
r,
|
|
84
|
+
objPath,
|
|
85
|
+
row[field] && Array.isArray(row[field]) && _.isObject(row[field][0])
|
|
86
|
+
? this.hydrate(row[field])
|
|
87
|
+
: row[field]
|
|
88
|
+
);
|
|
89
|
+
|
|
90
|
+
return r;
|
|
91
|
+
}, {} as any);
|
|
92
|
+
nullKeys.map((nullKey) => (hydrated[nullKey] = null));
|
|
93
|
+
|
|
94
|
+
return hydrated;
|
|
95
|
+
});
|
|
96
|
+
}
|
|
97
|
+
}
|