sonamu 0.3.1 → 0.4.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.pnp.cjs +11 -0
- package/dist/base-model-BzMJ2E_I.d.mts +43 -0
- package/dist/base-model-CWRKUX49.d.ts +43 -0
- package/dist/bin/cli.js +118 -89
- package/dist/bin/cli.js.map +1 -1
- package/dist/bin/cli.mjs +74 -45
- package/dist/bin/cli.mjs.map +1 -1
- package/dist/chunk-FLPD24HS.mjs +231 -0
- package/dist/chunk-FLPD24HS.mjs.map +1 -0
- package/dist/chunk-I2MMJRJN.mjs +1550 -0
- package/dist/chunk-I2MMJRJN.mjs.map +1 -0
- package/dist/{chunk-MPXE4IHO.mjs → chunk-PP2PSSAG.mjs} +5284 -5617
- package/dist/chunk-PP2PSSAG.mjs.map +1 -0
- package/dist/chunk-QK5XXJUX.mjs +280 -0
- package/dist/chunk-QK5XXJUX.mjs.map +1 -0
- package/dist/chunk-U636LQJJ.js +231 -0
- package/dist/chunk-U636LQJJ.js.map +1 -0
- package/dist/chunk-W7KDVJLQ.js +280 -0
- package/dist/chunk-W7KDVJLQ.js.map +1 -0
- package/dist/{chunk-YXILRRDT.js → chunk-XT6LHCX5.js} +5252 -5585
- package/dist/chunk-XT6LHCX5.js.map +1 -0
- package/dist/chunk-Z2P7XTXE.js +1550 -0
- package/dist/chunk-Z2P7XTXE.js.map +1 -0
- package/dist/database/drivers/knex/base-model.d.mts +16 -0
- package/dist/database/drivers/knex/base-model.d.ts +16 -0
- package/dist/database/drivers/knex/base-model.js +55 -0
- package/dist/database/drivers/knex/base-model.js.map +1 -0
- package/dist/database/drivers/knex/base-model.mjs +56 -0
- package/dist/database/drivers/knex/base-model.mjs.map +1 -0
- package/dist/database/drivers/kysely/base-model.d.mts +22 -0
- package/dist/database/drivers/kysely/base-model.d.ts +22 -0
- package/dist/database/drivers/kysely/base-model.js +64 -0
- package/dist/database/drivers/kysely/base-model.js.map +1 -0
- package/dist/database/drivers/kysely/base-model.mjs +65 -0
- package/dist/database/drivers/kysely/base-model.mjs.map +1 -0
- package/dist/index.d.mts +220 -926
- package/dist/index.d.ts +220 -926
- package/dist/index.js +13 -26
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +18 -31
- package/dist/index.mjs.map +1 -1
- package/dist/model-CAH_4oQh.d.mts +1042 -0
- package/dist/model-CAH_4oQh.d.ts +1042 -0
- package/import-to-require.js +27 -0
- package/package.json +23 -2
- package/src/api/caster.ts +6 -0
- package/src/api/code-converters.ts +3 -1
- package/src/api/sonamu.ts +41 -22
- package/src/bin/cli.ts +78 -46
- package/src/database/_batch_update.ts +16 -11
- package/src/database/base-model.abstract.ts +97 -0
- package/src/database/base-model.ts +214 -280
- package/src/database/code-generator.ts +72 -0
- package/src/database/db.abstract.ts +75 -0
- package/src/database/db.ts +21 -82
- package/src/database/drivers/knex/base-model.ts +55 -0
- package/src/database/drivers/knex/client.ts +209 -0
- package/src/database/drivers/knex/db.ts +227 -0
- package/src/database/drivers/knex/generator.ts +659 -0
- package/src/database/drivers/kysely/base-model.ts +89 -0
- package/src/database/drivers/kysely/client.ts +309 -0
- package/src/database/drivers/kysely/db.ts +238 -0
- package/src/database/drivers/kysely/generator.ts +714 -0
- package/src/database/types.ts +117 -0
- package/src/database/upsert-builder.ts +31 -18
- package/src/entity/entity-utils.ts +1 -1
- package/src/entity/migrator.ts +98 -693
- package/src/index.ts +1 -1
- package/src/syncer/syncer.ts +69 -27
- package/src/templates/generated_http.template.ts +14 -0
- package/src/templates/kysely_types.template.ts +205 -0
- package/src/templates/model.template.ts +2 -139
- package/src/templates/service.template.ts +3 -1
- package/src/testing/_relation-graph.ts +111 -0
- package/src/testing/fixture-manager.ts +216 -332
- package/src/types/types.ts +56 -6
- package/src/utils/utils.ts +56 -4
- package/src/utils/zod-error.ts +189 -0
- package/tsconfig.json +2 -2
- package/tsup.config.js +11 -10
- package/dist/chunk-MPXE4IHO.mjs.map +0 -1
- package/dist/chunk-YXILRRDT.js.map +0 -1
- /package/src/database/{knex-plugins → drivers/knex/plugins}/knex-on-duplicate-update.ts +0 -0
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
import { Kysely, SelectQueryBuilder } from "kysely";
|
|
2
|
+
import { SubsetQuery, isCustomJoinClause } from "../../../types/types";
|
|
3
|
+
import { BaseModelClassAbstract } from "../../base-model";
|
|
4
|
+
import { DB } from "../../db";
|
|
5
|
+
import { DBPreset, Database } from "../../types";
|
|
6
|
+
import { KyselyClient } from "./client";
|
|
7
|
+
import { UpsertBuilder } from "../../upsert-builder";
|
|
8
|
+
import { UndirectedOrderByExpression } from "kysely/dist/cjs/parser/order-by-parser";
|
|
9
|
+
import { EntityManager } from "../../../entity/entity-manager";
|
|
10
|
+
import inflection from "inflection";
|
|
11
|
+
|
|
12
|
+
type TB = keyof Database;
|
|
13
|
+
|
|
14
|
+
export class BaseModelClass extends BaseModelClassAbstract<"kysely"> {
|
|
15
|
+
getDB(which: DBPreset): Kysely<Database> {
|
|
16
|
+
return DB.getDB(which) as Kysely<Database>;
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
async destroy(): Promise<void> {
|
|
20
|
+
return DB.destroy();
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
getUpsertBuilder() {
|
|
24
|
+
return new UpsertBuilder<"kysely">();
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
protected applyJoins(
|
|
28
|
+
clonedQb: KyselyClient,
|
|
29
|
+
joins: SubsetQuery["joins"]
|
|
30
|
+
): KyselyClient {
|
|
31
|
+
for (const join of joins) {
|
|
32
|
+
if (isCustomJoinClause(join)) {
|
|
33
|
+
throw new Error("Custom join clause is not supported in Kysely");
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
if (join.join === "inner") {
|
|
37
|
+
clonedQb.innerJoin(`${join.table} as ${join.as}`, join.from, join.to);
|
|
38
|
+
} else if (join.join === "outer") {
|
|
39
|
+
clonedQb.leftJoin(`${join.table} as ${join.as}`, join.from, join.to);
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
return clonedQb;
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
protected async executeCountQuery(
|
|
47
|
+
qb: SelectQueryBuilder<Database, TB, any>
|
|
48
|
+
): Promise<number> {
|
|
49
|
+
const result = await qb
|
|
50
|
+
.clearSelect()
|
|
51
|
+
.select((eb) => eb.fn.count("id" as any).as("total"))
|
|
52
|
+
.executeTakeFirstOrThrow();
|
|
53
|
+
return Number(result.total);
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
parseOrderBy(
|
|
57
|
+
orderBy: string
|
|
58
|
+
): [
|
|
59
|
+
UndirectedOrderByExpression<Database, keyof Database, {}>,
|
|
60
|
+
"asc" | "desc",
|
|
61
|
+
] {
|
|
62
|
+
const [_column, order] = orderBy.split("-");
|
|
63
|
+
// FIXME: 조인 2개 이상일 때 처리
|
|
64
|
+
const [table, column] = _column.includes(".")
|
|
65
|
+
? _column.split(".")
|
|
66
|
+
: [inflection.tableize(this.modelName), _column];
|
|
67
|
+
|
|
68
|
+
if (order !== "asc" && order !== "desc") {
|
|
69
|
+
throw new Error("parseOrderBy: Invalid order");
|
|
70
|
+
}
|
|
71
|
+
if (!column) {
|
|
72
|
+
throw new Error("parseOrderBy: Invalid column");
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
const entity = EntityManager.get(inflection.classify(table));
|
|
76
|
+
if (!entity.props.find((p) => p.name === column)) {
|
|
77
|
+
throw new Error("parseOrderBy: 현재 엔티티에 존재하지 않는 컬럼입니다: ");
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
return [
|
|
81
|
+
`${table}.${column}` as unknown as UndirectedOrderByExpression<
|
|
82
|
+
Database,
|
|
83
|
+
keyof Database,
|
|
84
|
+
{}
|
|
85
|
+
>,
|
|
86
|
+
order as "asc" | "desc",
|
|
87
|
+
];
|
|
88
|
+
}
|
|
89
|
+
}
|
|
@@ -0,0 +1,309 @@
|
|
|
1
|
+
import {
|
|
2
|
+
ComparisonOperatorExpression,
|
|
3
|
+
FileMigrationProvider,
|
|
4
|
+
Kysely,
|
|
5
|
+
Migrator,
|
|
6
|
+
MysqlDialect,
|
|
7
|
+
Transaction,
|
|
8
|
+
sql,
|
|
9
|
+
} from "kysely";
|
|
10
|
+
import {
|
|
11
|
+
Database,
|
|
12
|
+
DatabaseClient,
|
|
13
|
+
DriverSpec,
|
|
14
|
+
KyselyConfig,
|
|
15
|
+
WhereClause,
|
|
16
|
+
} from "../../types";
|
|
17
|
+
import _ from "lodash";
|
|
18
|
+
import { asArray } from "../../../utils/model";
|
|
19
|
+
import { createPool } from "mysql2";
|
|
20
|
+
|
|
21
|
+
type TB = keyof Database;
|
|
22
|
+
type TE = TB & string;
|
|
23
|
+
|
|
24
|
+
// 확장된 Transaction 타입 정의
|
|
25
|
+
export type ExtendedKyselyTrx = Transaction<Database> &
|
|
26
|
+
DatabaseClient<"kysely">;
|
|
27
|
+
|
|
28
|
+
export class KyselyClient implements DatabaseClient<"kysely"> {
|
|
29
|
+
private kysely: Kysely<Database>;
|
|
30
|
+
|
|
31
|
+
get connectionInfo() {
|
|
32
|
+
return {
|
|
33
|
+
host: this.config.host,
|
|
34
|
+
port: this.config.port,
|
|
35
|
+
database: this.config.database,
|
|
36
|
+
user: this.config.user,
|
|
37
|
+
password: this.config.password,
|
|
38
|
+
};
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
private _qb?: DriverSpec["kysely"]["queryBuilder"];
|
|
42
|
+
set qb(qb: DriverSpec["kysely"]["queryBuilder"]) {
|
|
43
|
+
this._qb = qb;
|
|
44
|
+
}
|
|
45
|
+
get qb() {
|
|
46
|
+
if (!this._qb) {
|
|
47
|
+
throw new Error("QueryBuilder is not initialized");
|
|
48
|
+
}
|
|
49
|
+
return this._qb;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
private _migrator?: Migrator;
|
|
53
|
+
set migrator(migrator: Migrator) {
|
|
54
|
+
this._migrator = migrator;
|
|
55
|
+
}
|
|
56
|
+
get migrator() {
|
|
57
|
+
if (!this._migrator) {
|
|
58
|
+
throw new Error("Migrator is not initialized");
|
|
59
|
+
}
|
|
60
|
+
return this._migrator;
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
get sql() {
|
|
64
|
+
const bindings = this.qb.compile().parameters.map((p) => JSON.stringify(p));
|
|
65
|
+
return this.qb.compile().sql.replace(/\?/g, () => bindings.shift()!);
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
constructor(
|
|
69
|
+
private config: KyselyConfig,
|
|
70
|
+
kysely?: Kysely<Database>
|
|
71
|
+
) {
|
|
72
|
+
const { onCreateConnection, migration, ...rest } = this.config;
|
|
73
|
+
|
|
74
|
+
this.kysely =
|
|
75
|
+
kysely ??
|
|
76
|
+
new Kysely({
|
|
77
|
+
dialect: new MysqlDialect({
|
|
78
|
+
onCreateConnection,
|
|
79
|
+
pool: createPool(rest),
|
|
80
|
+
}),
|
|
81
|
+
});
|
|
82
|
+
|
|
83
|
+
this.migrator = new Migrator({
|
|
84
|
+
db: this.kysely,
|
|
85
|
+
provider: new FileMigrationProvider(migration as any),
|
|
86
|
+
});
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
from(table: string) {
|
|
90
|
+
this.qb = this.kysely.selectFrom(table as TB);
|
|
91
|
+
return this;
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
innerJoin(table: string, k1: string, k2: string) {
|
|
95
|
+
this.qb = this.qb.innerJoin(table as TB, k1 as TB, k2 as TB);
|
|
96
|
+
return this;
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
leftJoin(table: string, k1: string, k2: string) {
|
|
100
|
+
this.qb = this.qb.leftJoin(table as TB, k1 as TB, k2 as TB);
|
|
101
|
+
return this;
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
clearSelect() {
|
|
105
|
+
this.qb = this.qb.clearSelect();
|
|
106
|
+
return this;
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
select(columns: string | string[]) {
|
|
110
|
+
this.qb = this.qb.select(asArray(columns) as TE[]);
|
|
111
|
+
return this;
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
selectAll() {
|
|
115
|
+
this.qb = this.qb.selectAll();
|
|
116
|
+
return this;
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
where(ops: WhereClause | WhereClause[]) {
|
|
120
|
+
if (typeof ops[0] === "string") {
|
|
121
|
+
ops = [ops as WhereClause];
|
|
122
|
+
}
|
|
123
|
+
for (const [lhs, op, rhs] of asArray(ops)) {
|
|
124
|
+
this.qb = this.qb.where(
|
|
125
|
+
lhs as any,
|
|
126
|
+
op as ComparisonOperatorExpression,
|
|
127
|
+
rhs
|
|
128
|
+
);
|
|
129
|
+
}
|
|
130
|
+
return this;
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
orWhere(ops: WhereClause | WhereClause[]) {
|
|
134
|
+
this.qb = this.qb.where((eb) =>
|
|
135
|
+
eb.or(
|
|
136
|
+
ops.map(([lhs, op, rhs]) =>
|
|
137
|
+
eb(lhs as any, op as ComparisonOperatorExpression, rhs)
|
|
138
|
+
)
|
|
139
|
+
)
|
|
140
|
+
);
|
|
141
|
+
return this;
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
async insert(table: string, data: any[]) {
|
|
145
|
+
await this.kysely
|
|
146
|
+
.insertInto(table as TB)
|
|
147
|
+
.values(data)
|
|
148
|
+
.execute();
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
async upsert(table: string, data: any[]) {
|
|
152
|
+
const q = this.kysely
|
|
153
|
+
.insertInto(table as TB)
|
|
154
|
+
.values(data)
|
|
155
|
+
.onDuplicateKeyUpdate(() => {
|
|
156
|
+
const updates: Record<string, any> = {};
|
|
157
|
+
// 첫 번째 레코드의 키들을 기준으로 업데이트 설정
|
|
158
|
+
Object.keys(data[0]).forEach((key) => {
|
|
159
|
+
updates[key] = sql`VALUES(${sql.raw(key)})`; // VALUES 구문 사용
|
|
160
|
+
});
|
|
161
|
+
return updates;
|
|
162
|
+
});
|
|
163
|
+
await q.execute();
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
limit(limit: number) {
|
|
167
|
+
this.qb = this.qb.limit(limit);
|
|
168
|
+
return this;
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
offset(offset: number) {
|
|
172
|
+
this.qb = this.qb.offset(offset);
|
|
173
|
+
return this;
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
count(column: string, alias?: string) {
|
|
177
|
+
this.qb = this.qb.select((eb) =>
|
|
178
|
+
eb.fn.count(column as any).as(alias ?? column)
|
|
179
|
+
);
|
|
180
|
+
return this;
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
distinct(column: string) {
|
|
184
|
+
this.qb = this.qb.distinctOn(column as any);
|
|
185
|
+
return this;
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
first() {
|
|
189
|
+
this.qb = this.qb.limit(1);
|
|
190
|
+
return this;
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
async execute(trx?: ExtendedKyselyTrx): Promise<any[]> {
|
|
194
|
+
if (trx) {
|
|
195
|
+
const { rows } = await trx.executeQuery(this.qb.compile());
|
|
196
|
+
return rows as any[];
|
|
197
|
+
}
|
|
198
|
+
return this.qb.execute();
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
async pluck(column: string): Promise<any[]> {
|
|
202
|
+
const result = await this.execute();
|
|
203
|
+
return result.map((row) => row[column]);
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
createRawQuery(query: string, bindings?: any[]) {
|
|
207
|
+
if (bindings?.length) {
|
|
208
|
+
query = query.replace(
|
|
209
|
+
/\?/g,
|
|
210
|
+
() => sql.lit(bindings.shift()).compile(this.kysely).sql
|
|
211
|
+
);
|
|
212
|
+
}
|
|
213
|
+
return sql.raw(query).compile(this.kysely).sql;
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
async raw<R>(query: string, bindings?: any[]): Promise<R[]> {
|
|
217
|
+
if (bindings?.length) {
|
|
218
|
+
query = query.replace(
|
|
219
|
+
/\?/g,
|
|
220
|
+
() => sql.lit(bindings.shift()).compile(this.kysely).sql
|
|
221
|
+
);
|
|
222
|
+
}
|
|
223
|
+
const { rows } = await sql.raw(query).execute(this.kysely);
|
|
224
|
+
return rows as R[];
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
async truncate(table: string) {
|
|
228
|
+
await sql`truncate table ${sql.table(table)}`.execute(this.kysely);
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
trx<T>(callback: (trx: KyselyClient) => Promise<T>) {
|
|
232
|
+
return this.kysely
|
|
233
|
+
.transaction()
|
|
234
|
+
.execute(async (trx) => callback(new KyselyClient(this.config, trx)));
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
destroy() {
|
|
238
|
+
return this.kysely.destroy();
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
clearQueryParts(parts: ("order" | "offset" | "limit")[]) {
|
|
242
|
+
for (const part of parts) {
|
|
243
|
+
switch (part) {
|
|
244
|
+
case "order":
|
|
245
|
+
this.qb = this.qb.clearOrderBy();
|
|
246
|
+
break;
|
|
247
|
+
case "offset":
|
|
248
|
+
this.qb = this.qb.clearOffset();
|
|
249
|
+
break;
|
|
250
|
+
case "limit":
|
|
251
|
+
this.qb = this.qb.clearLimit();
|
|
252
|
+
break;
|
|
253
|
+
}
|
|
254
|
+
}
|
|
255
|
+
return this;
|
|
256
|
+
}
|
|
257
|
+
|
|
258
|
+
clone() {
|
|
259
|
+
const client = new KyselyClient(this.config);
|
|
260
|
+
client.qb = this.qb;
|
|
261
|
+
return client;
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
// Migrator
|
|
265
|
+
|
|
266
|
+
async getMigrations() {
|
|
267
|
+
const result = await this.migrator.getMigrations();
|
|
268
|
+
return result.filter((r) => !r.executedAt).map((r) => r.name);
|
|
269
|
+
}
|
|
270
|
+
|
|
271
|
+
async status() {
|
|
272
|
+
const pendings = await this.getMigrations();
|
|
273
|
+
return 0 - pendings.length;
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
async migrate() {
|
|
277
|
+
const { results, error } = await this.migrator.migrateToLatest();
|
|
278
|
+
if (error) {
|
|
279
|
+
throw error;
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
return [0, results?.map((r) => r.migrationName)];
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
async rollback() {
|
|
286
|
+
const { results, error } = await this.migrator.migrateDown();
|
|
287
|
+
if (error) {
|
|
288
|
+
throw error;
|
|
289
|
+
}
|
|
290
|
+
|
|
291
|
+
return [0, results?.map((r) => r.migrationName)];
|
|
292
|
+
}
|
|
293
|
+
|
|
294
|
+
async rollbackAll() {
|
|
295
|
+
while (true) {
|
|
296
|
+
const { error, results } = await this.migrator.migrateDown();
|
|
297
|
+
|
|
298
|
+
if (error) {
|
|
299
|
+
console.error("Error while rollbackAll:", error);
|
|
300
|
+
throw error;
|
|
301
|
+
}
|
|
302
|
+
|
|
303
|
+
if (!results || results.length === 0) {
|
|
304
|
+
console.log("RollbackAll completed");
|
|
305
|
+
break;
|
|
306
|
+
}
|
|
307
|
+
}
|
|
308
|
+
}
|
|
309
|
+
}
|
|
@@ -0,0 +1,238 @@
|
|
|
1
|
+
import _ from "lodash";
|
|
2
|
+
import { promises } from "fs";
|
|
3
|
+
import path from "path";
|
|
4
|
+
import { createPool } from "mysql2";
|
|
5
|
+
import {
|
|
6
|
+
DBPreset,
|
|
7
|
+
Database,
|
|
8
|
+
KyselyBaseConfig,
|
|
9
|
+
KyselyConfig,
|
|
10
|
+
SonamuKyselyDBConfig,
|
|
11
|
+
} from "../../types";
|
|
12
|
+
import { FileMigrationProviderProps, Kysely, MysqlDialect, sql } from "kysely";
|
|
13
|
+
import { KyselyClient } from "./client";
|
|
14
|
+
import { DBClass } from "../../db.abstract";
|
|
15
|
+
import { Sonamu } from "../../../api";
|
|
16
|
+
import { KyselyGenerator } from "./generator";
|
|
17
|
+
|
|
18
|
+
export class DBKyselyClass extends DBClass {
|
|
19
|
+
public migrationTable = "kysely_migration";
|
|
20
|
+
public generator: KyselyGenerator = new KyselyGenerator();
|
|
21
|
+
public baseConfig?: KyselyBaseConfig;
|
|
22
|
+
|
|
23
|
+
public declare _fullConfig?: SonamuKyselyDBConfig;
|
|
24
|
+
set fullConfig(config: SonamuKyselyDBConfig) {
|
|
25
|
+
this._fullConfig = config;
|
|
26
|
+
}
|
|
27
|
+
get fullConfig() {
|
|
28
|
+
if (!this._fullConfig) {
|
|
29
|
+
throw new Error("DB Config has not been initialized");
|
|
30
|
+
}
|
|
31
|
+
return this._fullConfig;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
private wdb?: Kysely<Database>;
|
|
35
|
+
private rdb?: Kysely<Database>;
|
|
36
|
+
|
|
37
|
+
private _tdb: KyselyClient | null = null;
|
|
38
|
+
set tdb(tdb: KyselyClient) {
|
|
39
|
+
this._tdb = tdb;
|
|
40
|
+
}
|
|
41
|
+
get tdb(): KyselyClient {
|
|
42
|
+
if (this._tdb === null) {
|
|
43
|
+
throw new Error("tdb has not been initialized");
|
|
44
|
+
}
|
|
45
|
+
return this._tdb;
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
private _fdb: KyselyClient | null = null;
|
|
49
|
+
set fdb(fdb: KyselyClient) {
|
|
50
|
+
this._fdb = fdb;
|
|
51
|
+
}
|
|
52
|
+
get fdb(): KyselyClient {
|
|
53
|
+
if (this._fdb === null) {
|
|
54
|
+
throw new Error("fdb has not been initialized");
|
|
55
|
+
}
|
|
56
|
+
return this._fdb;
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
get connectionInfo() {
|
|
60
|
+
return _.mapValues(this.fullConfig, (config) => ({
|
|
61
|
+
host: config.host ?? "localhost",
|
|
62
|
+
port: config.port ?? 3306,
|
|
63
|
+
database: config.database,
|
|
64
|
+
user: config.user,
|
|
65
|
+
password: config.password,
|
|
66
|
+
}));
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
constructor() {
|
|
70
|
+
super();
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
init(config: KyselyBaseConfig) {
|
|
74
|
+
this.baseConfig = config;
|
|
75
|
+
this.fullConfig = this.generateDBConfig(config);
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
async testInit() {
|
|
79
|
+
if (this._tdb !== null) {
|
|
80
|
+
return;
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
if (this.fullConfig.test && this.fullConfig.production_master) {
|
|
84
|
+
const tConnInfo = this.fullConfig.test;
|
|
85
|
+
const pConnInfo = this.fullConfig.production_master;
|
|
86
|
+
|
|
87
|
+
if (
|
|
88
|
+
`${tConnInfo.host ?? "localhost"}:${tConnInfo.port ?? 3306}/${
|
|
89
|
+
tConnInfo.database
|
|
90
|
+
}` ===
|
|
91
|
+
`${pConnInfo.host ?? "localhost"}:${pConnInfo.port ?? 3306}/${pConnInfo.database}`
|
|
92
|
+
) {
|
|
93
|
+
throw new Error(
|
|
94
|
+
`테스트DB와 프로덕션DB에 동일한 데이터베이스가 사용되었습니다.`
|
|
95
|
+
);
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
this.tdb = new KyselyClient(this.fullConfig.test);
|
|
100
|
+
this.fdb = new KyselyClient(this.fullConfig.fixture_local);
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
get config(): SonamuKyselyDBConfig {
|
|
104
|
+
return this.fullConfig;
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
getDB(which: DBPreset) {
|
|
108
|
+
const instanceName = which === "w" ? "wdb" : "rdb";
|
|
109
|
+
|
|
110
|
+
if (!this[instanceName]) {
|
|
111
|
+
const _config: KyselyConfig = this.getCurrentConfig(which);
|
|
112
|
+
const { onCreateConnection, migration, ...config } = _config;
|
|
113
|
+
|
|
114
|
+
this[instanceName] = new Kysely<Database>({
|
|
115
|
+
dialect: new MysqlDialect({
|
|
116
|
+
onCreateConnection,
|
|
117
|
+
pool: createPool(config),
|
|
118
|
+
}),
|
|
119
|
+
});
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
return this[instanceName]!;
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
getClient(mode: keyof SonamuKyselyDBConfig) {
|
|
126
|
+
return new KyselyClient(this.fullConfig[mode]);
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
async destroy(): Promise<void> {
|
|
130
|
+
if (this.wdb !== undefined) {
|
|
131
|
+
await this.wdb.destroy();
|
|
132
|
+
this.wdb = undefined;
|
|
133
|
+
}
|
|
134
|
+
if (this.rdb !== undefined) {
|
|
135
|
+
await this.rdb.destroy();
|
|
136
|
+
this.rdb = undefined;
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
async testDestroy() {
|
|
141
|
+
if (this._tdb) {
|
|
142
|
+
await this._tdb.destroy();
|
|
143
|
+
this._tdb = null;
|
|
144
|
+
}
|
|
145
|
+
if (this._fdb) {
|
|
146
|
+
await this._fdb.destroy();
|
|
147
|
+
this._fdb = null;
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
raw(db: Kysely<Database>, query: string) {
|
|
152
|
+
return sql`${query}`.execute(db);
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
private generateDBConfig(config: KyselyBaseConfig): SonamuKyselyDBConfig {
|
|
156
|
+
const defaultKyselyConfig = _.merge(
|
|
157
|
+
{
|
|
158
|
+
migration: {
|
|
159
|
+
fs: promises,
|
|
160
|
+
path,
|
|
161
|
+
migrationFolder: path.join(Sonamu.apiRootPath, "/dist/migrations"),
|
|
162
|
+
} as FileMigrationProviderProps,
|
|
163
|
+
port: 3306,
|
|
164
|
+
host: "localhost",
|
|
165
|
+
database: config.database,
|
|
166
|
+
},
|
|
167
|
+
config.defaultOptions
|
|
168
|
+
);
|
|
169
|
+
|
|
170
|
+
// 로컬 환경 설정
|
|
171
|
+
const test = _.merge({}, defaultKyselyConfig, {
|
|
172
|
+
database: `${config.database}_test`,
|
|
173
|
+
});
|
|
174
|
+
|
|
175
|
+
const fixture_local = _.merge({}, defaultKyselyConfig, {
|
|
176
|
+
database: `${config.database}_fixture`,
|
|
177
|
+
});
|
|
178
|
+
|
|
179
|
+
// 개발 환경 설정
|
|
180
|
+
const devMasterOptions = config.environments?.development;
|
|
181
|
+
const devSlaveOptions = config.environments?.development_slave;
|
|
182
|
+
const development_master = _.merge(
|
|
183
|
+
{},
|
|
184
|
+
defaultKyselyConfig,
|
|
185
|
+
devMasterOptions
|
|
186
|
+
);
|
|
187
|
+
const development_slave = _.merge(
|
|
188
|
+
{},
|
|
189
|
+
defaultKyselyConfig,
|
|
190
|
+
devMasterOptions,
|
|
191
|
+
devSlaveOptions
|
|
192
|
+
);
|
|
193
|
+
const fixture_remote = _.merge({}, defaultKyselyConfig, devMasterOptions, {
|
|
194
|
+
database: `${config.database}_fixture`,
|
|
195
|
+
});
|
|
196
|
+
|
|
197
|
+
// 프로덕션 환경 설정
|
|
198
|
+
const prodMasterOptions = config.environments?.production ?? {};
|
|
199
|
+
const prodSlaveOptions = config.environments?.production_slave ?? {};
|
|
200
|
+
const production_master = _.merge(
|
|
201
|
+
{},
|
|
202
|
+
defaultKyselyConfig,
|
|
203
|
+
prodMasterOptions
|
|
204
|
+
);
|
|
205
|
+
const production_slave = _.merge(
|
|
206
|
+
{},
|
|
207
|
+
defaultKyselyConfig,
|
|
208
|
+
prodMasterOptions,
|
|
209
|
+
prodSlaveOptions
|
|
210
|
+
);
|
|
211
|
+
|
|
212
|
+
return {
|
|
213
|
+
test,
|
|
214
|
+
fixture_local,
|
|
215
|
+
fixture_remote,
|
|
216
|
+
development_master,
|
|
217
|
+
development_slave,
|
|
218
|
+
production_master,
|
|
219
|
+
production_slave,
|
|
220
|
+
};
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
/**
|
|
224
|
+
* keys에 해당하는 설정들을 중복없이 가져옵니다. (host/port/database가 같은 설정은 중복으로 처리합니다.)
|
|
225
|
+
*/
|
|
226
|
+
getUniqueConfigs(keys: (keyof SonamuKyselyDBConfig)[]) {
|
|
227
|
+
const targets = keys.map((key) => ({
|
|
228
|
+
connKey: key,
|
|
229
|
+
options: this.fullConfig[key as keyof SonamuKyselyDBConfig],
|
|
230
|
+
}));
|
|
231
|
+
|
|
232
|
+
return _.uniqBy(
|
|
233
|
+
targets,
|
|
234
|
+
({ options }) =>
|
|
235
|
+
`${options.host ?? "localhost"}:${options.port ?? 3306}/${options.database}`
|
|
236
|
+
);
|
|
237
|
+
}
|
|
238
|
+
}
|