sonamu 0.3.1 → 0.4.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.pnp.cjs +11 -0
- package/dist/base-model-BzMJ2E_I.d.mts +43 -0
- package/dist/base-model-CWRKUX49.d.ts +43 -0
- package/dist/bin/cli.js +118 -89
- package/dist/bin/cli.js.map +1 -1
- package/dist/bin/cli.mjs +74 -45
- package/dist/bin/cli.mjs.map +1 -1
- package/dist/chunk-6HSW7OS3.js +1567 -0
- package/dist/chunk-6HSW7OS3.js.map +1 -0
- package/dist/chunk-FLPD24HS.mjs +231 -0
- package/dist/chunk-FLPD24HS.mjs.map +1 -0
- package/dist/{chunk-MPXE4IHO.mjs → chunk-PP2PSSAG.mjs} +5284 -5617
- package/dist/chunk-PP2PSSAG.mjs.map +1 -0
- package/dist/chunk-QK5XXJUX.mjs +280 -0
- package/dist/chunk-QK5XXJUX.mjs.map +1 -0
- package/dist/chunk-S6FYTR3V.mjs +1567 -0
- package/dist/chunk-S6FYTR3V.mjs.map +1 -0
- package/dist/chunk-U636LQJJ.js +231 -0
- package/dist/chunk-U636LQJJ.js.map +1 -0
- package/dist/chunk-W7KDVJLQ.js +280 -0
- package/dist/chunk-W7KDVJLQ.js.map +1 -0
- package/dist/{chunk-YXILRRDT.js → chunk-XT6LHCX5.js} +5252 -5585
- package/dist/chunk-XT6LHCX5.js.map +1 -0
- package/dist/database/drivers/knex/base-model.d.mts +16 -0
- package/dist/database/drivers/knex/base-model.d.ts +16 -0
- package/dist/database/drivers/knex/base-model.js +55 -0
- package/dist/database/drivers/knex/base-model.js.map +1 -0
- package/dist/database/drivers/knex/base-model.mjs +56 -0
- package/dist/database/drivers/knex/base-model.mjs.map +1 -0
- package/dist/database/drivers/kysely/base-model.d.mts +22 -0
- package/dist/database/drivers/kysely/base-model.d.ts +22 -0
- package/dist/database/drivers/kysely/base-model.js +64 -0
- package/dist/database/drivers/kysely/base-model.js.map +1 -0
- package/dist/database/drivers/kysely/base-model.mjs +65 -0
- package/dist/database/drivers/kysely/base-model.mjs.map +1 -0
- package/dist/index.d.mts +222 -928
- package/dist/index.d.ts +222 -928
- package/dist/index.js +13 -26
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +18 -31
- package/dist/index.mjs.map +1 -1
- package/dist/model-CAH_4oQh.d.mts +1042 -0
- package/dist/model-CAH_4oQh.d.ts +1042 -0
- package/import-to-require.js +27 -0
- package/package.json +24 -3
- package/src/api/caster.ts +6 -0
- package/src/api/code-converters.ts +3 -1
- package/src/api/sonamu.ts +41 -22
- package/src/bin/cli.ts +79 -46
- package/src/database/_batch_update.ts +16 -11
- package/src/database/base-model.abstract.ts +97 -0
- package/src/database/base-model.ts +214 -280
- package/src/database/code-generator.ts +72 -0
- package/src/database/db.abstract.ts +75 -0
- package/src/database/db.ts +21 -82
- package/src/database/drivers/knex/base-model.ts +55 -0
- package/src/database/drivers/knex/client.ts +209 -0
- package/src/database/drivers/knex/db.ts +227 -0
- package/src/database/drivers/knex/generator.ts +659 -0
- package/src/database/drivers/kysely/base-model.ts +89 -0
- package/src/database/drivers/kysely/client.ts +309 -0
- package/src/database/drivers/kysely/db.ts +238 -0
- package/src/database/drivers/kysely/generator.ts +714 -0
- package/src/database/types.ts +117 -0
- package/src/database/upsert-builder.ts +31 -18
- package/src/entity/entity-utils.ts +1 -1
- package/src/entity/migrator.ts +148 -711
- package/src/index.ts +1 -1
- package/src/syncer/syncer.ts +69 -27
- package/src/templates/generated_http.template.ts +14 -0
- package/src/templates/kysely_types.template.ts +205 -0
- package/src/templates/model.template.ts +2 -139
- package/src/templates/service.template.ts +3 -1
- package/src/testing/_relation-graph.ts +111 -0
- package/src/testing/fixture-manager.ts +216 -332
- package/src/types/types.ts +56 -6
- package/src/utils/utils.ts +56 -4
- package/src/utils/zod-error.ts +189 -0
- package/tsconfig.json +2 -2
- package/tsup.config.js +11 -10
- package/dist/chunk-MPXE4IHO.mjs.map +0 -1
- package/dist/chunk-YXILRRDT.js.map +0 -1
- /package/src/database/{knex-plugins → drivers/knex/plugins}/knex-on-duplicate-update.ts +0 -0
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
import { Knex } from "knex";
|
|
2
|
+
import { Kysely } from "kysely";
|
|
3
|
+
import path from "path";
|
|
4
|
+
import { KnexClient } from "./drivers/knex/client";
|
|
5
|
+
import { KyselyClient } from "./drivers/kysely/client";
|
|
6
|
+
import {
|
|
7
|
+
SonamuDBConfig,
|
|
8
|
+
DBPreset,
|
|
9
|
+
Database,
|
|
10
|
+
SonamuDBBaseConfig,
|
|
11
|
+
KnexConfig,
|
|
12
|
+
} from "./types";
|
|
13
|
+
|
|
14
|
+
// db.ts에 포함시킬 경우 순환참조 발생
|
|
15
|
+
|
|
16
|
+
export abstract class DBClass {
|
|
17
|
+
public _fullConfig?: SonamuDBConfig;
|
|
18
|
+
set fullConfig(config: SonamuDBConfig) {
|
|
19
|
+
this._fullConfig = config;
|
|
20
|
+
}
|
|
21
|
+
get fullConfig() {
|
|
22
|
+
if (!this._fullConfig) {
|
|
23
|
+
throw new Error("FixtureManager has not been initialized");
|
|
24
|
+
}
|
|
25
|
+
return this._fullConfig;
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
abstract tdb: KnexClient | KyselyClient;
|
|
29
|
+
abstract fdb: KnexClient | KyselyClient;
|
|
30
|
+
|
|
31
|
+
abstract testInit(): Promise<void>;
|
|
32
|
+
abstract getDB(which: DBPreset): Knex | Kysely<Database>;
|
|
33
|
+
abstract destroy(): Promise<void>;
|
|
34
|
+
abstract raw(db: Knex | Kysely<Database>, query: string): any;
|
|
35
|
+
|
|
36
|
+
async getBaseConfig(rootPath: string): Promise<SonamuDBBaseConfig> {
|
|
37
|
+
const baseConfigPath = path.join(rootPath, "/dist/configs/db.js");
|
|
38
|
+
const module = await import(baseConfigPath);
|
|
39
|
+
const config = module.default?.default ?? module.default ?? module;
|
|
40
|
+
return config;
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
getCurrentConfig(which?: DBPreset) {
|
|
44
|
+
switch (process.env.NODE_ENV ?? "development") {
|
|
45
|
+
case "development":
|
|
46
|
+
case "staging":
|
|
47
|
+
return which === "w"
|
|
48
|
+
? this.fullConfig["development_master"]
|
|
49
|
+
: this.fullConfig["development_slave"] ??
|
|
50
|
+
this.fullConfig["development_master"];
|
|
51
|
+
break;
|
|
52
|
+
case "production":
|
|
53
|
+
return which === "w"
|
|
54
|
+
? this.fullConfig["production_master"]
|
|
55
|
+
: this.fullConfig["production_slave"] ??
|
|
56
|
+
this.fullConfig["production_master"];
|
|
57
|
+
break;
|
|
58
|
+
case "test":
|
|
59
|
+
return this.fullConfig["test"];
|
|
60
|
+
break;
|
|
61
|
+
default:
|
|
62
|
+
throw new Error(
|
|
63
|
+
`현재 ENV ${process.env.NODE_ENV}에는 설정 가능한 DB설정이 없습니다.`
|
|
64
|
+
);
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
toClient(db: Knex | Kysely<Database>): KnexClient | KyselyClient {
|
|
69
|
+
if (db instanceof Kysely) {
|
|
70
|
+
return new KyselyClient(this.getCurrentConfig(), db);
|
|
71
|
+
} else {
|
|
72
|
+
return new KnexClient(this.getCurrentConfig() as KnexConfig, db);
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
}
|
package/src/database/db.ts
CHANGED
|
@@ -1,84 +1,23 @@
|
|
|
1
|
-
export type DBPreset = "w" | "r";
|
|
2
|
-
import knex, { Knex } from "knex";
|
|
3
1
|
import path from "path";
|
|
4
|
-
import {
|
|
5
|
-
import {
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
Sonamu.apiRootPath,
|
|
24
|
-
"/dist/configs/db.js"
|
|
25
|
-
);
|
|
26
|
-
try {
|
|
27
|
-
const knexfileModule = await import(dbConfigPath);
|
|
28
|
-
return (knexfileModule.default?.default ??
|
|
29
|
-
knexfileModule.default ??
|
|
30
|
-
knexfileModule) as SonamuDBConfig;
|
|
31
|
-
} catch {}
|
|
32
|
-
|
|
33
|
-
throw new ServiceUnavailableException(
|
|
34
|
-
`DB설정 파일을 찾을 수 없습니다. ${dbConfigPath}`
|
|
35
|
-
);
|
|
36
|
-
}
|
|
37
|
-
|
|
38
|
-
getDB(which: DBPreset): Knex {
|
|
39
|
-
const dbConfig = Sonamu.dbConfig;
|
|
40
|
-
|
|
41
|
-
const instanceName = which === "w" ? "wdb" : "rdb";
|
|
42
|
-
|
|
43
|
-
if (!this[instanceName]) {
|
|
44
|
-
let config: Knex.Config;
|
|
45
|
-
switch (process.env.NODE_ENV ?? "development") {
|
|
46
|
-
case "development":
|
|
47
|
-
case "staging":
|
|
48
|
-
config =
|
|
49
|
-
which === "w"
|
|
50
|
-
? dbConfig["development_master"]
|
|
51
|
-
: dbConfig["development_slave"] ?? dbConfig["development_master"];
|
|
52
|
-
break;
|
|
53
|
-
case "production":
|
|
54
|
-
config =
|
|
55
|
-
which === "w"
|
|
56
|
-
? dbConfig["production_master"]
|
|
57
|
-
: dbConfig["production_slave"] ?? dbConfig["production_master"];
|
|
58
|
-
break;
|
|
59
|
-
case "test":
|
|
60
|
-
config = dbConfig["test"];
|
|
61
|
-
break;
|
|
62
|
-
default:
|
|
63
|
-
throw new Error(
|
|
64
|
-
`현재 ENV ${process.env.NODE_ENV}에는 설정 가능한 DB설정이 없습니다.`
|
|
65
|
-
);
|
|
66
|
-
}
|
|
67
|
-
this[instanceName] = knex(config);
|
|
68
|
-
}
|
|
69
|
-
|
|
70
|
-
return this[instanceName]!;
|
|
71
|
-
}
|
|
72
|
-
|
|
73
|
-
async destroy(): Promise<void> {
|
|
74
|
-
if (this.wdb !== undefined) {
|
|
75
|
-
await this.wdb.destroy();
|
|
76
|
-
this.wdb = undefined;
|
|
77
|
-
}
|
|
78
|
-
if (this.rdb !== undefined) {
|
|
79
|
-
await this.rdb.destroy();
|
|
80
|
-
this.rdb = undefined;
|
|
81
|
-
}
|
|
2
|
+
import { findApiRootPath } from "../utils/utils";
|
|
3
|
+
import { DBKnexClass } from "./drivers/knex/db";
|
|
4
|
+
import { DBKyselyClass } from "./drivers/kysely/db";
|
|
5
|
+
import { SonamuDBBaseConfig } from "./types";
|
|
6
|
+
|
|
7
|
+
const dbConfigPath: string = path.join(
|
|
8
|
+
findApiRootPath(),
|
|
9
|
+
"/dist/configs/db.js"
|
|
10
|
+
);
|
|
11
|
+
const knexfileModule = await import(dbConfigPath);
|
|
12
|
+
|
|
13
|
+
export const DB = (() => {
|
|
14
|
+
const config = (knexfileModule.default?.default ??
|
|
15
|
+
knexfileModule.default ??
|
|
16
|
+
knexfileModule) as SonamuDBBaseConfig;
|
|
17
|
+
if (config.client === "knex") {
|
|
18
|
+
return new DBKnexClass();
|
|
19
|
+
} else if (config.client === "kysely") {
|
|
20
|
+
return new DBKyselyClass();
|
|
82
21
|
}
|
|
83
|
-
|
|
84
|
-
|
|
22
|
+
throw new Error("지원하지 않는 DB 클라이언트입니다.");
|
|
23
|
+
})();
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
// base-model.knex.ts
|
|
2
|
+
import { Knex } from "knex";
|
|
3
|
+
import { SubsetQuery, isCustomJoinClause } from "../../../types/types";
|
|
4
|
+
import { BaseModelClassAbstract } from "../../base-model";
|
|
5
|
+
import { DB } from "../../db";
|
|
6
|
+
import { KnexClient } from "./client";
|
|
7
|
+
import { DBPreset } from "../../types";
|
|
8
|
+
import { UpsertBuilder } from "../../upsert-builder";
|
|
9
|
+
|
|
10
|
+
export class BaseModelClass extends BaseModelClassAbstract<"knex"> {
|
|
11
|
+
getDB(which: DBPreset): Knex {
|
|
12
|
+
return DB.getDB(which) as Knex;
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
async destroy(): Promise<void> {
|
|
16
|
+
return DB.destroy();
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
getUpsertBuilder() {
|
|
20
|
+
return new UpsertBuilder<"knex">();
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
protected applyJoins(
|
|
24
|
+
clonedQb: KnexClient,
|
|
25
|
+
joins: SubsetQuery["joins"]
|
|
26
|
+
): KnexClient {
|
|
27
|
+
for (const join of joins) {
|
|
28
|
+
if (isCustomJoinClause(join)) {
|
|
29
|
+
if (join.join === "inner") {
|
|
30
|
+
clonedQb.qb = clonedQb.qb.innerJoin(
|
|
31
|
+
`${join.table} as ${join.as}`,
|
|
32
|
+
join.custom as any
|
|
33
|
+
);
|
|
34
|
+
} else {
|
|
35
|
+
clonedQb.qb = clonedQb.qb.leftJoin(
|
|
36
|
+
`${join.table} as ${join.as}`,
|
|
37
|
+
join.custom as any
|
|
38
|
+
);
|
|
39
|
+
}
|
|
40
|
+
} else {
|
|
41
|
+
if (join.join === "inner") {
|
|
42
|
+
clonedQb.innerJoin(`${join.table} as ${join.as}`, join.from, join.to);
|
|
43
|
+
} else if (join.join === "outer") {
|
|
44
|
+
clonedQb.leftJoin(`${join.table} as ${join.as}`, join.from, join.to);
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
return clonedQb;
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
protected async executeCountQuery(qb: Knex.QueryBuilder): Promise<number> {
|
|
52
|
+
const result = await qb.clear("select").count("* as total").first();
|
|
53
|
+
return Number(result?.total) ?? 0;
|
|
54
|
+
}
|
|
55
|
+
}
|
|
@@ -0,0 +1,209 @@
|
|
|
1
|
+
import knex, { Knex } from "knex";
|
|
2
|
+
import { DatabaseClient, KnexConfig, WhereClause } from "../../types";
|
|
3
|
+
import { asArray } from "../../../utils/model";
|
|
4
|
+
import _ from "lodash";
|
|
5
|
+
import { KnexGenerator } from "./generator";
|
|
6
|
+
|
|
7
|
+
// 확장된 Transaction 타입 정의
|
|
8
|
+
export type ExtendedKnexTrx = Knex.Transaction & DatabaseClient<"knex">;
|
|
9
|
+
|
|
10
|
+
export class KnexClient implements DatabaseClient<"knex"> {
|
|
11
|
+
private knex: Knex;
|
|
12
|
+
generator: KnexGenerator = new KnexGenerator();
|
|
13
|
+
|
|
14
|
+
get connectionInfo() {
|
|
15
|
+
return {
|
|
16
|
+
host: this.knex.client.config.connection?.host ?? "localhost",
|
|
17
|
+
port: this.knex.client.config.connection?.port ?? 3306,
|
|
18
|
+
database: this.knex.client.config.connection?.database ?? "",
|
|
19
|
+
user: this.knex.client.config.connection?.user ?? "",
|
|
20
|
+
password: this.knex.client.config.connection?.password ?? "",
|
|
21
|
+
};
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
private _qb?: Knex.QueryBuilder;
|
|
25
|
+
set qb(qb: Knex.QueryBuilder) {
|
|
26
|
+
this._qb = qb;
|
|
27
|
+
}
|
|
28
|
+
get qb() {
|
|
29
|
+
if (!this._qb) {
|
|
30
|
+
throw new Error("QueryBuilder is not initialized");
|
|
31
|
+
}
|
|
32
|
+
return this._qb;
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
get sql() {
|
|
36
|
+
return this.qb.toQuery();
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
constructor(
|
|
40
|
+
private config: KnexConfig,
|
|
41
|
+
_knex?: Knex
|
|
42
|
+
) {
|
|
43
|
+
this.knex = _knex ?? knex(this.config);
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
from(table: string): KnexClient {
|
|
47
|
+
this.qb = this.knex.from(table);
|
|
48
|
+
return this;
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
innerJoin(table: string, k1: string, k2: string) {
|
|
52
|
+
this.qb = this.qb.innerJoin(table, k1, k2);
|
|
53
|
+
return this;
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
leftJoin(table: string, k1: string, k2: string) {
|
|
57
|
+
this.qb = this.qb.leftJoin(table, k1, k2);
|
|
58
|
+
return this;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
clearSelect() {
|
|
62
|
+
this.qb = this.qb.clearSelect();
|
|
63
|
+
return this;
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
select(columns: string | string[]) {
|
|
67
|
+
this.qb = this.qb.select(asArray(columns));
|
|
68
|
+
return this;
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
selectAll() {
|
|
72
|
+
this.qb = this.qb.select("*");
|
|
73
|
+
return this;
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
where(ops: WhereClause | WhereClause[]) {
|
|
77
|
+
if (typeof ops[0] === "string") {
|
|
78
|
+
ops = [ops as WhereClause];
|
|
79
|
+
}
|
|
80
|
+
for (const [lhs, op, rhs] of asArray(ops)) {
|
|
81
|
+
this.qb = this.qb.where(lhs, op, rhs);
|
|
82
|
+
}
|
|
83
|
+
return this;
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
orWhere(ops: WhereClause | WhereClause[]) {
|
|
87
|
+
this.qb = this.qb.orWhere((qb) => {
|
|
88
|
+
for (const [lhs, op, rhs] of asArray(ops)) {
|
|
89
|
+
qb.andWhere(lhs, op, rhs);
|
|
90
|
+
}
|
|
91
|
+
});
|
|
92
|
+
return this;
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
async insert(table: string, data: any[]) {
|
|
96
|
+
await this.knex(table).insert(data);
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
async upsert(table: string, data: any[]) {
|
|
100
|
+
const q = this.knex(table).insert(data);
|
|
101
|
+
const updateFields = Array.isArray(data) ? Object.keys(data[0]) : data;
|
|
102
|
+
await q.onDuplicateUpdate.apply(q, updateFields);
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
limit(limit: number) {
|
|
106
|
+
this.qb = this.qb.limit(limit);
|
|
107
|
+
return this;
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
offset(offset: number) {
|
|
111
|
+
this.qb = this.qb.offset(offset);
|
|
112
|
+
return this;
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
count(column: string, alias?: string) {
|
|
116
|
+
this.qb = this.qb.count(alias ? `${column} as ${alias}` : column);
|
|
117
|
+
return this;
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
distinct(column: string) {
|
|
121
|
+
this.qb = this.qb.distinct(column);
|
|
122
|
+
return this;
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
first() {
|
|
126
|
+
this.qb = this.qb.limit(1);
|
|
127
|
+
return this;
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
async execute(trx?: ExtendedKnexTrx): Promise<any[]> {
|
|
131
|
+
if (trx) {
|
|
132
|
+
return this.qb.transacting(trx);
|
|
133
|
+
}
|
|
134
|
+
return this.qb;
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
async pluck(column: string): Promise<any[]> {
|
|
138
|
+
return this.qb.pluck(column);
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
createRawQuery(query: string, bindings?: any[]) {
|
|
142
|
+
if (bindings?.length) {
|
|
143
|
+
return this.knex.raw(query, bindings).toQuery();
|
|
144
|
+
}
|
|
145
|
+
return this.knex.raw(query).toQuery();
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
async raw<R>(query: string, bindings?: any[]): Promise<R[]> {
|
|
149
|
+
if (bindings?.length) {
|
|
150
|
+
return (await this.knex.raw(query, bindings))[0];
|
|
151
|
+
}
|
|
152
|
+
return (await this.knex.raw(query))[0];
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
async truncate(table: string) {
|
|
156
|
+
await this.knex(table).truncate();
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
trx(callback: (trx: KnexClient) => Promise<any>) {
|
|
160
|
+
return this.knex.transaction((trx) =>
|
|
161
|
+
callback(new KnexClient(this.config, trx))
|
|
162
|
+
);
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
destroy() {
|
|
166
|
+
return this.knex.destroy();
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
clearQueryParts(parts: ("order" | "offset" | "limit")[]) {
|
|
170
|
+
this.qb = parts.reduce((acc, part) => acc.clear(part), this.qb.clone());
|
|
171
|
+
return this;
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
clone() {
|
|
175
|
+
const client = new KnexClient(this.config);
|
|
176
|
+
client.qb = this.qb.clone();
|
|
177
|
+
return client;
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
// Migrator
|
|
181
|
+
|
|
182
|
+
async getMigrations() {
|
|
183
|
+
const [, result] = (await this.knex.migrate.list()) as [
|
|
184
|
+
unknown,
|
|
185
|
+
{
|
|
186
|
+
file: string;
|
|
187
|
+
directory: string;
|
|
188
|
+
}[],
|
|
189
|
+
];
|
|
190
|
+
|
|
191
|
+
return result.map((r) => r.file.replace(".js", ""));
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
async status() {
|
|
195
|
+
return this.knex.migrate.status();
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
async migrate() {
|
|
199
|
+
return this.knex.migrate.latest();
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
async rollback() {
|
|
203
|
+
return this.knex.migrate.rollback();
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
async rollbackAll() {
|
|
207
|
+
return this.knex.migrate.rollback(undefined, true);
|
|
208
|
+
}
|
|
209
|
+
}
|
|
@@ -0,0 +1,227 @@
|
|
|
1
|
+
import _ from "lodash";
|
|
2
|
+
import { DBPreset, KnexBaseConfig, SonamuKnexDBConfig } from "../../types";
|
|
3
|
+
import knex, { Knex } from "knex";
|
|
4
|
+
import { KnexClient } from "./client";
|
|
5
|
+
import { DBClass } from "../../db.abstract";
|
|
6
|
+
import { attachOnDuplicateUpdate } from "./plugins/knex-on-duplicate-update";
|
|
7
|
+
import { KnexGenerator } from "./generator";
|
|
8
|
+
|
|
9
|
+
export class DBKnexClass extends DBClass {
|
|
10
|
+
public migrationTable = "knex_migrations";
|
|
11
|
+
public generator: KnexGenerator = new KnexGenerator();
|
|
12
|
+
public baseConfig?: KnexBaseConfig;
|
|
13
|
+
|
|
14
|
+
public declare _fullConfig?: SonamuKnexDBConfig;
|
|
15
|
+
set fullConfig(config: SonamuKnexDBConfig) {
|
|
16
|
+
this._fullConfig = config;
|
|
17
|
+
}
|
|
18
|
+
get fullConfig() {
|
|
19
|
+
if (!this._fullConfig) {
|
|
20
|
+
throw new Error("DB Config has not been initialized");
|
|
21
|
+
}
|
|
22
|
+
return this._fullConfig;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
private wdb?: Knex;
|
|
26
|
+
private rdb?: Knex;
|
|
27
|
+
|
|
28
|
+
private _tdb: KnexClient | null = null;
|
|
29
|
+
set tdb(tdb: KnexClient) {
|
|
30
|
+
this._tdb = tdb;
|
|
31
|
+
}
|
|
32
|
+
get tdb(): KnexClient {
|
|
33
|
+
if (this._tdb === null) {
|
|
34
|
+
throw new Error("tdb has not been initialized");
|
|
35
|
+
}
|
|
36
|
+
return this._tdb;
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
private _fdb: KnexClient | null = null;
|
|
40
|
+
set fdb(fdb: KnexClient) {
|
|
41
|
+
this._fdb = fdb;
|
|
42
|
+
}
|
|
43
|
+
get fdb(): KnexClient {
|
|
44
|
+
if (this._fdb === null) {
|
|
45
|
+
throw new Error("fdb has not been initialized");
|
|
46
|
+
}
|
|
47
|
+
return this._fdb;
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
get connectionInfo() {
|
|
51
|
+
return _.mapValues(this.fullConfig, ({ connection }) => ({
|
|
52
|
+
host: connection.host ?? "localhost",
|
|
53
|
+
port: connection.port ?? 3306,
|
|
54
|
+
database: connection.database,
|
|
55
|
+
user: connection.user,
|
|
56
|
+
password: connection.password,
|
|
57
|
+
}));
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
constructor() {
|
|
61
|
+
super();
|
|
62
|
+
attachOnDuplicateUpdate();
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
init(config: KnexBaseConfig) {
|
|
66
|
+
this.baseConfig = config;
|
|
67
|
+
this.fullConfig = this.generateDBConfig(config);
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
async testInit() {
|
|
71
|
+
if (this._tdb !== null) {
|
|
72
|
+
return;
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
if (this.fullConfig.test && this.fullConfig.production_master) {
|
|
76
|
+
const tConn = this.connectionInfo.test;
|
|
77
|
+
const pConn = this.connectionInfo.production_master;
|
|
78
|
+
|
|
79
|
+
if (
|
|
80
|
+
`${tConn.host ?? "localhost"}:${tConn.port ?? 3306}/${
|
|
81
|
+
tConn.database
|
|
82
|
+
}` ===
|
|
83
|
+
`${pConn.host ?? "localhost"}:${pConn.port ?? 3306}/${pConn.database}`
|
|
84
|
+
) {
|
|
85
|
+
throw new Error(
|
|
86
|
+
`테스트DB와 프로덕션DB에 동일한 데이터베이스가 사용되었습니다.`
|
|
87
|
+
);
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
this.tdb = new KnexClient(this.fullConfig.test);
|
|
92
|
+
this.fdb = new KnexClient(this.fullConfig.fixture_local);
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
getDB(which: DBPreset) {
|
|
96
|
+
const instanceName = which === "w" ? "wdb" : "rdb";
|
|
97
|
+
|
|
98
|
+
if (!this[instanceName]) {
|
|
99
|
+
const config = this.getCurrentConfig(which);
|
|
100
|
+
this[instanceName] = knex(config);
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
return this[instanceName]!;
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
getClient(mode: keyof SonamuKnexDBConfig) {
|
|
107
|
+
return new KnexClient(this.fullConfig[mode]);
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
async destroy(): Promise<void> {
|
|
111
|
+
if (this.wdb !== undefined) {
|
|
112
|
+
await this.wdb.destroy();
|
|
113
|
+
this.wdb = undefined;
|
|
114
|
+
}
|
|
115
|
+
if (this.rdb !== undefined) {
|
|
116
|
+
await this.rdb.destroy();
|
|
117
|
+
this.rdb = undefined;
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
async testDestroy() {
|
|
122
|
+
if (this._tdb) {
|
|
123
|
+
await this._tdb.destroy();
|
|
124
|
+
this._tdb = null;
|
|
125
|
+
}
|
|
126
|
+
if (this._fdb) {
|
|
127
|
+
await this._fdb.destroy();
|
|
128
|
+
this._fdb = null;
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
raw(db: Knex, query: string) {
|
|
133
|
+
return db.raw(query);
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
private generateDBConfig(config: KnexBaseConfig): SonamuKnexDBConfig {
|
|
137
|
+
const defaultKnexConfig = _.merge(
|
|
138
|
+
{
|
|
139
|
+
client: "mysql2",
|
|
140
|
+
pool: {
|
|
141
|
+
min: 1,
|
|
142
|
+
max: 5,
|
|
143
|
+
},
|
|
144
|
+
migrations: {
|
|
145
|
+
extension: "js",
|
|
146
|
+
directory: "./dist/migrations",
|
|
147
|
+
},
|
|
148
|
+
connection: {
|
|
149
|
+
host: "localhost",
|
|
150
|
+
port: 3306,
|
|
151
|
+
database: config.database,
|
|
152
|
+
},
|
|
153
|
+
},
|
|
154
|
+
config.defaultOptions
|
|
155
|
+
);
|
|
156
|
+
|
|
157
|
+
// 로컬 환경 설정
|
|
158
|
+
const test = _.merge({}, defaultKnexConfig, {
|
|
159
|
+
connection: {
|
|
160
|
+
database: `${config.database}_test`,
|
|
161
|
+
},
|
|
162
|
+
});
|
|
163
|
+
|
|
164
|
+
const fixture_local = _.merge({}, defaultKnexConfig, {
|
|
165
|
+
connection: {
|
|
166
|
+
database: `${config.database}_fixture`,
|
|
167
|
+
},
|
|
168
|
+
});
|
|
169
|
+
|
|
170
|
+
// 개발 환경 설정
|
|
171
|
+
const devMasterOptions = config.environments?.development;
|
|
172
|
+
const devSlaveOptions = config.environments?.development_slave;
|
|
173
|
+
const development_master = _.merge({}, defaultKnexConfig, devMasterOptions);
|
|
174
|
+
const development_slave = _.merge(
|
|
175
|
+
{},
|
|
176
|
+
defaultKnexConfig,
|
|
177
|
+
devMasterOptions,
|
|
178
|
+
devSlaveOptions
|
|
179
|
+
);
|
|
180
|
+
const fixture_remote = _.merge({}, defaultKnexConfig, devMasterOptions, {
|
|
181
|
+
connection: {
|
|
182
|
+
database: `${config.database}_fixture`,
|
|
183
|
+
},
|
|
184
|
+
});
|
|
185
|
+
|
|
186
|
+
// 프로덕션 환경 설정
|
|
187
|
+
const prodMasterOptions = config.environments?.production ?? {};
|
|
188
|
+
const prodSlaveOptions = config.environments?.production_slave ?? {};
|
|
189
|
+
const production_master = _.merge({}, defaultKnexConfig, prodMasterOptions);
|
|
190
|
+
const production_slave = _.merge(
|
|
191
|
+
{},
|
|
192
|
+
defaultKnexConfig,
|
|
193
|
+
prodMasterOptions,
|
|
194
|
+
prodSlaveOptions
|
|
195
|
+
);
|
|
196
|
+
|
|
197
|
+
return {
|
|
198
|
+
test,
|
|
199
|
+
fixture_local,
|
|
200
|
+
fixture_remote,
|
|
201
|
+
development_master,
|
|
202
|
+
development_slave,
|
|
203
|
+
production_master,
|
|
204
|
+
production_slave,
|
|
205
|
+
};
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
/**
|
|
209
|
+
* keys에 해당하는 설정들을 중복없이 가져옵니다. (host/port/database가 같은 설정은 중복으로 처리합니다.)
|
|
210
|
+
*/
|
|
211
|
+
getUniqueConfigs(keys: (keyof SonamuKnexDBConfig)[]) {
|
|
212
|
+
const targets = keys.map((key) => ({
|
|
213
|
+
connKey: key,
|
|
214
|
+
options: this.fullConfig[key as keyof SonamuKnexDBConfig],
|
|
215
|
+
}));
|
|
216
|
+
|
|
217
|
+
return _.uniqBy(targets, ({ options }) => {
|
|
218
|
+
const conn = options.connection as Knex.ConnectionConfig & {
|
|
219
|
+
port?: number;
|
|
220
|
+
};
|
|
221
|
+
|
|
222
|
+
return `${conn.host ?? "localhost"}:${conn.port ?? 3306}/${
|
|
223
|
+
conn.database
|
|
224
|
+
}`;
|
|
225
|
+
});
|
|
226
|
+
}
|
|
227
|
+
}
|