@technicity/data-service-generator 0.22.2 → 0.23.0-next.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/generation/generate.js +338 -170
- package/dist/runtime/IRuntime.d.ts +2 -1
- package/dist/runtime/RuntimePostgreSQL.d.ts +30 -0
- package/dist/runtime/RuntimePostgreSQL.js +73 -0
- package/dist/runtime/lib/PostgreSQL.d.ts +14 -0
- package/dist/runtime/lib/PostgreSQL.js +110 -0
- package/dist/runtime/lib/getSqlAst.js +8 -6
- package/dist/runtime/lib/shared.js +51 -11
- package/dist/runtime/lib/stringifyWhere.js +7 -0
- package/package.json +5 -1
|
@@ -30,24 +30,27 @@ exports.generate = generate;
|
|
|
30
30
|
const path = __importStar(require("node:path"));
|
|
31
31
|
const fs = __importStar(require("node:fs"));
|
|
32
32
|
const os = __importStar(require("node:os"));
|
|
33
|
+
const node_async_hooks_1 = require("node:async_hooks");
|
|
33
34
|
const child_process = __importStar(require("node:child_process"));
|
|
34
35
|
const node_crypto_1 = __importDefault(require("node:crypto"));
|
|
35
36
|
const prettier = __importStar(require("prettier"));
|
|
36
37
|
const changeCase = __importStar(require("change-case"));
|
|
37
38
|
const fse = __importStar(require("fs-extra"));
|
|
38
39
|
const _ = __importStar(require("lodash/fp"));
|
|
40
|
+
const memoize_1 = __importDefault(require("lodash/memoize"));
|
|
39
41
|
const json_schema_to_typescript_1 = require("json-schema-to-typescript");
|
|
40
42
|
const getDuplicates_1 = require("../lib/getDuplicates");
|
|
41
43
|
const isNotNullOrUndefined_1 = require("../lib/isNotNullOrUndefined");
|
|
44
|
+
const pg_1 = require("pg");
|
|
42
45
|
const MySQL_1 = require("../runtime/lib/MySQL");
|
|
43
46
|
const capitalizeFirstLetter_1 = require("../lib/capitalizeFirstLetter");
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
47
|
+
const ctxStorage = new node_async_hooks_1.AsyncLocalStorage();
|
|
48
|
+
function getCtx() {
|
|
49
|
+
const c = ctxStorage.getStore();
|
|
50
|
+
if (!c)
|
|
51
|
+
throw new Error("generate() context missing");
|
|
52
|
+
return c;
|
|
53
|
+
}
|
|
51
54
|
const json2TsOpts = {
|
|
52
55
|
bannerComment: ""
|
|
53
56
|
};
|
|
@@ -62,159 +65,176 @@ async function generate(input) {
|
|
|
62
65
|
const specialCaseUuidColumn = input.specialCaseUuidColumn ?? true;
|
|
63
66
|
const includeMappedFields = input.includeMappedFields ?? true;
|
|
64
67
|
const supplementClientOpts = input.supplementClientOpts ?? true;
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
throw new Error("No tables found");
|
|
70
|
-
}
|
|
71
|
-
if (input.tables != null) {
|
|
72
|
-
tables = tables.filter((x) => input.tables?.includes(x));
|
|
73
|
-
}
|
|
74
|
-
if (input.excludeTables != null) {
|
|
75
|
-
tables = tables.filter((x) => !input.excludeTables?.includes(x));
|
|
76
|
-
}
|
|
77
|
-
const data = await Promise.all(tables.flatMap((x) => [
|
|
78
|
-
getGetOneData(x, includeMappedFields),
|
|
79
|
-
getGetListData(x),
|
|
80
|
-
getGetListPaginatedData(x),
|
|
81
|
-
getPostOneData(x, specialCaseUuidColumn, includeMappedFields),
|
|
82
|
-
getPatchOneData(x, specialCaseUuidColumn, includeMappedFields),
|
|
83
|
-
getPatchListData(x),
|
|
84
|
-
getDeleteOneData(x),
|
|
85
|
-
getDeleteListData(x)
|
|
86
|
-
]));
|
|
87
|
-
const artifacts = await getArtifacts(tables, includeMappedFields, specialCaseUuidColumn);
|
|
88
|
-
const artifactsSource = getArtifactsSource(artifacts);
|
|
89
|
-
const sdkSource = await getSDKSource(data, specialCaseUuidColumn, supplementClientOpts, artifacts, input.outputSqliteSchema);
|
|
90
|
-
const sdkFilename = "index.ts";
|
|
91
|
-
const sourceIRuntimeFilePath = fs.existsSync(path.join(__dirname, "../runtime", "IRuntime.ts"))
|
|
92
|
-
? path.join(__dirname, "../runtime", "IRuntime.ts")
|
|
93
|
-
: path.join(__dirname, "../runtime", "IRuntime.js");
|
|
94
|
-
const IRuntimeFilename = path.basename(sourceIRuntimeFilePath);
|
|
95
|
-
const artifactsFilename = "artifacts.ts";
|
|
96
|
-
const tsConfigJSON = {
|
|
97
|
-
compilerOptions: {
|
|
98
|
-
module: "commonjs",
|
|
99
|
-
moduleResolution: "node",
|
|
100
|
-
target: "es2020",
|
|
101
|
-
declaration: true,
|
|
102
|
-
outDir: "./sdk-ts"
|
|
103
|
-
},
|
|
104
|
-
include: [sdkFilename, artifactsFilename, IRuntimeFilename]
|
|
105
|
-
};
|
|
106
|
-
const packageJSON = {
|
|
107
|
-
name: "temp",
|
|
108
|
-
version: "1.0.0",
|
|
109
|
-
// Deps need to be included so that they're inlined by ncc
|
|
110
|
-
dependencies: require("../../package.json").dependencies,
|
|
111
|
-
devDependencies: {
|
|
112
|
-
"@types/node": require("../../package.json").devDependencies["@types/node"],
|
|
113
|
-
typescript: require("../../package.json").devDependencies.typescript
|
|
114
|
-
},
|
|
115
|
-
// Not `resolutions` because npm used for install
|
|
116
|
-
overrides: {
|
|
117
|
-
// Fix for: `Cannot find type definition file for 'glob'`
|
|
118
|
-
glob: ">9.0.0"
|
|
119
|
-
}
|
|
68
|
+
const ctx = {
|
|
69
|
+
runId: node_crypto_1.default.randomUUID(),
|
|
70
|
+
dialect: input.dialect,
|
|
71
|
+
query: undefined
|
|
120
72
|
};
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
fs.writeFileSync(path.join(tmpDirPath, artifactsFilename), artifactsSource);
|
|
127
|
-
fse.copyFileSync(sourceIRuntimeFilePath, path.join(tmpDirPath, IRuntimeFilename));
|
|
128
|
-
const typesDirPath = path.join(tmpDirPath, "types");
|
|
129
|
-
fse.mkdirpSync(typesDirPath);
|
|
130
|
-
fs.writeFileSync(path.join(typesDirPath, "_shared.ts"), getTypeShared());
|
|
131
|
-
for (let x of data) {
|
|
132
|
-
if (x.kind === "getOne") {
|
|
133
|
-
fs.writeFileSync(path.join(typesDirPath, x.typeFieldsName + ".ts"), x.typeFields);
|
|
134
|
-
fs.writeFileSync(path.join(typesDirPath, x.typeReturnBaseName + ".ts"), x.typeReturnBase);
|
|
73
|
+
return ctxStorage.run(ctx, async () => {
|
|
74
|
+
init(input);
|
|
75
|
+
let tables = await getTableNames();
|
|
76
|
+
if (tables.length === 0) {
|
|
77
|
+
throw new Error("No tables found");
|
|
135
78
|
}
|
|
136
|
-
if (
|
|
137
|
-
|
|
138
|
-
fs.writeFileSync(path.join(typesDirPath, x.typeOrderByName + ".ts"), x.typeOrderBy);
|
|
79
|
+
if (input.tables != null) {
|
|
80
|
+
tables = tables.filter((x) => input.tables?.includes(x));
|
|
139
81
|
}
|
|
140
|
-
if (
|
|
141
|
-
|
|
82
|
+
if (input.excludeTables != null) {
|
|
83
|
+
tables = tables.filter((x) => !input.excludeTables?.includes(x));
|
|
142
84
|
}
|
|
143
|
-
|
|
144
|
-
|
|
85
|
+
const data = await Promise.all(tables.flatMap((x) => [
|
|
86
|
+
getGetOneData(x, includeMappedFields),
|
|
87
|
+
getGetListData(x),
|
|
88
|
+
getGetListPaginatedData(x),
|
|
89
|
+
getPostOneData(x, specialCaseUuidColumn, includeMappedFields),
|
|
90
|
+
getPatchOneData(x, specialCaseUuidColumn, includeMappedFields),
|
|
91
|
+
getPatchListData(x),
|
|
92
|
+
getDeleteOneData(x),
|
|
93
|
+
getDeleteListData(x)
|
|
94
|
+
]));
|
|
95
|
+
const artifacts = await getArtifacts(tables, includeMappedFields, specialCaseUuidColumn);
|
|
96
|
+
const artifactsSource = getArtifactsSource(artifacts);
|
|
97
|
+
const sdkSource = await getSDKSource(data, specialCaseUuidColumn, supplementClientOpts, artifacts, input.outputSqliteSchema);
|
|
98
|
+
const sdkFilename = "index.ts";
|
|
99
|
+
const sourceIRuntimeFilePath = fs.existsSync(path.join(__dirname, "../runtime", "IRuntime.ts"))
|
|
100
|
+
? path.join(__dirname, "../runtime", "IRuntime.ts")
|
|
101
|
+
: path.join(__dirname, "../runtime", "IRuntime.js");
|
|
102
|
+
const IRuntimeFilename = path.basename(sourceIRuntimeFilePath);
|
|
103
|
+
const artifactsFilename = "artifacts.ts";
|
|
104
|
+
const tsConfigJSON = {
|
|
105
|
+
compilerOptions: {
|
|
106
|
+
module: "commonjs",
|
|
107
|
+
moduleResolution: "node",
|
|
108
|
+
target: "es2020",
|
|
109
|
+
declaration: true,
|
|
110
|
+
outDir: "./sdk-ts"
|
|
111
|
+
},
|
|
112
|
+
include: [sdkFilename, artifactsFilename, IRuntimeFilename]
|
|
113
|
+
};
|
|
114
|
+
const packageJSON = {
|
|
115
|
+
name: "temp",
|
|
116
|
+
version: "1.0.0",
|
|
117
|
+
// Deps need to be included so that they're inlined by ncc
|
|
118
|
+
dependencies: require("../../package.json").dependencies,
|
|
119
|
+
devDependencies: {
|
|
120
|
+
"@types/node": require("../../package.json").devDependencies["@types/node"],
|
|
121
|
+
typescript: require("../../package.json").devDependencies.typescript
|
|
122
|
+
},
|
|
123
|
+
// Not `resolutions` because npm used for install
|
|
124
|
+
overrides: {
|
|
125
|
+
// Fix for: `Cannot find type definition file for 'glob'`
|
|
126
|
+
glob: ">9.0.0"
|
|
127
|
+
}
|
|
128
|
+
};
|
|
129
|
+
const tmpDirPath = path.join(os.tmpdir(),
|
|
130
|
+
// _ because - in filename is not supported by mysql2sqlite
|
|
131
|
+
`dsg_${node_crypto_1.default.randomUUID()}`.replace(/-/g, "_"));
|
|
132
|
+
fse.mkdirpSync(tmpDirPath);
|
|
133
|
+
fs.writeFileSync(path.join(tmpDirPath, sdkFilename), sdkSource);
|
|
134
|
+
fs.writeFileSync(path.join(tmpDirPath, artifactsFilename), artifactsSource);
|
|
135
|
+
fse.copyFileSync(sourceIRuntimeFilePath, path.join(tmpDirPath, IRuntimeFilename));
|
|
136
|
+
const typesDirPath = path.join(tmpDirPath, "types");
|
|
137
|
+
fse.mkdirpSync(typesDirPath);
|
|
138
|
+
fs.writeFileSync(path.join(typesDirPath, "_shared.ts"), getTypeShared());
|
|
139
|
+
for (let x of data) {
|
|
140
|
+
if (x.kind === "getOne") {
|
|
141
|
+
fs.writeFileSync(path.join(typesDirPath, x.typeFieldsName + ".ts"), x.typeFields);
|
|
142
|
+
fs.writeFileSync(path.join(typesDirPath, x.typeReturnBaseName + ".ts"), x.typeReturnBase);
|
|
143
|
+
}
|
|
144
|
+
if (x.kind === "getList") {
|
|
145
|
+
fs.writeFileSync(path.join(typesDirPath, x.typeWhereName + ".ts"), x.typeWhere);
|
|
146
|
+
fs.writeFileSync(path.join(typesDirPath, x.typeOrderByName + ".ts"), x.typeOrderBy);
|
|
147
|
+
}
|
|
148
|
+
if (x.kind === "postOne") {
|
|
149
|
+
fs.writeFileSync(path.join(typesDirPath, x.typeDataName + ".ts"), x.typeData);
|
|
150
|
+
}
|
|
151
|
+
if (x.kind === "patchOne") {
|
|
152
|
+
fs.writeFileSync(path.join(typesDirPath, x.typeDataName + ".ts"), x.typeData);
|
|
153
|
+
}
|
|
145
154
|
}
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
.
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
155
|
+
fs.writeFileSync(path.join(typesDirPath, "index.ts"), getTypeTypesIndex(data));
|
|
156
|
+
fs.writeFileSync(path.join(tmpDirPath, "package.json"), JSON.stringify(packageJSON, null, 2));
|
|
157
|
+
fs.writeFileSync(path.join(tmpDirPath, "tsconfig.json"), JSON.stringify(tsConfigJSON, null, 2));
|
|
158
|
+
fse.copySync(__dirname, path.join(tmpDirPath, "src"));
|
|
159
|
+
const tmpBuildOutputPath = path.join(tmpDirPath, "sdk-ts");
|
|
160
|
+
const outdir = path.resolve(input.outdir);
|
|
161
|
+
const sdkOutputPath = path.join(outdir, "sdk-ts");
|
|
162
|
+
const nccVersion = "^0.33.0";
|
|
163
|
+
child_process.execSync("npm i", { cwd: tmpDirPath, stdio: "inherit" });
|
|
164
|
+
child_process.execSync(`npm_config_yes=true npx -p @vercel/ncc@${nccVersion} ncc build ./${sdkFilename} -o ${tmpBuildOutputPath} -e ./artifacts`, { cwd: tmpDirPath, stdio: "inherit" });
|
|
165
|
+
// TODO: workaround for artifacts.js not being output by ncc
|
|
166
|
+
fs.writeFileSync(path.join(tmpBuildOutputPath, "artifacts.js"), artifactsSource
|
|
167
|
+
.replace("export const artifacts: IArtifacts = ", "module.exports.artifacts = ")
|
|
168
|
+
.split("\n")
|
|
169
|
+
// Remove import
|
|
170
|
+
.slice(2)
|
|
171
|
+
.join("\n"));
|
|
172
|
+
// TODO: workaround for IRuntime.d.ts not being included
|
|
173
|
+
// copyFileSync hangs for some reason, so use writeFileSync + readFileSync instead
|
|
174
|
+
fs.writeFileSync(path.join(tmpBuildOutputPath, "IRuntime.d.ts"), fs.existsSync(path.join(__dirname, "../runtime", "IRuntime.d.ts"))
|
|
175
|
+
? fs.readFileSync(path.join(__dirname, "../runtime", "IRuntime.d.ts"), "utf-8")
|
|
176
|
+
: fs.readFileSync(sourceIRuntimeFilePath, "utf-8"));
|
|
177
|
+
if (getCtx().dialect === "mysql" && input.outputSqliteSchema) {
|
|
178
|
+
// Since mysql2sqlite outputs a malformed string if a column
|
|
179
|
+
// has the name `enum`, temporarily change the name to something else,
|
|
180
|
+
// then change it back.
|
|
181
|
+
const enumMarker = "`" + node_crypto_1.default.randomUUID() + "`";
|
|
182
|
+
const schemaMySql = Object.values(artifacts)
|
|
183
|
+
.reduce((acc, x) => {
|
|
184
|
+
let d = x.dump?.schema;
|
|
185
|
+
if (!d) {
|
|
186
|
+
return acc;
|
|
187
|
+
}
|
|
188
|
+
d = d.replace(/`enum`/g, enumMarker);
|
|
189
|
+
d += ";";
|
|
190
|
+
acc.push(d);
|
|
178
191
|
return acc;
|
|
179
|
-
}
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
.
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
}
|
|
203
|
-
fse.emptyDirSync(sdkOutputPath);
|
|
204
|
-
fse.copySync(tmpBuildOutputPath, sdkOutputPath);
|
|
205
|
-
fse.removeSync(tmpDirPath);
|
|
192
|
+
}, [])
|
|
193
|
+
.join("\n\n");
|
|
194
|
+
const mysql2SqliteSrc = getMysql2sqliteSrc();
|
|
195
|
+
const mysql2SqlitePath = path.join(tmpDirPath, "mysql2sqlite");
|
|
196
|
+
fs.writeFileSync(mysql2SqlitePath, mysql2SqliteSrc);
|
|
197
|
+
fs.chmodSync(mysql2SqlitePath, 0o755);
|
|
198
|
+
const tmpMySqlSchemaFilename = "tmp.sql";
|
|
199
|
+
const tmpMySqlSchemaPath = path.join(tmpDirPath, tmpMySqlSchemaFilename);
|
|
200
|
+
fs.writeFileSync(tmpMySqlSchemaPath, schemaMySql);
|
|
201
|
+
let schemaSqlite = child_process
|
|
202
|
+
.execFileSync(mysql2SqlitePath, [tmpMySqlSchemaFilename], { cwd: tmpDirPath })
|
|
203
|
+
.toString();
|
|
204
|
+
schemaSqlite = schemaSqlite.replace(new RegExp(enumMarker, "g"), "`enum`");
|
|
205
|
+
const src = prettier.format(`module.exports = { schema: \`${schemaSqlite.replace(/`/g, "\\`")}\` }`, { parser: "babel" });
|
|
206
|
+
fs.writeFileSync(path.join(tmpBuildOutputPath, "artifacts.sqlite.js"), src);
|
|
207
|
+
}
|
|
208
|
+
if (!fs.existsSync(outdir)) {
|
|
209
|
+
fse.mkdirpSync(outdir);
|
|
210
|
+
}
|
|
211
|
+
fse.emptyDirSync(sdkOutputPath);
|
|
212
|
+
fse.copySync(tmpBuildOutputPath, sdkOutputPath);
|
|
213
|
+
fse.removeSync(tmpDirPath);
|
|
214
|
+
});
|
|
206
215
|
}
|
|
207
216
|
function init(input) {
|
|
217
|
+
const ctx = getCtx();
|
|
208
218
|
const { database, user, password, host, port, server } = input;
|
|
209
|
-
if (dialect === "mysql") {
|
|
210
|
-
|
|
219
|
+
if (ctx.dialect === "mysql") {
|
|
220
|
+
const mysql = new MySQL_1.MySQL({
|
|
211
221
|
user,
|
|
212
222
|
password,
|
|
213
223
|
host,
|
|
214
224
|
port,
|
|
215
225
|
database
|
|
216
226
|
});
|
|
217
|
-
query = mysql.query.bind(mysql);
|
|
227
|
+
ctx.query = mysql.query.bind(mysql);
|
|
228
|
+
}
|
|
229
|
+
if (ctx.dialect === "postgresql") {
|
|
230
|
+
const pool = new pg_1.Pool({
|
|
231
|
+
host: host ?? "localhost",
|
|
232
|
+
port: port ?? 5432,
|
|
233
|
+
user,
|
|
234
|
+
password,
|
|
235
|
+
database
|
|
236
|
+
});
|
|
237
|
+
ctx.query = (q, values) => pool.query(q, values ?? []).then((r) => r.rows);
|
|
218
238
|
}
|
|
219
239
|
}
|
|
220
240
|
// It's a bit awkward to put __whereNeedsProcessing, __prepareWhere on the class,
|
|
@@ -1068,7 +1088,7 @@ async function getMappedFields(table) {
|
|
|
1068
1088
|
name: "uuid",
|
|
1069
1089
|
// Replace `Id` with `Uuid`
|
|
1070
1090
|
as: x.foreignKey.slice(0, -2) + "Uuid",
|
|
1071
|
-
type: getBaseJSONType(uuidColumn.Type)
|
|
1091
|
+
type: getBaseJSONType(uuidColumn.Type, getCtx().dialect)
|
|
1072
1092
|
});
|
|
1073
1093
|
}
|
|
1074
1094
|
return out;
|
|
@@ -1489,7 +1509,7 @@ async function getArtifacts(tables, includeMappedFields, specialCaseUuidColumn)
|
|
|
1489
1509
|
}
|
|
1490
1510
|
return {
|
|
1491
1511
|
kind: "scalar",
|
|
1492
|
-
type: getBaseJSONType(t.Type),
|
|
1512
|
+
type: getBaseJSONType(t.Type, getCtx().dialect),
|
|
1493
1513
|
name: t.Field,
|
|
1494
1514
|
nullable,
|
|
1495
1515
|
hasDefaultValue: !!t.Default
|
|
@@ -1534,7 +1554,7 @@ async function getArtifacts(tables, includeMappedFields, specialCaseUuidColumn)
|
|
|
1534
1554
|
}, {});
|
|
1535
1555
|
return artifacts;
|
|
1536
1556
|
}
|
|
1537
|
-
const getRelationInfo =
|
|
1557
|
+
const getRelationInfo = (0, memoize_1.default)(async function getRelationInfo(table) {
|
|
1538
1558
|
const relationsManyToOne = await getRelationsManyToOne(table);
|
|
1539
1559
|
const relationsOneToMany = await getRelationsOneToMany(table);
|
|
1540
1560
|
let out = [];
|
|
@@ -1604,7 +1624,7 @@ const getRelationInfo = _.memoize(async function getRelationInfo(table) {
|
|
|
1604
1624
|
out = out.concat(relationsManyToMany);
|
|
1605
1625
|
out = _.sortBy((x) => x.table, out);
|
|
1606
1626
|
return out;
|
|
1607
|
-
});
|
|
1627
|
+
}, (table) => getCtx().runId + ":" + table);
|
|
1608
1628
|
function getRelationManyToOneFieldName(x) {
|
|
1609
1629
|
return changeCase.camelCase(x.foreignKey.replace(new RegExp(x.referencedKey + "$", "i"), ""));
|
|
1610
1630
|
}
|
|
@@ -1627,11 +1647,12 @@ async function getJunctionTables() {
|
|
|
1627
1647
|
}
|
|
1628
1648
|
// `from` relations
|
|
1629
1649
|
// https://stackoverflow.com/a/54732547
|
|
1630
|
-
|
|
1631
|
-
|
|
1632
|
-
|
|
1633
|
-
|
|
1634
|
-
|
|
1650
|
+
const getRelationsManyToOne = (0, memoize_1.default)(async function getRelationsManyToOne(table) {
|
|
1651
|
+
const { dialect, query } = getCtx();
|
|
1652
|
+
const tableMeta = await getTableMeta(table);
|
|
1653
|
+
let rs;
|
|
1654
|
+
if (dialect === "mysql") {
|
|
1655
|
+
const sql = `
|
|
1635
1656
|
SELECT
|
|
1636
1657
|
TABLE_SCHEMA as db,
|
|
1637
1658
|
TABLE_NAME as t1,
|
|
@@ -1646,8 +1667,18 @@ const getRelationsManyToOne = _.memoize(async function getRelationsManyToOne(tab
|
|
|
1646
1667
|
AND REFERENCED_TABLE_NAME IS NOT NULL
|
|
1647
1668
|
AND (TABLE_NAME = ?);
|
|
1648
1669
|
`;
|
|
1649
|
-
|
|
1650
|
-
|
|
1670
|
+
rs = await query(sql, [table]);
|
|
1671
|
+
}
|
|
1672
|
+
else if (dialect === "postgresql") {
|
|
1673
|
+
rs = await query(`SELECT kcu.column_name AS "t1Field", ccu.table_name AS t2, ccu.column_name AS "t2Field"
|
|
1674
|
+
FROM information_schema.key_column_usage kcu
|
|
1675
|
+
JOIN information_schema.referential_constraints rc ON kcu.constraint_name = rc.constraint_name AND kcu.table_schema = rc.constraint_schema
|
|
1676
|
+
JOIN information_schema.constraint_column_usage ccu ON rc.unique_constraint_name = ccu.constraint_name AND rc.unique_constraint_schema = ccu.table_schema
|
|
1677
|
+
WHERE kcu.table_schema = 'public' AND kcu.table_name = $1`, [table]);
|
|
1678
|
+
}
|
|
1679
|
+
else {
|
|
1680
|
+
throw new Error("Unsupported dialect: " + dialect);
|
|
1681
|
+
}
|
|
1651
1682
|
const xs = await Promise.all(_.uniqWith(_.isEqual, rs.map(async (v) => {
|
|
1652
1683
|
return {
|
|
1653
1684
|
table: table,
|
|
@@ -1658,10 +1689,13 @@ const getRelationsManyToOne = _.memoize(async function getRelationsManyToOne(tab
|
|
|
1658
1689
|
};
|
|
1659
1690
|
})));
|
|
1660
1691
|
return _.sortBy((x) => x.referencedTable, xs);
|
|
1661
|
-
});
|
|
1692
|
+
}, (table) => getCtx().runId + ":" + table);
|
|
1662
1693
|
// `to` relations
|
|
1663
|
-
const getRelationsOneToMany =
|
|
1664
|
-
const
|
|
1694
|
+
const getRelationsOneToMany = (0, memoize_1.default)(async function getRelationsOneToMany(table) {
|
|
1695
|
+
const { dialect, query } = getCtx();
|
|
1696
|
+
let rs;
|
|
1697
|
+
if (dialect === "mysql") {
|
|
1698
|
+
const sql = `
|
|
1665
1699
|
SELECT
|
|
1666
1700
|
TABLE_SCHEMA as db,
|
|
1667
1701
|
TABLE_NAME as t1,
|
|
@@ -1676,19 +1710,29 @@ const getRelationsOneToMany = _.memoize(async function getRelationsOneToMany(tab
|
|
|
1676
1710
|
AND REFERENCED_TABLE_NAME IS NOT NULL
|
|
1677
1711
|
AND (REFERENCED_TABLE_NAME = ?);
|
|
1678
1712
|
`;
|
|
1679
|
-
|
|
1713
|
+
rs = await query(sql, [table]);
|
|
1714
|
+
}
|
|
1715
|
+
else if (dialect === "postgresql") {
|
|
1716
|
+
rs = await query(`SELECT kcu.table_name AS t1, kcu.column_name AS "t1Field", ccu.column_name AS "t2Field"
|
|
1717
|
+
FROM information_schema.key_column_usage kcu
|
|
1718
|
+
JOIN information_schema.referential_constraints rc ON kcu.constraint_name = rc.constraint_name AND kcu.table_schema = rc.constraint_schema
|
|
1719
|
+
JOIN information_schema.constraint_column_usage ccu ON rc.unique_constraint_name = ccu.constraint_name AND rc.unique_constraint_schema = ccu.table_schema
|
|
1720
|
+
WHERE kcu.table_schema = 'public' AND ccu.table_name = $1`, [table]);
|
|
1721
|
+
}
|
|
1722
|
+
else {
|
|
1723
|
+
throw new Error("Unsupported dialect: " + dialect);
|
|
1724
|
+
}
|
|
1680
1725
|
const xs = await Promise.all(_.uniqWith(_.isEqual, rs.map(async (v) => {
|
|
1681
1726
|
return {
|
|
1682
1727
|
table: table,
|
|
1683
1728
|
foreignKey: v.t2Field,
|
|
1684
1729
|
referencedTable: v.t1,
|
|
1685
1730
|
referencedKey: v.t1Field,
|
|
1686
|
-
// TODO? I think this is right, since it's one-to-many, so a list
|
|
1687
1731
|
nullable: false
|
|
1688
1732
|
};
|
|
1689
1733
|
})));
|
|
1690
1734
|
return _.sortBy((x) => x.referencedKey, _.sortBy((x) => x.referencedTable, xs));
|
|
1691
|
-
});
|
|
1735
|
+
}, (table) => getCtx().runId + ":" + table);
|
|
1692
1736
|
async function getPrimaryColumn(table) {
|
|
1693
1737
|
const tableMeta = await getTableMeta(table);
|
|
1694
1738
|
const columns = tableMeta.filter((x) => x.Key === "PRI");
|
|
@@ -1698,7 +1742,7 @@ async function getPrimaryColumn(table) {
|
|
|
1698
1742
|
const column = columns[0];
|
|
1699
1743
|
return {
|
|
1700
1744
|
name: column.Field,
|
|
1701
|
-
type: getBaseJSONType(column.Type),
|
|
1745
|
+
type: getBaseJSONType(column.Type, getCtx().dialect),
|
|
1702
1746
|
nullable: column.Null === "YES"
|
|
1703
1747
|
};
|
|
1704
1748
|
}
|
|
@@ -1710,7 +1754,7 @@ async function getUniqueColumns(table, specialCaseUuidColumn) {
|
|
|
1710
1754
|
(specialCaseUuidColumn && x.Field === "uuid"))
|
|
1711
1755
|
.map((x) => ({
|
|
1712
1756
|
name: x.Field,
|
|
1713
|
-
type: getBaseJSONType(x.Type),
|
|
1757
|
+
type: getBaseJSONType(x.Type, getCtx().dialect),
|
|
1714
1758
|
nullable: x.Null === "YES"
|
|
1715
1759
|
}));
|
|
1716
1760
|
}
|
|
@@ -1726,23 +1770,116 @@ async function getUuidColumn(table) {
|
|
|
1726
1770
|
nullable: column.Null === "YES"
|
|
1727
1771
|
};
|
|
1728
1772
|
}
|
|
1729
|
-
const
|
|
1773
|
+
const getPgEnumDefinition = (0, memoize_1.default)(async function getPgEnumDefinition(udtName) {
|
|
1774
|
+
const { dialect, query } = getCtx();
|
|
1775
|
+
if (dialect !== "postgresql")
|
|
1776
|
+
return null;
|
|
1777
|
+
const rows = await query(`SELECT e.enumlabel FROM pg_enum e
|
|
1778
|
+
JOIN pg_type t ON e.enumtypid = t.oid
|
|
1779
|
+
JOIN pg_catalog.pg_namespace n ON t.typnamespace = n.oid
|
|
1780
|
+
WHERE t.typname = $1 AND n.nspname = 'public'
|
|
1781
|
+
ORDER BY e.enumsortorder`, [udtName]);
|
|
1782
|
+
if (rows.length === 0)
|
|
1783
|
+
return null;
|
|
1784
|
+
const labels = rows.map((r) => String(r.enumlabel).replace(/'/g, "''"));
|
|
1785
|
+
return "enum('" + labels.join("', '") + "')";
|
|
1786
|
+
}, (udtName) => getCtx().runId + ":" + udtName);
|
|
1787
|
+
const getTableMeta = (0, memoize_1.default)(async function getTableMeta(table) {
|
|
1788
|
+
const { dialect, query } = getCtx();
|
|
1730
1789
|
if (dialect === "mysql") {
|
|
1731
1790
|
return query("DESCRIBE ??", [table]).then((xs) => _.sortBy((x) => x.Field, xs));
|
|
1732
1791
|
}
|
|
1792
|
+
if (dialect === "postgresql") {
|
|
1793
|
+
const columns = await query(`SELECT column_name AS "Field", data_type, udt_name, character_maximum_length AS char_max, is_nullable, column_default AS "Default"
|
|
1794
|
+
FROM information_schema.columns
|
|
1795
|
+
WHERE table_schema = 'public' AND table_name = $1
|
|
1796
|
+
ORDER BY ordinal_position`, [table]);
|
|
1797
|
+
const keyInfo = await query(`SELECT a.attname AS col, 'PRI' AS key_type
|
|
1798
|
+
FROM pg_index i
|
|
1799
|
+
JOIN pg_attribute a ON a.attrelid = i.indrelid AND a.attnum = ANY(i.indkey) AND NOT a.attisdropped AND a.attnum > 0
|
|
1800
|
+
JOIN pg_class c ON c.oid = i.indrelid
|
|
1801
|
+
JOIN pg_namespace n ON n.oid = c.relnamespace
|
|
1802
|
+
WHERE n.nspname = 'public' AND c.relname = $1 AND i.indisprimary
|
|
1803
|
+
UNION ALL
|
|
1804
|
+
SELECT kcu.column_name AS col, 'UNI' AS key_type
|
|
1805
|
+
FROM information_schema.table_constraints tc
|
|
1806
|
+
JOIN information_schema.key_column_usage kcu ON tc.constraint_name = kcu.constraint_name AND tc.table_schema = kcu.table_schema
|
|
1807
|
+
WHERE tc.table_schema = 'public' AND tc.table_name = $1 AND tc.constraint_type = 'UNIQUE'
|
|
1808
|
+
UNION ALL
|
|
1809
|
+
SELECT kcu.column_name AS col, 'MUL' AS key_type
|
|
1810
|
+
FROM information_schema.table_constraints tc
|
|
1811
|
+
JOIN information_schema.key_column_usage kcu ON tc.constraint_name = kcu.constraint_name AND tc.table_schema = kcu.table_schema
|
|
1812
|
+
WHERE tc.table_schema = 'public' AND tc.table_name = $1 AND tc.constraint_type = 'FOREIGN KEY'`, [table]);
|
|
1813
|
+
const keyMap = new Map();
|
|
1814
|
+
for (const k of keyInfo) {
|
|
1815
|
+
if (!keyMap.has(k.col) || k.key_type === "PRI")
|
|
1816
|
+
keyMap.set(k.col, k.key_type);
|
|
1817
|
+
}
|
|
1818
|
+
const udtNames = [
|
|
1819
|
+
...new Set(columns
|
|
1820
|
+
.filter((c) => c.data_type === "USER-DEFINED" && c.udt_name != null)
|
|
1821
|
+
.map((c) => c.udt_name))
|
|
1822
|
+
];
|
|
1823
|
+
const enumDefs = await Promise.all(udtNames.map((udt) => getPgEnumDefinition(udt)));
|
|
1824
|
+
const enumMap = new Map(udtNames.map((udt, i) => [udt, enumDefs[i] ?? "varchar(255)"]));
|
|
1825
|
+
return columns.map((c) => {
|
|
1826
|
+
let type;
|
|
1827
|
+
if (c.data_type === "USER-DEFINED" && c.udt_name != null) {
|
|
1828
|
+
type = enumMap.get(c.udt_name) ?? "character varying(255)";
|
|
1829
|
+
}
|
|
1830
|
+
else {
|
|
1831
|
+
type = c.data_type;
|
|
1832
|
+
if ((c.data_type === "character varying" || c.data_type === "character") &&
|
|
1833
|
+
c.char_max != null) {
|
|
1834
|
+
type += "(" + c.char_max + ")";
|
|
1835
|
+
}
|
|
1836
|
+
}
|
|
1837
|
+
return {
|
|
1838
|
+
Field: c.Field,
|
|
1839
|
+
Type: type,
|
|
1840
|
+
Null: c.is_nullable === "YES" ? "YES" : "NO",
|
|
1841
|
+
Key: keyMap.get(c.Field) ?? "",
|
|
1842
|
+
Default: c.Default ?? ""
|
|
1843
|
+
};
|
|
1844
|
+
});
|
|
1845
|
+
}
|
|
1733
1846
|
throw new Error("Unsupported dialect: " + dialect);
|
|
1734
|
-
});
|
|
1735
|
-
function getShowCreateTable(table) {
|
|
1847
|
+
}, (table) => getCtx().runId + ":" + table);
|
|
1848
|
+
async function getShowCreateTable(table) {
|
|
1849
|
+
const { dialect, query } = getCtx();
|
|
1736
1850
|
if (dialect === "mysql") {
|
|
1737
1851
|
return query("SHOW CREATE TABLE ??", [table]).then((xs) => xs[0]["Create Table"]
|
|
1738
1852
|
// https://github.com/bradzacher/mysqldump/blob/66839a57e572a07c046b0ba98753f30a7026cbd8/src/getSchemaDump.ts#L65
|
|
1739
1853
|
.replace(/AUTO_INCREMENT\s*=\s*\d+ /g, ""));
|
|
1740
1854
|
}
|
|
1855
|
+
if (dialect === "postgresql") {
|
|
1856
|
+
const [tableMeta, relations] = await Promise.all([
|
|
1857
|
+
getTableMeta(table),
|
|
1858
|
+
getRelationsManyToOne(table)
|
|
1859
|
+
]);
|
|
1860
|
+
const refByFk = new Map(relations.map((r) => [r.foreignKey, r]));
|
|
1861
|
+
const columnDefs = tableMeta.map((c) => {
|
|
1862
|
+
let def = `"${c.Field.replace(/"/g, '""')}" ${c.Type} ${c.Null === "YES" ? "NULL" : "NOT NULL"}`;
|
|
1863
|
+
if (c.Default != null && c.Default !== "") {
|
|
1864
|
+
def += ` DEFAULT ${c.Default}`;
|
|
1865
|
+
}
|
|
1866
|
+
if (c.Key === "PRI")
|
|
1867
|
+
def += " PRIMARY KEY";
|
|
1868
|
+
if (c.Key === "UNI")
|
|
1869
|
+
def += " UNIQUE";
|
|
1870
|
+
const ref = refByFk.get(c.Field);
|
|
1871
|
+
if (ref != null) {
|
|
1872
|
+
def += ` REFERENCES "${ref.referencedTable.replace(/"/g, '""')}" ("${ref.referencedKey.replace(/"/g, '""')}")`;
|
|
1873
|
+
}
|
|
1874
|
+
return def;
|
|
1875
|
+
});
|
|
1876
|
+
return `CREATE TABLE "${table.replace(/"/g, '""')}" (\n ${columnDefs.join(",\n ")}\n)`;
|
|
1877
|
+
}
|
|
1741
1878
|
return Promise.resolve(null);
|
|
1742
1879
|
}
|
|
1743
1880
|
function getJSONSchemaObjProperties(tableMeta) {
|
|
1744
1881
|
return tableMeta.reduce((acc, m) => {
|
|
1745
|
-
const baseType = getBaseJSONType(m.Type);
|
|
1882
|
+
const baseType = getBaseJSONType(m.Type, getCtx().dialect);
|
|
1746
1883
|
const format = getPropertyFormat(m.Type);
|
|
1747
1884
|
const nullable = m.Null === "YES";
|
|
1748
1885
|
const isEnum = m.Type.startsWith("enum");
|
|
@@ -1778,7 +1915,29 @@ function getJSONTypes(baseType, nullable) {
|
|
|
1778
1915
|
return baseType;
|
|
1779
1916
|
}
|
|
1780
1917
|
// https://github.com/mysqljs/mysql#type-casting
|
|
1781
|
-
function getBaseJSONType(sqlType) {
|
|
1918
|
+
function getBaseJSONType(sqlType, dialect) {
|
|
1919
|
+
if (dialect === "postgresql") {
|
|
1920
|
+
if (sqlType === "boolean" || sqlType === "bool")
|
|
1921
|
+
return "boolean";
|
|
1922
|
+
if (["smallint", "int2", "integer", "int4", "bigint", "int8"].includes(sqlType)) {
|
|
1923
|
+
return "integer";
|
|
1924
|
+
}
|
|
1925
|
+
if (["real", "float4", "double precision", "float8"].includes(sqlType) ||
|
|
1926
|
+
sqlType.startsWith("numeric") ||
|
|
1927
|
+
sqlType.startsWith("decimal")) {
|
|
1928
|
+
return "number";
|
|
1929
|
+
}
|
|
1930
|
+
if (["text", "uuid", "json", "jsonb"].includes(sqlType) ||
|
|
1931
|
+
sqlType === "date" ||
|
|
1932
|
+
sqlType === "time" ||
|
|
1933
|
+
sqlType.startsWith("timestamp") ||
|
|
1934
|
+
sqlType.startsWith("character varying") ||
|
|
1935
|
+
sqlType.startsWith("character") ||
|
|
1936
|
+
sqlType.startsWith("enum")) {
|
|
1937
|
+
return "string";
|
|
1938
|
+
}
|
|
1939
|
+
throw new Error("Unable to map to JSON type: " + sqlType);
|
|
1940
|
+
}
|
|
1782
1941
|
if (
|
|
1783
1942
|
// TODO?
|
|
1784
1943
|
sqlType === "tinyint(1)" ||
|
|
@@ -1863,7 +2022,10 @@ function getPropertyEnum(sqlType) {
|
|
|
1863
2022
|
return c;
|
|
1864
2023
|
}
|
|
1865
2024
|
function getPropertyFormat(sqlType) {
|
|
1866
|
-
if (sqlType === "datetime" ||
|
|
2025
|
+
if (sqlType === "datetime" ||
|
|
2026
|
+
sqlType === "datetime2" ||
|
|
2027
|
+
sqlType === "timestamp" ||
|
|
2028
|
+
sqlType.startsWith("timestamp")) {
|
|
1867
2029
|
// TODO: not sure this is correct for `timestamp`
|
|
1868
2030
|
return "date-time";
|
|
1869
2031
|
}
|
|
@@ -1882,9 +2044,15 @@ function getPropertyFormat(sqlType) {
|
|
|
1882
2044
|
return undefined;
|
|
1883
2045
|
}
|
|
1884
2046
|
async function getTableNames() {
|
|
2047
|
+
const { dialect, query } = getCtx();
|
|
1885
2048
|
if (dialect === "mysql") {
|
|
1886
2049
|
return query("SHOW TABLES").then((xs) => xs.flatMap((x) => Object.values(x)).sort());
|
|
1887
2050
|
}
|
|
2051
|
+
if (dialect === "postgresql") {
|
|
2052
|
+
return query(`SELECT table_name FROM information_schema.tables
|
|
2053
|
+
WHERE table_schema = 'public' AND table_type = 'BASE TABLE'
|
|
2054
|
+
ORDER BY table_name`).then((rows) => rows.map((r) => r.table_name));
|
|
2055
|
+
}
|
|
1888
2056
|
throw new Error("Unsupported dialect: " + dialect);
|
|
1889
2057
|
}
|
|
1890
2058
|
function getMysql2sqliteSrc() {
|
|
@@ -45,7 +45,7 @@ export type TContext = {
|
|
|
45
45
|
[k: string]: any;
|
|
46
46
|
};
|
|
47
47
|
export type TMiddleware = (params: TResolveParams, next: (params: TResolveParams) => Promise<any>) => Promise<any>;
|
|
48
|
-
export type IDialect = "mysql" | "sqlite";
|
|
48
|
+
export type IDialect = "mysql" | "sqlite" | "postgresql";
|
|
49
49
|
export type TDbCall = (q: string) => Promise<any>;
|
|
50
50
|
export type TFormatQuery = (q: string, values: any[]) => string;
|
|
51
51
|
export type TBeginTransaction = () => Promise<TBeginTransactionResult>;
|
|
@@ -120,6 +120,7 @@ export type IGetSQLASTInput = {
|
|
|
120
120
|
artifacts: IArtifacts;
|
|
121
121
|
dialect: IDialect;
|
|
122
122
|
firstChild?: IASTChildColumn | IASTChildComposite;
|
|
123
|
+
escapeId: (x: string) => string;
|
|
123
124
|
};
|
|
124
125
|
export type IRelation = {
|
|
125
126
|
table: string;
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import { RedisConfig } from "./Cache";
|
|
2
|
+
import type { IRuntime, TMiddleware, TResolveParams, IArtifacts, ISupplementClientOpts } from "./IRuntime";
|
|
3
|
+
import { PostgreSQL } from "./lib/PostgreSQL";
|
|
4
|
+
export declare class RuntimePostgreSQL implements IRuntime {
|
|
5
|
+
#private;
|
|
6
|
+
constructor(clientOpts: ConstructorParameters<typeof PostgreSQL>[0], otherOpts: {
|
|
7
|
+
supplementClientOpts?: ISupplementClientOpts;
|
|
8
|
+
redis?: RedisConfig;
|
|
9
|
+
}, artifacts: IArtifacts);
|
|
10
|
+
resolve(input: TResolveParams): Promise<any>;
|
|
11
|
+
$queryRaw(sql: string, values?: any[]): Promise<any[]>;
|
|
12
|
+
$use(middleware: TMiddleware): Promise<void>;
|
|
13
|
+
$whereNeedsProcessing(where: any): boolean;
|
|
14
|
+
$prepareWhere(artifacts: IArtifacts, table: string, data: any): Promise<{}>;
|
|
15
|
+
$shutdown(): Promise<void>;
|
|
16
|
+
$startTransaction(input?: {
|
|
17
|
+
isolationLevel?: "READ UNCOMMITTED" | "READ COMMITTED" | "REPEATABLE READ" | "SERIALIZABLE";
|
|
18
|
+
}): Promise<{
|
|
19
|
+
commit: () => Promise<void>;
|
|
20
|
+
rollback: () => Promise<void>;
|
|
21
|
+
dbCall: (q: string) => Promise<any[]>;
|
|
22
|
+
queryRaw: (q: string, values: any[]) => Promise<any[]>;
|
|
23
|
+
}>;
|
|
24
|
+
private dbCall;
|
|
25
|
+
/**
|
|
26
|
+
* Converts MySQL-style ?? (identifier) and ? (value) placeholders to
|
|
27
|
+
* pg-format placeholders (%I and %L) and formats the query.
|
|
28
|
+
*/
|
|
29
|
+
private formatQuery;
|
|
30
|
+
}
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {
|
|
3
|
+
if (kind === "m") throw new TypeError("Private method is not writable");
|
|
4
|
+
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter");
|
|
5
|
+
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it");
|
|
6
|
+
return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;
|
|
7
|
+
};
|
|
8
|
+
var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {
|
|
9
|
+
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter");
|
|
10
|
+
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it");
|
|
11
|
+
return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
|
|
12
|
+
};
|
|
13
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
14
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
15
|
+
};
|
|
16
|
+
var _RuntimePostgreSQL_dialect, _RuntimePostgreSQL_pgClient, _RuntimePostgreSQL_clientCache, _RuntimePostgreSQL_middlewareHandler;
|
|
17
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
18
|
+
exports.RuntimePostgreSQL = void 0;
|
|
19
|
+
const pg_format_1 = __importDefault(require("pg-format"));
|
|
20
|
+
const Cache_1 = __importDefault(require("./Cache"));
|
|
21
|
+
const PostgreSQL_1 = require("./lib/PostgreSQL");
|
|
22
|
+
const shared_1 = require("./lib/shared");
|
|
23
|
+
class RuntimePostgreSQL {
|
|
24
|
+
constructor(clientOpts, otherOpts, artifacts) {
|
|
25
|
+
_RuntimePostgreSQL_dialect.set(this, "postgresql");
|
|
26
|
+
_RuntimePostgreSQL_pgClient.set(this, void 0);
|
|
27
|
+
_RuntimePostgreSQL_clientCache.set(this, void 0);
|
|
28
|
+
_RuntimePostgreSQL_middlewareHandler.set(this, void 0);
|
|
29
|
+
__classPrivateFieldSet(this, _RuntimePostgreSQL_middlewareHandler, new shared_1.MiddlewareHandler(), "f");
|
|
30
|
+
if (otherOpts.redis) {
|
|
31
|
+
__classPrivateFieldSet(this, _RuntimePostgreSQL_clientCache, new Cache_1.default(otherOpts.redis, clientOpts?.debug), "f");
|
|
32
|
+
}
|
|
33
|
+
__classPrivateFieldSet(this, _RuntimePostgreSQL_pgClient, new PostgreSQL_1.PostgreSQL(clientOpts), "f");
|
|
34
|
+
}
|
|
35
|
+
async resolve(input) {
|
|
36
|
+
return (0, shared_1.resolve)(input, input.dbCall ?? this.dbCall.bind(this), this.formatQuery.bind(this), this.$startTransaction.bind(this), __classPrivateFieldGet(this, _RuntimePostgreSQL_dialect, "f"), __classPrivateFieldGet(this, _RuntimePostgreSQL_middlewareHandler, "f"), input.context ?? {}, __classPrivateFieldGet(this, _RuntimePostgreSQL_clientCache, "f"));
|
|
37
|
+
}
|
|
38
|
+
async $queryRaw(sql, values) {
|
|
39
|
+
return this.dbCall(this.formatQuery(sql, values ?? []));
|
|
40
|
+
}
|
|
41
|
+
async $use(middleware) {
|
|
42
|
+
__classPrivateFieldGet(this, _RuntimePostgreSQL_middlewareHandler, "f").register(middleware);
|
|
43
|
+
}
|
|
44
|
+
$whereNeedsProcessing(where) {
|
|
45
|
+
return (0, shared_1.whereNeedsProcessing)(where);
|
|
46
|
+
}
|
|
47
|
+
async $prepareWhere(artifacts, table, data) {
|
|
48
|
+
return (0, shared_1._prepareWhere)(artifacts, table, data, this.dbCall.bind(this), this.formatQuery.bind(this));
|
|
49
|
+
}
|
|
50
|
+
async $shutdown() {
|
|
51
|
+
if (__classPrivateFieldGet(this, _RuntimePostgreSQL_clientCache, "f")) {
|
|
52
|
+
await __classPrivateFieldGet(this, _RuntimePostgreSQL_clientCache, "f").shutdown();
|
|
53
|
+
}
|
|
54
|
+
await __classPrivateFieldGet(this, _RuntimePostgreSQL_pgClient, "f").endPool();
|
|
55
|
+
}
|
|
56
|
+
async $startTransaction(input) {
|
|
57
|
+
return __classPrivateFieldGet(this, _RuntimePostgreSQL_pgClient, "f").beginTransaction(input?.isolationLevel);
|
|
58
|
+
}
|
|
59
|
+
dbCall(q) {
|
|
60
|
+
return __classPrivateFieldGet(this, _RuntimePostgreSQL_pgClient, "f").query(
|
|
61
|
+
// join-monster doesn't use formatQuery, so this is needed
|
|
62
|
+
q.replace(/`/g, '"'));
|
|
63
|
+
}
|
|
64
|
+
/**
|
|
65
|
+
* Converts MySQL-style ?? (identifier) and ? (value) placeholders to
|
|
66
|
+
* pg-format placeholders (%I and %L) and formats the query.
|
|
67
|
+
*/
|
|
68
|
+
formatQuery(q, values) {
|
|
69
|
+
return pg_format_1.default.withArray(q.replace(/`/g, '"').replace(/\?\?/g, "%I").replace(/\?/g, "%L"), values);
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
exports.RuntimePostgreSQL = RuntimePostgreSQL;
|
|
73
|
+
_RuntimePostgreSQL_dialect = new WeakMap(), _RuntimePostgreSQL_pgClient = new WeakMap(), _RuntimePostgreSQL_clientCache = new WeakMap(), _RuntimePostgreSQL_middlewareHandler = new WeakMap();
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import { Pool } from "pg";
|
|
2
|
+
export type TPoolConfig = ConstructorParameters<typeof Pool>[0];
|
|
3
|
+
export declare class PostgreSQL {
|
|
4
|
+
pool: Pool;
|
|
5
|
+
constructor(opts: TPoolConfig);
|
|
6
|
+
query(q: string): Promise<any[]>;
|
|
7
|
+
beginTransaction(isolationLevel?: "READ UNCOMMITTED" | "READ COMMITTED" | "REPEATABLE READ" | "SERIALIZABLE"): Promise<{
|
|
8
|
+
commit: () => Promise<void>;
|
|
9
|
+
rollback: () => Promise<void>;
|
|
10
|
+
dbCall: (q: string) => Promise<any[]>;
|
|
11
|
+
queryRaw: (q: string, values: any[]) => Promise<any[]>;
|
|
12
|
+
}>;
|
|
13
|
+
endPool(): Promise<void>;
|
|
14
|
+
}
|
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.PostgreSQL = void 0;
|
|
4
|
+
const pg_1 = require("pg");
|
|
5
|
+
// OID 20 = int8 (BIGINT/BIGSERIAL). Parse as number for consistency with MySQL.
|
|
6
|
+
const BIGINT_OID = 20;
|
|
7
|
+
pg_1.types.setTypeParser(BIGINT_OID, (val) => val === null ? null : parseInt(val, 10));
|
|
8
|
+
// OID 114 = json, 3802 = jsonb. Return raw string for consistency with MySQL.
|
|
9
|
+
pg_1.types.setTypeParser(114, (val) => val);
|
|
10
|
+
pg_1.types.setTypeParser(3802, (val) => val);
|
|
11
|
+
class PostgreSQL {
|
|
12
|
+
constructor(opts) {
|
|
13
|
+
this.pool = new pg_1.Pool(opts);
|
|
14
|
+
if (opts && opts.debug) {
|
|
15
|
+
this.pool.on("acquire", () => {
|
|
16
|
+
console.log("Connection acquired from pool");
|
|
17
|
+
});
|
|
18
|
+
this.pool.on("connect", () => {
|
|
19
|
+
console.log("Client connected to PostgreSQL");
|
|
20
|
+
});
|
|
21
|
+
this.pool.on("remove", () => {
|
|
22
|
+
console.log("Client removed from pool");
|
|
23
|
+
});
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
query(q) {
|
|
27
|
+
return this.pool.query(q).then((result) => result.rows);
|
|
28
|
+
}
|
|
29
|
+
async beginTransaction(isolationLevel) {
|
|
30
|
+
const client = await this.pool.connect();
|
|
31
|
+
let released = false;
|
|
32
|
+
const releaseOnce = () => {
|
|
33
|
+
if (!released) {
|
|
34
|
+
released = true;
|
|
35
|
+
client.release();
|
|
36
|
+
}
|
|
37
|
+
};
|
|
38
|
+
if (isolationLevel != null) {
|
|
39
|
+
if (isolationLevel !== "READ UNCOMMITTED" &&
|
|
40
|
+
isolationLevel !== "READ COMMITTED" &&
|
|
41
|
+
isolationLevel !== "REPEATABLE READ" &&
|
|
42
|
+
isolationLevel !== "SERIALIZABLE") {
|
|
43
|
+
releaseOnce();
|
|
44
|
+
throw new Error(`Invalid isolationLevel: ${isolationLevel}`);
|
|
45
|
+
}
|
|
46
|
+
// PostgreSQL: BEGIN ISOLATION LEVEL <level>
|
|
47
|
+
await client.query(`BEGIN ISOLATION LEVEL ${isolationLevel}`);
|
|
48
|
+
}
|
|
49
|
+
else {
|
|
50
|
+
await client.query("BEGIN");
|
|
51
|
+
}
|
|
52
|
+
return {
|
|
53
|
+
commit: () => {
|
|
54
|
+
if (released)
|
|
55
|
+
return Promise.resolve();
|
|
56
|
+
return client
|
|
57
|
+
.query("COMMIT")
|
|
58
|
+
.then(() => undefined)
|
|
59
|
+
.catch(async (err) => {
|
|
60
|
+
try {
|
|
61
|
+
await client.query("ROLLBACK");
|
|
62
|
+
}
|
|
63
|
+
catch {
|
|
64
|
+
// Ignore
|
|
65
|
+
}
|
|
66
|
+
throw err;
|
|
67
|
+
})
|
|
68
|
+
.finally(releaseOnce);
|
|
69
|
+
},
|
|
70
|
+
rollback: () => {
|
|
71
|
+
if (released)
|
|
72
|
+
return Promise.resolve();
|
|
73
|
+
return client
|
|
74
|
+
.query("ROLLBACK")
|
|
75
|
+
.then(() => undefined)
|
|
76
|
+
.finally(releaseOnce);
|
|
77
|
+
},
|
|
78
|
+
dbCall: (q) => client
|
|
79
|
+
.query(q)
|
|
80
|
+
.then((result) => result.rows)
|
|
81
|
+
.catch(async (err) => {
|
|
82
|
+
try {
|
|
83
|
+
await client.query("ROLLBACK");
|
|
84
|
+
}
|
|
85
|
+
catch {
|
|
86
|
+
// Ignore
|
|
87
|
+
}
|
|
88
|
+
releaseOnce();
|
|
89
|
+
throw err;
|
|
90
|
+
}),
|
|
91
|
+
queryRaw: (q, values) => client
|
|
92
|
+
.query(q, values)
|
|
93
|
+
.then((result) => result.rows)
|
|
94
|
+
.catch(async (err) => {
|
|
95
|
+
try {
|
|
96
|
+
await client.query("ROLLBACK");
|
|
97
|
+
}
|
|
98
|
+
catch {
|
|
99
|
+
// Ignore
|
|
100
|
+
}
|
|
101
|
+
releaseOnce();
|
|
102
|
+
throw err;
|
|
103
|
+
})
|
|
104
|
+
};
|
|
105
|
+
}
|
|
106
|
+
async endPool() {
|
|
107
|
+
await this.pool.end();
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
exports.PostgreSQL = PostgreSQL;
|
|
@@ -34,7 +34,7 @@ const _ = __importStar(require("lodash/fp"));
|
|
|
34
34
|
const getOrderBy_1 = require("./getOrderBy");
|
|
35
35
|
const namespace = new alias_namespace_1.default(true);
|
|
36
36
|
function getSqlAst(input) {
|
|
37
|
-
const { table, fieldName, fields, args, grabMany, sqlJoin, sqlBatch, junction, getWhere, artifacts, rowWithMatchingCursor, dialect } = input;
|
|
37
|
+
const { table, fieldName, fields, args, grabMany, sqlJoin, sqlBatch, junction, getWhere, artifacts, rowWithMatchingCursor, dialect, escapeId } = input;
|
|
38
38
|
const tableArtifacts = artifacts[table];
|
|
39
39
|
const primaryKey = tableArtifacts.primaryKey;
|
|
40
40
|
const format = SqlString.format.bind(SqlString);
|
|
@@ -162,9 +162,9 @@ function getSqlAst(input) {
|
|
|
162
162
|
// TODO - where
|
|
163
163
|
// sqlJoins: [
|
|
164
164
|
// (t: string, junctionTable: string, args: any) =>
|
|
165
|
-
// `${t}.${relationField.relations[0].referencedKey} = ${junctionTable}.${relationField.relations[0].foreignKey}`,
|
|
165
|
+
// `${t}.${escapeId(relationField.relations[0].referencedKey)} = ${junctionTable}.${escapeId(relationField.relations[0].foreignKey)}`,
|
|
166
166
|
// (junctionTable: string, t: string, args: any) =>
|
|
167
|
-
// `${junctionTable}.${relationField.relations[1].foreignKey} = ${t}.${relationField.relations[1].referencedKey}`,
|
|
167
|
+
// `${junctionTable}.${escapeId(relationField.relations[1].foreignKey)} = ${t}.${escapeId(relationField.relations[1].referencedKey)}`,
|
|
168
168
|
// ],
|
|
169
169
|
// We have to use sqlBatch instead of sqlJoin because pagination is not
|
|
170
170
|
// supported with `mysql` dialect, and LIMITing the root list means
|
|
@@ -172,12 +172,13 @@ function getSqlAst(input) {
|
|
|
172
172
|
sqlBatch: {
|
|
173
173
|
thisKey: columnToASTChild(relationField.relations[0].foreignKey, namespace, asJunction),
|
|
174
174
|
parentKey: columnToASTChild(relationField.relations[0].referencedKey, namespace),
|
|
175
|
-
sqlJoin: (junctionTable, t, args) => `${junctionTable}.${relationField.relations[1].foreignKey} = ${t}.${relationField.relations[1].referencedKey}`
|
|
175
|
+
sqlJoin: (junctionTable, t, args) => `${junctionTable}.${escapeId(relationField.relations[1].foreignKey)} = ${t}.${escapeId(relationField.relations[1].referencedKey)}`
|
|
176
176
|
}
|
|
177
177
|
},
|
|
178
178
|
getWhere,
|
|
179
179
|
artifacts,
|
|
180
|
-
dialect
|
|
180
|
+
dialect,
|
|
181
|
+
escapeId
|
|
181
182
|
});
|
|
182
183
|
}
|
|
183
184
|
return getSqlAst({
|
|
@@ -207,7 +208,8 @@ function getSqlAst(input) {
|
|
|
207
208
|
},
|
|
208
209
|
getWhere,
|
|
209
210
|
artifacts,
|
|
210
|
-
dialect
|
|
211
|
+
dialect,
|
|
212
|
+
escapeId
|
|
211
213
|
});
|
|
212
214
|
}
|
|
213
215
|
else {
|
|
@@ -230,6 +230,7 @@ async function getData(input, dbCall, formatQuery, dialect) {
|
|
|
230
230
|
action === "findManyPaginated" ||
|
|
231
231
|
action === "updateMany" ||
|
|
232
232
|
action === "deleteMany";
|
|
233
|
+
const escapeId = (0, stringifyWhere_1.getEscapeId)(dialect);
|
|
233
234
|
const sqlAST = (0, getSqlAst_1.getSqlAst)({
|
|
234
235
|
...input,
|
|
235
236
|
table: input.resource,
|
|
@@ -239,9 +240,12 @@ async function getData(input, dbCall, formatQuery, dialect) {
|
|
|
239
240
|
orderBy: orderByListPaginatedRootResult?.orderBy,
|
|
240
241
|
rowWithMatchingCursor,
|
|
241
242
|
dialect,
|
|
242
|
-
grabMany
|
|
243
|
+
grabMany,
|
|
244
|
+
escapeId
|
|
243
245
|
});
|
|
244
|
-
const options = {
|
|
246
|
+
const options = {
|
|
247
|
+
dialect: dialect === "postgresql" ? "pg" : dialect === "sqlite" ? "mysql" : dialect
|
|
248
|
+
};
|
|
245
249
|
let { sql, shapeDefinition } = await (0, util_1.compileSqlAST)(sqlAST, context, options);
|
|
246
250
|
if (!sql) {
|
|
247
251
|
// return {};
|
|
@@ -315,7 +319,8 @@ async function getData(input, dbCall, formatQuery, dialect) {
|
|
|
315
319
|
// We don't want the where clause to include cursor-related stuff
|
|
316
320
|
rowWithMatchingCursor: null,
|
|
317
321
|
dialect,
|
|
318
|
-
grabMany: true
|
|
322
|
+
grabMany: true,
|
|
323
|
+
escapeId
|
|
319
324
|
});
|
|
320
325
|
// Because orderBy doesn't matter for total count.
|
|
321
326
|
// getOrderBy adds an element if paginating, so deleting args.$orderBy
|
|
@@ -464,6 +469,26 @@ const runCreateTreeMySQL = async (table, referencedKey, referencedKeyValue, colu
|
|
|
464
469
|
}
|
|
465
470
|
return Promise.all(allColumns.map((cs, i) => dbCall(formatQuery(`INSERT INTO ?? (??) VALUES (?)`, [table, cs, allValues[i]])).then((x) => x.insertId)));
|
|
466
471
|
};
|
|
472
|
+
const runCreateTreePostgreSQL = async (table, referencedKey, referencedKeyValue, columns, values, dbCall, formatQuery, artifacts) => {
|
|
473
|
+
if (artifacts == null) {
|
|
474
|
+
throw new Error("artifacts required for PostgreSQL runCreateTree");
|
|
475
|
+
}
|
|
476
|
+
const primaryKey = artifacts[table]?.primaryKey ?? "id";
|
|
477
|
+
let allColumns = columns;
|
|
478
|
+
if (typeof referencedKey === "string") {
|
|
479
|
+
allColumns = allColumns.slice().map((xs) => xs.concat(referencedKey));
|
|
480
|
+
}
|
|
481
|
+
let allValues = values;
|
|
482
|
+
if (referencedKeyValue != null) {
|
|
483
|
+
allValues = allValues.slice().map((xs) => xs.concat(referencedKeyValue));
|
|
484
|
+
}
|
|
485
|
+
return Promise.all(allColumns.map((cs, i) => dbCall(formatQuery(`INSERT INTO ?? (??) VALUES (?) RETURNING ??`, [
|
|
486
|
+
table,
|
|
487
|
+
cs,
|
|
488
|
+
allValues[i],
|
|
489
|
+
primaryKey
|
|
490
|
+
])).then((rows) => rows[0]?.[primaryKey])));
|
|
491
|
+
};
|
|
467
492
|
async function create(input, dbCall, formatQuery, beginTransaction, dialect, context) {
|
|
468
493
|
async function _create() {
|
|
469
494
|
// Shallow clone, as we're going to mutate later
|
|
@@ -485,7 +510,8 @@ async function create(input, dbCall, formatQuery, beginTransaction, dialect, con
|
|
|
485
510
|
});
|
|
486
511
|
if (hasChildren) {
|
|
487
512
|
const { dbCall: dbCallTransaction, commit } = await beginTransaction();
|
|
488
|
-
const
|
|
513
|
+
const runCreateTreeSQL = dialect === "postgresql" ? runCreateTreePostgreSQL : runCreateTreeMySQL;
|
|
514
|
+
const id = await runCreateTree(getCreateTree([data], input.resource, null, context?.specialCaseUuidColumn, dialect, input.artifacts), null, runCreateTreeSQL, dbCallTransaction, formatQuery, dialect, input.artifacts).then((xs) => xs[0]);
|
|
489
515
|
await commit();
|
|
490
516
|
return id;
|
|
491
517
|
}
|
|
@@ -493,6 +519,16 @@ async function create(input, dbCall, formatQuery, beginTransaction, dialect, con
|
|
|
493
519
|
data = processCreateData(data, tableArtifacts, dialect, context?.specialCaseUuidColumn);
|
|
494
520
|
const columns = Object.keys(data);
|
|
495
521
|
const values = Object.values(data);
|
|
522
|
+
if (dialect === "postgresql") {
|
|
523
|
+
const primaryKey = tableArtifacts.primaryKey;
|
|
524
|
+
const inserted = await dbCall(formatQuery(`INSERT INTO ?? (??) VALUES (?) RETURNING ??`, [
|
|
525
|
+
input.resource,
|
|
526
|
+
columns,
|
|
527
|
+
values,
|
|
528
|
+
primaryKey
|
|
529
|
+
]));
|
|
530
|
+
return inserted[0]?.[primaryKey];
|
|
531
|
+
}
|
|
496
532
|
const inserted = await dbCall(formatQuery(`INSERT INTO ?? (??) VALUES (?)`, [
|
|
497
533
|
input.resource,
|
|
498
534
|
columns,
|
|
@@ -506,7 +542,8 @@ async function create(input, dbCall, formatQuery, beginTransaction, dialect, con
|
|
|
506
542
|
}
|
|
507
543
|
function processCreateData(data, tableArtifacts, dialect, specialCaseUuidColumn) {
|
|
508
544
|
let out = { ...data };
|
|
509
|
-
if (dialect === "mysql"
|
|
545
|
+
if ((dialect === "mysql" || dialect === "postgresql") &&
|
|
546
|
+
tableArtifacts.dateTimeFieldsCount > 0) {
|
|
510
547
|
for (let k in tableArtifacts.dateTimeFields) {
|
|
511
548
|
if (out[k] != null) {
|
|
512
549
|
out[k] = (0, getDateTimeStringMySQL_1.getDateTimeStringMySQL)(out[k]);
|
|
@@ -527,10 +564,10 @@ function processCreateData(data, tableArtifacts, dialect, specialCaseUuidColumn)
|
|
|
527
564
|
}
|
|
528
565
|
return out;
|
|
529
566
|
}
|
|
530
|
-
async function runCreateTree(tree, referencedKeyValue, runCreateTreeSQL, dbCall, formatQuery, dialect) {
|
|
531
|
-
const ids = await runCreateTreeSQL(tree.table, tree.referencedKey, referencedKeyValue, tree.columns, tree.values, dbCall, formatQuery);
|
|
567
|
+
async function runCreateTree(tree, referencedKeyValue, runCreateTreeSQL, dbCall, formatQuery, dialect, artifacts) {
|
|
568
|
+
const ids = await runCreateTreeSQL(tree.table, tree.referencedKey, referencedKeyValue, tree.columns, tree.values, dbCall, formatQuery, artifacts);
|
|
532
569
|
if (tree.children?.length > 0) {
|
|
533
|
-
await Promise.all(ids.flatMap((id, i) => tree.children[i].map((c) => runCreateTree(c, id, runCreateTreeSQL, dbCall, formatQuery, dialect))));
|
|
570
|
+
await Promise.all(ids.flatMap((id, i) => tree.children[i].map((c) => runCreateTree(c, id, runCreateTreeSQL, dbCall, formatQuery, dialect, artifacts))));
|
|
534
571
|
}
|
|
535
572
|
return ids;
|
|
536
573
|
}
|
|
@@ -593,7 +630,8 @@ async function update(input, dbCall, formatQuery, dialect, cache) {
|
|
|
593
630
|
if (hasMappedFields(input.artifacts, input.resource, data)) {
|
|
594
631
|
await mapMappedFields(tableArtifacts, data, dbCall, formatQuery);
|
|
595
632
|
}
|
|
596
|
-
if (dialect === "mysql"
|
|
633
|
+
if ((dialect === "mysql" || dialect === "postgresql") &&
|
|
634
|
+
tableArtifacts.dateTimeFieldsCount > 0) {
|
|
597
635
|
for (let k in tableArtifacts.dateTimeFields) {
|
|
598
636
|
if (data[k] != null) {
|
|
599
637
|
data[k] = (0, getDateTimeStringMySQL_1.getDateTimeStringMySQL)(data[k]);
|
|
@@ -670,9 +708,10 @@ function getUpdateQuery(table, data, where, dialect, formatQuery) {
|
|
|
670
708
|
return formatQuery(q, values);
|
|
671
709
|
}
|
|
672
710
|
function stringifyConcat(operand1, operand2, dialect) {
|
|
673
|
-
if (dialect
|
|
711
|
+
if (dialect === "mysql") {
|
|
674
712
|
return `CONCAT(${operand1}, ${operand2})`;
|
|
675
713
|
}
|
|
714
|
+
// sqlite and postgresql use || for string concatenation
|
|
676
715
|
return `(${operand1} || ${operand2})`;
|
|
677
716
|
}
|
|
678
717
|
async function updateMany(input, dbCall, formatQuery, dialect, cache) {
|
|
@@ -698,7 +737,8 @@ async function updateMany(input, dbCall, formatQuery, dialect, cache) {
|
|
|
698
737
|
if (hasMappedFields(input.artifacts, input.resource, data)) {
|
|
699
738
|
await mapMappedFields(tableArtifacts, data, dbCall, formatQuery);
|
|
700
739
|
}
|
|
701
|
-
if (dialect === "mysql"
|
|
740
|
+
if ((dialect === "mysql" || dialect === "postgresql") &&
|
|
741
|
+
tableArtifacts.dateTimeFieldsCount > 0) {
|
|
702
742
|
for (let k in tableArtifacts.dateTimeFields) {
|
|
703
743
|
if (data[k] != null) {
|
|
704
744
|
data[k] = (0, getDateTimeStringMySQL_1.getDateTimeStringMySQL)(data[k]);
|
|
@@ -30,6 +30,7 @@ const _ = __importStar(require("lodash/fp"));
|
|
|
30
30
|
const MySqlString = __importStar(require("sqlstring"));
|
|
31
31
|
// @ts-expect-error
|
|
32
32
|
const SqliteString = __importStar(require("sqlstring-sqlite"));
|
|
33
|
+
const pgFormat = __importStar(require("pg-format"));
|
|
33
34
|
function stringifyWhere(input) {
|
|
34
35
|
const { where, table, dialect, args, orderBy, rowWithMatchingCursor } = input;
|
|
35
36
|
const escapeId = getEscapeId(dialect);
|
|
@@ -237,6 +238,9 @@ function getEscapeId(dialect) {
|
|
|
237
238
|
if (dialect === "sqlite") {
|
|
238
239
|
return SqliteString.escapeId.bind(SqliteString);
|
|
239
240
|
}
|
|
241
|
+
if (dialect === "postgresql") {
|
|
242
|
+
return (id) => pgFormat.ident(id);
|
|
243
|
+
}
|
|
240
244
|
throw new Error("Unsupported dialect: " + dialect);
|
|
241
245
|
}
|
|
242
246
|
function getEscape(dialect) {
|
|
@@ -246,5 +250,8 @@ function getEscape(dialect) {
|
|
|
246
250
|
if (dialect === "sqlite") {
|
|
247
251
|
return SqliteString.escape.bind(SqliteString);
|
|
248
252
|
}
|
|
253
|
+
if (dialect === "postgresql") {
|
|
254
|
+
return (value) => pgFormat.literal(value);
|
|
255
|
+
}
|
|
249
256
|
throw new Error("Unsupported dialect: " + dialect);
|
|
250
257
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@technicity/data-service-generator",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.23.0-next.1",
|
|
4
4
|
"main": "./dist/index.js",
|
|
5
5
|
"files": [
|
|
6
6
|
"dist"
|
|
@@ -25,6 +25,8 @@
|
|
|
25
25
|
"lodash": "^4.17.23",
|
|
26
26
|
"loglevel": "^1.8.1",
|
|
27
27
|
"mysql2": "^3.10.1",
|
|
28
|
+
"pg": "^8.13.1",
|
|
29
|
+
"pg-format": "^1.0.4",
|
|
28
30
|
"prettier": "^2.1.2",
|
|
29
31
|
"sqlstring": "^2.3.2",
|
|
30
32
|
"sqlstring-sqlite": "^0.1.1",
|
|
@@ -35,9 +37,11 @@
|
|
|
35
37
|
"@swc/core": "^1.3.36",
|
|
36
38
|
"@swc/jest": "^0.2.24",
|
|
37
39
|
"@testcontainers/mysql": "^10.23.0",
|
|
40
|
+
"@testcontainers/postgresql": "^10.23.0",
|
|
38
41
|
"@types/fs-extra": "9.0.13",
|
|
39
42
|
"@types/lodash": "4.14.177",
|
|
40
43
|
"@types/node": "^18.14.1",
|
|
44
|
+
"@types/pg": "^8.11.10",
|
|
41
45
|
"@types/prettier": "^2.1.5",
|
|
42
46
|
"@types/sqlstring": "^2.2.1",
|
|
43
47
|
"@types/uuid": "^8.3.1",
|