@teamkeel/functions-runtime 0.412.1 → 0.413.2-next.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +2842 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +739 -0
- package/dist/index.d.ts +739 -0
- package/dist/index.js +2817 -0
- package/dist/index.js.map +1 -0
- package/package.json +30 -4
- package/.env.test +0 -2
- package/compose.yaml +0 -10
- package/src/Duration.js +0 -40
- package/src/Duration.test.js +0 -34
- package/src/File.js +0 -295
- package/src/ModelAPI.js +0 -377
- package/src/ModelAPI.test.js +0 -1428
- package/src/QueryBuilder.js +0 -184
- package/src/QueryContext.js +0 -90
- package/src/RequestHeaders.js +0 -21
- package/src/TimePeriod.js +0 -89
- package/src/TimePeriod.test.js +0 -148
- package/src/applyAdditionalQueryConstraints.js +0 -22
- package/src/applyJoins.js +0 -67
- package/src/applyWhereConditions.js +0 -124
- package/src/auditing.js +0 -110
- package/src/auditing.test.js +0 -330
- package/src/camelCasePlugin.js +0 -52
- package/src/casing.js +0 -54
- package/src/casing.test.js +0 -56
- package/src/consts.js +0 -14
- package/src/database.js +0 -244
- package/src/errors.js +0 -160
- package/src/handleJob.js +0 -110
- package/src/handleJob.test.js +0 -270
- package/src/handleRequest.js +0 -153
- package/src/handleRequest.test.js +0 -463
- package/src/handleRoute.js +0 -112
- package/src/handleSubscriber.js +0 -105
- package/src/index.d.ts +0 -317
- package/src/index.js +0 -38
- package/src/parsing.js +0 -113
- package/src/parsing.test.js +0 -140
- package/src/permissions.js +0 -77
- package/src/permissions.test.js +0 -118
- package/src/tracing.js +0 -184
- package/src/tracing.test.js +0 -147
- package/src/tryExecuteFunction.js +0 -91
- package/src/tryExecuteJob.js +0 -29
- package/src/tryExecuteSubscriber.js +0 -17
- package/src/type-utils.js +0 -18
- package/vite.config.js +0 -7
package/src/Duration.test.js
DELETED
|
@@ -1,34 +0,0 @@
|
|
|
1
|
-
import { test, expect } from "vitest";
|
|
2
|
-
const { Duration } = require("./Duration");
|
|
3
|
-
|
|
4
|
-
test("fromISOString test", async () => {
|
|
5
|
-
const fullDate = Duration.fromISOString("P1Y2M3DT4H5M6S");
|
|
6
|
-
expect(fullDate.toISOString()).toEqual("P1Y2M3DT4H5M6S");
|
|
7
|
-
expect(fullDate.toPostgres()).toEqual(
|
|
8
|
-
"1 years 2 months 3 days 4 hours 5 minutes 6 seconds"
|
|
9
|
-
);
|
|
10
|
-
const dateOnly = Duration.fromISOString("P2Y3M4D");
|
|
11
|
-
expect(dateOnly.toISOString()).toEqual("P2Y3M4D");
|
|
12
|
-
expect(dateOnly.toPostgres()).toEqual("2 years 3 months 4 days");
|
|
13
|
-
const timeOnly = Duration.fromISOString("PT4H5M6S");
|
|
14
|
-
expect(timeOnly.toISOString()).toEqual("PT4H5M6S");
|
|
15
|
-
expect(timeOnly.toPostgres()).toEqual("4 hours 5 minutes 6 seconds");
|
|
16
|
-
const years = Duration.fromISOString("P10Y");
|
|
17
|
-
expect(years.toISOString()).toEqual("P10Y");
|
|
18
|
-
expect(years.toPostgres()).toEqual("10 years");
|
|
19
|
-
const months = Duration.fromISOString("P20M");
|
|
20
|
-
expect(months.toISOString()).toEqual("P20M");
|
|
21
|
-
expect(months.toPostgres()).toEqual("20 months");
|
|
22
|
-
const days = Duration.fromISOString("P31D");
|
|
23
|
-
expect(days.toISOString()).toEqual("P31D");
|
|
24
|
-
expect(days.toPostgres()).toEqual("31 days");
|
|
25
|
-
const hours = Duration.fromISOString("PT4H");
|
|
26
|
-
expect(hours.toISOString()).toEqual("PT4H");
|
|
27
|
-
expect(hours.toPostgres()).toEqual("4 hours");
|
|
28
|
-
const minutes = Duration.fromISOString("PT61M");
|
|
29
|
-
expect(minutes.toISOString()).toEqual("PT61M");
|
|
30
|
-
expect(minutes.toPostgres()).toEqual("61 minutes");
|
|
31
|
-
const seconds = Duration.fromISOString("PT76S");
|
|
32
|
-
expect(seconds.toISOString()).toEqual("PT76S");
|
|
33
|
-
expect(seconds.toPostgres()).toEqual("76 seconds");
|
|
34
|
-
});
|
package/src/File.js
DELETED
|
@@ -1,295 +0,0 @@
|
|
|
1
|
-
const {
|
|
2
|
-
S3Client,
|
|
3
|
-
PutObjectCommand,
|
|
4
|
-
GetObjectCommand,
|
|
5
|
-
} = require("@aws-sdk/client-s3");
|
|
6
|
-
const { fromEnv } = require("@aws-sdk/credential-providers");
|
|
7
|
-
const { getSignedUrl } = require("@aws-sdk/s3-request-presigner");
|
|
8
|
-
const { useDatabase } = require("./database");
|
|
9
|
-
const { DatabaseError } = require("./errors");
|
|
10
|
-
const KSUID = require("ksuid");
|
|
11
|
-
|
|
12
|
-
const s3Client = (() => {
|
|
13
|
-
if (!process.env.KEEL_FILES_BUCKET_NAME) {
|
|
14
|
-
return null;
|
|
15
|
-
}
|
|
16
|
-
|
|
17
|
-
// Set in integration tests to send all AWS API calls to a test server
|
|
18
|
-
// for mocking
|
|
19
|
-
const endpoint = process.env.TEST_AWS_ENDPOINT;
|
|
20
|
-
|
|
21
|
-
if (!endpoint) {
|
|
22
|
-
// If no test endpoint is provided then use the env's configuration
|
|
23
|
-
return new S3Client({
|
|
24
|
-
region: process.env.KEEL_REGION,
|
|
25
|
-
credentials: fromEnv(),
|
|
26
|
-
});
|
|
27
|
-
}
|
|
28
|
-
|
|
29
|
-
return new S3Client({
|
|
30
|
-
region: process.env.KEEL_REGION,
|
|
31
|
-
|
|
32
|
-
// If a test endpoint is provided then use some test credentials
|
|
33
|
-
credentials: {
|
|
34
|
-
accessKeyId: "test",
|
|
35
|
-
secretAccessKey: "test",
|
|
36
|
-
},
|
|
37
|
-
|
|
38
|
-
// If a custom endpoint is set we need to use a custom resolver. Just setting the base endpoint isn't enough for S3 as it
|
|
39
|
-
// as the default resolver uses the bucket name as a sub-domain, which likely won't work with the custom endpoint.
|
|
40
|
-
// By implementing a full resolver we can force it to be the endpoint we want.
|
|
41
|
-
endpointProvider: () => {
|
|
42
|
-
return {
|
|
43
|
-
url: new URL(endpoint),
|
|
44
|
-
};
|
|
45
|
-
},
|
|
46
|
-
});
|
|
47
|
-
})();
|
|
48
|
-
|
|
49
|
-
class InlineFile {
|
|
50
|
-
constructor({ filename, contentType }) {
|
|
51
|
-
this._filename = filename;
|
|
52
|
-
this._contentType = contentType;
|
|
53
|
-
this._contents = null;
|
|
54
|
-
}
|
|
55
|
-
|
|
56
|
-
static fromDataURL(dataURL) {
|
|
57
|
-
var info = dataURL.split(",")[0].split(":")[1];
|
|
58
|
-
var data = dataURL.split(",")[1];
|
|
59
|
-
var mime = info.split(";")[0];
|
|
60
|
-
var name = info.split(";")[1].split("=")[1];
|
|
61
|
-
var buffer = Buffer.from(data, "base64");
|
|
62
|
-
var file = new InlineFile({ filename: name, contentType: mime });
|
|
63
|
-
file.write(buffer);
|
|
64
|
-
|
|
65
|
-
return file;
|
|
66
|
-
}
|
|
67
|
-
|
|
68
|
-
get size() {
|
|
69
|
-
if (this._contents) {
|
|
70
|
-
return this._contents.size;
|
|
71
|
-
} else {
|
|
72
|
-
return 0;
|
|
73
|
-
}
|
|
74
|
-
}
|
|
75
|
-
|
|
76
|
-
get contentType() {
|
|
77
|
-
return this._contentType;
|
|
78
|
-
}
|
|
79
|
-
|
|
80
|
-
get filename() {
|
|
81
|
-
return this._filename;
|
|
82
|
-
}
|
|
83
|
-
|
|
84
|
-
write(buffer) {
|
|
85
|
-
this._contents = new Blob([buffer]);
|
|
86
|
-
}
|
|
87
|
-
|
|
88
|
-
// Read the contents of the file. If URL is set, it will be read from the remote storage, otherwise, if dataURL is set
|
|
89
|
-
// on the instance, it will return a blob with the file contents
|
|
90
|
-
async read() {
|
|
91
|
-
const arrayBuffer = await this._contents.arrayBuffer();
|
|
92
|
-
const buffer = Buffer.from(arrayBuffer);
|
|
93
|
-
|
|
94
|
-
return buffer;
|
|
95
|
-
}
|
|
96
|
-
|
|
97
|
-
async store(expires = null) {
|
|
98
|
-
const content = await this.read();
|
|
99
|
-
const key = KSUID.randomSync().string;
|
|
100
|
-
|
|
101
|
-
await storeFile(
|
|
102
|
-
content,
|
|
103
|
-
key,
|
|
104
|
-
this._filename,
|
|
105
|
-
this._contentType,
|
|
106
|
-
this.size,
|
|
107
|
-
expires
|
|
108
|
-
);
|
|
109
|
-
|
|
110
|
-
return new File({
|
|
111
|
-
key: key,
|
|
112
|
-
size: this.size,
|
|
113
|
-
filename: this.filename,
|
|
114
|
-
contentType: this.contentType,
|
|
115
|
-
});
|
|
116
|
-
}
|
|
117
|
-
}
|
|
118
|
-
|
|
119
|
-
class File extends InlineFile {
|
|
120
|
-
constructor(input) {
|
|
121
|
-
super({ filename: input.filename, contentType: input.contentType });
|
|
122
|
-
this._key = input.key;
|
|
123
|
-
this._size = input.size;
|
|
124
|
-
}
|
|
125
|
-
|
|
126
|
-
static fromDbRecord({ key, filename, size, contentType }) {
|
|
127
|
-
return new File({
|
|
128
|
-
key: key,
|
|
129
|
-
filename: filename,
|
|
130
|
-
size: size,
|
|
131
|
-
contentType: contentType,
|
|
132
|
-
});
|
|
133
|
-
}
|
|
134
|
-
|
|
135
|
-
get size() {
|
|
136
|
-
return this._size;
|
|
137
|
-
}
|
|
138
|
-
|
|
139
|
-
get key() {
|
|
140
|
-
return this._key;
|
|
141
|
-
}
|
|
142
|
-
|
|
143
|
-
async read() {
|
|
144
|
-
if (this._contents) {
|
|
145
|
-
const arrayBuffer = await this._contents.arrayBuffer();
|
|
146
|
-
return Buffer.from(arrayBuffer);
|
|
147
|
-
}
|
|
148
|
-
|
|
149
|
-
if (s3Client) {
|
|
150
|
-
const params = {
|
|
151
|
-
Bucket: process.env.KEEL_FILES_BUCKET_NAME,
|
|
152
|
-
Key: "files/" + this.key,
|
|
153
|
-
};
|
|
154
|
-
const command = new GetObjectCommand(params);
|
|
155
|
-
const response = await s3Client.send(command);
|
|
156
|
-
const blob = await response.Body.transformToByteArray();
|
|
157
|
-
return Buffer.from(blob);
|
|
158
|
-
}
|
|
159
|
-
|
|
160
|
-
// default to db storage
|
|
161
|
-
const db = useDatabase();
|
|
162
|
-
|
|
163
|
-
try {
|
|
164
|
-
let query = db
|
|
165
|
-
.selectFrom("keel_storage")
|
|
166
|
-
.select("data")
|
|
167
|
-
.where("id", "=", this.key);
|
|
168
|
-
|
|
169
|
-
const row = await query.executeTakeFirstOrThrow();
|
|
170
|
-
return row.data;
|
|
171
|
-
} catch (e) {
|
|
172
|
-
throw new DatabaseError(e);
|
|
173
|
-
}
|
|
174
|
-
}
|
|
175
|
-
|
|
176
|
-
async store(expires = null) {
|
|
177
|
-
// Only necessary to store the file if the contents have been changed
|
|
178
|
-
if (this._contents) {
|
|
179
|
-
const contents = await this.read();
|
|
180
|
-
await storeFile(
|
|
181
|
-
contents,
|
|
182
|
-
this.key,
|
|
183
|
-
this.filename,
|
|
184
|
-
this.contentType,
|
|
185
|
-
expires
|
|
186
|
-
);
|
|
187
|
-
}
|
|
188
|
-
return this;
|
|
189
|
-
}
|
|
190
|
-
|
|
191
|
-
async getPresignedUrl() {
|
|
192
|
-
if (s3Client) {
|
|
193
|
-
const command = new GetObjectCommand({
|
|
194
|
-
Bucket: process.env.KEEL_FILES_BUCKET_NAME,
|
|
195
|
-
Key: "files/" + this.key,
|
|
196
|
-
ResponseContentDisposition: "inline",
|
|
197
|
-
});
|
|
198
|
-
|
|
199
|
-
const url = await getSignedUrl(s3Client, command, { expiresIn: 60 * 60 });
|
|
200
|
-
|
|
201
|
-
return new URL(url);
|
|
202
|
-
} else {
|
|
203
|
-
const contents = await this.read();
|
|
204
|
-
const dataurl = `data:${this.contentType};name=${
|
|
205
|
-
this.filename
|
|
206
|
-
};base64,${contents.toString("base64")}`;
|
|
207
|
-
return new URL(dataurl);
|
|
208
|
-
}
|
|
209
|
-
}
|
|
210
|
-
|
|
211
|
-
toDbRecord() {
|
|
212
|
-
return {
|
|
213
|
-
key: this.key,
|
|
214
|
-
filename: this.filename,
|
|
215
|
-
contentType: this.contentType,
|
|
216
|
-
size: this.size,
|
|
217
|
-
};
|
|
218
|
-
}
|
|
219
|
-
|
|
220
|
-
toJSON() {
|
|
221
|
-
return {
|
|
222
|
-
key: this.key,
|
|
223
|
-
filename: this.filename,
|
|
224
|
-
contentType: this.contentType,
|
|
225
|
-
size: this.size,
|
|
226
|
-
};
|
|
227
|
-
}
|
|
228
|
-
}
|
|
229
|
-
|
|
230
|
-
async function storeFile(contents, key, filename, contentType, expires) {
|
|
231
|
-
if (s3Client) {
|
|
232
|
-
const params = {
|
|
233
|
-
Bucket: process.env.KEEL_FILES_BUCKET_NAME,
|
|
234
|
-
Key: "files/" + key,
|
|
235
|
-
Body: contents,
|
|
236
|
-
ContentType: contentType,
|
|
237
|
-
ContentDisposition: `attachment; filename="${encodeURIComponent(
|
|
238
|
-
filename
|
|
239
|
-
)}"`,
|
|
240
|
-
Metadata: {
|
|
241
|
-
filename: filename,
|
|
242
|
-
},
|
|
243
|
-
ACL: "private",
|
|
244
|
-
};
|
|
245
|
-
|
|
246
|
-
if (expires) {
|
|
247
|
-
if (expires instanceof Date) {
|
|
248
|
-
params.Expires = expires;
|
|
249
|
-
} else {
|
|
250
|
-
console.warn("Invalid expires value. Skipping Expires parameter.");
|
|
251
|
-
}
|
|
252
|
-
}
|
|
253
|
-
|
|
254
|
-
const command = new PutObjectCommand(params);
|
|
255
|
-
try {
|
|
256
|
-
await s3Client.send(command);
|
|
257
|
-
} catch (error) {
|
|
258
|
-
console.error("Error uploading file:", error);
|
|
259
|
-
throw error;
|
|
260
|
-
}
|
|
261
|
-
} else {
|
|
262
|
-
const db = useDatabase();
|
|
263
|
-
|
|
264
|
-
try {
|
|
265
|
-
let query = db
|
|
266
|
-
.insertInto("keel_storage")
|
|
267
|
-
.values({
|
|
268
|
-
id: key,
|
|
269
|
-
filename: filename,
|
|
270
|
-
content_type: contentType,
|
|
271
|
-
data: contents,
|
|
272
|
-
})
|
|
273
|
-
.onConflict((oc) =>
|
|
274
|
-
oc
|
|
275
|
-
.column("id")
|
|
276
|
-
.doUpdateSet(() => ({
|
|
277
|
-
filename: filename,
|
|
278
|
-
content_type: contentType,
|
|
279
|
-
data: contents,
|
|
280
|
-
}))
|
|
281
|
-
.where("keel_storage.id", "=", key)
|
|
282
|
-
)
|
|
283
|
-
.returningAll();
|
|
284
|
-
|
|
285
|
-
await query.execute();
|
|
286
|
-
} catch (e) {
|
|
287
|
-
throw new DatabaseError(e);
|
|
288
|
-
}
|
|
289
|
-
}
|
|
290
|
-
}
|
|
291
|
-
|
|
292
|
-
module.exports = {
|
|
293
|
-
InlineFile,
|
|
294
|
-
File,
|
|
295
|
-
};
|
package/src/ModelAPI.js
DELETED
|
@@ -1,377 +0,0 @@
|
|
|
1
|
-
const { sql } = require("kysely");
|
|
2
|
-
const { useDatabase } = require("./database");
|
|
3
|
-
const {
|
|
4
|
-
transformRichDataTypes,
|
|
5
|
-
isReferencingExistingRecord,
|
|
6
|
-
} = require("./parsing");
|
|
7
|
-
const { isPlainObject } = require("./type-utils");
|
|
8
|
-
const { QueryBuilder } = require("./QueryBuilder");
|
|
9
|
-
const { QueryContext } = require("./QueryContext");
|
|
10
|
-
const { applyWhereConditions } = require("./applyWhereConditions");
|
|
11
|
-
const { applyJoins } = require("./applyJoins");
|
|
12
|
-
const { InlineFile, File } = require("./File");
|
|
13
|
-
const { Duration } = require("./Duration");
|
|
14
|
-
|
|
15
|
-
const {
|
|
16
|
-
applyLimit,
|
|
17
|
-
applyOffset,
|
|
18
|
-
applyOrderBy,
|
|
19
|
-
} = require("./applyAdditionalQueryConstraints");
|
|
20
|
-
const {
|
|
21
|
-
camelCaseObject,
|
|
22
|
-
snakeCaseObject,
|
|
23
|
-
upperCamelCase,
|
|
24
|
-
} = require("./casing");
|
|
25
|
-
const tracing = require("./tracing");
|
|
26
|
-
const { DatabaseError } = require("./errors");
|
|
27
|
-
|
|
28
|
-
/**
|
|
29
|
-
* RelationshipConfig is a simple representation of a model field that
|
|
30
|
-
* is a relationship. It is used by applyJoins and applyWhereConditions
|
|
31
|
-
* to build the correct query.
|
|
32
|
-
* @typedef {{
|
|
33
|
-
* relationshipType: "belongsTo" | "hasMany",
|
|
34
|
-
* foreignKey: string,
|
|
35
|
-
* referencesTable: string,
|
|
36
|
-
* }} RelationshipConfig
|
|
37
|
-
*
|
|
38
|
-
* TableConfig is an object where the keys are relationship field names
|
|
39
|
-
* (which don't exist in the database) and the values are RelationshipConfig
|
|
40
|
-
* objects describing that relationship.
|
|
41
|
-
* @typedef {Object.<string, RelationshipConfig} TableConfig
|
|
42
|
-
*
|
|
43
|
-
* TableConfigMap is mapping of database table names to TableConfig objects
|
|
44
|
-
* @typedef {Object.<string, TableConfig>} TableConfigMap
|
|
45
|
-
*/
|
|
46
|
-
|
|
47
|
-
class ModelAPI {
|
|
48
|
-
/**
|
|
49
|
-
* @param {string} tableName The name of the table this API is for
|
|
50
|
-
* @param {Function} _ Used to be a function that returns the default values for a row in this table. No longer used.
|
|
51
|
-
* @param {TableConfigMap} tableConfigMap
|
|
52
|
-
*/
|
|
53
|
-
constructor(tableName, _, tableConfigMap = {}) {
|
|
54
|
-
this._tableName = tableName;
|
|
55
|
-
this._tableConfigMap = tableConfigMap;
|
|
56
|
-
this._modelName = upperCamelCase(this._tableName);
|
|
57
|
-
}
|
|
58
|
-
|
|
59
|
-
async create(values) {
|
|
60
|
-
const name = tracing.spanNameForModelAPI(this._modelName, "create");
|
|
61
|
-
|
|
62
|
-
return tracing.withSpan(name, () => {
|
|
63
|
-
const db = useDatabase();
|
|
64
|
-
return create(
|
|
65
|
-
db,
|
|
66
|
-
this._tableName,
|
|
67
|
-
this._tableConfigMap,
|
|
68
|
-
snakeCaseObject(values)
|
|
69
|
-
);
|
|
70
|
-
});
|
|
71
|
-
}
|
|
72
|
-
|
|
73
|
-
async findOne(where = {}) {
|
|
74
|
-
const name = tracing.spanNameForModelAPI(this._modelName, "findOne");
|
|
75
|
-
const db = useDatabase();
|
|
76
|
-
|
|
77
|
-
return tracing.withSpan(name, async (span) => {
|
|
78
|
-
let builder = db
|
|
79
|
-
.selectFrom(this._tableName)
|
|
80
|
-
.distinctOn(`${this._tableName}.id`)
|
|
81
|
-
.selectAll(this._tableName);
|
|
82
|
-
|
|
83
|
-
const context = new QueryContext([this._tableName], this._tableConfigMap);
|
|
84
|
-
|
|
85
|
-
builder = applyJoins(context, builder, where);
|
|
86
|
-
builder = applyWhereConditions(context, builder, where);
|
|
87
|
-
|
|
88
|
-
span.setAttribute("sql", builder.compile().sql);
|
|
89
|
-
const row = await builder.executeTakeFirst();
|
|
90
|
-
if (!row) {
|
|
91
|
-
return null;
|
|
92
|
-
}
|
|
93
|
-
|
|
94
|
-
return transformRichDataTypes(camelCaseObject(row));
|
|
95
|
-
});
|
|
96
|
-
}
|
|
97
|
-
|
|
98
|
-
async findMany(params) {
|
|
99
|
-
const name = tracing.spanNameForModelAPI(this._modelName, "findMany");
|
|
100
|
-
const db = useDatabase();
|
|
101
|
-
const where = params?.where || {};
|
|
102
|
-
|
|
103
|
-
return tracing.withSpan(name, async (span) => {
|
|
104
|
-
const context = new QueryContext([this._tableName], this._tableConfigMap);
|
|
105
|
-
|
|
106
|
-
let builder = db
|
|
107
|
-
.selectFrom((qb) => {
|
|
108
|
-
// We need to wrap this query as a sub query in the selectFrom because you cannot apply a different order by column when using distinct(id)
|
|
109
|
-
let builder = qb
|
|
110
|
-
.selectFrom(this._tableName)
|
|
111
|
-
.distinctOn(`${this._tableName}.id`)
|
|
112
|
-
.selectAll(this._tableName);
|
|
113
|
-
|
|
114
|
-
builder = applyJoins(context, builder, where);
|
|
115
|
-
builder = applyWhereConditions(context, builder, where);
|
|
116
|
-
|
|
117
|
-
builder = builder.as(this._tableName);
|
|
118
|
-
|
|
119
|
-
return builder;
|
|
120
|
-
})
|
|
121
|
-
.selectAll();
|
|
122
|
-
|
|
123
|
-
// The only constraints added to the main query are the orderBy, limit and offset as they are performed on the "outer" set
|
|
124
|
-
if (params?.limit) {
|
|
125
|
-
builder = applyLimit(context, builder, params.limit);
|
|
126
|
-
}
|
|
127
|
-
|
|
128
|
-
if (params?.offset) {
|
|
129
|
-
builder = applyOffset(context, builder, params.offset);
|
|
130
|
-
}
|
|
131
|
-
|
|
132
|
-
if (
|
|
133
|
-
params?.orderBy !== undefined &&
|
|
134
|
-
Object.keys(params?.orderBy).length > 0
|
|
135
|
-
) {
|
|
136
|
-
builder = applyOrderBy(
|
|
137
|
-
context,
|
|
138
|
-
builder,
|
|
139
|
-
this._tableName,
|
|
140
|
-
params.orderBy
|
|
141
|
-
);
|
|
142
|
-
} else {
|
|
143
|
-
builder = builder.orderBy(`${this._tableName}.id`);
|
|
144
|
-
}
|
|
145
|
-
|
|
146
|
-
const query = builder;
|
|
147
|
-
|
|
148
|
-
span.setAttribute("sql", query.compile().sql);
|
|
149
|
-
const rows = await builder.execute();
|
|
150
|
-
return rows.map((x) => transformRichDataTypes(camelCaseObject(x)));
|
|
151
|
-
});
|
|
152
|
-
}
|
|
153
|
-
|
|
154
|
-
async update(where, values) {
|
|
155
|
-
const name = tracing.spanNameForModelAPI(this._modelName, "update");
|
|
156
|
-
const db = useDatabase();
|
|
157
|
-
|
|
158
|
-
return tracing.withSpan(name, async (span) => {
|
|
159
|
-
let builder = db.updateTable(this._tableName).returningAll();
|
|
160
|
-
|
|
161
|
-
// process input values
|
|
162
|
-
const keys = values ? Object.keys(values) : [];
|
|
163
|
-
const row = {};
|
|
164
|
-
|
|
165
|
-
for (const key of keys) {
|
|
166
|
-
const value = values[key];
|
|
167
|
-
if (Array.isArray(value)) {
|
|
168
|
-
row[key] = await Promise.all(
|
|
169
|
-
value.map(async (item) => {
|
|
170
|
-
if (item instanceof Duration) {
|
|
171
|
-
return item.toPostgres();
|
|
172
|
-
}
|
|
173
|
-
if (item instanceof InlineFile) {
|
|
174
|
-
const storedFile = await item.store();
|
|
175
|
-
return storedFile.toDbRecord();
|
|
176
|
-
}
|
|
177
|
-
if (item instanceof File) {
|
|
178
|
-
return item.toDbRecord();
|
|
179
|
-
}
|
|
180
|
-
return item;
|
|
181
|
-
})
|
|
182
|
-
);
|
|
183
|
-
} else if (value instanceof Duration) {
|
|
184
|
-
row[key] = value.toPostgres();
|
|
185
|
-
} else if (value instanceof InlineFile) {
|
|
186
|
-
const storedFile = await value.store();
|
|
187
|
-
row[key] = storedFile.toDbRecord();
|
|
188
|
-
} else if (value instanceof File) {
|
|
189
|
-
row[key] = value.toDbRecord();
|
|
190
|
-
} else {
|
|
191
|
-
row[key] = value;
|
|
192
|
-
}
|
|
193
|
-
}
|
|
194
|
-
|
|
195
|
-
builder = builder.set(snakeCaseObject(row));
|
|
196
|
-
|
|
197
|
-
const context = new QueryContext([this._tableName], this._tableConfigMap);
|
|
198
|
-
|
|
199
|
-
// TODO: support joins for update
|
|
200
|
-
builder = applyWhereConditions(context, builder, where);
|
|
201
|
-
|
|
202
|
-
span.setAttribute("sql", builder.compile().sql);
|
|
203
|
-
|
|
204
|
-
try {
|
|
205
|
-
const row = await builder.executeTakeFirstOrThrow();
|
|
206
|
-
return transformRichDataTypes(camelCaseObject(row));
|
|
207
|
-
} catch (e) {
|
|
208
|
-
throw new DatabaseError(e);
|
|
209
|
-
}
|
|
210
|
-
});
|
|
211
|
-
}
|
|
212
|
-
|
|
213
|
-
async delete(where) {
|
|
214
|
-
const name = tracing.spanNameForModelAPI(this._modelName, "delete");
|
|
215
|
-
const db = useDatabase();
|
|
216
|
-
|
|
217
|
-
return tracing.withSpan(name, async (span) => {
|
|
218
|
-
let builder = db.deleteFrom(this._tableName).returning(["id"]);
|
|
219
|
-
|
|
220
|
-
const context = new QueryContext([this._tableName], this._tableConfigMap);
|
|
221
|
-
|
|
222
|
-
// TODO: support joins for delete
|
|
223
|
-
builder = applyWhereConditions(context, builder, where);
|
|
224
|
-
|
|
225
|
-
span.setAttribute("sql", builder.compile().sql);
|
|
226
|
-
try {
|
|
227
|
-
const row = await builder.executeTakeFirstOrThrow();
|
|
228
|
-
return row.id;
|
|
229
|
-
} catch (e) {
|
|
230
|
-
throw new DatabaseError(e);
|
|
231
|
-
}
|
|
232
|
-
});
|
|
233
|
-
}
|
|
234
|
-
|
|
235
|
-
where(where) {
|
|
236
|
-
const db = useDatabase();
|
|
237
|
-
|
|
238
|
-
let builder = db
|
|
239
|
-
.selectFrom(this._tableName)
|
|
240
|
-
.distinctOn(`${this._tableName}.id`)
|
|
241
|
-
.selectAll(this._tableName);
|
|
242
|
-
|
|
243
|
-
const context = new QueryContext([this._tableName], this._tableConfigMap);
|
|
244
|
-
|
|
245
|
-
builder = applyJoins(context, builder, where);
|
|
246
|
-
builder = applyWhereConditions(context, builder, where);
|
|
247
|
-
|
|
248
|
-
return new QueryBuilder(this._tableName, context, builder);
|
|
249
|
-
}
|
|
250
|
-
}
|
|
251
|
-
|
|
252
|
-
async function create(conn, tableName, tableConfigs, values) {
|
|
253
|
-
try {
|
|
254
|
-
let query = conn.insertInto(tableName);
|
|
255
|
-
|
|
256
|
-
const keys = values ? Object.keys(values) : [];
|
|
257
|
-
const tableConfig = tableConfigs[tableName] || {};
|
|
258
|
-
const hasManyRecords = [];
|
|
259
|
-
|
|
260
|
-
if (keys.length === 0) {
|
|
261
|
-
// See https://github.com/kysely-org/kysely/issues/685#issuecomment-1711240534
|
|
262
|
-
query = query.expression(sql`default values`);
|
|
263
|
-
} else {
|
|
264
|
-
const row = {};
|
|
265
|
-
for (const key of keys) {
|
|
266
|
-
const value = values[key];
|
|
267
|
-
const columnConfig = tableConfig[key];
|
|
268
|
-
|
|
269
|
-
if (!columnConfig) {
|
|
270
|
-
if (Array.isArray(value)) {
|
|
271
|
-
row[key] = await Promise.all(
|
|
272
|
-
value.map(async (item) => {
|
|
273
|
-
if (item instanceof Duration) {
|
|
274
|
-
return item.toPostgres();
|
|
275
|
-
}
|
|
276
|
-
if (item instanceof InlineFile) {
|
|
277
|
-
const storedFile = await item.store();
|
|
278
|
-
return storedFile.toDbRecord();
|
|
279
|
-
}
|
|
280
|
-
if (item instanceof File) {
|
|
281
|
-
return item.toDbRecord();
|
|
282
|
-
}
|
|
283
|
-
return item;
|
|
284
|
-
})
|
|
285
|
-
);
|
|
286
|
-
} else if (value instanceof Duration) {
|
|
287
|
-
row[key] = value.toPostgres();
|
|
288
|
-
} else if (value instanceof InlineFile) {
|
|
289
|
-
const storedFile = await value.store();
|
|
290
|
-
row[key] = storedFile.toDbRecord();
|
|
291
|
-
} else if (value instanceof File) {
|
|
292
|
-
row[key] = value.toDbRecord();
|
|
293
|
-
} else {
|
|
294
|
-
row[key] = value;
|
|
295
|
-
}
|
|
296
|
-
continue;
|
|
297
|
-
}
|
|
298
|
-
|
|
299
|
-
switch (columnConfig.relationshipType) {
|
|
300
|
-
case "belongsTo":
|
|
301
|
-
if (!isPlainObject(value)) {
|
|
302
|
-
throw new Error(
|
|
303
|
-
`non-object provided for field ${key} of ${tableName}`
|
|
304
|
-
);
|
|
305
|
-
}
|
|
306
|
-
|
|
307
|
-
if (isReferencingExistingRecord(value)) {
|
|
308
|
-
row[columnConfig.foreignKey] = value.id;
|
|
309
|
-
break;
|
|
310
|
-
}
|
|
311
|
-
|
|
312
|
-
const created = await create(
|
|
313
|
-
conn,
|
|
314
|
-
columnConfig.referencesTable,
|
|
315
|
-
tableConfigs,
|
|
316
|
-
value
|
|
317
|
-
);
|
|
318
|
-
row[columnConfig.foreignKey] = created.id;
|
|
319
|
-
break;
|
|
320
|
-
|
|
321
|
-
case "hasMany":
|
|
322
|
-
if (!Array.isArray(value)) {
|
|
323
|
-
throw new Error(
|
|
324
|
-
`non-array provided for has-many field ${key} of ${tableName}`
|
|
325
|
-
);
|
|
326
|
-
}
|
|
327
|
-
for (const v of value) {
|
|
328
|
-
hasManyRecords.push({
|
|
329
|
-
key,
|
|
330
|
-
value: v,
|
|
331
|
-
columnConfig,
|
|
332
|
-
});
|
|
333
|
-
}
|
|
334
|
-
break;
|
|
335
|
-
default:
|
|
336
|
-
throw new Error(
|
|
337
|
-
`unsupported relationship type - ${tableName}.${key} (${columnConfig.relationshipType})`
|
|
338
|
-
);
|
|
339
|
-
}
|
|
340
|
-
}
|
|
341
|
-
|
|
342
|
-
query = query.values(row);
|
|
343
|
-
}
|
|
344
|
-
|
|
345
|
-
const created = await query.returningAll().executeTakeFirstOrThrow();
|
|
346
|
-
|
|
347
|
-
await Promise.all(
|
|
348
|
-
hasManyRecords.map(async ({ key, value, columnConfig }) => {
|
|
349
|
-
if (!isPlainObject(value)) {
|
|
350
|
-
throw new Error(
|
|
351
|
-
`non-object provided for field ${key} of ${tableName}`
|
|
352
|
-
);
|
|
353
|
-
}
|
|
354
|
-
|
|
355
|
-
if (isReferencingExistingRecord(value)) {
|
|
356
|
-
throw new Error(
|
|
357
|
-
`nested update as part of create not supported for ${key} of ${tableConfig}`
|
|
358
|
-
);
|
|
359
|
-
}
|
|
360
|
-
|
|
361
|
-
return create(conn, columnConfig.referencesTable, tableConfigs, {
|
|
362
|
-
...value,
|
|
363
|
-
[columnConfig.foreignKey]: created.id,
|
|
364
|
-
});
|
|
365
|
-
})
|
|
366
|
-
);
|
|
367
|
-
|
|
368
|
-
return transformRichDataTypes(created);
|
|
369
|
-
} catch (e) {
|
|
370
|
-
throw new DatabaseError(e);
|
|
371
|
-
}
|
|
372
|
-
}
|
|
373
|
-
|
|
374
|
-
module.exports = {
|
|
375
|
-
ModelAPI,
|
|
376
|
-
DatabaseError,
|
|
377
|
-
};
|