@teamkeel/functions-runtime 0.412.0-next.3 → 0.412.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. package/.env.test +2 -0
  2. package/compose.yaml +10 -0
  3. package/package.json +5 -23
  4. package/src/Duration.js +40 -0
  5. package/src/Duration.test.js +34 -0
  6. package/src/File.js +295 -0
  7. package/src/ModelAPI.js +377 -0
  8. package/src/ModelAPI.test.js +1428 -0
  9. package/src/QueryBuilder.js +184 -0
  10. package/src/QueryContext.js +90 -0
  11. package/src/RequestHeaders.js +21 -0
  12. package/src/TimePeriod.js +89 -0
  13. package/src/TimePeriod.test.js +148 -0
  14. package/src/applyAdditionalQueryConstraints.js +22 -0
  15. package/src/applyJoins.js +67 -0
  16. package/src/applyWhereConditions.js +124 -0
  17. package/src/auditing.js +110 -0
  18. package/src/auditing.test.js +330 -0
  19. package/src/camelCasePlugin.js +52 -0
  20. package/src/casing.js +54 -0
  21. package/src/casing.test.js +56 -0
  22. package/src/consts.js +14 -0
  23. package/src/database.js +244 -0
  24. package/src/errors.js +160 -0
  25. package/src/handleJob.js +110 -0
  26. package/src/handleJob.test.js +270 -0
  27. package/src/handleRequest.js +153 -0
  28. package/src/handleRequest.test.js +463 -0
  29. package/src/handleRoute.js +112 -0
  30. package/src/handleSubscriber.js +105 -0
  31. package/src/index.d.ts +317 -0
  32. package/src/index.js +38 -0
  33. package/src/parsing.js +113 -0
  34. package/src/parsing.test.js +140 -0
  35. package/src/permissions.js +77 -0
  36. package/src/permissions.test.js +118 -0
  37. package/src/tracing.js +184 -0
  38. package/src/tracing.test.js +147 -0
  39. package/src/tryExecuteFunction.js +91 -0
  40. package/src/tryExecuteJob.js +29 -0
  41. package/src/tryExecuteSubscriber.js +17 -0
  42. package/src/type-utils.js +18 -0
  43. package/vite.config.js +7 -0
  44. package/dist/index.d.mts +0 -604
  45. package/dist/index.d.ts +0 -604
  46. package/dist/index.js +0 -3104
  47. package/dist/index.js.map +0 -1
  48. package/dist/index.mjs +0 -3101
  49. package/dist/index.mjs.map +0 -1
package/.env.test ADDED
@@ -0,0 +1,2 @@
1
+ KEEL_DB_CONN=postgresql://postgres:postgres@localhost:7654/functions-runtime
2
+ KEEL_DB_CONN_TYPE=pg
package/compose.yaml ADDED
@@ -0,0 +1,10 @@
1
+ services:
2
+ postgres:
3
+ image: pgvector/pgvector:pg15
4
+ restart: always
5
+ environment:
6
+ - POSTGRES_USER=postgres
7
+ - POSTGRES_PASSWORD=postgres
8
+ - POSTGRES_DB=functions-runtime
9
+ ports:
10
+ - "7654:5432"
package/package.json CHANGED
@@ -1,40 +1,22 @@
1
1
  {
2
2
  "name": "@teamkeel/functions-runtime",
3
- "version": "0.412.0-next.3",
3
+ "version": "0.412.0",
4
4
  "description": "Internal package used by @teamkeel/sdk",
5
- "main": "./dist/index.js",
6
- "module": "./dist/index.mjs",
7
- "types": "./dist/index.d.ts",
8
- "exports": {
9
- ".": {
10
- "types": "./dist/index.d.ts",
11
- "require": "./dist/index.js",
12
- "import": "./dist/index.mjs"
13
- }
14
- },
5
+ "main": "src/index.js",
15
6
  "scripts": {
16
7
  "test": "vitest run --reporter verbose --pool=threads --poolOptions.threads.singleThread",
17
- "format": "npx prettier --write src/**/*.js",
18
- "build": "tsup",
19
- "type-check": "tsc --noEmit",
20
- "dev": "tsup --watch"
8
+ "format": "npx prettier --write src/**/*.js"
21
9
  },
22
10
  "keywords": [],
23
11
  "author": "Keel (www.keel.so)",
24
12
  "license": "ASL (Apache 2.0)",
13
+ "typings": "src/index.d.ts",
25
14
  "publishConfig": {
26
15
  "access": "public"
27
16
  },
28
- "files": [
29
- "dist",
30
- "README.md"
31
- ],
32
17
  "devDependencies": {
33
18
  "prettier": "3.1.1",
34
- "vitest": "3.0.8",
35
- "typescript": "^5.3.0",
36
- "tsup": "^8.0.0",
37
- "@types/node": "^22.0.0"
19
+ "vitest": "3.0.8"
38
20
  },
39
21
  "dependencies": {
40
22
  "@aws-sdk/client-s3": "~3.722.0",
@@ -0,0 +1,40 @@
1
+ const parseInterval = require("postgres-interval");
2
+
3
+ const isoRegex =
4
+ /^P(?:(\d+)Y)?(?:(\d+)M)?(?:(\d+)D)?(?:T(?:(\d+)H)?(?:(\d+)M)?(?:(\d+)S)?)?$/;
5
+
6
+ class Duration {
7
+ constructor(postgresString) {
8
+ this._typename = "Duration";
9
+ this.pgInterval = postgresString;
10
+ this._interval = parseInterval(postgresString);
11
+ }
12
+
13
+ static fromISOString(isoString) {
14
+ // todo parse iso string to postgres string
15
+ const match = isoString.match(isoRegex);
16
+ if (match) {
17
+ let d = new Duration();
18
+ d._interval.years = match[1];
19
+ d._interval.months = match[2];
20
+ d._interval.days = match[3];
21
+ d._interval.hours = match[4];
22
+ d._interval.minutes = match[5];
23
+ d._interval.seconds = match[6];
24
+ return d;
25
+ }
26
+ return new Duration();
27
+ }
28
+
29
+ toISOString() {
30
+ return this._interval.toISOStringShort();
31
+ }
32
+
33
+ toPostgres() {
34
+ return this._interval.toPostgres();
35
+ }
36
+ }
37
+
38
+ module.exports = {
39
+ Duration,
40
+ };
@@ -0,0 +1,34 @@
1
+ import { test, expect } from "vitest";
2
+ const { Duration } = require("./Duration");
3
+
4
+ test("fromISOString test", async () => {
5
+ const fullDate = Duration.fromISOString("P1Y2M3DT4H5M6S");
6
+ expect(fullDate.toISOString()).toEqual("P1Y2M3DT4H5M6S");
7
+ expect(fullDate.toPostgres()).toEqual(
8
+ "1 years 2 months 3 days 4 hours 5 minutes 6 seconds"
9
+ );
10
+ const dateOnly = Duration.fromISOString("P2Y3M4D");
11
+ expect(dateOnly.toISOString()).toEqual("P2Y3M4D");
12
+ expect(dateOnly.toPostgres()).toEqual("2 years 3 months 4 days");
13
+ const timeOnly = Duration.fromISOString("PT4H5M6S");
14
+ expect(timeOnly.toISOString()).toEqual("PT4H5M6S");
15
+ expect(timeOnly.toPostgres()).toEqual("4 hours 5 minutes 6 seconds");
16
+ const years = Duration.fromISOString("P10Y");
17
+ expect(years.toISOString()).toEqual("P10Y");
18
+ expect(years.toPostgres()).toEqual("10 years");
19
+ const months = Duration.fromISOString("P20M");
20
+ expect(months.toISOString()).toEqual("P20M");
21
+ expect(months.toPostgres()).toEqual("20 months");
22
+ const days = Duration.fromISOString("P31D");
23
+ expect(days.toISOString()).toEqual("P31D");
24
+ expect(days.toPostgres()).toEqual("31 days");
25
+ const hours = Duration.fromISOString("PT4H");
26
+ expect(hours.toISOString()).toEqual("PT4H");
27
+ expect(hours.toPostgres()).toEqual("4 hours");
28
+ const minutes = Duration.fromISOString("PT61M");
29
+ expect(minutes.toISOString()).toEqual("PT61M");
30
+ expect(minutes.toPostgres()).toEqual("61 minutes");
31
+ const seconds = Duration.fromISOString("PT76S");
32
+ expect(seconds.toISOString()).toEqual("PT76S");
33
+ expect(seconds.toPostgres()).toEqual("76 seconds");
34
+ });
package/src/File.js ADDED
@@ -0,0 +1,295 @@
1
+ const {
2
+ S3Client,
3
+ PutObjectCommand,
4
+ GetObjectCommand,
5
+ } = require("@aws-sdk/client-s3");
6
+ const { fromEnv } = require("@aws-sdk/credential-providers");
7
+ const { getSignedUrl } = require("@aws-sdk/s3-request-presigner");
8
+ const { useDatabase } = require("./database");
9
+ const { DatabaseError } = require("./errors");
10
+ const KSUID = require("ksuid");
11
+
12
+ const s3Client = (() => {
13
+ if (!process.env.KEEL_FILES_BUCKET_NAME) {
14
+ return null;
15
+ }
16
+
17
+ // Set in integration tests to send all AWS API calls to a test server
18
+ // for mocking
19
+ const endpoint = process.env.TEST_AWS_ENDPOINT;
20
+
21
+ if (!endpoint) {
22
+ // If no test endpoint is provided then use the env's configuration
23
+ return new S3Client({
24
+ region: process.env.KEEL_REGION,
25
+ credentials: fromEnv(),
26
+ });
27
+ }
28
+
29
+ return new S3Client({
30
+ region: process.env.KEEL_REGION,
31
+
32
+ // If a test endpoint is provided then use some test credentials
33
+ credentials: {
34
+ accessKeyId: "test",
35
+ secretAccessKey: "test",
36
+ },
37
+
38
+ // If a custom endpoint is set we need to use a custom resolver. Just setting the base endpoint isn't enough for S3 as it
39
+ // as the default resolver uses the bucket name as a sub-domain, which likely won't work with the custom endpoint.
40
+ // By implementing a full resolver we can force it to be the endpoint we want.
41
+ endpointProvider: () => {
42
+ return {
43
+ url: new URL(endpoint),
44
+ };
45
+ },
46
+ });
47
+ })();
48
+
49
+ class InlineFile {
50
+ constructor({ filename, contentType }) {
51
+ this._filename = filename;
52
+ this._contentType = contentType;
53
+ this._contents = null;
54
+ }
55
+
56
+ static fromDataURL(dataURL) {
57
+ var info = dataURL.split(",")[0].split(":")[1];
58
+ var data = dataURL.split(",")[1];
59
+ var mime = info.split(";")[0];
60
+ var name = info.split(";")[1].split("=")[1];
61
+ var buffer = Buffer.from(data, "base64");
62
+ var file = new InlineFile({ filename: name, contentType: mime });
63
+ file.write(buffer);
64
+
65
+ return file;
66
+ }
67
+
68
+ get size() {
69
+ if (this._contents) {
70
+ return this._contents.size;
71
+ } else {
72
+ return 0;
73
+ }
74
+ }
75
+
76
+ get contentType() {
77
+ return this._contentType;
78
+ }
79
+
80
+ get filename() {
81
+ return this._filename;
82
+ }
83
+
84
+ write(buffer) {
85
+ this._contents = new Blob([buffer]);
86
+ }
87
+
88
+ // Read the contents of the file. If URL is set, it will be read from the remote storage, otherwise, if dataURL is set
89
+ // on the instance, it will return a blob with the file contents
90
+ async read() {
91
+ const arrayBuffer = await this._contents.arrayBuffer();
92
+ const buffer = Buffer.from(arrayBuffer);
93
+
94
+ return buffer;
95
+ }
96
+
97
+ async store(expires = null) {
98
+ const content = await this.read();
99
+ const key = KSUID.randomSync().string;
100
+
101
+ await storeFile(
102
+ content,
103
+ key,
104
+ this._filename,
105
+ this._contentType,
106
+ this.size,
107
+ expires
108
+ );
109
+
110
+ return new File({
111
+ key: key,
112
+ size: this.size,
113
+ filename: this.filename,
114
+ contentType: this.contentType,
115
+ });
116
+ }
117
+ }
118
+
119
+ class File extends InlineFile {
120
+ constructor(input) {
121
+ super({ filename: input.filename, contentType: input.contentType });
122
+ this._key = input.key;
123
+ this._size = input.size;
124
+ }
125
+
126
+ static fromDbRecord({ key, filename, size, contentType }) {
127
+ return new File({
128
+ key: key,
129
+ filename: filename,
130
+ size: size,
131
+ contentType: contentType,
132
+ });
133
+ }
134
+
135
+ get size() {
136
+ return this._size;
137
+ }
138
+
139
+ get key() {
140
+ return this._key;
141
+ }
142
+
143
+ async read() {
144
+ if (this._contents) {
145
+ const arrayBuffer = await this._contents.arrayBuffer();
146
+ return Buffer.from(arrayBuffer);
147
+ }
148
+
149
+ if (s3Client) {
150
+ const params = {
151
+ Bucket: process.env.KEEL_FILES_BUCKET_NAME,
152
+ Key: "files/" + this.key,
153
+ };
154
+ const command = new GetObjectCommand(params);
155
+ const response = await s3Client.send(command);
156
+ const blob = await response.Body.transformToByteArray();
157
+ return Buffer.from(blob);
158
+ }
159
+
160
+ // default to db storage
161
+ const db = useDatabase();
162
+
163
+ try {
164
+ let query = db
165
+ .selectFrom("keel_storage")
166
+ .select("data")
167
+ .where("id", "=", this.key);
168
+
169
+ const row = await query.executeTakeFirstOrThrow();
170
+ return row.data;
171
+ } catch (e) {
172
+ throw new DatabaseError(e);
173
+ }
174
+ }
175
+
176
+ async store(expires = null) {
177
+ // Only necessary to store the file if the contents have been changed
178
+ if (this._contents) {
179
+ const contents = await this.read();
180
+ await storeFile(
181
+ contents,
182
+ this.key,
183
+ this.filename,
184
+ this.contentType,
185
+ expires
186
+ );
187
+ }
188
+ return this;
189
+ }
190
+
191
+ async getPresignedUrl() {
192
+ if (s3Client) {
193
+ const command = new GetObjectCommand({
194
+ Bucket: process.env.KEEL_FILES_BUCKET_NAME,
195
+ Key: "files/" + this.key,
196
+ ResponseContentDisposition: "inline",
197
+ });
198
+
199
+ const url = await getSignedUrl(s3Client, command, { expiresIn: 60 * 60 });
200
+
201
+ return new URL(url);
202
+ } else {
203
+ const contents = await this.read();
204
+ const dataurl = `data:${this.contentType};name=${
205
+ this.filename
206
+ };base64,${contents.toString("base64")}`;
207
+ return new URL(dataurl);
208
+ }
209
+ }
210
+
211
+ toDbRecord() {
212
+ return {
213
+ key: this.key,
214
+ filename: this.filename,
215
+ contentType: this.contentType,
216
+ size: this.size,
217
+ };
218
+ }
219
+
220
+ toJSON() {
221
+ return {
222
+ key: this.key,
223
+ filename: this.filename,
224
+ contentType: this.contentType,
225
+ size: this.size,
226
+ };
227
+ }
228
+ }
229
+
230
+ async function storeFile(contents, key, filename, contentType, expires) {
231
+ if (s3Client) {
232
+ const params = {
233
+ Bucket: process.env.KEEL_FILES_BUCKET_NAME,
234
+ Key: "files/" + key,
235
+ Body: contents,
236
+ ContentType: contentType,
237
+ ContentDisposition: `attachment; filename="${encodeURIComponent(
238
+ filename
239
+ )}"`,
240
+ Metadata: {
241
+ filename: filename,
242
+ },
243
+ ACL: "private",
244
+ };
245
+
246
+ if (expires) {
247
+ if (expires instanceof Date) {
248
+ params.Expires = expires;
249
+ } else {
250
+ console.warn("Invalid expires value. Skipping Expires parameter.");
251
+ }
252
+ }
253
+
254
+ const command = new PutObjectCommand(params);
255
+ try {
256
+ await s3Client.send(command);
257
+ } catch (error) {
258
+ console.error("Error uploading file:", error);
259
+ throw error;
260
+ }
261
+ } else {
262
+ const db = useDatabase();
263
+
264
+ try {
265
+ let query = db
266
+ .insertInto("keel_storage")
267
+ .values({
268
+ id: key,
269
+ filename: filename,
270
+ content_type: contentType,
271
+ data: contents,
272
+ })
273
+ .onConflict((oc) =>
274
+ oc
275
+ .column("id")
276
+ .doUpdateSet(() => ({
277
+ filename: filename,
278
+ content_type: contentType,
279
+ data: contents,
280
+ }))
281
+ .where("keel_storage.id", "=", key)
282
+ )
283
+ .returningAll();
284
+
285
+ await query.execute();
286
+ } catch (e) {
287
+ throw new DatabaseError(e);
288
+ }
289
+ }
290
+ }
291
+
292
+ module.exports = {
293
+ InlineFile,
294
+ File,
295
+ };