@teamkeel/functions-runtime 0.402.1 → 0.404.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@teamkeel/functions-runtime",
3
- "version": "0.402.1",
3
+ "version": "0.404.0",
4
4
  "description": "Internal package used by @teamkeel/sdk",
5
5
  "main": "src/index.js",
6
6
  "scripts": {
package/src/File.js CHANGED
@@ -9,6 +9,37 @@ const { useDatabase } = require("./database");
9
9
  const { DatabaseError } = require("./errors");
10
10
  const KSUID = require("ksuid");
11
11
 
12
+ const s3Client = (() => {
13
+ if (!process.env.KEEL_FILES_BUCKET_NAME) {
14
+ return null;
15
+ }
16
+
17
+ // Set in integration tests to send all AWS API calls to a test server
18
+ // for mocking
19
+ const endpoint = process.env.TEST_AWS_ENDPOINT;
20
+
21
+ return new S3Client({
22
+ region: process.env.KEEL_REGION,
23
+
24
+ // If a test endpoint is provided then use some test credentials rather than fromEnv()
25
+ credentials: endpoint
26
+ ? {
27
+ accessKeyId: "test",
28
+ secretAccessKey: "test",
29
+ }
30
+ : fromEnv(),
31
+
32
+ // If a custom endpoint is set we need to use a custom resolver. Just settng the base endpoint isn't enough for S3 as it
33
+ // as the default resolver uses the bucket name as a sub-domain, which likely won't work with the custom endpoint.
34
+ // By impleenting a full resolver we can force it to be the endpoint we want.
35
+ endpointProvider: () => {
36
+ return {
37
+ url: URL.parse(endpoint),
38
+ };
39
+ },
40
+ });
41
+ })();
42
+
12
43
  class InlineFile {
13
44
  constructor({ filename, contentType }) {
14
45
  this._filename = filename;
@@ -109,12 +140,7 @@ class File extends InlineFile {
109
140
  return Buffer.from(arrayBuffer);
110
141
  }
111
142
 
112
- if (isS3Storage()) {
113
- const s3Client = new S3Client({
114
- credentials: fromEnv(),
115
- region: process.env.KEEL_REGION,
116
- });
117
-
143
+ if (s3Client) {
118
144
  const params = {
119
145
  Bucket: process.env.KEEL_FILES_BUCKET_NAME,
120
146
  Key: "files/" + this.key,
@@ -157,12 +183,7 @@ class File extends InlineFile {
157
183
  }
158
184
 
159
185
  async getPresignedUrl() {
160
- if (isS3Storage()) {
161
- const s3Client = new S3Client({
162
- credentials: fromEnv(),
163
- region: process.env.KEEL_REGION,
164
- });
165
-
186
+ if (s3Client) {
166
187
  const command = new GetObjectCommand({
167
188
  Bucket: process.env.KEEL_FILES_BUCKET_NAME,
168
189
  Key: "files/" + this.key,
@@ -203,12 +224,7 @@ class File extends InlineFile {
203
224
  }
204
225
 
205
226
  async function storeFile(contents, key, filename, contentType, expires) {
206
- if (isS3Storage()) {
207
- const s3Client = new S3Client({
208
- credentials: fromEnv(),
209
- region: process.env.KEEL_REGION,
210
- });
211
-
227
+ if (s3Client) {
212
228
  const params = {
213
229
  Bucket: process.env.KEEL_FILES_BUCKET_NAME,
214
230
  Key: "files/" + key,
@@ -269,10 +285,6 @@ async function storeFile(contents, key, filename, contentType, expires) {
269
285
  }
270
286
  }
271
287
 
272
- function isS3Storage() {
273
- return "KEEL_FILES_BUCKET_NAME" in process.env;
274
- }
275
-
276
288
  module.exports = {
277
289
  InlineFile,
278
290
  File,
package/src/ModelAPI.js CHANGED
@@ -164,11 +164,25 @@ class ModelAPI {
164
164
 
165
165
  for (const key of keys) {
166
166
  const value = values[key];
167
- if (value instanceof Duration) {
167
+ if (Array.isArray(value)) {
168
+ row[key] = await Promise.all(
169
+ value.map(async (item) => {
170
+ if (item instanceof Duration) {
171
+ return item.toPostgres();
172
+ }
173
+ if (item instanceof InlineFile) {
174
+ const storedFile = await item.store();
175
+ return storedFile.toDbRecord();
176
+ }
177
+ if (item instanceof File) {
178
+ return item.toDbRecord();
179
+ }
180
+ return item;
181
+ })
182
+ );
183
+ } else if (value instanceof Duration) {
168
184
  row[key] = value.toPostgres();
169
- }
170
- // handle files that need uploading
171
- if (value instanceof InlineFile) {
185
+ } else if (value instanceof InlineFile) {
172
186
  const storedFile = await value.store();
173
187
  row[key] = storedFile.toDbRecord();
174
188
  } else if (value instanceof File) {
@@ -253,10 +267,25 @@ async function create(conn, tableName, tableConfigs, values) {
253
267
  const columnConfig = tableConfig[key];
254
268
 
255
269
  if (!columnConfig) {
256
- if (value instanceof Duration) {
270
+ if (Array.isArray(value)) {
271
+ row[key] = await Promise.all(
272
+ value.map(async (item) => {
273
+ if (item instanceof Duration) {
274
+ return item.toPostgres();
275
+ }
276
+ if (item instanceof InlineFile) {
277
+ const storedFile = await item.store();
278
+ return storedFile.toDbRecord();
279
+ }
280
+ if (item instanceof File) {
281
+ return item.toDbRecord();
282
+ }
283
+ return item;
284
+ })
285
+ );
286
+ } else if (value instanceof Duration) {
257
287
  row[key] = value.toPostgres();
258
- }
259
- if (value instanceof InlineFile) {
288
+ } else if (value instanceof InlineFile) {
260
289
  const storedFile = await value.store();
261
290
  row[key] = storedFile.toDbRecord();
262
291
  } else if (value instanceof File) {
package/src/index.d.ts CHANGED
@@ -313,3 +313,5 @@ export type DurationString =
313
313
  | `P${dateDuration}T${timeDuration}`
314
314
  | `P${dateDuration}`
315
315
  | `PT${timeDuration}`;
316
+
317
+ export type FileWriteTypes = InlineFile | File;
package/src/parsing.js CHANGED
@@ -5,22 +5,24 @@ const { isPlainObject } = require("./type-utils");
5
5
  // parseInputs takes a set of inputs and creates objects for the ones that are of a complex type.
6
6
  //
7
7
  // inputs that are objects and contain a "__typename" field are resolved to instances of the complex type
8
- // they represent. At the moment, the only supported type is `InlineFile`
8
+ // they represent.
9
9
  function parseInputs(inputs) {
10
10
  if (inputs != null && typeof inputs === "object") {
11
11
  for (const k of Object.keys(inputs)) {
12
12
  if (inputs[k] !== null && typeof inputs[k] === "object") {
13
- if ("__typename" in inputs[k]) {
14
- switch (inputs[k].__typename) {
15
- case "InlineFile":
16
- inputs[k] = InlineFile.fromDataURL(inputs[k].dataURL);
17
- break;
18
- case "Duration":
19
- inputs[k] = Duration.fromISOString(inputs[k].interval);
20
- break;
21
- default:
22
- break;
23
- }
13
+ if (Array.isArray(inputs[k])) {
14
+ inputs[k] = inputs[k].map((item) => {
15
+ if (item && typeof item === "object") {
16
+ if ("__typename" in item) {
17
+ return parseComplexInputType(item);
18
+ }
19
+ // Recursively parse nested objects in arrays
20
+ return parseInputs(item);
21
+ }
22
+ return item;
23
+ });
24
+ } else if ("__typename" in inputs[k]) {
25
+ inputs[k] = parseComplexInputType(inputs[k]);
24
26
  } else {
25
27
  inputs[k] = parseInputs(inputs[k]);
26
28
  }
@@ -31,26 +33,42 @@ function parseInputs(inputs) {
31
33
  return inputs;
32
34
  }
33
35
 
36
+ // parseComplexInputType will parse out complex types such as InlineFile and Duration
37
+ function parseComplexInputType(value) {
38
+ switch (value.__typename) {
39
+ case "InlineFile":
40
+ return InlineFile.fromDataURL(value.dataURL);
41
+ case "Duration":
42
+ return Duration.fromISOString(value.interval);
43
+ default:
44
+ throw new Error("complex type not handled: " + value.__typename);
45
+ }
46
+ }
47
+
34
48
  // parseOutputs will take a response from the custom function and perform operations on any fields if necessary.
35
49
  //
36
50
  // For example, InlineFiles need to be stored before returning the response.
37
- async function parseOutputs(inputs) {
38
- if (inputs != null && typeof inputs === "object") {
39
- for (const k of Object.keys(inputs)) {
40
- if (inputs[k] !== null && typeof inputs[k] === "object") {
41
- if (inputs[k] instanceof InlineFile) {
42
- const stored = await inputs[k].store();
43
- inputs[k] = stored;
44
- } else if (inputs[k] instanceof Duration) {
45
- inputs[k] = inputs[k].toISOString();
51
+ async function parseOutputs(outputs) {
52
+ if (outputs != null && typeof outputs === "object") {
53
+ for (const k of Object.keys(outputs)) {
54
+ if (outputs[k] !== null && typeof outputs[k] === "object") {
55
+ if (Array.isArray(outputs[k])) {
56
+ outputs[k] = await Promise.all(
57
+ outputs[k].map((item) => parseOutputs(item))
58
+ );
59
+ } else if (outputs[k] instanceof InlineFile) {
60
+ const stored = await outputs[k].store();
61
+ outputs[k] = stored;
62
+ } else if (outputs[k] instanceof Duration) {
63
+ outputs[k] = outputs[k].toISOString();
46
64
  } else {
47
- inputs[k] = await parseOutputs(inputs[k]);
65
+ outputs[k] = await parseOutputs(outputs[k]);
48
66
  }
49
67
  }
50
68
  }
51
69
  }
52
70
 
53
- return inputs;
71
+ return outputs;
54
72
  }
55
73
 
56
74
  // transformRichDataTypes iterates through the given object's keys and if any of the values are a rich data type, instantiate their respective class
@@ -60,7 +78,9 @@ function transformRichDataTypes(data) {
60
78
 
61
79
  for (const key of keys) {
62
80
  const value = data[key];
63
- if (isPlainObject(value)) {
81
+ if (Array.isArray(value)) {
82
+ row[key] = value.map((item) => transformRichDataTypes({ item }).item);
83
+ } else if (isPlainObject(value)) {
64
84
  if (value._typename == "Duration" && value.pgInterval) {
65
85
  row[key] = new Duration(value.pgInterval);
66
86
  } else if (
@@ -73,10 +93,9 @@ function transformRichDataTypes(data) {
73
93
  } else {
74
94
  row[key] = value;
75
95
  }
76
- continue;
96
+ } else {
97
+ row[key] = value;
77
98
  }
78
-
79
- row[key] = value;
80
99
  }
81
100
 
82
101
  return row;