@balena/pinejs 17.0.0-build-large-file-uploads-293e65ee371a69130834c50fef2f7f42cc18133f-1 → 17.0.0-build-17-x-0447fb6e94bafa136206d060acda2f2df4c00ad2-1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (66) hide show
  1. package/.pinejs-cache.json +1 -1
  2. package/.versionbot/CHANGELOG.yml +2295 -17
  3. package/CHANGELOG.md +860 -5
  4. package/out/bin/abstract-sql-compiler.js +1 -1
  5. package/out/bin/abstract-sql-compiler.js.map +1 -1
  6. package/out/bin/sbvr-compiler.js +1 -1
  7. package/out/bin/sbvr-compiler.js.map +1 -1
  8. package/out/database-layer/db.js +30 -19
  9. package/out/database-layer/db.js.map +1 -1
  10. package/out/http-transactions/transactions.js +2 -2
  11. package/out/http-transactions/transactions.js.map +1 -1
  12. package/out/migrator/async.js +8 -9
  13. package/out/migrator/async.js.map +1 -1
  14. package/out/migrator/sync.js +6 -6
  15. package/out/migrator/sync.js.map +1 -1
  16. package/out/pinejs-session-store/pinejs-session-store.js +103 -108
  17. package/out/pinejs-session-store/pinejs-session-store.js.map +1 -1
  18. package/out/sbvr-api/abstract-sql.js +1 -1
  19. package/out/sbvr-api/abstract-sql.js.map +1 -1
  20. package/out/sbvr-api/errors.js +3 -0
  21. package/out/sbvr-api/errors.js.map +1 -1
  22. package/out/sbvr-api/hooks.js +4 -5
  23. package/out/sbvr-api/hooks.js.map +1 -1
  24. package/out/sbvr-api/permissions.js +3 -3
  25. package/out/sbvr-api/permissions.js.map +1 -1
  26. package/out/sbvr-api/sbvr-utils.d.ts +5 -6
  27. package/out/sbvr-api/sbvr-utils.js +16 -15
  28. package/out/sbvr-api/sbvr-utils.js.map +1 -1
  29. package/out/sbvr-api/uri-parser.js +1 -1
  30. package/out/sbvr-api/uri-parser.js.map +1 -1
  31. package/out/server-glue/module.js +0 -2
  32. package/out/server-glue/module.js.map +1 -1
  33. package/out/webresource-handler/handlers/NoopHandler.d.ts +1 -3
  34. package/out/webresource-handler/handlers/NoopHandler.js +0 -6
  35. package/out/webresource-handler/handlers/NoopHandler.js.map +1 -1
  36. package/out/webresource-handler/handlers/S3Handler.d.ts +28 -0
  37. package/out/webresource-handler/handlers/S3Handler.js +104 -0
  38. package/out/webresource-handler/handlers/S3Handler.js.map +1 -0
  39. package/out/webresource-handler/handlers/index.d.ts +1 -0
  40. package/out/webresource-handler/handlers/index.js +1 -0
  41. package/out/webresource-handler/handlers/index.js.map +1 -1
  42. package/out/webresource-handler/index.d.ts +7 -31
  43. package/out/webresource-handler/index.js +22 -23
  44. package/out/webresource-handler/index.js.map +1 -1
  45. package/package.json +40 -39
  46. package/src/bin/abstract-sql-compiler.ts +1 -1
  47. package/src/bin/sbvr-compiler.ts +1 -1
  48. package/src/http-transactions/transactions.js +2 -2
  49. package/src/migrator/async.ts +10 -11
  50. package/src/migrator/sync.ts +6 -6
  51. package/src/sbvr-api/abstract-sql.ts +1 -1
  52. package/src/sbvr-api/permissions.ts +3 -3
  53. package/src/sbvr-api/sbvr-utils.ts +24 -23
  54. package/src/sbvr-api/uri-parser.ts +1 -1
  55. package/src/server-glue/module.ts +0 -2
  56. package/src/webresource-handler/handlers/NoopHandler.ts +1 -14
  57. package/src/webresource-handler/handlers/S3Handler.ts +143 -0
  58. package/src/webresource-handler/handlers/index.ts +1 -0
  59. package/src/webresource-handler/index.ts +22 -64
  60. package/tsconfig.json +1 -1
  61. package/out/webresource-handler/multipartUpload.d.ts +0 -16
  62. package/out/webresource-handler/multipartUpload.js +0 -182
  63. package/out/webresource-handler/multipartUpload.js.map +0 -1
  64. package/out/webresource-handler/webresource.sbvr +0 -63
  65. package/src/webresource-handler/multipartUpload.ts +0 -275
  66. package/src/webresource-handler/webresource.sbvr +0 -63
@@ -671,20 +671,20 @@ export const executeModels = async (
671
671
  await validateModel(tx, apiRoot);
672
672
  }
673
673
 
674
- // TODO: Can we do this without the cast?
675
- api[apiRoot] = new PinejsClient('/' + apiRoot + '/') as LoggingClient;
676
- api[apiRoot].logger = { ...console };
674
+ api[apiRoot] = new PinejsClient('/' + apiRoot + '/');
675
+
676
+ logger[apiRoot] = { ...console };
677
677
  if (model.logging != null) {
678
678
  const defaultSetting = model.logging?.default ?? true;
679
- const { logger } = api[apiRoot];
679
+ const log = logger[apiRoot];
680
680
  for (const k of Object.keys(model.logging)) {
681
681
  const key = k as keyof Console;
682
682
  if (
683
683
  key !== 'Console' &&
684
- typeof logger[key] === 'function' &&
684
+ typeof log[key] === 'function' &&
685
685
  !(model.logging?.[key] ?? defaultSetting)
686
686
  ) {
687
- logger[key] = _.noop;
687
+ log[key] = _.noop;
688
688
  }
689
689
  }
690
690
  }
@@ -856,7 +856,7 @@ export const runRule = (() => {
856
856
  translator.addTypes(sbvrTypes);
857
857
  return async (vocab: string, rule: string) => {
858
858
  const seModel = models[vocab].se;
859
- const { logger } = api[vocab];
859
+ const log = logger[vocab];
860
860
  let lfModel: LFModel;
861
861
  let slfModel: LFModel;
862
862
  let abstractSqlModel: AbstractSQLCompiler.AbstractSqlModel;
@@ -867,7 +867,7 @@ export const runRule = (() => {
867
867
  'Process',
868
868
  );
869
869
  } catch (e) {
870
- logger.error('Error parsing rule', rule, e);
870
+ log.error('Error parsing rule', rule, e);
871
871
  throw new Error(`Error parsing rule'${rule}': ${e}`);
872
872
  }
873
873
 
@@ -881,7 +881,7 @@ export const runRule = (() => {
881
881
  translator.reset();
882
882
  abstractSqlModel = translator.match(slfModel, 'Process');
883
883
  } catch (e) {
884
- logger.error('Error compiling rule', rule, e);
884
+ log.error('Error compiling rule', rule, e);
885
885
  throw new Error(`Error compiling rule '${rule}': ${e}`);
886
886
  }
887
887
 
@@ -992,6 +992,9 @@ export const runRule = (() => {
992
992
  };
993
993
  })();
994
994
 
995
+ /**
996
+ * This type shows the passthrough properties that the internal pinejs client instance accepts
997
+ */
995
998
  export type Passthrough = AnyObject & {
996
999
  req?: {
997
1000
  user?: User;
@@ -999,9 +1002,7 @@ export type Passthrough = AnyObject & {
999
1002
  tx?: Db.Tx;
1000
1003
  };
1001
1004
 
1002
- export class PinejsClient extends PinejsClientCore<PinejsClient> {
1003
- // @ts-expect-error This is actually assigned by `super` so it is always declared but that isn't detected here
1004
- public passthrough: Passthrough;
1005
+ export class PinejsClient extends PinejsClientCore {
1005
1006
  public async _request({
1006
1007
  method,
1007
1008
  url,
@@ -1021,11 +1022,11 @@ export class PinejsClient extends PinejsClientCore<PinejsClient> {
1021
1022
  }
1022
1023
  }
1023
1024
 
1024
- export type LoggingClient = PinejsClient & {
1025
- logger: Console;
1026
- };
1027
1025
  export const api: {
1028
- [vocab: string]: LoggingClient;
1026
+ [vocab: string]: PinejsClient;
1027
+ } = {};
1028
+ export const logger: {
1029
+ [vocab: string]: Console;
1029
1030
  } = {};
1030
1031
 
1031
1032
  // We default to guest only permissions if no req object is passed in
@@ -1224,7 +1225,7 @@ export const getModel = (vocabulary: string) => {
1224
1225
 
1225
1226
  const runODataRequest = (req: Express.Request, vocabulary: string) => {
1226
1227
  if (env.DEBUG) {
1227
- api[vocabulary].logger.log('Parsing', req.method, req.url);
1228
+ logger[vocabulary].log('Parsing', req.method, req.url);
1228
1229
  }
1229
1230
 
1230
1231
  // Get the hooks for the current method/vocabulary as we know it,
@@ -1535,10 +1536,10 @@ const runRequest = async (
1535
1536
  tx: Db.Tx,
1536
1537
  request: uriParser.ODataRequest,
1537
1538
  ): Promise<Response> => {
1538
- const { logger } = api[request.vocabulary];
1539
+ const log = logger[request.vocabulary];
1539
1540
 
1540
1541
  if (env.DEBUG) {
1541
- logger.log('Running', req.method, req.url);
1542
+ log.log('Running', req.method, req.url);
1542
1543
  }
1543
1544
  let result: Db.Result | number | undefined;
1544
1545
 
@@ -1566,7 +1567,7 @@ const runRequest = async (
1566
1567
  } catch (err: any) {
1567
1568
  if (err instanceof db.DatabaseError) {
1568
1569
  prettifyConstraintError(err, request);
1569
- logger.error(err);
1570
+ log.error(err);
1570
1571
  // Override the error message so we don't leak any internal db info
1571
1572
  err.message = 'Database error';
1572
1573
  throw err;
@@ -1580,7 +1581,7 @@ const runRequest = async (
1580
1581
  err instanceof TypeError ||
1581
1582
  err instanceof URIError
1582
1583
  ) {
1583
- logger.error(err);
1584
+ log.error(err);
1584
1585
  throw new InternalRequestError();
1585
1586
  }
1586
1587
  throw err;
@@ -1740,7 +1741,7 @@ const runQuery = async (
1740
1741
  );
1741
1742
 
1742
1743
  if (env.DEBUG) {
1743
- api[vocabulary].logger.log(query, values);
1744
+ logger[vocabulary].log(query, values);
1744
1745
  }
1745
1746
 
1746
1747
  // We only add the returning clause if it's been requested and `affectedIds` hasn't been populated yet
@@ -1848,7 +1849,7 @@ const respondPost = async (
1848
1849
  id,
1849
1850
  );
1850
1851
  if (env.DEBUG) {
1851
- api[vocab].logger.log('Insert ID: ', request.resourceName, id);
1852
+ logger[vocab].log('Insert ID: ', request.resourceName, id);
1852
1853
  }
1853
1854
 
1854
1855
  let result: AnyObject = { d: [{ id }] };
@@ -421,7 +421,7 @@ export const translateUri = <
421
421
  request.values = new Proxy(request.values, {
422
422
  set: (obj: ODataRequest['values'], prop: string, value) => {
423
423
  if (!Object.prototype.hasOwnProperty.call(obj, prop)) {
424
- sbvrUtils.api[request.vocabulary].logger.warn(
424
+ sbvrUtils.logger[request.vocabulary].warn(
425
425
  `Assigning a new request.values property '${prop}' however it will be ignored`,
426
426
  );
427
427
  }
@@ -5,7 +5,6 @@ import './sbvr-loader';
5
5
  import * as dbModule from '../database-layer/db';
6
6
  import * as configLoader from '../config-loader/config-loader';
7
7
  import * as migrator from '../migrator/sync';
8
- import * as webResourceHandler from '../webresource-handler';
9
8
  import type * as migratorUtils from '../migrator/utils';
10
9
 
11
10
  import * as sbvrUtils from '../sbvr-api/sbvr-utils';
@@ -64,7 +63,6 @@ export const init = async <T extends string>(
64
63
  await sbvrUtils.setup(app, db);
65
64
  const cfgLoader = await configLoader.setup(app);
66
65
  await cfgLoader.loadConfig(migrator.config);
67
- await cfgLoader.loadConfig(webResourceHandler.config);
68
66
 
69
67
  const promises: Array<Promise<void>> = [];
70
68
  if (process.env.SBVR_SERVER_ENABLED) {
@@ -1,10 +1,5 @@
1
1
  import type { WebResourceType as WebResource } from '@balena/sbvr-types';
2
- import type {
3
- BeginMultipartUploadHandlerResponse,
4
- IncomingFile,
5
- UploadResponse,
6
- WebResourceHandler,
7
- } from '..';
2
+ import type { IncomingFile, UploadResponse, WebResourceHandler } from '..';
8
3
 
9
4
  export class NoopHandler implements WebResourceHandler {
10
5
  public async handleFile(resource: IncomingFile): Promise<UploadResponse> {
@@ -23,12 +18,4 @@ export class NoopHandler implements WebResourceHandler {
23
18
  public async onPreRespond(webResource: WebResource): Promise<WebResource> {
24
19
  return webResource;
25
20
  }
26
-
27
- public async beginMultipartUpload(): Promise<BeginMultipartUploadHandlerResponse> {
28
- return { fileKey: 'noop', uploadId: 'noop', uploadParts: [] };
29
- }
30
-
31
- public async commitMultipartUpload(): Promise<WebResource> {
32
- return { filename: 'noop', href: 'noop' };
33
- }
34
21
  }
@@ -0,0 +1,143 @@
1
+ import {
2
+ FileSizeExceededError,
3
+ type IncomingFile,
4
+ normalizeHref,
5
+ type UploadResponse,
6
+ WebResourceError,
7
+ type WebResourceHandler,
8
+ } from '..';
9
+ import {
10
+ S3Client,
11
+ type S3ClientConfig,
12
+ DeleteObjectCommand,
13
+ type PutObjectCommandInput,
14
+ GetObjectCommand,
15
+ } from '@aws-sdk/client-s3';
16
+ import { Upload } from '@aws-sdk/lib-storage';
17
+ import { getSignedUrl } from '@aws-sdk/s3-request-presigner';
18
+
19
+ import { randomUUID } from 'crypto';
20
+ import type { WebResourceType as WebResource } from '@balena/sbvr-types';
21
+ import memoize from 'memoizee';
22
+
23
+ export interface S3HandlerProps {
24
+ region: string;
25
+ accessKey: string;
26
+ secretKey: string;
27
+ endpoint: string;
28
+ bucket: string;
29
+ maxSize?: number;
30
+ signedUrlExpireTimeSeconds?: number;
31
+ signedUrlCacheExpireTimeSeconds?: number;
32
+ }
33
+
34
+ export class S3Handler implements WebResourceHandler {
35
+ private readonly config: S3ClientConfig;
36
+ private readonly bucket: string;
37
+ private readonly maxFileSize: number;
38
+
39
+ protected readonly signedUrlExpireTimeSeconds: number;
40
+ protected readonly signedUrlCacheExpireTimeSeconds: number;
41
+ protected cachedGetSignedUrl: (fileKey: string) => Promise<string>;
42
+
43
+ private client: S3Client;
44
+
45
+ constructor(config: S3HandlerProps) {
46
+ this.config = {
47
+ region: config.region,
48
+ credentials: {
49
+ accessKeyId: config.accessKey,
50
+ secretAccessKey: config.secretKey,
51
+ },
52
+ endpoint: config.endpoint,
53
+ forcePathStyle: true,
54
+ };
55
+
56
+ this.signedUrlExpireTimeSeconds =
57
+ config.signedUrlExpireTimeSeconds ?? 86400; // 24h
58
+ this.signedUrlCacheExpireTimeSeconds =
59
+ config.signedUrlCacheExpireTimeSeconds ?? 82800; // 22h
60
+
61
+ this.maxFileSize = config.maxSize ?? 52428800;
62
+ this.bucket = config.bucket;
63
+ this.client = new S3Client(this.config);
64
+
65
+ // Memoize expects maxAge in MS and s3 signing method in seconds.
66
+ // Normalization to use only seconds and therefore convert here from seconds to MS
67
+ this.cachedGetSignedUrl = memoize(this.s3SignUrl, {
68
+ maxAge: this.signedUrlCacheExpireTimeSeconds * 1000,
69
+ });
70
+ }
71
+
72
+ public async handleFile(resource: IncomingFile): Promise<UploadResponse> {
73
+ let size = 0;
74
+ const key = `${resource.fieldname}_${randomUUID()}_${
75
+ resource.originalname
76
+ }`;
77
+ const params: PutObjectCommandInput = {
78
+ Bucket: this.bucket,
79
+ Key: key,
80
+ Body: resource.stream,
81
+ ContentType: resource.mimetype,
82
+ };
83
+ const upload = new Upload({ client: this.client, params });
84
+
85
+ upload.on('httpUploadProgress', async (ev) => {
86
+ size = ev.total ?? ev.loaded!;
87
+ if (size > this.maxFileSize) {
88
+ await upload.abort();
89
+ }
90
+ });
91
+
92
+ try {
93
+ await upload.done();
94
+ } catch (err: any) {
95
+ resource.stream.resume();
96
+ if (size > this.maxFileSize) {
97
+ throw new FileSizeExceededError(this.maxFileSize);
98
+ }
99
+ throw new WebResourceError(err);
100
+ }
101
+
102
+ const filename = this.getS3URL(key);
103
+ return { size, filename };
104
+ }
105
+
106
+ public async removeFile(href: string): Promise<void> {
107
+ const fileKey = this.getKeyFromHref(href);
108
+
109
+ const command = new DeleteObjectCommand({
110
+ Bucket: this.bucket,
111
+ Key: fileKey,
112
+ });
113
+
114
+ await this.client.send(command);
115
+ }
116
+
117
+ public async onPreRespond(webResource: WebResource): Promise<WebResource> {
118
+ if (webResource.href != null) {
119
+ const fileKey = this.getKeyFromHref(webResource.href);
120
+ webResource.href = await this.cachedGetSignedUrl(fileKey);
121
+ }
122
+ return webResource;
123
+ }
124
+
125
+ private s3SignUrl(fileKey: string): Promise<string> {
126
+ const command = new GetObjectCommand({
127
+ Bucket: this.bucket,
128
+ Key: fileKey,
129
+ });
130
+ return getSignedUrl(this.client, command, {
131
+ expiresIn: this.signedUrlExpireTimeSeconds,
132
+ });
133
+ }
134
+
135
+ private getS3URL(key: string): string {
136
+ return `${this.config.endpoint}/${this.bucket}/${key}`;
137
+ }
138
+
139
+ private getKeyFromHref(href: string): string {
140
+ const hrefWithoutParams = normalizeHref(href);
141
+ return hrefWithoutParams.substring(hrefWithoutParams.lastIndexOf('/') + 1);
142
+ }
143
+ }
@@ -1 +1,2 @@
1
1
  export * from './NoopHandler';
2
+ export * from './S3Handler';
@@ -13,8 +13,7 @@ import {
13
13
  } from '@balena/odata-to-abstract-sql';
14
14
  import { errors, permissions } from '../server-glue/module';
15
15
  import type { WebResourceType as WebResource } from '@balena/sbvr-types';
16
- import type { AnyObject } from 'pinejs-client-core';
17
- import { multipartUploadHooks } from './multipartUpload';
16
+ import { TypedError } from 'typed-error';
18
17
 
19
18
  export * from './handlers';
20
19
 
@@ -31,44 +30,19 @@ export interface UploadResponse {
31
30
  filename: string;
32
31
  }
33
32
 
34
- export interface BeginMultipartUploadPayload {
35
- filename: string;
36
- content_type: string;
37
- size: number;
38
- chunk_size: number;
39
- }
40
-
41
- export interface UploadPart {
42
- url: string;
43
- chunkSize: number;
44
- partNumber: number;
45
- }
46
-
47
- export interface BeginMultipartUploadHandlerResponse {
48
- uploadParts: UploadPart[];
49
- fileKey: string;
50
- uploadId: string;
51
- }
52
-
53
- export interface CommitMultipartUploadPayload {
54
- fileKey: string;
55
- uploadId: string;
56
- filename: string;
57
- providerCommitData?: AnyObject;
58
- }
59
-
60
33
  export interface WebResourceHandler {
61
34
  handleFile: (resource: IncomingFile) => Promise<UploadResponse>;
62
35
  removeFile: (fileReference: string) => Promise<void>;
63
36
  onPreRespond: (webResource: WebResource) => Promise<WebResource>;
37
+ }
38
+
39
+ export class WebResourceError extends TypedError {}
64
40
 
65
- beginMultipartUpload: (
66
- fieldName: string,
67
- payload: BeginMultipartUploadPayload,
68
- ) => Promise<BeginMultipartUploadHandlerResponse>;
69
- commitMultipartUpload: (
70
- commitInfo: CommitMultipartUploadPayload,
71
- ) => Promise<WebResource>;
41
+ export class FileSizeExceededError extends WebResourceError {
42
+ name = 'FileSizeExceededError';
43
+ constructor(maxSize: number) {
44
+ super(`File size exceeded the limit of ${maxSize} bytes.`);
45
+ }
72
46
  }
73
47
 
74
48
  type WebResourcesDbResponse = {
@@ -77,7 +51,7 @@ type WebResourcesDbResponse = {
77
51
 
78
52
  const getLogger = (vocab?: string): Console => {
79
53
  if (vocab) {
80
- return sbvrUtils.api[vocab]?.logger ?? console;
54
+ return sbvrUtils.logger[vocab] ?? console;
81
55
  }
82
56
  return console;
83
57
  };
@@ -219,12 +193,17 @@ export const getUploaderMiddlware = (
219
193
  next();
220
194
  } catch (err: any) {
221
195
  await clearFiles();
222
- getLogger(getApiRoot(req)).warn('Error uploading file', err);
223
- return sbvrUtils.handleHttpErrors(
224
- req,
225
- res,
226
- new errors.BadRequestError(err),
227
- );
196
+
197
+ if (err instanceof FileSizeExceededError) {
198
+ return sbvrUtils.handleHttpErrors(
199
+ req,
200
+ res,
201
+ new errors.BadRequestError(err.message),
202
+ );
203
+ }
204
+
205
+ getLogger(getApiRoot(req)).error('Error uploading file', err);
206
+ next(err);
228
207
  }
229
208
  });
230
209
 
@@ -237,7 +216,7 @@ export const getUploaderMiddlware = (
237
216
  };
238
217
  };
239
218
 
240
- export const getWebResourceFields = (
219
+ const getWebResourceFields = (
241
220
  request: uriParser.ODataRequest,
242
221
  useTranslations = true,
243
222
  ): string[] => {
@@ -270,8 +249,6 @@ const throwIfWebresourceNotInMultipart = (
270
249
  { req, request }: HookArgs,
271
250
  ) => {
272
251
  if (
273
- request.custom.isAction !== 'beginUpload' &&
274
- request.custom.isAction !== 'commitUpload' &&
275
252
  !req.is?.('multipart') &&
276
253
  webResourceFields.some((field) => request.values[field] != null)
277
254
  ) {
@@ -470,23 +447,4 @@ export const setupUploadHooks = (
470
447
  resourceName,
471
448
  getCreateWebResourceHooks(handler),
472
449
  );
473
-
474
- sbvrUtils.addPureHook(
475
- 'POST',
476
- apiRoot,
477
- resourceName,
478
- multipartUploadHooks(handler),
479
- );
480
- };
481
-
482
- // eslint-disable-next-line @typescript-eslint/no-var-requires
483
- const webresourceModel: string = require('./webresource.sbvr');
484
- export const config = {
485
- models: [
486
- {
487
- apiRoot: 'webresource',
488
- modelText: webresourceModel,
489
- modelName: 'webresource',
490
- },
491
- ] as sbvrUtils.ExecutableModel[],
492
450
  };
package/tsconfig.json CHANGED
@@ -11,7 +11,7 @@
11
11
  "removeComments": true,
12
12
  "rootDir": "src",
13
13
  "sourceMap": true,
14
- "target": "es2021",
14
+ "target": "es2022",
15
15
  "declaration": true,
16
16
  "skipLibCheck": true,
17
17
  "resolveJsonModule": true,
@@ -1,16 +0,0 @@
1
- import type { BeginMultipartUploadPayload, UploadPart, WebResourceHandler } from '.';
2
- import type { ODataRequest } from '../sbvr-api/uri-parser';
3
- import { sbvrUtils } from '../server-glue/module';
4
- export interface PendingUpload extends BeginMultipartUploadPayload {
5
- fieldName: string;
6
- fileKey: string;
7
- uploadId: string;
8
- }
9
- export interface BeginUploadResponse {
10
- [fieldName: string]: {
11
- uuid: string;
12
- uploadParts: UploadPart[];
13
- };
14
- }
15
- export declare const multipartUploadHooks: (webResourceHandler: WebResourceHandler) => sbvrUtils.Hooks;
16
- export declare const beginUpload: (webResourceHandler: WebResourceHandler, odataRequest: ODataRequest, actorId?: number) => Promise<BeginUploadResponse>;
@@ -1,182 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.beginUpload = exports.multipartUploadHooks = void 0;
4
- const node_crypto_1 = require("node:crypto");
5
- const _1 = require(".");
6
- const sbvr_utils_1 = require("../sbvr-api/sbvr-utils");
7
- const module_1 = require("../server-glue/module");
8
- const MB = 1024 * 1024;
9
- const multipartUploadHooks = (webResourceHandler) => {
10
- return {
11
- POSTPARSE: async ({ req, request, tx, api: applicationApi }) => {
12
- if (request.odataQuery.property?.resource === 'beginUpload') {
13
- const uploadParams = await validateBeginUpload(request, applicationApi);
14
- tx = await module_1.sbvrUtils.db.transaction();
15
- req.tx = tx;
16
- request.tx = tx;
17
- request.method = 'PATCH';
18
- request.values = uploadParams;
19
- request.odataQuery.resource = request.resourceName;
20
- delete request.odataQuery.property;
21
- request.custom.isAction = 'beginUpload';
22
- }
23
- else if (request.odataQuery.property?.resource === 'commitUpload') {
24
- const commitPayload = await validateCommitUpload(request, applicationApi);
25
- const webresource = await webResourceHandler.commitMultipartUpload({
26
- fileKey: commitPayload.metadata.fileKey,
27
- uploadId: commitPayload.metadata.uploadId,
28
- filename: commitPayload.metadata.filename,
29
- providerCommitData: commitPayload.providerCommitData,
30
- });
31
- await sbvr_utils_1.api.webresource.patch({
32
- resource: 'multipart_upload',
33
- body: {
34
- status: 'completed',
35
- },
36
- options: {
37
- $filter: {
38
- uuid: commitPayload.uuid,
39
- },
40
- },
41
- passthrough: {
42
- tx: tx,
43
- },
44
- });
45
- request.method = 'PATCH';
46
- request.values = {
47
- [commitPayload.metadata.fieldName]: webresource,
48
- };
49
- request.odataQuery.resource = request.resourceName;
50
- delete request.odataQuery.property;
51
- request.custom.isAction = 'commitUpload';
52
- request.custom.commitUploadPayload = webresource;
53
- }
54
- },
55
- PRERESPOND: async ({ req, request, response, tx }) => {
56
- if (request.custom.isAction === 'beginUpload') {
57
- await tx.rollback();
58
- response.statusCode = 200;
59
- response.body = await (0, exports.beginUpload)(webResourceHandler, request, req.user?.actor);
60
- }
61
- else if (request.custom.isAction === 'commitUpload') {
62
- response.body = await webResourceHandler.onPreRespond(request.custom.commitUploadPayload);
63
- }
64
- },
65
- };
66
- };
67
- exports.multipartUploadHooks = multipartUploadHooks;
68
- const beginUpload = async (webResourceHandler, odataRequest, actorId) => {
69
- const payload = odataRequest.values;
70
- const fieldName = Object.keys(payload)[0];
71
- const metadata = payload[fieldName];
72
- const { fileKey, uploadId, uploadParts } = await webResourceHandler.beginMultipartUpload(fieldName, metadata);
73
- const uuid = (0, node_crypto_1.randomUUID)();
74
- try {
75
- await sbvr_utils_1.api.webresource.post({
76
- resource: 'multipart_upload',
77
- body: {
78
- uuid,
79
- resource_name: odataRequest.resourceName,
80
- field_name: fieldName,
81
- resource_id: odataRequest.affectedIds?.[0],
82
- upload_id: uploadId,
83
- file_key: fileKey,
84
- status: 'pending',
85
- filename: metadata.filename,
86
- content_type: metadata.content_type,
87
- size: metadata.size,
88
- chunk_size: metadata.chunk_size,
89
- expiry_date: Date.now() + 7 * 24 * 60 * 60 * 1000,
90
- is_created_by__actor: actorId,
91
- },
92
- });
93
- }
94
- catch (err) {
95
- console.error('failed to start multipart upload', err);
96
- throw new module_1.errors.BadRequestError('Failed to start multipart upload');
97
- }
98
- return { [fieldName]: { uuid, uploadParts } };
99
- };
100
- exports.beginUpload = beginUpload;
101
- const validateBeginUpload = async (request, applicationApi) => {
102
- if (request.odataQuery.key == null) {
103
- throw new module_1.errors.BadRequestError();
104
- }
105
- await applicationApi.post({
106
- url: request.url.substring(1).replace('beginUpload', 'canAccess'),
107
- body: { method: 'PATCH' },
108
- });
109
- const fieldNames = Object.keys(request.values);
110
- if (fieldNames.length !== 1) {
111
- throw new module_1.errors.BadRequestError('You can only get upload url for one field at a time');
112
- }
113
- const [fieldName] = fieldNames;
114
- const webResourceFields = (0, _1.getWebResourceFields)(request, false);
115
- if (!webResourceFields.includes(fieldName)) {
116
- throw new module_1.errors.BadRequestError(`You must provide a valid webresource field from: ${JSON.stringify(webResourceFields)}`);
117
- }
118
- const beginUploadPayload = parseBeginUploadPayload(request.values[fieldName]);
119
- if (beginUploadPayload == null) {
120
- throw new module_1.errors.BadRequestError('Invalid file metadata');
121
- }
122
- const uploadMetadataCheck = {
123
- ...beginUploadPayload,
124
- href: 'metadata_check',
125
- };
126
- return { [fieldName]: uploadMetadataCheck };
127
- };
128
- const parseBeginUploadPayload = (payload) => {
129
- if (typeof payload !== 'object') {
130
- return null;
131
- }
132
- let { filename, content_type, size, chunk_size } = payload;
133
- if (typeof filename !== 'string' ||
134
- typeof content_type !== 'string' ||
135
- typeof size !== 'number' ||
136
- (chunk_size != null && typeof chunk_size !== 'number') ||
137
- (chunk_size != null && chunk_size < 5 * MB)) {
138
- return null;
139
- }
140
- if (chunk_size == null) {
141
- chunk_size = 5 * MB;
142
- }
143
- return { filename, content_type, size, chunk_size };
144
- };
145
- const validateCommitUpload = async (request, applicationApi) => {
146
- if (request.odataQuery.key == null) {
147
- throw new module_1.errors.BadRequestError();
148
- }
149
- await applicationApi.post({
150
- url: request.url.substring(1).replace('commitUpload', 'canAccess'),
151
- body: { method: 'PATCH' },
152
- });
153
- const { uuid, providerCommitData } = request.values;
154
- if (typeof uuid !== 'string') {
155
- throw new module_1.errors.BadRequestError('Invalid uuid type');
156
- }
157
- const [multipartUpload] = (await sbvr_utils_1.api.webresource.get({
158
- resource: 'multipart_upload',
159
- options: {
160
- $select: ['id', 'file_key', 'upload_id', 'field_name', 'filename'],
161
- $filter: {
162
- uuid,
163
- status: 'pending',
164
- expiry_date: { $gt: { $now: {} } },
165
- },
166
- },
167
- passthrough: {
168
- tx: request.tx,
169
- },
170
- }));
171
- if (multipartUpload == null) {
172
- throw new module_1.errors.BadRequestError(`Invalid upload for uuid ${uuid}`);
173
- }
174
- const metadata = {
175
- fileKey: multipartUpload.file_key,
176
- uploadId: multipartUpload.upload_id,
177
- filename: multipartUpload.filename,
178
- fieldName: multipartUpload.field_name,
179
- };
180
- return { uuid, providerCommitData, metadata };
181
- };
182
- //# sourceMappingURL=multipartUpload.js.map