@balena/pinejs 16.2.0-build-joshbwlng-tasks-046f828587ae6d1889a3ae3298b3e44e96c74f90-1 → 17.0.0-build-wip-large-file-uploads-d6522dad962bc0bff6ee7c596df8f43f596b6aaa-1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.husky/pre-commit +0 -2
- package/.pinejs-cache.json +1 -1
- package/.versionbot/CHANGELOG.yml +33 -7
- package/CHANGELOG.md +12 -2
- package/VERSION +1 -1
- package/out/config-loader/env.d.ts +0 -4
- package/out/config-loader/env.js +1 -5
- package/out/config-loader/env.js.map +1 -1
- package/out/database-layer/db.d.ts +0 -3
- package/out/database-layer/db.js +0 -14
- package/out/database-layer/db.js.map +1 -1
- package/out/migrator/utils.js +2 -2
- package/out/migrator/utils.js.map +1 -1
- package/out/sbvr-api/sbvr-utils.d.ts +0 -1
- package/out/sbvr-api/sbvr-utils.js +1 -6
- package/out/sbvr-api/sbvr-utils.js.map +1 -1
- package/out/server-glue/module.d.ts +0 -1
- package/out/server-glue/module.js +1 -2
- package/out/server-glue/module.js.map +1 -1
- package/out/webresource-handler/handlers/NoopHandler.d.ts +3 -0
- package/out/webresource-handler/handlers/NoopHandler.js +6 -0
- package/out/webresource-handler/handlers/NoopHandler.js.map +1 -1
- package/out/webresource-handler/handlers/S3Handler.d.ts +7 -0
- package/out/webresource-handler/handlers/S3Handler.js +68 -2
- package/out/webresource-handler/handlers/S3Handler.js.map +1 -1
- package/out/webresource-handler/index.d.ts +5 -0
- package/out/webresource-handler/index.js +10 -5
- package/out/webresource-handler/index.js.map +1 -1
- package/out/webresource-handler/multipartUpload.d.ts +40 -0
- package/out/webresource-handler/multipartUpload.js +125 -0
- package/out/webresource-handler/multipartUpload.js.map +1 -0
- package/package.json +7 -10
- package/src/config-loader/env.ts +1 -6
- package/src/database-layer/db.ts +0 -24
- package/src/migrator/utils.ts +1 -1
- package/src/sbvr-api/sbvr-utils.ts +1 -5
- package/src/server-glue/module.ts +0 -1
- package/src/webresource-handler/handlers/NoopHandler.ts +21 -0
- package/src/webresource-handler/handlers/S3Handler.ts +130 -4
- package/src/webresource-handler/index.ts +24 -1
- package/src/webresource-handler/multipartUpload.ts +214 -0
- package/out/tasks/common.d.ts +0 -4
- package/out/tasks/common.js +0 -13
- package/out/tasks/common.js.map +0 -1
- package/out/tasks/index.d.ts +0 -10
- package/out/tasks/index.js +0 -139
- package/out/tasks/index.js.map +0 -1
- package/out/tasks/model.sbvr +0 -60
- package/out/tasks/types.d.ts +0 -38
- package/out/tasks/types.js +0 -10
- package/out/tasks/types.js.map +0 -1
- package/out/tasks/worker.d.ts +0 -16
- package/out/tasks/worker.js +0 -191
- package/out/tasks/worker.js.map +0 -1
- package/src/tasks/common.ts +0 -14
- package/src/tasks/index.ts +0 -158
- package/src/tasks/model.sbvr +0 -60
- package/src/tasks/types.ts +0 -58
- package/src/tasks/worker.ts +0 -246
package/src/database-layer/db.ts
CHANGED
@@ -98,13 +98,6 @@ export interface Database extends BaseDatabase {
|
|
98
98
|
) => Promise<Result>;
|
99
99
|
transaction: TransactionFn;
|
100
100
|
readTransaction: TransactionFn;
|
101
|
-
on?: (
|
102
|
-
name: 'notification',
|
103
|
-
fn: (...args: any[]) => Promise<void>,
|
104
|
-
options?: {
|
105
|
-
channel?: string;
|
106
|
-
},
|
107
|
-
) => void;
|
108
101
|
}
|
109
102
|
|
110
103
|
interface EngineParams {
|
@@ -696,23 +689,6 @@ if (maybePg != null) {
|
|
696
689
|
return {
|
697
690
|
engine: Engines.postgres,
|
698
691
|
executeSql: atomicExecuteSql,
|
699
|
-
on: async (name, fn, options) => {
|
700
|
-
if (name === 'notification' && options?.channel === undefined) {
|
701
|
-
throw new Error('Missing channel option for notification listener');
|
702
|
-
}
|
703
|
-
|
704
|
-
const client = await pool.connect();
|
705
|
-
client.on(name, (msg) => {
|
706
|
-
fn(msg).catch((error) => {
|
707
|
-
console.error('Error handling message:', error);
|
708
|
-
});
|
709
|
-
});
|
710
|
-
|
711
|
-
if (name === 'notification' && options?.channel !== undefined) {
|
712
|
-
await client.query(`LISTEN "${options.channel}";`);
|
713
|
-
// client.release();
|
714
|
-
}
|
715
|
-
},
|
716
692
|
transaction: createTransaction(async (stackTraceErr) => {
|
717
693
|
const client = await pool.connect();
|
718
694
|
const tx = new PostgresTx(client, false, stackTraceErr);
|
package/src/migrator/utils.ts
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
import type { Result, Tx } from '../database-layer/db';
|
2
2
|
import type { Resolvable } from '../sbvr-api/common-types';
|
3
3
|
|
4
|
-
import { createHash } from 'crypto';
|
4
|
+
import { createHash } from 'node:crypto';
|
5
5
|
import { Engines } from '@balena/abstract-sql-compiler';
|
6
6
|
import _ from 'lodash';
|
7
7
|
import { TypedError } from 'typed-error';
|
@@ -42,7 +42,6 @@ import { generateODataMetadata } from '../odata-metadata/odata-metadata-generato
|
|
42
42
|
|
43
43
|
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
44
44
|
const devModel = require('./dev.sbvr');
|
45
|
-
import * as tasks from '../tasks';
|
46
45
|
import * as permissions from './permissions';
|
47
46
|
import {
|
48
47
|
BadRequestError,
|
@@ -78,7 +77,6 @@ export {
|
|
78
77
|
addPureHook,
|
79
78
|
addSideEffectHook,
|
80
79
|
} from './hooks';
|
81
|
-
export { addTaskHandler } from '../tasks';
|
82
80
|
|
83
81
|
import memoizeWeak = require('memoizee/weak');
|
84
82
|
import * as controlFlow from './control-flow';
|
@@ -775,7 +773,7 @@ export const postExecuteModels = async (tx: Db.Tx): Promise<void> => {
|
|
775
773
|
// Hence, skipped migrations from earlier models are not set as executed as the `migration` table is missing
|
776
774
|
// Here the skipped migrations that haven't been set properly are covered
|
777
775
|
// This is mostly an edge case when running on an empty database schema and migrations model hasn't been executed, yet.
|
778
|
-
// One
|
776
|
+
// One specifc case are tests to run tests against migrated and unmigrated database states
|
779
777
|
|
780
778
|
for (const modelKey of Object.keys(models)) {
|
781
779
|
const pendingToSetExecutedMigrations =
|
@@ -1985,7 +1983,6 @@ export const executeStandardModels = async (tx: Db.Tx): Promise<void> => {
|
|
1985
1983
|
},
|
1986
1984
|
});
|
1987
1985
|
await executeModels(tx, permissions.config.models);
|
1988
|
-
await executeModels(tx, tasks.config.models);
|
1989
1986
|
console.info('Successfully executed standard models.');
|
1990
1987
|
} catch (err: any) {
|
1991
1988
|
console.error('Failed to execute standard models.', err);
|
@@ -2002,7 +1999,6 @@ export const setup = async (
|
|
2002
1999
|
await db.transaction(async (tx) => {
|
2003
2000
|
await executeStandardModels(tx);
|
2004
2001
|
await permissions.setup();
|
2005
|
-
await tasks.setup($db, tx);
|
2006
2002
|
});
|
2007
2003
|
} catch (err: any) {
|
2008
2004
|
console.error('Could not execute standard models', err);
|
@@ -19,7 +19,6 @@ export * as errors from '../sbvr-api/errors';
|
|
19
19
|
export * as env from '../config-loader/env';
|
20
20
|
export * as types from '../sbvr-api/common-types';
|
21
21
|
export * as hooks from '../sbvr-api/hooks';
|
22
|
-
export * as tasks from '../tasks';
|
23
22
|
export * as webResourceHandler from '../webresource-handler';
|
24
23
|
export type { configLoader as ConfigLoader };
|
25
24
|
export type { migratorUtils as Migrator };
|
@@ -1,5 +1,10 @@
|
|
1
1
|
import type { WebResourceType as WebResource } from '@balena/sbvr-types';
|
2
2
|
import type { IncomingFile, UploadResponse, WebResourceHandler } from '..';
|
3
|
+
import type {
|
4
|
+
BeginUploadHandlerResponse,
|
5
|
+
BeginUploadPayload,
|
6
|
+
CommitUploadHandlerPayload,
|
7
|
+
} from '../multipartUpload';
|
3
8
|
|
4
9
|
export class NoopHandler implements WebResourceHandler {
|
5
10
|
public async handleFile(resource: IncomingFile): Promise<UploadResponse> {
|
@@ -18,4 +23,20 @@ export class NoopHandler implements WebResourceHandler {
|
|
18
23
|
public async onPreRespond(webResource: WebResource): Promise<WebResource> {
|
19
24
|
return webResource;
|
20
25
|
}
|
26
|
+
|
27
|
+
public async beginUpload(
|
28
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
29
|
+
_fieldName: string,
|
30
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
31
|
+
_payload: BeginUploadPayload,
|
32
|
+
): Promise<BeginUploadHandlerResponse> {
|
33
|
+
return { fileKey: 'noop', uploadId: 'noop', uploadUrls: [] };
|
34
|
+
}
|
35
|
+
|
36
|
+
public async commitUpload(
|
37
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
38
|
+
_payload: CommitUploadHandlerPayload,
|
39
|
+
): Promise<WebResource> {
|
40
|
+
return { filename: 'noop', href: 'noop' };
|
41
|
+
}
|
21
42
|
}
|
@@ -6,17 +6,27 @@ import {
|
|
6
6
|
WebResourceError,
|
7
7
|
type WebResourceHandler,
|
8
8
|
} from '..';
|
9
|
+
import type {
|
10
|
+
BeginUploadHandlerResponse,
|
11
|
+
BeginUploadPayload,
|
12
|
+
CommitUploadHandlerPayload,
|
13
|
+
UploadUrl,
|
14
|
+
} from '../multipartUpload';
|
9
15
|
import {
|
10
16
|
S3Client,
|
11
17
|
type S3ClientConfig,
|
12
18
|
DeleteObjectCommand,
|
13
19
|
type PutObjectCommandInput,
|
14
20
|
GetObjectCommand,
|
21
|
+
CreateMultipartUploadCommand,
|
22
|
+
UploadPartCommand,
|
23
|
+
CompleteMultipartUploadCommand,
|
24
|
+
HeadObjectCommand,
|
15
25
|
} from '@aws-sdk/client-s3';
|
16
26
|
import { Upload } from '@aws-sdk/lib-storage';
|
17
27
|
import { getSignedUrl } from '@aws-sdk/s3-request-presigner';
|
18
28
|
|
19
|
-
import { randomUUID } from 'crypto';
|
29
|
+
import { randomUUID } from 'node:crypto';
|
20
30
|
import type { WebResourceType as WebResource } from '@balena/sbvr-types';
|
21
31
|
import memoize from 'memoizee';
|
22
32
|
|
@@ -71,9 +81,7 @@ export class S3Handler implements WebResourceHandler {
|
|
71
81
|
|
72
82
|
public async handleFile(resource: IncomingFile): Promise<UploadResponse> {
|
73
83
|
let size = 0;
|
74
|
-
const key =
|
75
|
-
resource.originalname
|
76
|
-
}`;
|
84
|
+
const key = this.getFileKey(resource.fieldname, resource.originalname);
|
77
85
|
const params: PutObjectCommandInput = {
|
78
86
|
Bucket: this.bucket,
|
79
87
|
Key: key,
|
@@ -122,6 +130,62 @@ export class S3Handler implements WebResourceHandler {
|
|
122
130
|
return webResource;
|
123
131
|
}
|
124
132
|
|
133
|
+
public async beginUpload(
|
134
|
+
fieldName: string,
|
135
|
+
payload: BeginUploadPayload,
|
136
|
+
): Promise<BeginUploadHandlerResponse> {
|
137
|
+
const fileKey = this.getFileKey(fieldName, payload.filename);
|
138
|
+
|
139
|
+
const createMultiPartResponse = await this.client.send(
|
140
|
+
new CreateMultipartUploadCommand({
|
141
|
+
Bucket: this.bucket,
|
142
|
+
Key: fileKey,
|
143
|
+
ContentType: payload.content_type,
|
144
|
+
}),
|
145
|
+
);
|
146
|
+
|
147
|
+
if (createMultiPartResponse.UploadId == null) {
|
148
|
+
throw new WebResourceError('Failed to create multipart upload.');
|
149
|
+
}
|
150
|
+
|
151
|
+
const uploadUrls = await this.getPartUploadUrls(
|
152
|
+
fileKey,
|
153
|
+
createMultiPartResponse.UploadId,
|
154
|
+
payload,
|
155
|
+
);
|
156
|
+
return { fileKey, uploadId: createMultiPartResponse.UploadId, uploadUrls };
|
157
|
+
}
|
158
|
+
|
159
|
+
public async commitUpload({
|
160
|
+
fileKey,
|
161
|
+
uploadId,
|
162
|
+
filename,
|
163
|
+
multipartUploadChecksums,
|
164
|
+
}: CommitUploadHandlerPayload): Promise<WebResource> {
|
165
|
+
await this.client.send(
|
166
|
+
new CompleteMultipartUploadCommand({
|
167
|
+
Bucket: this.bucket,
|
168
|
+
Key: fileKey,
|
169
|
+
UploadId: uploadId,
|
170
|
+
MultipartUpload: multipartUploadChecksums,
|
171
|
+
}),
|
172
|
+
);
|
173
|
+
|
174
|
+
const headResult = await this.client.send(
|
175
|
+
new HeadObjectCommand({
|
176
|
+
Bucket: this.bucket,
|
177
|
+
Key: fileKey,
|
178
|
+
}),
|
179
|
+
);
|
180
|
+
|
181
|
+
return {
|
182
|
+
href: this.getS3URL(fileKey),
|
183
|
+
filename: filename,
|
184
|
+
size: headResult.ContentLength,
|
185
|
+
content_type: headResult.ContentType,
|
186
|
+
};
|
187
|
+
}
|
188
|
+
|
125
189
|
private s3SignUrl(fileKey: string): Promise<string> {
|
126
190
|
const command = new GetObjectCommand({
|
127
191
|
Bucket: this.bucket,
|
@@ -136,8 +200,70 @@ export class S3Handler implements WebResourceHandler {
|
|
136
200
|
return `${this.config.endpoint}/${this.bucket}/${key}`;
|
137
201
|
}
|
138
202
|
|
203
|
+
private getFileKey(fieldName: string, fileName: string) {
|
204
|
+
return `${fieldName}_${randomUUID()}_${fileName}`;
|
205
|
+
}
|
206
|
+
|
139
207
|
private getKeyFromHref(href: string): string {
|
140
208
|
const hrefWithoutParams = normalizeHref(href);
|
141
209
|
return hrefWithoutParams.substring(hrefWithoutParams.lastIndexOf('/') + 1);
|
142
210
|
}
|
211
|
+
|
212
|
+
private async getPartUploadUrls(
|
213
|
+
fileKey: string,
|
214
|
+
uploadId: string,
|
215
|
+
payload: BeginUploadPayload,
|
216
|
+
): Promise<UploadUrl[]> {
|
217
|
+
const chunkSizesWithParts = await this.getChunkSizesWithParts(
|
218
|
+
payload.size,
|
219
|
+
payload.chunk_size,
|
220
|
+
);
|
221
|
+
return Promise.all(
|
222
|
+
chunkSizesWithParts.map(async ({ chunkSize, partNumber }) => ({
|
223
|
+
chunkSize,
|
224
|
+
partNumber,
|
225
|
+
url: await this.getPartUploadUrl(
|
226
|
+
fileKey,
|
227
|
+
uploadId,
|
228
|
+
partNumber,
|
229
|
+
chunkSize,
|
230
|
+
),
|
231
|
+
})),
|
232
|
+
);
|
233
|
+
}
|
234
|
+
|
235
|
+
private async getPartUploadUrl(
|
236
|
+
fileKey: string,
|
237
|
+
uploadId: string,
|
238
|
+
partNumber: number,
|
239
|
+
partSize: number,
|
240
|
+
): Promise<string> {
|
241
|
+
const command = new UploadPartCommand({
|
242
|
+
Bucket: this.bucket,
|
243
|
+
Key: fileKey,
|
244
|
+
UploadId: uploadId,
|
245
|
+
PartNumber: partNumber,
|
246
|
+
ContentLength: partSize,
|
247
|
+
});
|
248
|
+
|
249
|
+
return getSignedUrl(this.client, command, {
|
250
|
+
expiresIn: this.signedUrlExpireTimeSeconds,
|
251
|
+
});
|
252
|
+
}
|
253
|
+
|
254
|
+
private async getChunkSizesWithParts(
|
255
|
+
size: number,
|
256
|
+
chunkSize: number,
|
257
|
+
): Promise<Array<Pick<UploadUrl, 'chunkSize' | 'partNumber'>>> {
|
258
|
+
const chunkSizesWithParts = [];
|
259
|
+
let partNumber = 1;
|
260
|
+
let remainingSize = size;
|
261
|
+
while (remainingSize > 0) {
|
262
|
+
const currentChunkSize = Math.min(remainingSize, chunkSize);
|
263
|
+
chunkSizesWithParts.push({ chunkSize: currentChunkSize, partNumber });
|
264
|
+
remainingSize -= currentChunkSize;
|
265
|
+
partNumber += 1;
|
266
|
+
}
|
267
|
+
return chunkSizesWithParts;
|
268
|
+
}
|
143
269
|
}
|
@@ -7,6 +7,12 @@ import type { HookArgs } from '../sbvr-api/hooks';
|
|
7
7
|
import { getApiRoot, getModel } from '../sbvr-api/sbvr-utils';
|
8
8
|
import { checkPermissions } from '../sbvr-api/permissions';
|
9
9
|
import { NoopHandler } from './handlers/NoopHandler';
|
10
|
+
import type {
|
11
|
+
BeginUploadHandlerResponse,
|
12
|
+
BeginUploadPayload,
|
13
|
+
CommitUploadHandlerPayload,
|
14
|
+
} from './multipartUpload';
|
15
|
+
import { multipartUploadHooks } from './multipartUpload';
|
10
16
|
import {
|
11
17
|
odataNameToSqlName,
|
12
18
|
sqlNameToODataName,
|
@@ -34,6 +40,14 @@ export interface WebResourceHandler {
|
|
34
40
|
handleFile: (resource: IncomingFile) => Promise<UploadResponse>;
|
35
41
|
removeFile: (fileReference: string) => Promise<void>;
|
36
42
|
onPreRespond: (webResource: WebResource) => Promise<WebResource>;
|
43
|
+
|
44
|
+
beginUpload: (
|
45
|
+
fieldName: string,
|
46
|
+
payload: BeginUploadPayload,
|
47
|
+
) => Promise<BeginUploadHandlerResponse>;
|
48
|
+
commitUpload: (
|
49
|
+
commitInfo: CommitUploadHandlerPayload,
|
50
|
+
) => Promise<WebResource>;
|
37
51
|
}
|
38
52
|
|
39
53
|
export class WebResourceError extends TypedError {}
|
@@ -216,7 +230,7 @@ export const getUploaderMiddlware = (
|
|
216
230
|
};
|
217
231
|
};
|
218
232
|
|
219
|
-
const getWebResourceFields = (
|
233
|
+
export const getWebResourceFields = (
|
220
234
|
request: uriParser.ODataRequest,
|
221
235
|
useTranslations = true,
|
222
236
|
): string[] => {
|
@@ -249,6 +263,8 @@ const throwIfWebresourceNotInMultipart = (
|
|
249
263
|
{ req, request }: HookArgs,
|
250
264
|
) => {
|
251
265
|
if (
|
266
|
+
request.custom.isAction !== 'beginUpload' &&
|
267
|
+
request.custom.isAction !== 'commitUpload' &&
|
252
268
|
!req.is?.('multipart') &&
|
253
269
|
webResourceFields.some((field) => request.values[field] != null)
|
254
270
|
) {
|
@@ -447,4 +463,11 @@ export const setupUploadHooks = (
|
|
447
463
|
resourceName,
|
448
464
|
getCreateWebResourceHooks(handler),
|
449
465
|
);
|
466
|
+
|
467
|
+
sbvrUtils.addPureHook(
|
468
|
+
'POST',
|
469
|
+
apiRoot,
|
470
|
+
resourceName,
|
471
|
+
multipartUploadHooks(handler),
|
472
|
+
);
|
450
473
|
};
|
@@ -0,0 +1,214 @@
|
|
1
|
+
import type { AnyObject } from 'pinejs-client-core';
|
2
|
+
import type { WebResourceHandler } from '.';
|
3
|
+
import { getWebResourceFields } from '.';
|
4
|
+
import type { ODataRequest } from '../sbvr-api/uri-parser';
|
5
|
+
import { errors, sbvrUtils } from '../server-glue/module';
|
6
|
+
import type { WebResource } from '@balena/sbvr-types/out/types/web-resource';
|
7
|
+
import { randomUUID } from 'node:crypto';
|
8
|
+
|
9
|
+
export interface BeginUploadPayload {
|
10
|
+
filename: string;
|
11
|
+
content_type: string;
|
12
|
+
size: number;
|
13
|
+
chunk_size: number;
|
14
|
+
}
|
15
|
+
|
16
|
+
type BeginUploadDbCheck = BeginUploadPayload & WebResource;
|
17
|
+
|
18
|
+
export interface UploadUrl {
|
19
|
+
url: string;
|
20
|
+
chunkSize: number;
|
21
|
+
partNumber: number;
|
22
|
+
}
|
23
|
+
|
24
|
+
export interface BeginUploadHandlerResponse {
|
25
|
+
uploadUrls: UploadUrl[];
|
26
|
+
fileKey: string;
|
27
|
+
uploadId: string;
|
28
|
+
}
|
29
|
+
|
30
|
+
export interface PendingUpload extends BeginUploadPayload {
|
31
|
+
fieldName: string;
|
32
|
+
fileKey: string;
|
33
|
+
uploadId: string;
|
34
|
+
}
|
35
|
+
|
36
|
+
export interface BeginUploadResponse {
|
37
|
+
[fieldName: string]: {
|
38
|
+
key: string;
|
39
|
+
uploadUrls: UploadUrl[];
|
40
|
+
};
|
41
|
+
}
|
42
|
+
export interface CommitUploadHandlerPayload {
|
43
|
+
fileKey: string;
|
44
|
+
uploadId: string;
|
45
|
+
filename: string;
|
46
|
+
multipartUploadChecksums?: AnyObject;
|
47
|
+
}
|
48
|
+
|
49
|
+
const MB = 1024 * 1024;
|
50
|
+
|
51
|
+
export const multipartUploadHooks = (
|
52
|
+
webResourceHandler: WebResourceHandler,
|
53
|
+
): sbvrUtils.Hooks => {
|
54
|
+
return {
|
55
|
+
POSTPARSE: async ({ req, request, tx }) => {
|
56
|
+
if (request.odataQuery.property?.resource === 'beginUpload') {
|
57
|
+
const uploadParams = parseBeginUpload(request);
|
58
|
+
|
59
|
+
await sbvrUtils.api[request.vocabulary].post({
|
60
|
+
url: request.url.substring(1).replace('beginUpload', 'canAccess'),
|
61
|
+
body: { method: 'PATCH' },
|
62
|
+
});
|
63
|
+
|
64
|
+
// This transaction is necessary because beginUpload requests
|
65
|
+
// will rollback the transaction (in order to first validate)
|
66
|
+
// The metadata requested. If we don't pass any transaction
|
67
|
+
// It will use the default transaction handler which will error out
|
68
|
+
// on any rollback.
|
69
|
+
tx = await sbvrUtils.db.transaction();
|
70
|
+
req.tx = tx;
|
71
|
+
request.tx = tx;
|
72
|
+
|
73
|
+
request.method = 'PATCH';
|
74
|
+
request.values = uploadParams;
|
75
|
+
request.odataQuery.resource = request.resourceName;
|
76
|
+
delete request.odataQuery.property;
|
77
|
+
request.custom.isAction = 'beginUpload';
|
78
|
+
} else if (request.odataQuery.property?.resource === 'commitUpload') {
|
79
|
+
const commitPayload = parseCommitUpload(request);
|
80
|
+
|
81
|
+
await sbvrUtils.api[request.vocabulary].post({
|
82
|
+
url: request.url.substring(1).replace('commitUpload', 'canAccess'),
|
83
|
+
body: { method: 'PATCH' },
|
84
|
+
});
|
85
|
+
|
86
|
+
const webresource = await webResourceHandler.commitUpload({
|
87
|
+
fileKey: commitPayload.metadata.fileKey,
|
88
|
+
uploadId: commitPayload.metadata.uploadId,
|
89
|
+
filename: commitPayload.metadata.filename,
|
90
|
+
multipartUploadChecksums: commitPayload.additionalCommitInfo,
|
91
|
+
});
|
92
|
+
delete someKvStoreDbOrRedisIdc[commitPayload.key];
|
93
|
+
|
94
|
+
request.method = 'PATCH';
|
95
|
+
request.values = {
|
96
|
+
[commitPayload.metadata.fieldName]: webresource,
|
97
|
+
};
|
98
|
+
request.odataQuery.resource = request.resourceName;
|
99
|
+
delete request.odataQuery.property;
|
100
|
+
request.custom.isAction = 'commitUpload';
|
101
|
+
request.custom.commitUploadPayload = webresource;
|
102
|
+
}
|
103
|
+
},
|
104
|
+
PRERESPOND: async ({ request, response, tx }) => {
|
105
|
+
if (request.custom.isAction === 'beginUpload') {
|
106
|
+
await tx.rollback();
|
107
|
+
response.statusCode = 200;
|
108
|
+
response.body = await beginUpload(
|
109
|
+
webResourceHandler,
|
110
|
+
request.values as { [x: string]: BeginUploadPayload },
|
111
|
+
);
|
112
|
+
} else if (request.custom.isAction === 'commitUpload') {
|
113
|
+
response.body = await webResourceHandler.onPreRespond(
|
114
|
+
request.custom.commitUploadPayload,
|
115
|
+
);
|
116
|
+
}
|
117
|
+
},
|
118
|
+
};
|
119
|
+
};
|
120
|
+
|
121
|
+
// Really any storage will do.
|
122
|
+
// I just honestly believe redis better suits this user case but I am fine if we agree on storing on DB
|
123
|
+
// This is not a cache, it is a persistent KV storage.
|
124
|
+
const someKvStoreDbOrRedisIdc: Record<string, PendingUpload> = {};
|
125
|
+
|
126
|
+
export const beginUpload = async (
|
127
|
+
webResourceHandler: WebResourceHandler,
|
128
|
+
payload: { [fieldName: string]: BeginUploadPayload },
|
129
|
+
): Promise<BeginUploadResponse> => {
|
130
|
+
const fieldName = Object.keys(payload)[0];
|
131
|
+
const metadata = payload[fieldName];
|
132
|
+
|
133
|
+
const { fileKey, uploadId, uploadUrls } =
|
134
|
+
await webResourceHandler.beginUpload(fieldName, metadata);
|
135
|
+
const key = randomUUID();
|
136
|
+
|
137
|
+
someKvStoreDbOrRedisIdc[key] = { ...metadata, fieldName, fileKey, uploadId };
|
138
|
+
|
139
|
+
return { [fieldName]: { key, uploadUrls } };
|
140
|
+
};
|
141
|
+
|
142
|
+
const parseBeginUpload = (request: ODataRequest) => {
|
143
|
+
if (request.odataQuery.key == null) {
|
144
|
+
throw new errors.BadRequestError();
|
145
|
+
}
|
146
|
+
|
147
|
+
const fieldNames = Object.keys(request.values);
|
148
|
+
if (fieldNames.length !== 1) {
|
149
|
+
throw new errors.BadRequestError(
|
150
|
+
'You can only get upload url for one field at a time',
|
151
|
+
);
|
152
|
+
}
|
153
|
+
|
154
|
+
const [fieldName] = fieldNames;
|
155
|
+
const webResourceFields = getWebResourceFields(request, false);
|
156
|
+
if (!webResourceFields.includes(fieldName)) {
|
157
|
+
throw new errors.BadRequestError(
|
158
|
+
`You must provide a valid webresource field from: ${JSON.stringify(webResourceFields)}`,
|
159
|
+
);
|
160
|
+
}
|
161
|
+
|
162
|
+
const beginUploadPayload = parseBeginUploadPayload(request.values[fieldName]);
|
163
|
+
if (beginUploadPayload == null) {
|
164
|
+
throw new errors.BadRequestError('Invalid file metadata');
|
165
|
+
}
|
166
|
+
|
167
|
+
const uploadMetadataCheck: BeginUploadDbCheck = {
|
168
|
+
...beginUploadPayload,
|
169
|
+
href: 'metadata_check',
|
170
|
+
};
|
171
|
+
|
172
|
+
return { [fieldName]: uploadMetadataCheck };
|
173
|
+
};
|
174
|
+
|
175
|
+
const parseBeginUploadPayload = (
|
176
|
+
payload: AnyObject,
|
177
|
+
): BeginUploadPayload | null => {
|
178
|
+
if (typeof payload !== 'object') {
|
179
|
+
return null;
|
180
|
+
}
|
181
|
+
|
182
|
+
let { filename, content_type, size, chunk_size } = payload;
|
183
|
+
if (
|
184
|
+
typeof filename !== 'string' ||
|
185
|
+
typeof content_type !== 'string' ||
|
186
|
+
typeof size !== 'number' ||
|
187
|
+
(chunk_size != null && typeof chunk_size !== 'number') ||
|
188
|
+
(chunk_size != null && chunk_size < 5 * MB)
|
189
|
+
) {
|
190
|
+
return null;
|
191
|
+
}
|
192
|
+
|
193
|
+
if (chunk_size == null) {
|
194
|
+
chunk_size = 5 * MB;
|
195
|
+
}
|
196
|
+
return { filename, content_type, size, chunk_size };
|
197
|
+
};
|
198
|
+
|
199
|
+
const parseCommitUpload = (request: ODataRequest) => {
|
200
|
+
if (request.odataQuery.key == null) {
|
201
|
+
throw new errors.BadRequestError();
|
202
|
+
}
|
203
|
+
|
204
|
+
const { key, additionalCommitInfo } = request.values;
|
205
|
+
if (typeof key !== 'string') {
|
206
|
+
throw new errors.BadRequestError('Invalid key type');
|
207
|
+
}
|
208
|
+
|
209
|
+
if (someKvStoreDbOrRedisIdc[key] == null) {
|
210
|
+
throw new errors.BadRequestError('Invalid key');
|
211
|
+
}
|
212
|
+
|
213
|
+
return { key, additionalCommitInfo, metadata: someKvStoreDbOrRedisIdc[key] };
|
214
|
+
};
|
package/out/tasks/common.d.ts
DELETED
package/out/tasks/common.js
DELETED
@@ -1,13 +0,0 @@
|
|
1
|
-
"use strict";
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
4
|
-
};
|
5
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
6
|
-
exports.ajv = exports.channel = exports.apiRoot = void 0;
|
7
|
-
const ajv_1 = __importDefault(require("ajv"));
|
8
|
-
exports.apiRoot = 'tasks';
|
9
|
-
exports.channel = 'task_insert';
|
10
|
-
exports.ajv = new ajv_1.default({
|
11
|
-
inlineRefs: false,
|
12
|
-
});
|
13
|
-
//# sourceMappingURL=common.js.map
|
package/out/tasks/common.js.map
DELETED
@@ -1 +0,0 @@
|
|
1
|
-
{"version":3,"file":"common.js","sourceRoot":"","sources":["../../src/tasks/common.ts"],"names":[],"mappings":";;;;;;AAAA,8CAAsB;AAGT,QAAA,OAAO,GAAG,OAAO,CAAC;AAGlB,QAAA,OAAO,GAAG,aAAa,CAAC;AAKxB,QAAA,GAAG,GAAG,IAAI,aAAG,CAAC;IAC1B,UAAU,EAAE,KAAK;CACjB,CAAC,CAAC"}
|
package/out/tasks/index.d.ts
DELETED
@@ -1,10 +0,0 @@
|
|
1
|
-
import type { Schema } from 'ajv';
|
2
|
-
import type * as Db from '../database-layer/db';
|
3
|
-
import type { sbvrUtils } from '../server-glue/module';
|
4
|
-
import type { TaskHandler } from './types';
|
5
|
-
export * from './types';
|
6
|
-
export declare const config: {
|
7
|
-
models: sbvrUtils.ExecutableModel[];
|
8
|
-
};
|
9
|
-
export declare function setup(db: Db.Database, tx: Db.Tx): Promise<void>;
|
10
|
-
export declare function addTaskHandler(name: string, fn: TaskHandler['fn'], schema?: Schema): void;
|