@balena/pinejs 17.0.0-build-wip-large-file-uploads-d6522dad962bc0bff6ee7c596df8f43f596b6aaa-1 → 17.0.0-build-wip-large-file-uploads-b2029f1b1df6c60eca71dd6d8921b4a3a94b2821-1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.pinejs-cache.json +1 -1
- package/.versionbot/CHANGELOG.yml +2 -2
- package/out/server-glue/module.js +2 -0
- package/out/server-glue/module.js.map +1 -1
- package/out/webresource-handler/index.d.ts +6 -2
- package/out/webresource-handler/index.js +16 -6
- package/out/webresource-handler/index.js.map +1 -1
- package/out/webresource-handler/multipartUpload.d.ts +2 -3
- package/out/webresource-handler/multipartUpload.js +79 -17
- package/out/webresource-handler/multipartUpload.js.map +1 -1
- package/out/webresource-handler/webresource.sbvr +62 -0
- package/package.json +2 -2
- package/src/server-glue/module.ts +2 -0
- package/src/webresource-handler/index.ts +23 -11
- package/src/webresource-handler/multipartUpload.ts +96 -24
- package/src/webresource-handler/webresource.sbvr +62 -0
@@ -1,10 +1,11 @@
|
|
1
|
+
import type { WebResourceType as WebResource } from '@balena/sbvr-types';
|
2
|
+
import { randomUUID } from 'node:crypto';
|
1
3
|
import type { AnyObject } from 'pinejs-client-core';
|
2
4
|
import type { WebResourceHandler } from '.';
|
3
5
|
import { getWebResourceFields } from '.';
|
6
|
+
import { api } from '../sbvr-api/sbvr-utils';
|
4
7
|
import type { ODataRequest } from '../sbvr-api/uri-parser';
|
5
|
-
import { errors, sbvrUtils } from '../server-glue/module';
|
6
|
-
import type { WebResource } from '@balena/sbvr-types/out/types/web-resource';
|
7
|
-
import { randomUUID } from 'node:crypto';
|
8
|
+
import { errors, permissions, sbvrUtils } from '../server-glue/module';
|
8
9
|
|
9
10
|
export interface BeginUploadPayload {
|
10
11
|
filename: string;
|
@@ -52,11 +53,11 @@ export const multipartUploadHooks = (
|
|
52
53
|
webResourceHandler: WebResourceHandler,
|
53
54
|
): sbvrUtils.Hooks => {
|
54
55
|
return {
|
55
|
-
POSTPARSE: async ({ req, request, tx }) => {
|
56
|
+
POSTPARSE: async ({ req, request, tx, api: vocabularyApi }) => {
|
56
57
|
if (request.odataQuery.property?.resource === 'beginUpload') {
|
57
58
|
const uploadParams = parseBeginUpload(request);
|
58
59
|
|
59
|
-
await
|
60
|
+
await vocabularyApi.post({
|
60
61
|
url: request.url.substring(1).replace('beginUpload', 'canAccess'),
|
61
62
|
body: { method: 'PATCH' },
|
62
63
|
});
|
@@ -76,9 +77,9 @@ export const multipartUploadHooks = (
|
|
76
77
|
delete request.odataQuery.property;
|
77
78
|
request.custom.isAction = 'beginUpload';
|
78
79
|
} else if (request.odataQuery.property?.resource === 'commitUpload') {
|
79
|
-
const commitPayload = parseCommitUpload(request);
|
80
|
+
const commitPayload = await parseCommitUpload(request);
|
80
81
|
|
81
|
-
await
|
82
|
+
await vocabularyApi.post({
|
82
83
|
url: request.url.substring(1).replace('commitUpload', 'canAccess'),
|
83
84
|
body: { method: 'PATCH' },
|
84
85
|
});
|
@@ -89,7 +90,22 @@ export const multipartUploadHooks = (
|
|
89
90
|
filename: commitPayload.metadata.filename,
|
90
91
|
multipartUploadChecksums: commitPayload.additionalCommitInfo,
|
91
92
|
});
|
92
|
-
|
93
|
+
|
94
|
+
await api.webresource.patch({
|
95
|
+
resource: 'multipart_upload',
|
96
|
+
body: {
|
97
|
+
status: 'completed',
|
98
|
+
},
|
99
|
+
options: {
|
100
|
+
$filter: {
|
101
|
+
uuid: commitPayload.key,
|
102
|
+
},
|
103
|
+
},
|
104
|
+
passthrough: {
|
105
|
+
req: permissions.root,
|
106
|
+
tx: tx,
|
107
|
+
},
|
108
|
+
});
|
93
109
|
|
94
110
|
request.method = 'PATCH';
|
95
111
|
request.values = {
|
@@ -101,13 +117,15 @@ export const multipartUploadHooks = (
|
|
101
117
|
request.custom.commitUploadPayload = webresource;
|
102
118
|
}
|
103
119
|
},
|
104
|
-
PRERESPOND: async ({ request, response, tx }) => {
|
120
|
+
PRERESPOND: async ({ req, request, response, tx }) => {
|
105
121
|
if (request.custom.isAction === 'beginUpload') {
|
106
122
|
await tx.rollback();
|
123
|
+
|
107
124
|
response.statusCode = 200;
|
108
125
|
response.body = await beginUpload(
|
109
126
|
webResourceHandler,
|
110
|
-
request
|
127
|
+
request,
|
128
|
+
req.user?.actor,
|
111
129
|
);
|
112
130
|
} else if (request.custom.isAction === 'commitUpload') {
|
113
131
|
response.body = await webResourceHandler.onPreRespond(
|
@@ -118,25 +136,47 @@ export const multipartUploadHooks = (
|
|
118
136
|
};
|
119
137
|
};
|
120
138
|
|
121
|
-
// Really any storage will do.
|
122
|
-
// I just honestly believe redis better suits this user case but I am fine if we agree on storing on DB
|
123
|
-
// This is not a cache, it is a persistent KV storage.
|
124
|
-
const someKvStoreDbOrRedisIdc: Record<string, PendingUpload> = {};
|
125
|
-
|
126
139
|
export const beginUpload = async (
|
127
140
|
webResourceHandler: WebResourceHandler,
|
128
|
-
|
141
|
+
odataRequest: ODataRequest,
|
142
|
+
actorId?: number,
|
129
143
|
): Promise<BeginUploadResponse> => {
|
144
|
+
const payload = odataRequest.values as { [x: string]: BeginUploadPayload };
|
130
145
|
const fieldName = Object.keys(payload)[0];
|
131
146
|
const metadata = payload[fieldName];
|
132
147
|
|
133
148
|
const { fileKey, uploadId, uploadUrls } =
|
134
149
|
await webResourceHandler.beginUpload(fieldName, metadata);
|
135
|
-
const
|
136
|
-
|
137
|
-
|
150
|
+
const uuid = randomUUID();
|
151
|
+
|
152
|
+
try {
|
153
|
+
await api.webresource.post({
|
154
|
+
resource: 'multipart_upload',
|
155
|
+
body: {
|
156
|
+
uuid,
|
157
|
+
resource_name: odataRequest.resourceName,
|
158
|
+
field_name: fieldName,
|
159
|
+
resource_id: odataRequest.affectedIds?.[0],
|
160
|
+
upload_id: uploadId,
|
161
|
+
file_key: fileKey,
|
162
|
+
status: 'pending',
|
163
|
+
filename: metadata.filename,
|
164
|
+
content_type: metadata.content_type,
|
165
|
+
size: metadata.size,
|
166
|
+
chunk_size: metadata.chunk_size,
|
167
|
+
expiry_date: Date.now() + 7 * 24 * 60 * 60 * 1000, // 7 days in ms
|
168
|
+
is_created_by__actor: actorId,
|
169
|
+
},
|
170
|
+
passthrough: {
|
171
|
+
req: permissions.root,
|
172
|
+
},
|
173
|
+
});
|
174
|
+
} catch (err) {
|
175
|
+
console.error('failed to start multipart upload', err);
|
176
|
+
throw new errors.BadRequestError('Failed to start multipart upload');
|
177
|
+
}
|
138
178
|
|
139
|
-
return { [fieldName]: { key, uploadUrls } };
|
179
|
+
return { [fieldName]: { key: uuid, uploadUrls } };
|
140
180
|
};
|
141
181
|
|
142
182
|
const parseBeginUpload = (request: ODataRequest) => {
|
@@ -196,7 +236,7 @@ const parseBeginUploadPayload = (
|
|
196
236
|
return { filename, content_type, size, chunk_size };
|
197
237
|
};
|
198
238
|
|
199
|
-
const parseCommitUpload = (request: ODataRequest) => {
|
239
|
+
const parseCommitUpload = async (request: ODataRequest) => {
|
200
240
|
if (request.odataQuery.key == null) {
|
201
241
|
throw new errors.BadRequestError();
|
202
242
|
}
|
@@ -206,9 +246,41 @@ const parseCommitUpload = (request: ODataRequest) => {
|
|
206
246
|
throw new errors.BadRequestError('Invalid key type');
|
207
247
|
}
|
208
248
|
|
209
|
-
|
210
|
-
|
249
|
+
// TODO: actor permissions
|
250
|
+
const [multipartUpload] = (await api.webresource.get({
|
251
|
+
resource: 'multipart_upload',
|
252
|
+
options: {
|
253
|
+
$select: ['id', 'file_key', 'upload_id', 'field_name', 'filename'],
|
254
|
+
$filter: {
|
255
|
+
uuid: key,
|
256
|
+
status: 'pending',
|
257
|
+
expiry_date: { $gt: { $now: {} } },
|
258
|
+
},
|
259
|
+
},
|
260
|
+
passthrough: {
|
261
|
+
req: permissions.root,
|
262
|
+
tx: request.tx,
|
263
|
+
},
|
264
|
+
})) as [
|
265
|
+
{
|
266
|
+
id: number;
|
267
|
+
file_key: string;
|
268
|
+
upload_id: string;
|
269
|
+
field_name: string;
|
270
|
+
filename: string;
|
271
|
+
}?,
|
272
|
+
];
|
273
|
+
|
274
|
+
if (multipartUpload == null) {
|
275
|
+
throw new errors.BadRequestError(`Invalid upload for key ${key}`);
|
211
276
|
}
|
212
277
|
|
213
|
-
|
278
|
+
const metadata = {
|
279
|
+
fileKey: multipartUpload.file_key,
|
280
|
+
uploadId: multipartUpload.upload_id,
|
281
|
+
filename: multipartUpload.filename,
|
282
|
+
fieldName: multipartUpload.field_name,
|
283
|
+
};
|
284
|
+
|
285
|
+
return { key, additionalCommitInfo, metadata };
|
214
286
|
};
|
@@ -0,0 +1,62 @@
|
|
1
|
+
Vocabulary: Auth
|
2
|
+
|
3
|
+
Term: actor
|
4
|
+
Term: expiry date
|
5
|
+
Concept Type: Date Time (Type)
|
6
|
+
|
7
|
+
Vocabulary: webresource
|
8
|
+
|
9
|
+
Term: uuid
|
10
|
+
Concept Type: Short Text (Type)
|
11
|
+
Term: resource name
|
12
|
+
Concept Type: Short Text (Type)
|
13
|
+
Term: field name
|
14
|
+
Concept Type: Short Text (Type)
|
15
|
+
Term: resource id
|
16
|
+
Concept Type: Integer (Type)
|
17
|
+
Term: upload id
|
18
|
+
Concept Type: Short Text (Type)
|
19
|
+
Term: file key
|
20
|
+
Concept Type: Short Text (Type)
|
21
|
+
Term: status
|
22
|
+
Concept Type: Short Text (Type)
|
23
|
+
Term: filename
|
24
|
+
Concept Type: Short Text (Type)
|
25
|
+
Term: content type
|
26
|
+
Concept Type: Short Text (Type)
|
27
|
+
Term: size
|
28
|
+
Concept Type: Integer (Type)
|
29
|
+
Term: chunk size
|
30
|
+
Concept Type: Integer (Type)
|
31
|
+
Term: valid until date
|
32
|
+
Concept Type: Date Time (Type)
|
33
|
+
|
34
|
+
Term: multipart upload
|
35
|
+
Fact type: multipart upload has uuid
|
36
|
+
Necessity: each multipart upload has exactly one uuid
|
37
|
+
Necessity: each uuid is of exactly one multipart upload
|
38
|
+
Fact type: multipart upload has resource name
|
39
|
+
Necessity: each multipart upload has exactly one resource name
|
40
|
+
Fact type: multipart upload has field name
|
41
|
+
Necessity: each multipart upload has exactly one field name
|
42
|
+
Fact type: multipart upload has resource id
|
43
|
+
Necessity: each multipart upload has exactly one resource id
|
44
|
+
Fact type: multipart upload has upload id
|
45
|
+
Necessity: each multipart upload has exactly one upload id
|
46
|
+
Fact type: multipart upload has file key
|
47
|
+
Necessity: each multipart upload has exactly one file key
|
48
|
+
Fact type: multipart upload has status
|
49
|
+
Necessity: each multipart upload has exactly one status
|
50
|
+
Definition: "pending" or "completed" or "cancelled"
|
51
|
+
Fact type: multipart upload has filename
|
52
|
+
Necessity: each multipart upload has exactly one filename
|
53
|
+
Fact type: multipart upload has content type
|
54
|
+
Necessity: each multipart upload has exactly one content type
|
55
|
+
Fact type: multipart upload has size
|
56
|
+
Necessity: each multipart upload has exactly one size
|
57
|
+
Fact type: multipart upload has chunk size
|
58
|
+
Necessity: each multipart upload has exactly one chunk size
|
59
|
+
Fact type: multipart upload has expiry date (Auth)
|
60
|
+
Necessity: each multipart upload has exactly one expiry date (Auth)
|
61
|
+
Fact type: multipart upload is created by actor (Auth)
|
62
|
+
Necessity: each multipart upload is created by at most one actor (Auth)
|