@editframe/api 0.11.0-beta.9 → 0.12.0-beta.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/ProgressIterator.d.ts +25 -0
- package/dist/ProgressIterator.test.d.ts +1 -0
- package/dist/StreamEventSource.d.ts +50 -0
- package/dist/StreamEventSource.test.d.ts +1 -0
- package/dist/{CHUNK_SIZE_BYTES.js → api/src/CHUNK_SIZE_BYTES.js} +1 -1
- package/dist/api/src/ProgressIterator.js +99 -0
- package/dist/api/src/StreamEventSource.js +130 -0
- package/dist/api/src/client.js +49 -0
- package/dist/api/src/index.js +49 -0
- package/dist/{resources → api/src/resources}/caption-file.js +22 -2
- package/dist/{resources → api/src/resources}/image-file.js +24 -4
- package/dist/api/src/resources/isobmff-file.js +102 -0
- package/dist/{resources → api/src/resources}/isobmff-track.js +44 -1
- package/dist/api/src/resources/process-isobmff.js +22 -0
- package/dist/{resources → api/src/resources}/renders.js +21 -2
- package/dist/api/src/resources/transcriptions.js +45 -0
- package/dist/api/src/resources/unprocessed-file.js +114 -0
- package/dist/api/src/streamChunker.js +28 -0
- package/dist/{uploadChunks.js → api/src/uploadChunks.js} +1 -4
- package/dist/cli/src/utils/createReadableStreamFromReadable.js +82 -0
- package/dist/client.d.ts +6 -3
- package/dist/index.d.ts +7 -5
- package/dist/readableFromBuffers.d.ts +1 -2
- package/dist/resources/caption-file.d.ts +7 -3
- package/dist/resources/image-file.d.ts +15 -5
- package/dist/resources/isobmff-file.d.ts +29 -3
- package/dist/resources/isobmff-track.d.ts +13 -15
- package/dist/resources/process-isobmff.d.ts +12 -0
- package/dist/resources/process-isobmff.test.d.ts +1 -0
- package/dist/resources/renders.d.ts +10 -5
- package/dist/resources/transcriptions.d.ts +24 -0
- package/dist/resources/transcriptions.test.d.ts +1 -0
- package/dist/resources/unprocessed-file.d.ts +15 -53
- package/dist/streamChunker.d.ts +1 -2
- package/dist/uploadChunks.d.ts +1 -2
- package/package.json +3 -2
- package/src/resources/caption-file.test.ts +57 -6
- package/src/resources/caption-file.ts +34 -5
- package/src/resources/image-file.test.ts +59 -11
- package/src/resources/image-file.ts +75 -9
- package/src/resources/isobmff-file.test.ts +57 -6
- package/src/resources/isobmff-file.ts +96 -5
- package/src/resources/isobmff-track.test.ts +3 -3
- package/src/resources/isobmff-track.ts +50 -5
- package/src/resources/process-isobmff.test.ts +62 -0
- package/src/resources/process-isobmff.ts +33 -0
- package/src/resources/renders.test.ts +51 -6
- package/src/resources/renders.ts +34 -5
- package/src/resources/transcriptions.test.ts +49 -0
- package/src/resources/transcriptions.ts +64 -0
- package/src/resources/unprocessed-file.test.ts +24 -437
- package/src/resources/unprocessed-file.ts +90 -160
- package/dist/client.js +0 -35
- package/dist/index.js +0 -36
- package/dist/resources/isobmff-file.js +0 -47
- package/dist/resources/unprocessed-file.js +0 -180
- package/dist/streamChunker.js +0 -17
- /package/dist/{resources → api/src/resources}/url-token.js +0 -0
|
@@ -1,71 +1,70 @@
|
|
|
1
|
-
import { createReadStream } from "node:fs";
|
|
2
|
-
import { stat } from "node:fs/promises";
|
|
3
|
-
import { basename } from "node:path";
|
|
4
|
-
import { Readable } from "node:stream";
|
|
5
|
-
|
|
6
1
|
import debug from "debug";
|
|
7
2
|
import { z } from "zod";
|
|
8
3
|
|
|
9
|
-
import { md5Buffer, md5FilePath } from "@editframe/assets";
|
|
10
|
-
|
|
11
4
|
import type { Client } from "../client.ts";
|
|
12
|
-
import {
|
|
13
|
-
type IteratorWithPromise,
|
|
14
|
-
type UploadChunkEvent,
|
|
15
|
-
uploadChunks,
|
|
16
|
-
} from "../uploadChunks.ts";
|
|
5
|
+
import { uploadChunks } from "../uploadChunks.ts";
|
|
17
6
|
|
|
18
7
|
const log = debug("ef:api:unprocessed-file");
|
|
19
8
|
|
|
20
|
-
const FileProcessor = z.union([
|
|
21
|
-
z.literal("isobmff"),
|
|
22
|
-
z.literal("image"),
|
|
23
|
-
z.literal("captions"),
|
|
24
|
-
z.string(),
|
|
25
|
-
]);
|
|
26
|
-
|
|
27
|
-
const FileProcessors = z.array(FileProcessor).refine(
|
|
28
|
-
(value) => {
|
|
29
|
-
return new Set(value).size === value.length;
|
|
30
|
-
},
|
|
31
|
-
{
|
|
32
|
-
message: "Processors list must not include duplicates",
|
|
33
|
-
},
|
|
34
|
-
);
|
|
35
|
-
|
|
36
9
|
const MAX_FILE_SIZE = 1024 * 1024 * 1024; // 1GiB
|
|
37
10
|
|
|
38
11
|
export const CreateUnprocessedFilePayload = z.object({
|
|
39
12
|
md5: z.string(),
|
|
40
13
|
filename: z.string(),
|
|
41
|
-
processes: FileProcessors.optional(),
|
|
42
14
|
byte_size: z.number().int().max(MAX_FILE_SIZE),
|
|
43
15
|
});
|
|
44
16
|
|
|
45
|
-
export const UpdateUnprocessedFilePayload = z.object({
|
|
46
|
-
processes: FileProcessors.optional(),
|
|
47
|
-
});
|
|
17
|
+
export const UpdateUnprocessedFilePayload = z.object({});
|
|
48
18
|
|
|
49
|
-
|
|
19
|
+
interface UnprocessedFile {
|
|
50
20
|
byte_size: number;
|
|
51
21
|
next_byte: number;
|
|
52
22
|
complete: boolean;
|
|
53
23
|
id: string;
|
|
54
24
|
md5: string;
|
|
55
|
-
processes: z.infer<typeof FileProcessors> & string[];
|
|
56
|
-
asset_id: string;
|
|
57
25
|
}
|
|
58
26
|
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
27
|
+
type UnprocessedFileUploadDetails = Pick<UnprocessedFile, "id" | "byte_size">;
|
|
28
|
+
|
|
29
|
+
export interface CreateUnprocessedFileResult extends UnprocessedFile {}
|
|
30
|
+
|
|
31
|
+
export interface LookupUnprocessedFileByMd5Result extends UnprocessedFile {}
|
|
32
|
+
|
|
33
|
+
export interface UpdateUnprocessedFileResult extends UnprocessedFile {}
|
|
34
|
+
|
|
35
|
+
export interface ProcessIsobmffFileResult {
|
|
63
36
|
id: string;
|
|
64
|
-
md5: string;
|
|
65
|
-
processes: z.infer<typeof FileProcessors>;
|
|
66
|
-
asset_id: string;
|
|
67
37
|
}
|
|
68
38
|
|
|
39
|
+
export const createUnprocessedFileFromPath = async (
|
|
40
|
+
client: Client,
|
|
41
|
+
path: string,
|
|
42
|
+
) => {
|
|
43
|
+
const [{ stat }, { basename }, { md5FilePath }] = await Promise.all([
|
|
44
|
+
import("node:fs/promises"),
|
|
45
|
+
import("node:path"),
|
|
46
|
+
import("@editframe/assets"),
|
|
47
|
+
]).catch((error) => {
|
|
48
|
+
console.error("Error importing modules", error);
|
|
49
|
+
console.error(
|
|
50
|
+
"This is likely because you are bundling for the browser. createUnprocessedFileFromPath can only be run in environments that support importing `node:path`",
|
|
51
|
+
);
|
|
52
|
+
throw error;
|
|
53
|
+
});
|
|
54
|
+
|
|
55
|
+
const fileInfo = await stat(path);
|
|
56
|
+
|
|
57
|
+
const byte_size = fileInfo.size;
|
|
58
|
+
|
|
59
|
+
const md5 = await md5FilePath(path);
|
|
60
|
+
|
|
61
|
+
return createUnprocessedFile(client, {
|
|
62
|
+
md5,
|
|
63
|
+
filename: basename(path),
|
|
64
|
+
byte_size,
|
|
65
|
+
});
|
|
66
|
+
};
|
|
67
|
+
|
|
69
68
|
export const createUnprocessedFile = async (
|
|
70
69
|
client: Client,
|
|
71
70
|
payload: z.infer<typeof CreateUnprocessedFilePayload>,
|
|
@@ -96,146 +95,77 @@ export const createUnprocessedFile = async (
|
|
|
96
95
|
);
|
|
97
96
|
};
|
|
98
97
|
|
|
99
|
-
export const
|
|
98
|
+
export const uploadUnprocessedFile = async (
|
|
100
99
|
client: Client,
|
|
101
|
-
|
|
102
|
-
|
|
100
|
+
uploadDetails: UnprocessedFileUploadDetails,
|
|
101
|
+
path: string,
|
|
103
102
|
) => {
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
const response = await client.authenticatedFetch(
|
|
107
|
-
`/api/v1/unprocessed_files/${fileId}`,
|
|
108
|
-
{
|
|
109
|
-
method: "POST",
|
|
110
|
-
body: JSON.stringify(payload),
|
|
111
|
-
},
|
|
112
|
-
);
|
|
113
|
-
|
|
114
|
-
log("Unprocessed file updated", response);
|
|
103
|
+
const { createReadStream } = await import("node:fs");
|
|
104
|
+
const readStream = createReadStream(path);
|
|
115
105
|
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
106
|
+
const { createReadableStreamFromReadable } = await import(
|
|
107
|
+
"packages/cli/src/utils/createReadableStreamFromReadable.ts"
|
|
108
|
+
);
|
|
119
109
|
|
|
120
|
-
|
|
121
|
-
|
|
110
|
+
return uploadUnprocessedReadableStream(
|
|
111
|
+
client,
|
|
112
|
+
uploadDetails,
|
|
113
|
+
createReadableStreamFromReadable(readStream),
|
|
122
114
|
);
|
|
123
115
|
};
|
|
124
116
|
|
|
125
|
-
export const
|
|
117
|
+
export const uploadUnprocessedReadableStream = (
|
|
126
118
|
client: Client,
|
|
127
|
-
|
|
128
|
-
fileStream:
|
|
129
|
-
fileSize: number,
|
|
119
|
+
uploadDetails: UnprocessedFileUploadDetails,
|
|
120
|
+
fileStream: ReadableStream,
|
|
130
121
|
) => {
|
|
131
|
-
log("Uploading unprocessed file",
|
|
122
|
+
log("Uploading unprocessed file", uploadDetails.id);
|
|
132
123
|
|
|
133
124
|
return uploadChunks(client, {
|
|
134
|
-
url: `/api/v1/unprocessed_files/${
|
|
135
|
-
fileSize,
|
|
125
|
+
url: `/api/v1/unprocessed_files/${uploadDetails.id}/upload`,
|
|
126
|
+
fileSize: uploadDetails.byte_size,
|
|
136
127
|
fileStream,
|
|
137
128
|
maxSize: MAX_FILE_SIZE,
|
|
138
129
|
});
|
|
139
130
|
};
|
|
140
131
|
|
|
141
|
-
const
|
|
132
|
+
export const lookupUnprocessedFileByMd5 = async (
|
|
142
133
|
client: Client,
|
|
143
|
-
filename: string,
|
|
144
134
|
md5: string,
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
135
|
+
): Promise<LookupUnprocessedFileByMd5Result | null> => {
|
|
136
|
+
const response = await client.authenticatedFetch(
|
|
137
|
+
`/api/v1/unprocessed_files/md5/${md5}`,
|
|
138
|
+
{
|
|
139
|
+
method: "GET",
|
|
140
|
+
},
|
|
141
|
+
);
|
|
150
142
|
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
filename,
|
|
155
|
-
byte_size: byteSize,
|
|
156
|
-
});
|
|
143
|
+
if (response.ok) {
|
|
144
|
+
return (await response.json()) as LookupUnprocessedFileByMd5Result;
|
|
145
|
+
}
|
|
157
146
|
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
if (unprocessedFile.complete) {
|
|
162
|
-
yield { type: "progress", progress: 0 };
|
|
163
|
-
yield { type: "progress", progress: 1 };
|
|
164
|
-
}
|
|
165
|
-
yield* doUpload(unprocessedFile.id);
|
|
166
|
-
},
|
|
167
|
-
file: async () => {
|
|
168
|
-
const unprocessedFile = await createFilePromise;
|
|
169
|
-
if (unprocessedFile.complete) {
|
|
170
|
-
return unprocessedFile;
|
|
171
|
-
}
|
|
172
|
-
await doUpload(unprocessedFile.id).whenUploaded();
|
|
173
|
-
const fileInformation = await updateUnprocessedFile(
|
|
174
|
-
client,
|
|
175
|
-
unprocessedFile.id,
|
|
176
|
-
{
|
|
177
|
-
processes: [processor],
|
|
178
|
-
},
|
|
179
|
-
);
|
|
180
|
-
log("File processed", fileInformation);
|
|
181
|
-
return fileInformation;
|
|
182
|
-
},
|
|
183
|
-
};
|
|
184
|
-
};
|
|
147
|
+
if (response.status === 404) {
|
|
148
|
+
return null;
|
|
149
|
+
}
|
|
185
150
|
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
const md5 = md5Buffer(buffer);
|
|
190
|
-
|
|
191
|
-
return processResource(
|
|
192
|
-
client,
|
|
193
|
-
filename,
|
|
194
|
-
md5,
|
|
195
|
-
buffer.byteLength,
|
|
196
|
-
processor,
|
|
197
|
-
(id: string) => {
|
|
198
|
-
const readStream = new Readable({
|
|
199
|
-
read() {
|
|
200
|
-
readStream.push(buffer);
|
|
201
|
-
readStream.push(null);
|
|
202
|
-
},
|
|
203
|
-
});
|
|
204
|
-
|
|
205
|
-
return uploadUnprocessedFile(client, id, readStream, buffer.byteLength);
|
|
206
|
-
},
|
|
207
|
-
);
|
|
208
|
-
};
|
|
151
|
+
throw new Error(
|
|
152
|
+
`Failed to lookup unprocessed file by md5 ${md5} ${response.status} ${response.statusText}`,
|
|
153
|
+
);
|
|
209
154
|
};
|
|
210
155
|
|
|
211
|
-
const
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
return processResource(
|
|
219
|
-
client,
|
|
220
|
-
basename(filePath),
|
|
221
|
-
md5,
|
|
222
|
-
byteSize,
|
|
223
|
-
processor,
|
|
224
|
-
(id: string) => {
|
|
225
|
-
const readStream = createReadStream(filePath);
|
|
226
|
-
return uploadUnprocessedFile(client, id, readStream, byteSize);
|
|
227
|
-
},
|
|
228
|
-
);
|
|
229
|
-
};
|
|
230
|
-
return {
|
|
231
|
-
progress: async () => (await processPromise()).progress(),
|
|
232
|
-
file: async () => (await processPromise()).file(),
|
|
233
|
-
};
|
|
234
|
-
};
|
|
235
|
-
};
|
|
156
|
+
export const processIsobmffFile = async (client: Client, id: string) => {
|
|
157
|
+
const response = await client.authenticatedFetch(
|
|
158
|
+
`/api/v1/unprocessed_files/${id}/isobmff`,
|
|
159
|
+
{
|
|
160
|
+
method: "POST",
|
|
161
|
+
},
|
|
162
|
+
);
|
|
236
163
|
|
|
237
|
-
|
|
238
|
-
|
|
164
|
+
if (response.ok) {
|
|
165
|
+
return (await response.json()) as ProcessIsobmffFileResult;
|
|
166
|
+
}
|
|
239
167
|
|
|
240
|
-
|
|
241
|
-
|
|
168
|
+
throw new Error(
|
|
169
|
+
`Failed to process isobmff file ${id} ${response.status} ${response.statusText}`,
|
|
170
|
+
);
|
|
171
|
+
};
|
package/dist/client.js
DELETED
|
@@ -1,35 +0,0 @@
|
|
|
1
|
-
import debug from "debug";
|
|
2
|
-
import fetch from "node-fetch";
|
|
3
|
-
const log = debug("ef:api:client");
|
|
4
|
-
class Client {
|
|
5
|
-
constructor(token, efHost = "https://editframe.dev") {
|
|
6
|
-
this.authenticatedFetch = async (path, init = {}) => {
|
|
7
|
-
init.headers ||= {};
|
|
8
|
-
const url = new URL(path, this.#efHost);
|
|
9
|
-
log(
|
|
10
|
-
"Authenticated fetch",
|
|
11
|
-
{ url: url.toString(), init },
|
|
12
|
-
"(Token will be added as Bearer token)"
|
|
13
|
-
);
|
|
14
|
-
Object.assign(init.headers, {
|
|
15
|
-
Authorization: `Bearer ${this.#token}`,
|
|
16
|
-
"Content-Type": "application/json"
|
|
17
|
-
});
|
|
18
|
-
const response = await fetch(url, init);
|
|
19
|
-
log("Authenticated fetch response", response.status, response.statusText);
|
|
20
|
-
return response;
|
|
21
|
-
};
|
|
22
|
-
log("Creating client with efHost", { efHost, tokenIsSet: !!token });
|
|
23
|
-
this.#token = token;
|
|
24
|
-
this.#efHost = efHost;
|
|
25
|
-
const { apiKey, apiSecret } = token.match(/^(?<apiSecret>ef_[^_]+)_(?<apiKey>.+)$/)?.groups ?? {};
|
|
26
|
-
if (!apiKey || !apiSecret) {
|
|
27
|
-
throw new Error("Invalid token format. Must look like: ef_{}_{}");
|
|
28
|
-
}
|
|
29
|
-
}
|
|
30
|
-
#token;
|
|
31
|
-
#efHost;
|
|
32
|
-
}
|
|
33
|
-
export {
|
|
34
|
-
Client
|
|
35
|
-
};
|
package/dist/index.js
DELETED
|
@@ -1,36 +0,0 @@
|
|
|
1
|
-
import { CreateCaptionFilePayload, createCaptionFile, uploadCaptionFile } from "./resources/caption-file.js";
|
|
2
|
-
import { CreateImageFilePayload, createImageFile, uploadImageFile } from "./resources/image-file.js";
|
|
3
|
-
import { CreateISOBMFFFilePayload, createISOBMFFFile, uploadFragmentIndex } from "./resources/isobmff-file.js";
|
|
4
|
-
import { CreateISOBMFFTrackPayload, createISOBMFFTrack, uploadISOBMFFTrack } from "./resources/isobmff-track.js";
|
|
5
|
-
import { CreateRenderPayload, createRender, uploadRender } from "./resources/renders.js";
|
|
6
|
-
import { createURLToken } from "./resources/url-token.js";
|
|
7
|
-
import { CreateUnprocessedFilePayload, UpdateUnprocessedFilePayload, createUnprocessedFile, processAVFile, processAVFileBuffer, processImageFile, processImageFileBuffer, updateUnprocessedFile, uploadUnprocessedFile } from "./resources/unprocessed-file.js";
|
|
8
|
-
import { Client } from "./client.js";
|
|
9
|
-
export {
|
|
10
|
-
Client,
|
|
11
|
-
CreateCaptionFilePayload,
|
|
12
|
-
CreateISOBMFFFilePayload,
|
|
13
|
-
CreateISOBMFFTrackPayload,
|
|
14
|
-
CreateImageFilePayload,
|
|
15
|
-
CreateRenderPayload,
|
|
16
|
-
CreateUnprocessedFilePayload,
|
|
17
|
-
UpdateUnprocessedFilePayload,
|
|
18
|
-
createCaptionFile,
|
|
19
|
-
createISOBMFFFile,
|
|
20
|
-
createISOBMFFTrack,
|
|
21
|
-
createImageFile,
|
|
22
|
-
createRender,
|
|
23
|
-
createURLToken,
|
|
24
|
-
createUnprocessedFile,
|
|
25
|
-
processAVFile,
|
|
26
|
-
processAVFileBuffer,
|
|
27
|
-
processImageFile,
|
|
28
|
-
processImageFileBuffer,
|
|
29
|
-
updateUnprocessedFile,
|
|
30
|
-
uploadCaptionFile,
|
|
31
|
-
uploadFragmentIndex,
|
|
32
|
-
uploadISOBMFFTrack,
|
|
33
|
-
uploadImageFile,
|
|
34
|
-
uploadRender,
|
|
35
|
-
uploadUnprocessedFile
|
|
36
|
-
};
|
|
@@ -1,47 +0,0 @@
|
|
|
1
|
-
import { z } from "zod";
|
|
2
|
-
import debug from "debug";
|
|
3
|
-
const log = debug("ef:api:isobmff-file");
|
|
4
|
-
const FILE_SIZE_LIMIT = 1024 * 1024 * 2;
|
|
5
|
-
const CreateISOBMFFFilePayload = z.object({
|
|
6
|
-
md5: z.string(),
|
|
7
|
-
filename: z.string()
|
|
8
|
-
});
|
|
9
|
-
const createISOBMFFFile = async (client, payload) => {
|
|
10
|
-
log("Creating isobmff file", payload);
|
|
11
|
-
const response = await client.authenticatedFetch("/api/v1/isobmff_files", {
|
|
12
|
-
method: "POST",
|
|
13
|
-
body: JSON.stringify(payload)
|
|
14
|
-
});
|
|
15
|
-
log("ISOBMFF file created", response);
|
|
16
|
-
if (response.ok) {
|
|
17
|
-
return await response.json();
|
|
18
|
-
}
|
|
19
|
-
throw new Error(
|
|
20
|
-
`Failed to create isobmff file ${response.status} ${response.statusText}`
|
|
21
|
-
);
|
|
22
|
-
};
|
|
23
|
-
const uploadFragmentIndex = async (client, fileId, fileStream, fileSize) => {
|
|
24
|
-
log("Uploading fragment index", fileId);
|
|
25
|
-
if (fileSize > FILE_SIZE_LIMIT) {
|
|
26
|
-
throw new Error(`File size exceeds limit of ${FILE_SIZE_LIMIT} bytes`);
|
|
27
|
-
}
|
|
28
|
-
const response = await client.authenticatedFetch(
|
|
29
|
-
`/api/v1/isobmff_files/${fileId}/index/upload`,
|
|
30
|
-
{
|
|
31
|
-
method: "POST",
|
|
32
|
-
body: fileStream
|
|
33
|
-
}
|
|
34
|
-
);
|
|
35
|
-
log("Fragment index uploaded", response);
|
|
36
|
-
if (response.ok) {
|
|
37
|
-
return response.json();
|
|
38
|
-
}
|
|
39
|
-
throw new Error(
|
|
40
|
-
`Failed to create fragment index ${response.status} ${response.statusText}`
|
|
41
|
-
);
|
|
42
|
-
};
|
|
43
|
-
export {
|
|
44
|
-
CreateISOBMFFFilePayload,
|
|
45
|
-
createISOBMFFFile,
|
|
46
|
-
uploadFragmentIndex
|
|
47
|
-
};
|
|
@@ -1,180 +0,0 @@
|
|
|
1
|
-
import { createReadStream } from "node:fs";
|
|
2
|
-
import { stat } from "node:fs/promises";
|
|
3
|
-
import { basename } from "node:path";
|
|
4
|
-
import { Readable } from "node:stream";
|
|
5
|
-
import debug from "debug";
|
|
6
|
-
import { z } from "zod";
|
|
7
|
-
import { md5Buffer, md5FilePath } from "@editframe/assets";
|
|
8
|
-
import { uploadChunks } from "../uploadChunks.js";
|
|
9
|
-
const log = debug("ef:api:unprocessed-file");
|
|
10
|
-
const FileProcessor = z.union([
|
|
11
|
-
z.literal("isobmff"),
|
|
12
|
-
z.literal("image"),
|
|
13
|
-
z.literal("captions"),
|
|
14
|
-
z.string()
|
|
15
|
-
]);
|
|
16
|
-
const FileProcessors = z.array(FileProcessor).refine(
|
|
17
|
-
(value) => {
|
|
18
|
-
return new Set(value).size === value.length;
|
|
19
|
-
},
|
|
20
|
-
{
|
|
21
|
-
message: "Processors list must not include duplicates"
|
|
22
|
-
}
|
|
23
|
-
);
|
|
24
|
-
const MAX_FILE_SIZE = 1024 * 1024 * 1024;
|
|
25
|
-
const CreateUnprocessedFilePayload = z.object({
|
|
26
|
-
md5: z.string(),
|
|
27
|
-
filename: z.string(),
|
|
28
|
-
processes: FileProcessors.optional(),
|
|
29
|
-
byte_size: z.number().int().max(MAX_FILE_SIZE)
|
|
30
|
-
});
|
|
31
|
-
const UpdateUnprocessedFilePayload = z.object({
|
|
32
|
-
processes: FileProcessors.optional()
|
|
33
|
-
});
|
|
34
|
-
const createUnprocessedFile = async (client, payload) => {
|
|
35
|
-
log("Creating an unprocessed file", payload);
|
|
36
|
-
CreateUnprocessedFilePayload.parse(payload);
|
|
37
|
-
const response = await client.authenticatedFetch(
|
|
38
|
-
"/api/v1/unprocessed_files",
|
|
39
|
-
{
|
|
40
|
-
method: "POST",
|
|
41
|
-
body: JSON.stringify(payload)
|
|
42
|
-
}
|
|
43
|
-
);
|
|
44
|
-
log(
|
|
45
|
-
"Unprocessed file created",
|
|
46
|
-
response.status,
|
|
47
|
-
response.statusText,
|
|
48
|
-
response.headers
|
|
49
|
-
);
|
|
50
|
-
if (response.ok) {
|
|
51
|
-
return await response.json();
|
|
52
|
-
}
|
|
53
|
-
throw new Error(
|
|
54
|
-
`Failed to create unprocessed file ${response.status} ${response.statusText}`
|
|
55
|
-
);
|
|
56
|
-
};
|
|
57
|
-
const updateUnprocessedFile = async (client, fileId, payload) => {
|
|
58
|
-
log("Updating unprocessed file", fileId, payload);
|
|
59
|
-
UpdateUnprocessedFilePayload.parse(payload);
|
|
60
|
-
const response = await client.authenticatedFetch(
|
|
61
|
-
`/api/v1/unprocessed_files/${fileId}`,
|
|
62
|
-
{
|
|
63
|
-
method: "POST",
|
|
64
|
-
body: JSON.stringify(payload)
|
|
65
|
-
}
|
|
66
|
-
);
|
|
67
|
-
log("Unprocessed file updated", response);
|
|
68
|
-
if (response.ok) {
|
|
69
|
-
return await response.json();
|
|
70
|
-
}
|
|
71
|
-
throw new Error(
|
|
72
|
-
`Failed to update unprocessed file ${response.status} ${response.statusText}`
|
|
73
|
-
);
|
|
74
|
-
};
|
|
75
|
-
const uploadUnprocessedFile = (client, fileId, fileStream, fileSize) => {
|
|
76
|
-
log("Uploading unprocessed file", fileId);
|
|
77
|
-
return uploadChunks(client, {
|
|
78
|
-
url: `/api/v1/unprocessed_files/${fileId}/upload`,
|
|
79
|
-
fileSize,
|
|
80
|
-
fileStream,
|
|
81
|
-
maxSize: MAX_FILE_SIZE
|
|
82
|
-
});
|
|
83
|
-
};
|
|
84
|
-
const processResource = (client, filename, md5, byteSize, processor, doUpload) => {
|
|
85
|
-
log("Processing", { filename, md5, byteSize, processor });
|
|
86
|
-
const createFilePromise = createUnprocessedFile(client, {
|
|
87
|
-
md5,
|
|
88
|
-
processes: [],
|
|
89
|
-
filename,
|
|
90
|
-
byte_size: byteSize
|
|
91
|
-
});
|
|
92
|
-
return {
|
|
93
|
-
async *progress() {
|
|
94
|
-
const unprocessedFile = await createFilePromise;
|
|
95
|
-
if (unprocessedFile.complete) {
|
|
96
|
-
yield { type: "progress", progress: 0 };
|
|
97
|
-
yield { type: "progress", progress: 1 };
|
|
98
|
-
}
|
|
99
|
-
yield* doUpload(unprocessedFile.id);
|
|
100
|
-
},
|
|
101
|
-
file: async () => {
|
|
102
|
-
const unprocessedFile = await createFilePromise;
|
|
103
|
-
if (unprocessedFile.complete) {
|
|
104
|
-
return unprocessedFile;
|
|
105
|
-
}
|
|
106
|
-
await doUpload(unprocessedFile.id).whenUploaded();
|
|
107
|
-
const fileInformation = await updateUnprocessedFile(
|
|
108
|
-
client,
|
|
109
|
-
unprocessedFile.id,
|
|
110
|
-
{
|
|
111
|
-
processes: [processor]
|
|
112
|
-
}
|
|
113
|
-
);
|
|
114
|
-
log("File processed", fileInformation);
|
|
115
|
-
return fileInformation;
|
|
116
|
-
}
|
|
117
|
-
};
|
|
118
|
-
};
|
|
119
|
-
const buildBufferProcessor = (processor) => {
|
|
120
|
-
return (client, buffer, filename = "buffer") => {
|
|
121
|
-
log(`Processing file buffer: ${processor}`, filename);
|
|
122
|
-
const md5 = md5Buffer(buffer);
|
|
123
|
-
return processResource(
|
|
124
|
-
client,
|
|
125
|
-
filename,
|
|
126
|
-
md5,
|
|
127
|
-
buffer.byteLength,
|
|
128
|
-
processor,
|
|
129
|
-
(id) => {
|
|
130
|
-
const readStream = new Readable({
|
|
131
|
-
read() {
|
|
132
|
-
readStream.push(buffer);
|
|
133
|
-
readStream.push(null);
|
|
134
|
-
}
|
|
135
|
-
});
|
|
136
|
-
return uploadUnprocessedFile(client, id, readStream, buffer.byteLength);
|
|
137
|
-
}
|
|
138
|
-
);
|
|
139
|
-
};
|
|
140
|
-
};
|
|
141
|
-
const buildFileProcessor = (processor) => {
|
|
142
|
-
return (client, filePath) => {
|
|
143
|
-
const processPromise = async () => {
|
|
144
|
-
const [md5, { size: byteSize }] = await Promise.all([
|
|
145
|
-
md5FilePath(filePath),
|
|
146
|
-
stat(filePath)
|
|
147
|
-
]);
|
|
148
|
-
return processResource(
|
|
149
|
-
client,
|
|
150
|
-
basename(filePath),
|
|
151
|
-
md5,
|
|
152
|
-
byteSize,
|
|
153
|
-
processor,
|
|
154
|
-
(id) => {
|
|
155
|
-
const readStream = createReadStream(filePath);
|
|
156
|
-
return uploadUnprocessedFile(client, id, readStream, byteSize);
|
|
157
|
-
}
|
|
158
|
-
);
|
|
159
|
-
};
|
|
160
|
-
return {
|
|
161
|
-
progress: async () => (await processPromise()).progress(),
|
|
162
|
-
file: async () => (await processPromise()).file()
|
|
163
|
-
};
|
|
164
|
-
};
|
|
165
|
-
};
|
|
166
|
-
const processAVFileBuffer = buildBufferProcessor("isobmff");
|
|
167
|
-
const processAVFile = buildFileProcessor("isobmff");
|
|
168
|
-
const processImageFileBuffer = buildBufferProcessor("image");
|
|
169
|
-
const processImageFile = buildFileProcessor("image");
|
|
170
|
-
export {
|
|
171
|
-
CreateUnprocessedFilePayload,
|
|
172
|
-
UpdateUnprocessedFilePayload,
|
|
173
|
-
createUnprocessedFile,
|
|
174
|
-
processAVFile,
|
|
175
|
-
processAVFileBuffer,
|
|
176
|
-
processImageFile,
|
|
177
|
-
processImageFileBuffer,
|
|
178
|
-
updateUnprocessedFile,
|
|
179
|
-
uploadUnprocessedFile
|
|
180
|
-
};
|
package/dist/streamChunker.js
DELETED
|
@@ -1,17 +0,0 @@
|
|
|
1
|
-
import { CHUNK_SIZE_BYTES } from "./CHUNK_SIZE_BYTES.js";
|
|
2
|
-
async function* streamChunker(readableStream, chunkSize = CHUNK_SIZE_BYTES) {
|
|
3
|
-
let buffer = Buffer.alloc(0);
|
|
4
|
-
for await (const chunk of readableStream) {
|
|
5
|
-
buffer = Buffer.concat([buffer, chunk]);
|
|
6
|
-
while (buffer.length >= chunkSize) {
|
|
7
|
-
yield buffer.slice(0, chunkSize);
|
|
8
|
-
buffer = buffer.slice(chunkSize);
|
|
9
|
-
}
|
|
10
|
-
}
|
|
11
|
-
if (buffer.length > 0) {
|
|
12
|
-
yield buffer;
|
|
13
|
-
}
|
|
14
|
-
}
|
|
15
|
-
export {
|
|
16
|
-
streamChunker
|
|
17
|
-
};
|
|
File without changes
|