@editframe/api 0.11.0-beta.9 → 0.12.0-beta.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/ProgressIterator.d.ts +25 -0
- package/dist/ProgressIterator.test.d.ts +1 -0
- package/dist/StreamEventSource.d.ts +50 -0
- package/dist/StreamEventSource.test.d.ts +1 -0
- package/dist/{CHUNK_SIZE_BYTES.js → api/src/CHUNK_SIZE_BYTES.js} +1 -1
- package/dist/api/src/ProgressIterator.js +99 -0
- package/dist/api/src/StreamEventSource.js +130 -0
- package/dist/api/src/client.js +49 -0
- package/dist/api/src/index.js +49 -0
- package/dist/{resources → api/src/resources}/caption-file.js +22 -2
- package/dist/{resources → api/src/resources}/image-file.js +24 -4
- package/dist/api/src/resources/isobmff-file.js +102 -0
- package/dist/{resources → api/src/resources}/isobmff-track.js +44 -1
- package/dist/api/src/resources/process-isobmff.js +22 -0
- package/dist/{resources → api/src/resources}/renders.js +21 -2
- package/dist/api/src/resources/transcriptions.js +45 -0
- package/dist/api/src/resources/unprocessed-file.js +114 -0
- package/dist/api/src/streamChunker.js +28 -0
- package/dist/{uploadChunks.js → api/src/uploadChunks.js} +1 -4
- package/dist/cli/src/utils/createReadableStreamFromReadable.js +82 -0
- package/dist/client.d.ts +6 -3
- package/dist/index.d.ts +7 -5
- package/dist/readableFromBuffers.d.ts +1 -2
- package/dist/resources/caption-file.d.ts +7 -3
- package/dist/resources/image-file.d.ts +15 -5
- package/dist/resources/isobmff-file.d.ts +29 -3
- package/dist/resources/isobmff-track.d.ts +13 -15
- package/dist/resources/process-isobmff.d.ts +12 -0
- package/dist/resources/process-isobmff.test.d.ts +1 -0
- package/dist/resources/renders.d.ts +10 -5
- package/dist/resources/transcriptions.d.ts +24 -0
- package/dist/resources/transcriptions.test.d.ts +1 -0
- package/dist/resources/unprocessed-file.d.ts +15 -53
- package/dist/streamChunker.d.ts +1 -2
- package/dist/uploadChunks.d.ts +1 -2
- package/package.json +3 -2
- package/src/resources/caption-file.test.ts +57 -6
- package/src/resources/caption-file.ts +34 -5
- package/src/resources/image-file.test.ts +59 -11
- package/src/resources/image-file.ts +75 -9
- package/src/resources/isobmff-file.test.ts +57 -6
- package/src/resources/isobmff-file.ts +96 -5
- package/src/resources/isobmff-track.test.ts +3 -3
- package/src/resources/isobmff-track.ts +50 -5
- package/src/resources/process-isobmff.test.ts +62 -0
- package/src/resources/process-isobmff.ts +33 -0
- package/src/resources/renders.test.ts +51 -6
- package/src/resources/renders.ts +34 -5
- package/src/resources/transcriptions.test.ts +49 -0
- package/src/resources/transcriptions.ts +64 -0
- package/src/resources/unprocessed-file.test.ts +24 -437
- package/src/resources/unprocessed-file.ts +90 -160
- package/dist/client.js +0 -35
- package/dist/index.js +0 -36
- package/dist/resources/isobmff-file.js +0 -47
- package/dist/resources/unprocessed-file.js +0 -180
- package/dist/streamChunker.js +0 -17
- /package/dist/{resources → api/src/resources}/url-token.js +0 -0
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import { z } from "zod";
|
|
2
1
|
import debug from "debug";
|
|
2
|
+
import { z } from "zod";
|
|
3
3
|
const log = debug("ef:api:renders");
|
|
4
4
|
const FILE_SIZE_LIMIT = 1024 * 1024 * 16;
|
|
5
5
|
const CreateRenderPayload = z.object({
|
|
@@ -35,7 +35,8 @@ const uploadRender = async (client, fileId, fileStream, folderSize) => {
|
|
|
35
35
|
`/api/v1/renders/${fileId}/upload`,
|
|
36
36
|
{
|
|
37
37
|
method: "POST",
|
|
38
|
-
body: fileStream
|
|
38
|
+
body: fileStream,
|
|
39
|
+
duplex: "half"
|
|
39
40
|
}
|
|
40
41
|
);
|
|
41
42
|
if (response.ok) {
|
|
@@ -45,8 +46,26 @@ const uploadRender = async (client, fileId, fileStream, folderSize) => {
|
|
|
45
46
|
`Failed to upload render ${response.status} ${response.statusText}`
|
|
46
47
|
);
|
|
47
48
|
};
|
|
49
|
+
const lookupRenderByMd5 = async (client, md5) => {
|
|
50
|
+
const response = await client.authenticatedFetch(
|
|
51
|
+
`/api/v1/renders/md5/${md5}`,
|
|
52
|
+
{
|
|
53
|
+
method: "GET"
|
|
54
|
+
}
|
|
55
|
+
);
|
|
56
|
+
if (response.ok) {
|
|
57
|
+
return await response.json();
|
|
58
|
+
}
|
|
59
|
+
if (response.status === 404) {
|
|
60
|
+
return null;
|
|
61
|
+
}
|
|
62
|
+
throw new Error(
|
|
63
|
+
`Failed to lookup render by md5 ${md5} ${response.status} ${response.statusText}`
|
|
64
|
+
);
|
|
65
|
+
};
|
|
48
66
|
export {
|
|
49
67
|
CreateRenderPayload,
|
|
50
68
|
createRender,
|
|
69
|
+
lookupRenderByMd5,
|
|
51
70
|
uploadRender
|
|
52
71
|
};
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
import debug from "debug";
|
|
2
|
+
import { z } from "zod";
|
|
3
|
+
import { CompletionIterator } from "../ProgressIterator.js";
|
|
4
|
+
const log = debug("ef:api:transcriptions");
|
|
5
|
+
const CreateTranscriptionPayload = z.object({
|
|
6
|
+
file_id: z.string(),
|
|
7
|
+
track_id: z.number().int()
|
|
8
|
+
});
|
|
9
|
+
const createTranscription = async (client, payload) => {
|
|
10
|
+
log("Creating transcription", payload);
|
|
11
|
+
const response = await client.authenticatedFetch("/api/v1/transcriptions", {
|
|
12
|
+
method: "POST",
|
|
13
|
+
body: JSON.stringify(payload)
|
|
14
|
+
});
|
|
15
|
+
log("Transcription created", response);
|
|
16
|
+
if (response.ok) {
|
|
17
|
+
return await response.json();
|
|
18
|
+
}
|
|
19
|
+
throw new Error(
|
|
20
|
+
`Failed to create transcription ${response.status} ${response.statusText}`
|
|
21
|
+
);
|
|
22
|
+
};
|
|
23
|
+
const getTranscriptionProgress = async (client, id) => {
|
|
24
|
+
const eventSource = await client.authenticatedEventSource(
|
|
25
|
+
`/api/v1/transcriptions/${id}/progress`
|
|
26
|
+
);
|
|
27
|
+
return new CompletionIterator(eventSource);
|
|
28
|
+
};
|
|
29
|
+
const getTranscriptionInfo = async (client, id) => {
|
|
30
|
+
const response = await client.authenticatedFetch(
|
|
31
|
+
`/api/v1/transcriptions/${id}`
|
|
32
|
+
);
|
|
33
|
+
if (response.ok) {
|
|
34
|
+
return await response.json();
|
|
35
|
+
}
|
|
36
|
+
throw new Error(
|
|
37
|
+
`Failed to get transcription info ${response.status} ${response.statusText}`
|
|
38
|
+
);
|
|
39
|
+
};
|
|
40
|
+
export {
|
|
41
|
+
CreateTranscriptionPayload,
|
|
42
|
+
createTranscription,
|
|
43
|
+
getTranscriptionInfo,
|
|
44
|
+
getTranscriptionProgress
|
|
45
|
+
};
|
|
@@ -0,0 +1,114 @@
|
|
|
1
|
+
import debug from "debug";
|
|
2
|
+
import { z } from "zod";
|
|
3
|
+
import { uploadChunks } from "../uploadChunks.js";
|
|
4
|
+
const log = debug("ef:api:unprocessed-file");
|
|
5
|
+
const MAX_FILE_SIZE = 1024 * 1024 * 1024;
|
|
6
|
+
const CreateUnprocessedFilePayload = z.object({
|
|
7
|
+
md5: z.string(),
|
|
8
|
+
filename: z.string(),
|
|
9
|
+
byte_size: z.number().int().max(MAX_FILE_SIZE)
|
|
10
|
+
});
|
|
11
|
+
z.object({});
|
|
12
|
+
const createUnprocessedFileFromPath = async (client, path) => {
|
|
13
|
+
const [{ stat }, { basename }, { md5FilePath }] = await Promise.all([
|
|
14
|
+
import("node:fs/promises"),
|
|
15
|
+
import("node:path"),
|
|
16
|
+
import("@editframe/assets")
|
|
17
|
+
]).catch((error) => {
|
|
18
|
+
console.error("Error importing modules", error);
|
|
19
|
+
console.error(
|
|
20
|
+
"This is likely because you are bundling for the browser. createUnprocessedFileFromPath can only be run in environments that support importing `node:path`"
|
|
21
|
+
);
|
|
22
|
+
throw error;
|
|
23
|
+
});
|
|
24
|
+
const fileInfo = await stat(path);
|
|
25
|
+
const byte_size = fileInfo.size;
|
|
26
|
+
const md5 = await md5FilePath(path);
|
|
27
|
+
return createUnprocessedFile(client, {
|
|
28
|
+
md5,
|
|
29
|
+
filename: basename(path),
|
|
30
|
+
byte_size
|
|
31
|
+
});
|
|
32
|
+
};
|
|
33
|
+
const createUnprocessedFile = async (client, payload) => {
|
|
34
|
+
log("Creating an unprocessed file", payload);
|
|
35
|
+
CreateUnprocessedFilePayload.parse(payload);
|
|
36
|
+
const response = await client.authenticatedFetch(
|
|
37
|
+
"/api/v1/unprocessed_files",
|
|
38
|
+
{
|
|
39
|
+
method: "POST",
|
|
40
|
+
body: JSON.stringify(payload)
|
|
41
|
+
}
|
|
42
|
+
);
|
|
43
|
+
log(
|
|
44
|
+
"Unprocessed file created",
|
|
45
|
+
response.status,
|
|
46
|
+
response.statusText,
|
|
47
|
+
response.headers
|
|
48
|
+
);
|
|
49
|
+
if (response.ok) {
|
|
50
|
+
return await response.json();
|
|
51
|
+
}
|
|
52
|
+
throw new Error(
|
|
53
|
+
`Failed to create unprocessed file ${response.status} ${response.statusText}`
|
|
54
|
+
);
|
|
55
|
+
};
|
|
56
|
+
const uploadUnprocessedFile = async (client, uploadDetails, path) => {
|
|
57
|
+
const { createReadStream } = await import("node:fs");
|
|
58
|
+
const readStream = createReadStream(path);
|
|
59
|
+
const { createReadableStreamFromReadable } = await import("../../../cli/src/utils/createReadableStreamFromReadable.js");
|
|
60
|
+
return uploadUnprocessedReadableStream(
|
|
61
|
+
client,
|
|
62
|
+
uploadDetails,
|
|
63
|
+
createReadableStreamFromReadable(readStream)
|
|
64
|
+
);
|
|
65
|
+
};
|
|
66
|
+
const uploadUnprocessedReadableStream = (client, uploadDetails, fileStream) => {
|
|
67
|
+
log("Uploading unprocessed file", uploadDetails.id);
|
|
68
|
+
return uploadChunks(client, {
|
|
69
|
+
url: `/api/v1/unprocessed_files/${uploadDetails.id}/upload`,
|
|
70
|
+
fileSize: uploadDetails.byte_size,
|
|
71
|
+
fileStream,
|
|
72
|
+
maxSize: MAX_FILE_SIZE
|
|
73
|
+
});
|
|
74
|
+
};
|
|
75
|
+
const lookupUnprocessedFileByMd5 = async (client, md5) => {
|
|
76
|
+
const response = await client.authenticatedFetch(
|
|
77
|
+
`/api/v1/unprocessed_files/md5/${md5}`,
|
|
78
|
+
{
|
|
79
|
+
method: "GET"
|
|
80
|
+
}
|
|
81
|
+
);
|
|
82
|
+
if (response.ok) {
|
|
83
|
+
return await response.json();
|
|
84
|
+
}
|
|
85
|
+
if (response.status === 404) {
|
|
86
|
+
return null;
|
|
87
|
+
}
|
|
88
|
+
throw new Error(
|
|
89
|
+
`Failed to lookup unprocessed file by md5 ${md5} ${response.status} ${response.statusText}`
|
|
90
|
+
);
|
|
91
|
+
};
|
|
92
|
+
const processIsobmffFile = async (client, id) => {
|
|
93
|
+
const response = await client.authenticatedFetch(
|
|
94
|
+
`/api/v1/unprocessed_files/${id}/isobmff`,
|
|
95
|
+
{
|
|
96
|
+
method: "POST"
|
|
97
|
+
}
|
|
98
|
+
);
|
|
99
|
+
if (response.ok) {
|
|
100
|
+
return await response.json();
|
|
101
|
+
}
|
|
102
|
+
throw new Error(
|
|
103
|
+
`Failed to process isobmff file ${id} ${response.status} ${response.statusText}`
|
|
104
|
+
);
|
|
105
|
+
};
|
|
106
|
+
export {
|
|
107
|
+
CreateUnprocessedFilePayload,
|
|
108
|
+
createUnprocessedFile,
|
|
109
|
+
createUnprocessedFileFromPath,
|
|
110
|
+
lookupUnprocessedFileByMd5,
|
|
111
|
+
processIsobmffFile,
|
|
112
|
+
uploadUnprocessedFile,
|
|
113
|
+
uploadUnprocessedReadableStream
|
|
114
|
+
};
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
import { CHUNK_SIZE_BYTES } from "./CHUNK_SIZE_BYTES.js";
|
|
2
|
+
async function* streamChunker(readableStream, chunkSize = CHUNK_SIZE_BYTES) {
|
|
3
|
+
const reader = readableStream.getReader();
|
|
4
|
+
let buffer = new Uint8Array(0);
|
|
5
|
+
try {
|
|
6
|
+
while (true) {
|
|
7
|
+
const { done, value } = await reader.read();
|
|
8
|
+
if (done) break;
|
|
9
|
+
const chunk = value;
|
|
10
|
+
const newBuffer = new Uint8Array(buffer.length + chunk.length);
|
|
11
|
+
newBuffer.set(buffer);
|
|
12
|
+
newBuffer.set(chunk, buffer.length);
|
|
13
|
+
buffer = newBuffer;
|
|
14
|
+
while (buffer.length >= chunkSize) {
|
|
15
|
+
yield buffer.slice(0, chunkSize);
|
|
16
|
+
buffer = buffer.slice(chunkSize);
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
if (buffer.length > 0) {
|
|
20
|
+
yield buffer;
|
|
21
|
+
}
|
|
22
|
+
} finally {
|
|
23
|
+
reader.releaseLock();
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
export {
|
|
27
|
+
streamChunker
|
|
28
|
+
};
|
|
@@ -51,7 +51,7 @@ function uploadChunks(client, {
|
|
|
51
51
|
const uploadStatus = await client.authenticatedFetch(url);
|
|
52
52
|
yield { type: "progress", progress: 0 };
|
|
53
53
|
if (uploadStatus.status === 200) {
|
|
54
|
-
log("
|
|
54
|
+
log("Chunk already uploaded");
|
|
55
55
|
yield { type: "progress", progress: 1 };
|
|
56
56
|
return;
|
|
57
57
|
}
|
|
@@ -72,9 +72,6 @@ function uploadChunks(client, {
|
|
|
72
72
|
progress: Math.min(1, chunkNumber / (fileSize / chunkSizeBytes))
|
|
73
73
|
};
|
|
74
74
|
}
|
|
75
|
-
if (!fileStream.readableEnded) {
|
|
76
|
-
throw new Error("Did not read entire file stream");
|
|
77
|
-
}
|
|
78
75
|
if (!complete) {
|
|
79
76
|
throw new Error("Did not complete upload");
|
|
80
77
|
}
|
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
import { Stream } from "node:stream";
|
|
2
|
+
const createReadableStreamFromReadable = (source) => {
|
|
3
|
+
const pump = new StreamPump(source);
|
|
4
|
+
const stream = new ReadableStream(pump, pump);
|
|
5
|
+
return stream;
|
|
6
|
+
};
|
|
7
|
+
class StreamPump {
|
|
8
|
+
constructor(stream) {
|
|
9
|
+
this.highWaterMark = stream.readableHighWaterMark || new Stream.Readable().readableHighWaterMark;
|
|
10
|
+
this.accumalatedSize = 0;
|
|
11
|
+
this.stream = stream;
|
|
12
|
+
this.enqueue = this.enqueue.bind(this);
|
|
13
|
+
this.error = this.error.bind(this);
|
|
14
|
+
this.close = this.close.bind(this);
|
|
15
|
+
}
|
|
16
|
+
size(chunk) {
|
|
17
|
+
return chunk?.byteLength || 0;
|
|
18
|
+
}
|
|
19
|
+
start(controller) {
|
|
20
|
+
this.controller = controller;
|
|
21
|
+
this.stream.on("data", this.enqueue);
|
|
22
|
+
this.stream.once("error", this.error);
|
|
23
|
+
this.stream.once("end", this.close);
|
|
24
|
+
this.stream.once("close", this.close);
|
|
25
|
+
}
|
|
26
|
+
pull() {
|
|
27
|
+
this.resume();
|
|
28
|
+
}
|
|
29
|
+
cancel(reason) {
|
|
30
|
+
if (this.stream.destroy) {
|
|
31
|
+
this.stream.destroy(reason);
|
|
32
|
+
}
|
|
33
|
+
this.stream.off("data", this.enqueue);
|
|
34
|
+
this.stream.off("error", this.error);
|
|
35
|
+
this.stream.off("end", this.close);
|
|
36
|
+
this.stream.off("close", this.close);
|
|
37
|
+
}
|
|
38
|
+
enqueue(chunk) {
|
|
39
|
+
if (this.controller) {
|
|
40
|
+
try {
|
|
41
|
+
const bytes = chunk instanceof Uint8Array ? chunk : Buffer.from(chunk);
|
|
42
|
+
const available = (this.controller.desiredSize || 0) - bytes.byteLength;
|
|
43
|
+
this.controller.enqueue(bytes);
|
|
44
|
+
if (available <= 0) {
|
|
45
|
+
this.pause();
|
|
46
|
+
}
|
|
47
|
+
} catch (error) {
|
|
48
|
+
this.controller.error(
|
|
49
|
+
new Error(
|
|
50
|
+
"Could not create Buffer, chunk must be of type string or an instance of Buffer, ArrayBuffer, or Array or an Array-like Object"
|
|
51
|
+
)
|
|
52
|
+
);
|
|
53
|
+
this.cancel();
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
pause() {
|
|
58
|
+
if (this.stream.pause) {
|
|
59
|
+
this.stream.pause();
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
resume() {
|
|
63
|
+
if (this.stream.readable && this.stream.resume) {
|
|
64
|
+
this.stream.resume();
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
close() {
|
|
68
|
+
if (this.controller) {
|
|
69
|
+
this.controller.close();
|
|
70
|
+
delete this.controller;
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
error(error) {
|
|
74
|
+
if (this.controller) {
|
|
75
|
+
this.controller.error(error);
|
|
76
|
+
delete this.controller;
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
export {
|
|
81
|
+
createReadableStreamFromReadable
|
|
82
|
+
};
|
package/dist/client.d.ts
CHANGED
|
@@ -1,6 +1,9 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { StreamEventSource } from './StreamEventSource.ts';
|
|
2
2
|
export declare class Client {
|
|
3
3
|
#private;
|
|
4
|
-
constructor(token
|
|
5
|
-
|
|
4
|
+
constructor(token?: string, efHost?: string);
|
|
5
|
+
authenticatedEventSource: (path: string, init?: RequestInit) => Promise<StreamEventSource>;
|
|
6
|
+
authenticatedFetch: (path: string, init?: RequestInit & {
|
|
7
|
+
duplex?: "half";
|
|
8
|
+
}) => Promise<Response>;
|
|
6
9
|
}
|
package/dist/index.d.ts
CHANGED
|
@@ -1,8 +1,10 @@
|
|
|
1
|
-
export { createCaptionFile, CreateCaptionFilePayload, type CreateCaptionFileResult, uploadCaptionFile, } from './resources/caption-file.ts';
|
|
2
|
-
export { createImageFile, CreateImageFilePayload, type CreateImageFileResult, uploadImageFile, } from './resources/image-file.ts';
|
|
3
|
-
export { createISOBMFFFile, CreateISOBMFFFilePayload, type CreateISOBMFFFileResult, uploadFragmentIndex, } from './resources/isobmff-file.ts';
|
|
1
|
+
export { createCaptionFile, CreateCaptionFilePayload, type CreateCaptionFileResult, uploadCaptionFile, type LookupCaptionFileByMd5Result, lookupCaptionFileByMd5, } from './resources/caption-file.ts';
|
|
2
|
+
export { createImageFile, CreateImageFilePayload, type CreateImageFileResult, uploadImageFile, type LookupImageFileByMd5Result, lookupImageFileByMd5, } from './resources/image-file.ts';
|
|
3
|
+
export { createISOBMFFFile, CreateISOBMFFFilePayload, type CreateISOBMFFFileResult, uploadFragmentIndex, type LookupISOBMFFFileByMd5Result, lookupISOBMFFFileByMd5, type GetISOBMFFFileTranscriptionResult, getISOBMFFFileTranscription, type TranscribeISOBMFFFileResult, transcribeISOBMFFFile, TranscribeISOBMFFFilePayload, } from './resources/isobmff-file.ts';
|
|
4
4
|
export { createISOBMFFTrack, CreateISOBMFFTrackPayload, type CreateISOBMFFTrackResult, uploadISOBMFFTrack, } from './resources/isobmff-track.ts';
|
|
5
|
-
export { createRender, CreateRenderPayload, type CreateRenderResult, uploadRender, } from './resources/renders.ts';
|
|
5
|
+
export { createRender, CreateRenderPayload, type CreateRenderResult, uploadRender, type LookupRenderByMd5Result, lookupRenderByMd5, } from './resources/renders.ts';
|
|
6
|
+
export { createTranscription, CreateTranscriptionPayload, type CreateTranscriptionResult, getTranscriptionInfo, getTranscriptionProgress, type TranscriptionInfoResult, } from './resources/transcriptions.ts';
|
|
6
7
|
export { createURLToken, type URLTokenResult, } from './resources/url-token.ts';
|
|
7
|
-
export { createUnprocessedFile, CreateUnprocessedFilePayload, type CreateUnprocessedFileResult,
|
|
8
|
+
export { createUnprocessedFile, CreateUnprocessedFilePayload, type CreateUnprocessedFileResult, uploadUnprocessedReadableStream, type LookupUnprocessedFileByMd5Result, lookupUnprocessedFileByMd5, processIsobmffFile, type ProcessIsobmffFileResult, createUnprocessedFileFromPath, uploadUnprocessedFile, } from './resources/unprocessed-file.ts';
|
|
9
|
+
export { getIsobmffProcessInfo, getIsobmffProcessProgress, type IsobmffProcessInfoResult, } from './resources/process-isobmff.ts';
|
|
8
10
|
export { Client } from './client.ts';
|
|
@@ -1,2 +1 @@
|
|
|
1
|
-
|
|
2
|
-
export declare const readableFromBuffers: (...buffers: Buffer[]) => Readable;
|
|
1
|
+
export declare const webReadableFromBuffers: (...buffers: Buffer[]) => ReadableStream<any>;
|
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import { Readable } from 'node:stream';
|
|
2
1
|
import { z } from 'zod';
|
|
3
2
|
import { Client } from '../client.ts';
|
|
4
3
|
export declare const CreateCaptionFilePayload: z.ZodObject<{
|
|
@@ -18,7 +17,11 @@ export interface CreateCaptionFileResult {
|
|
|
18
17
|
complete: boolean | null;
|
|
19
18
|
id: string;
|
|
20
19
|
md5: string;
|
|
21
|
-
|
|
20
|
+
}
|
|
21
|
+
export interface LookupCaptionFileByMd5Result {
|
|
22
|
+
complete: boolean | null;
|
|
23
|
+
id: string;
|
|
24
|
+
md5: string;
|
|
22
25
|
}
|
|
23
26
|
/**
|
|
24
27
|
* Create a caption file
|
|
@@ -38,4 +41,5 @@ export interface CreateCaptionFileResult {
|
|
|
38
41
|
* @beta
|
|
39
42
|
*/
|
|
40
43
|
export declare const createCaptionFile: (client: Client, payload: z.infer<typeof CreateCaptionFilePayload>) => Promise<CreateCaptionFileResult>;
|
|
41
|
-
export declare const uploadCaptionFile: (client: Client, fileId: string, fileStream:
|
|
44
|
+
export declare const uploadCaptionFile: (client: Client, fileId: string, fileStream: ReadableStream, fileSize: number) => Promise<any>;
|
|
45
|
+
export declare const lookupCaptionFileByMd5: (client: Client, md5: string) => Promise<LookupCaptionFileByMd5Result | null>;
|
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import { Readable } from 'node:stream';
|
|
2
1
|
import { z } from 'zod';
|
|
3
2
|
import { Client } from '../client.ts';
|
|
4
3
|
export declare const CreateImageFilePayload: z.ZodObject<{
|
|
@@ -12,22 +11,33 @@ export declare const CreateImageFilePayload: z.ZodObject<{
|
|
|
12
11
|
md5: string;
|
|
13
12
|
filename: string;
|
|
14
13
|
byte_size: number;
|
|
15
|
-
height: number;
|
|
16
14
|
width: number;
|
|
15
|
+
height: number;
|
|
17
16
|
mime_type: "image/jpeg" | "image/png" | "image/jpg" | "image/webp";
|
|
18
17
|
}, {
|
|
19
18
|
md5: string;
|
|
20
19
|
filename: string;
|
|
21
20
|
byte_size: number;
|
|
22
|
-
height: number;
|
|
23
21
|
width: number;
|
|
22
|
+
height: number;
|
|
24
23
|
mime_type: "image/jpeg" | "image/png" | "image/jpg" | "image/webp";
|
|
25
24
|
}>;
|
|
26
25
|
export interface CreateImageFileResult {
|
|
27
26
|
complete: boolean | null;
|
|
27
|
+
byte_size: number;
|
|
28
|
+
id: string;
|
|
29
|
+
md5: string;
|
|
30
|
+
}
|
|
31
|
+
export interface LookupImageFileByMd5Result {
|
|
32
|
+
complete: boolean | null;
|
|
33
|
+
byte_size: number;
|
|
28
34
|
id: string;
|
|
29
35
|
md5: string;
|
|
30
|
-
asset_id: string;
|
|
31
36
|
}
|
|
37
|
+
export declare const createImageFileFromPath: (client: Client, path: string) => Promise<CreateImageFileResult>;
|
|
32
38
|
export declare const createImageFile: (client: Client, payload: z.infer<typeof CreateImageFilePayload>) => Promise<CreateImageFileResult>;
|
|
33
|
-
export declare const uploadImageFile: (client: Client,
|
|
39
|
+
export declare const uploadImageFile: (client: Client, uploadDetails: {
|
|
40
|
+
id: string;
|
|
41
|
+
byte_size: number;
|
|
42
|
+
}, fileStream: ReadableStream) => import('../uploadChunks.ts').IteratorWithPromise<import('../uploadChunks.ts').UploadChunkEvent>;
|
|
43
|
+
export declare const lookupImageFileByMd5: (client: Client, md5: string) => Promise<LookupImageFileByMd5Result | null>;
|
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import { Readable } from 'node:stream';
|
|
2
1
|
import { z } from 'zod';
|
|
3
2
|
import { Client } from '../client.ts';
|
|
4
3
|
export declare const CreateISOBMFFFilePayload: z.ZodObject<{
|
|
@@ -16,7 +15,34 @@ export interface CreateISOBMFFFileResult {
|
|
|
16
15
|
filename: string;
|
|
17
16
|
id: string;
|
|
18
17
|
md5: string;
|
|
19
|
-
|
|
18
|
+
}
|
|
19
|
+
export interface LookupISOBMFFFileByMd5Result {
|
|
20
|
+
fragment_index_complete: boolean;
|
|
21
|
+
filename: string;
|
|
22
|
+
id: string;
|
|
23
|
+
md5: string;
|
|
24
|
+
}
|
|
25
|
+
export interface GetISOBMFFFileTranscriptionResult {
|
|
26
|
+
id: string;
|
|
27
|
+
work_slice_ms: number;
|
|
28
|
+
isobmff_track: {
|
|
29
|
+
duration_ms: number;
|
|
30
|
+
};
|
|
20
31
|
}
|
|
21
32
|
export declare const createISOBMFFFile: (client: Client, payload: z.infer<typeof CreateISOBMFFFilePayload>) => Promise<CreateISOBMFFFileResult>;
|
|
22
|
-
export declare const uploadFragmentIndex: (client: Client, fileId: string, fileStream:
|
|
33
|
+
export declare const uploadFragmentIndex: (client: Client, fileId: string, fileStream: ReadableStream, fileSize: number) => Promise<any>;
|
|
34
|
+
export declare const lookupISOBMFFFileByMd5: (client: Client, md5: string) => Promise<LookupISOBMFFFileByMd5Result | null>;
|
|
35
|
+
export declare const getISOBMFFFileTranscription: (client: Client, id: string) => Promise<GetISOBMFFFileTranscriptionResult | null>;
|
|
36
|
+
export declare const TranscribeISOBMFFFilePayload: z.ZodObject<{
|
|
37
|
+
trackId: z.ZodOptional<z.ZodString>;
|
|
38
|
+
}, "strip", z.ZodTypeAny, {
|
|
39
|
+
trackId?: string | undefined;
|
|
40
|
+
}, {
|
|
41
|
+
trackId?: string | undefined;
|
|
42
|
+
}>;
|
|
43
|
+
export interface TranscribeISOBMFFFileResult {
|
|
44
|
+
id: string;
|
|
45
|
+
file_id: string;
|
|
46
|
+
track_id: number;
|
|
47
|
+
}
|
|
48
|
+
export declare const transcribeISOBMFFFile: (client: Client, id: string, payload?: z.infer<typeof TranscribeISOBMFFFilePayload>) => Promise<TranscribeISOBMFFFileResult>;
|
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import { Readable } from 'node:stream';
|
|
2
1
|
import { z } from 'zod';
|
|
3
2
|
import { Client } from '../client.ts';
|
|
4
3
|
export declare const CreateISOBMFFTrackPayload: z.ZodDiscriminatedUnion<"type", [z.ZodObject<{
|
|
@@ -76,8 +75,8 @@ export declare const CreateISOBMFFTrackPayload: z.ZodDiscriminatedUnion<"type",
|
|
|
76
75
|
codec_name: z.ZodString;
|
|
77
76
|
byte_size: z.ZodNumber;
|
|
78
77
|
}, "strip", z.ZodTypeAny, {
|
|
79
|
-
byte_size: number;
|
|
80
78
|
type: "audio";
|
|
79
|
+
byte_size: number;
|
|
81
80
|
codec_name: string;
|
|
82
81
|
file_id: string;
|
|
83
82
|
track_id: number;
|
|
@@ -106,8 +105,8 @@ export declare const CreateISOBMFFTrackPayload: z.ZodDiscriminatedUnion<"type",
|
|
|
106
105
|
};
|
|
107
106
|
duration_ms: number;
|
|
108
107
|
}, {
|
|
109
|
-
byte_size: number;
|
|
110
108
|
type: "audio";
|
|
109
|
+
byte_size: number;
|
|
111
110
|
codec_name: string;
|
|
112
111
|
file_id: string;
|
|
113
112
|
track_id: number;
|
|
@@ -160,8 +159,6 @@ export declare const CreateISOBMFFTrackPayload: z.ZodDiscriminatedUnion<"type",
|
|
|
160
159
|
bit_rate: z.ZodOptional<z.ZodString>;
|
|
161
160
|
disposition: z.ZodRecord<z.ZodString, z.ZodUnknown>;
|
|
162
161
|
}, "strip", z.ZodTypeAny, {
|
|
163
|
-
height: number;
|
|
164
|
-
width: number;
|
|
165
162
|
index: number;
|
|
166
163
|
codec_name: string;
|
|
167
164
|
codec_long_name: string;
|
|
@@ -172,6 +169,8 @@ export declare const CreateISOBMFFTrackPayload: z.ZodDiscriminatedUnion<"type",
|
|
|
172
169
|
avg_frame_rate: string;
|
|
173
170
|
time_base: string;
|
|
174
171
|
disposition: Record<string, unknown>;
|
|
172
|
+
width: number;
|
|
173
|
+
height: number;
|
|
175
174
|
coded_width: number;
|
|
176
175
|
coded_height: number;
|
|
177
176
|
start_pts?: number | undefined;
|
|
@@ -180,8 +179,6 @@ export declare const CreateISOBMFFTrackPayload: z.ZodDiscriminatedUnion<"type",
|
|
|
180
179
|
duration?: number | undefined;
|
|
181
180
|
bit_rate?: string | undefined;
|
|
182
181
|
}, {
|
|
183
|
-
height: number;
|
|
184
|
-
width: number;
|
|
185
182
|
index: number;
|
|
186
183
|
codec_name: string;
|
|
187
184
|
codec_long_name: string;
|
|
@@ -192,6 +189,8 @@ export declare const CreateISOBMFFTrackPayload: z.ZodDiscriminatedUnion<"type",
|
|
|
192
189
|
avg_frame_rate: string;
|
|
193
190
|
time_base: string;
|
|
194
191
|
disposition: Record<string, unknown>;
|
|
192
|
+
width: number;
|
|
193
|
+
height: number;
|
|
195
194
|
coded_width: number;
|
|
196
195
|
coded_height: number;
|
|
197
196
|
start_pts?: number | undefined;
|
|
@@ -204,14 +203,12 @@ export declare const CreateISOBMFFTrackPayload: z.ZodDiscriminatedUnion<"type",
|
|
|
204
203
|
codec_name: z.ZodString;
|
|
205
204
|
byte_size: z.ZodNumber;
|
|
206
205
|
}, "strip", z.ZodTypeAny, {
|
|
207
|
-
byte_size: number;
|
|
208
206
|
type: "video";
|
|
207
|
+
byte_size: number;
|
|
209
208
|
codec_name: string;
|
|
210
209
|
file_id: string;
|
|
211
210
|
track_id: number;
|
|
212
211
|
probe_info: {
|
|
213
|
-
height: number;
|
|
214
|
-
width: number;
|
|
215
212
|
index: number;
|
|
216
213
|
codec_name: string;
|
|
217
214
|
codec_long_name: string;
|
|
@@ -222,6 +219,8 @@ export declare const CreateISOBMFFTrackPayload: z.ZodDiscriminatedUnion<"type",
|
|
|
222
219
|
avg_frame_rate: string;
|
|
223
220
|
time_base: string;
|
|
224
221
|
disposition: Record<string, unknown>;
|
|
222
|
+
width: number;
|
|
223
|
+
height: number;
|
|
225
224
|
coded_width: number;
|
|
226
225
|
coded_height: number;
|
|
227
226
|
start_pts?: number | undefined;
|
|
@@ -232,14 +231,12 @@ export declare const CreateISOBMFFTrackPayload: z.ZodDiscriminatedUnion<"type",
|
|
|
232
231
|
};
|
|
233
232
|
duration_ms: number;
|
|
234
233
|
}, {
|
|
235
|
-
byte_size: number;
|
|
236
234
|
type: "video";
|
|
235
|
+
byte_size: number;
|
|
237
236
|
codec_name: string;
|
|
238
237
|
file_id: string;
|
|
239
238
|
track_id: number;
|
|
240
239
|
probe_info: {
|
|
241
|
-
height: number;
|
|
242
|
-
width: number;
|
|
243
240
|
index: number;
|
|
244
241
|
codec_name: string;
|
|
245
242
|
codec_long_name: string;
|
|
@@ -250,6 +247,8 @@ export declare const CreateISOBMFFTrackPayload: z.ZodDiscriminatedUnion<"type",
|
|
|
250
247
|
avg_frame_rate: string;
|
|
251
248
|
time_base: string;
|
|
252
249
|
disposition: Record<string, unknown>;
|
|
250
|
+
width: number;
|
|
251
|
+
height: number;
|
|
253
252
|
coded_width: number;
|
|
254
253
|
coded_height: number;
|
|
255
254
|
start_pts?: number | undefined;
|
|
@@ -265,8 +264,7 @@ export interface CreateISOBMFFTrackResult {
|
|
|
265
264
|
byte_size: number;
|
|
266
265
|
track_id: number;
|
|
267
266
|
file_id: string;
|
|
268
|
-
asset_id: string;
|
|
269
267
|
complete: boolean;
|
|
270
268
|
}
|
|
271
269
|
export declare const createISOBMFFTrack: (client: Client, payload: z.infer<typeof CreateISOBMFFTrackPayload>) => Promise<CreateISOBMFFTrackResult>;
|
|
272
|
-
export declare const uploadISOBMFFTrack: (client: Client, fileId: string, trackId: number, fileStream:
|
|
270
|
+
export declare const uploadISOBMFFTrack: (client: Client, fileId: string, trackId: number, fileStream: ReadableStream, trackSize: number) => import('../uploadChunks.ts').IteratorWithPromise<import('../uploadChunks.ts').UploadChunkEvent>;
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import { ProgressIterator } from '../ProgressIterator.ts';
|
|
2
|
+
import { Client } from '../client.ts';
|
|
3
|
+
export interface IsobmffProcessInfoResult {
|
|
4
|
+
id: string;
|
|
5
|
+
created_at: string;
|
|
6
|
+
completed_at: string | null;
|
|
7
|
+
failed_at: string | null;
|
|
8
|
+
isobmff_file_id: string | null;
|
|
9
|
+
unprocessed_file_id: string;
|
|
10
|
+
}
|
|
11
|
+
export declare const getIsobmffProcessProgress: (client: Client, id: string) => Promise<ProgressIterator>;
|
|
12
|
+
export declare const getIsobmffProcessInfo: (client: Client, id: string) => Promise<IsobmffProcessInfoResult>;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import { Readable } from 'node:stream';
|
|
2
1
|
import { z } from 'zod';
|
|
3
2
|
import { Client } from '../client.ts';
|
|
4
3
|
export declare const CreateRenderPayload: z.ZodObject<{
|
|
@@ -11,16 +10,16 @@ export declare const CreateRenderPayload: z.ZodObject<{
|
|
|
11
10
|
strategy: z.ZodEnum<["v1", "v2"]>;
|
|
12
11
|
}, "strip", z.ZodTypeAny, {
|
|
13
12
|
md5: string;
|
|
14
|
-
height: number;
|
|
15
13
|
width: number;
|
|
14
|
+
height: number;
|
|
16
15
|
strategy: "v1" | "v2";
|
|
17
16
|
duration_ms: number;
|
|
18
17
|
fps: number;
|
|
19
18
|
work_slice_ms: number;
|
|
20
19
|
}, {
|
|
21
20
|
md5: string;
|
|
22
|
-
height: number;
|
|
23
21
|
width: number;
|
|
22
|
+
height: number;
|
|
24
23
|
strategy: "v1" | "v2";
|
|
25
24
|
duration_ms: number;
|
|
26
25
|
fps: number;
|
|
@@ -29,7 +28,13 @@ export declare const CreateRenderPayload: z.ZodObject<{
|
|
|
29
28
|
export interface CreateRenderResult {
|
|
30
29
|
id: string;
|
|
31
30
|
md5: string;
|
|
32
|
-
status: "complete" | "created" | "failed" | "pending" | "rendering";
|
|
31
|
+
status: "complete" | "created" | "failed" | "pending" | "rendering" | string;
|
|
32
|
+
}
|
|
33
|
+
export interface LookupRenderByMd5Result {
|
|
34
|
+
id: string;
|
|
35
|
+
md5: string;
|
|
36
|
+
status: "complete" | "created" | "failed" | "pending" | "rendering" | string;
|
|
33
37
|
}
|
|
34
38
|
export declare const createRender: (client: Client, payload: z.infer<typeof CreateRenderPayload>) => Promise<CreateRenderResult>;
|
|
35
|
-
export declare const uploadRender: (client: Client, fileId: string, fileStream:
|
|
39
|
+
export declare const uploadRender: (client: Client, fileId: string, fileStream: ReadableStream, folderSize: number) => Promise<any>;
|
|
40
|
+
export declare const lookupRenderByMd5: (client: Client, md5: string) => Promise<LookupRenderByMd5Result | null>;
|