@editframe/api 0.11.0-beta.9 → 0.12.0-beta.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/ProgressIterator.d.ts +25 -0
- package/dist/ProgressIterator.test.d.ts +1 -0
- package/dist/StreamEventSource.d.ts +50 -0
- package/dist/StreamEventSource.test.d.ts +1 -0
- package/dist/{CHUNK_SIZE_BYTES.js → api/src/CHUNK_SIZE_BYTES.js} +1 -1
- package/dist/api/src/ProgressIterator.js +99 -0
- package/dist/api/src/StreamEventSource.js +130 -0
- package/dist/api/src/client.js +49 -0
- package/dist/api/src/index.js +49 -0
- package/dist/{resources → api/src/resources}/caption-file.js +22 -2
- package/dist/{resources → api/src/resources}/image-file.js +24 -4
- package/dist/api/src/resources/isobmff-file.js +102 -0
- package/dist/{resources → api/src/resources}/isobmff-track.js +44 -1
- package/dist/api/src/resources/process-isobmff.js +22 -0
- package/dist/{resources → api/src/resources}/renders.js +21 -2
- package/dist/api/src/resources/transcriptions.js +45 -0
- package/dist/api/src/resources/unprocessed-file.js +114 -0
- package/dist/api/src/streamChunker.js +28 -0
- package/dist/{uploadChunks.js → api/src/uploadChunks.js} +1 -4
- package/dist/cli/src/utils/createReadableStreamFromReadable.js +82 -0
- package/dist/client.d.ts +6 -3
- package/dist/index.d.ts +7 -5
- package/dist/readableFromBuffers.d.ts +1 -2
- package/dist/resources/caption-file.d.ts +7 -3
- package/dist/resources/image-file.d.ts +15 -5
- package/dist/resources/isobmff-file.d.ts +29 -3
- package/dist/resources/isobmff-track.d.ts +13 -15
- package/dist/resources/process-isobmff.d.ts +12 -0
- package/dist/resources/process-isobmff.test.d.ts +1 -0
- package/dist/resources/renders.d.ts +10 -5
- package/dist/resources/transcriptions.d.ts +24 -0
- package/dist/resources/transcriptions.test.d.ts +1 -0
- package/dist/resources/unprocessed-file.d.ts +15 -53
- package/dist/streamChunker.d.ts +1 -2
- package/dist/uploadChunks.d.ts +1 -2
- package/package.json +3 -2
- package/src/resources/caption-file.test.ts +57 -6
- package/src/resources/caption-file.ts +34 -5
- package/src/resources/image-file.test.ts +59 -11
- package/src/resources/image-file.ts +75 -9
- package/src/resources/isobmff-file.test.ts +57 -6
- package/src/resources/isobmff-file.ts +96 -5
- package/src/resources/isobmff-track.test.ts +3 -3
- package/src/resources/isobmff-track.ts +50 -5
- package/src/resources/process-isobmff.test.ts +62 -0
- package/src/resources/process-isobmff.ts +33 -0
- package/src/resources/renders.test.ts +51 -6
- package/src/resources/renders.ts +34 -5
- package/src/resources/transcriptions.test.ts +49 -0
- package/src/resources/transcriptions.ts +64 -0
- package/src/resources/unprocessed-file.test.ts +24 -437
- package/src/resources/unprocessed-file.ts +90 -160
- package/dist/client.js +0 -35
- package/dist/index.js +0 -36
- package/dist/resources/isobmff-file.js +0 -47
- package/dist/resources/unprocessed-file.js +0 -180
- package/dist/streamChunker.js +0 -17
- /package/dist/{resources → api/src/resources}/url-token.js +0 -0
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import { CompleteEvent, ProgressEvent, StreamEventSource } from './StreamEventSource.ts';
|
|
2
|
+
declare abstract class BaseEventIterator<T extends CompleteEvent | any> implements AsyncIterable<T> {
|
|
3
|
+
protected eventSource: StreamEventSource;
|
|
4
|
+
protected queue: T[];
|
|
5
|
+
protected index: number;
|
|
6
|
+
protected isComplete: boolean;
|
|
7
|
+
protected resolversNext: PromiseWithResolvers<void>;
|
|
8
|
+
constructor(eventSource: StreamEventSource);
|
|
9
|
+
whenComplete(): Promise<T[]>;
|
|
10
|
+
on: (event: "progress", callback: (event: ProgressEvent) => void) => this;
|
|
11
|
+
protected push(event: T): void;
|
|
12
|
+
protected get queueLength(): number;
|
|
13
|
+
[Symbol.asyncIterator](): AsyncIterator<T>;
|
|
14
|
+
}
|
|
15
|
+
export declare class ProgressIterator extends BaseEventIterator<ProgressEvent | CompleteEvent> {
|
|
16
|
+
constructor(eventSource: StreamEventSource);
|
|
17
|
+
private initializeListeners;
|
|
18
|
+
}
|
|
19
|
+
export declare class CompletionIterator extends BaseEventIterator<CompleteEvent | ProgressEvent> {
|
|
20
|
+
private totalSize;
|
|
21
|
+
private currentProgress;
|
|
22
|
+
constructor(eventSource: StreamEventSource);
|
|
23
|
+
private initializeListeners;
|
|
24
|
+
}
|
|
25
|
+
export {};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
type EventCallback<T> = (event: T) => void;
|
|
2
|
+
export type StreamEventSourceEventMap = {
|
|
3
|
+
progress: ProgressEvent;
|
|
4
|
+
complete: CompleteEvent;
|
|
5
|
+
size: SizeEvent;
|
|
6
|
+
completion: CompletionEvent;
|
|
7
|
+
message: {
|
|
8
|
+
id: string | undefined;
|
|
9
|
+
data: string;
|
|
10
|
+
};
|
|
11
|
+
end: [];
|
|
12
|
+
error: Error;
|
|
13
|
+
};
|
|
14
|
+
export type ProgressEvent = {
|
|
15
|
+
type: "progress";
|
|
16
|
+
data: {
|
|
17
|
+
progress: number;
|
|
18
|
+
};
|
|
19
|
+
};
|
|
20
|
+
export type CompleteEvent = {
|
|
21
|
+
type: "complete";
|
|
22
|
+
data: object;
|
|
23
|
+
};
|
|
24
|
+
export type SizeEvent = {
|
|
25
|
+
type: "size";
|
|
26
|
+
data: {
|
|
27
|
+
size: number;
|
|
28
|
+
};
|
|
29
|
+
};
|
|
30
|
+
export type CompletionEvent = {
|
|
31
|
+
type: "completion";
|
|
32
|
+
data: {
|
|
33
|
+
count: number;
|
|
34
|
+
};
|
|
35
|
+
};
|
|
36
|
+
export declare class StreamEventSource {
|
|
37
|
+
private stream;
|
|
38
|
+
private activeReader;
|
|
39
|
+
private decoder;
|
|
40
|
+
private parser;
|
|
41
|
+
private listeners;
|
|
42
|
+
constructor(stream: ReadableStream);
|
|
43
|
+
on<K extends keyof StreamEventSourceEventMap>(event: K, callback: EventCallback<StreamEventSourceEventMap[K]>): this;
|
|
44
|
+
off<K extends keyof StreamEventSourceEventMap>(event: K, callback: EventCallback<StreamEventSourceEventMap[K]>): this;
|
|
45
|
+
protected emit<K extends keyof StreamEventSourceEventMap>(event: K, data: StreamEventSourceEventMap[K]): void;
|
|
46
|
+
whenClosed(): Promise<void>;
|
|
47
|
+
private startReading;
|
|
48
|
+
close(): void;
|
|
49
|
+
}
|
|
50
|
+
export {};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
class BaseEventIterator {
|
|
2
|
+
constructor(eventSource) {
|
|
3
|
+
this.queue = [];
|
|
4
|
+
this.index = 0;
|
|
5
|
+
this.isComplete = false;
|
|
6
|
+
this.resolversNext = Promise.withResolvers();
|
|
7
|
+
this.eventSource = eventSource;
|
|
8
|
+
}
|
|
9
|
+
async whenComplete() {
|
|
10
|
+
for await (const _ of this) {
|
|
11
|
+
}
|
|
12
|
+
return this.queue;
|
|
13
|
+
}
|
|
14
|
+
push(event) {
|
|
15
|
+
this.queue.push(event);
|
|
16
|
+
this.resolversNext.resolve();
|
|
17
|
+
this.resolversNext = Promise.withResolvers();
|
|
18
|
+
}
|
|
19
|
+
get queueLength() {
|
|
20
|
+
return this.queue.length - this.index;
|
|
21
|
+
}
|
|
22
|
+
async *[Symbol.asyncIterator]() {
|
|
23
|
+
try {
|
|
24
|
+
while (!this.isComplete || this.queueLength > 0) {
|
|
25
|
+
if (this.queueLength === 0) {
|
|
26
|
+
await this.resolversNext.promise;
|
|
27
|
+
} else {
|
|
28
|
+
const item = this.queue[this.index];
|
|
29
|
+
if (!item) {
|
|
30
|
+
throw new Error("Queue is corrupted");
|
|
31
|
+
}
|
|
32
|
+
yield item;
|
|
33
|
+
this.index++;
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
} finally {
|
|
37
|
+
this.eventSource.close();
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
class ProgressIterator extends BaseEventIterator {
|
|
42
|
+
constructor(eventSource) {
|
|
43
|
+
super(eventSource);
|
|
44
|
+
this.initializeListeners();
|
|
45
|
+
}
|
|
46
|
+
initializeListeners() {
|
|
47
|
+
this.eventSource.on("progress", (event) => {
|
|
48
|
+
this.push(event);
|
|
49
|
+
});
|
|
50
|
+
this.eventSource.on("complete", (event) => {
|
|
51
|
+
this.isComplete = true;
|
|
52
|
+
this.push(event);
|
|
53
|
+
});
|
|
54
|
+
this.eventSource.on("error", (error) => {
|
|
55
|
+
this.eventSource.close();
|
|
56
|
+
this.resolversNext.reject(error);
|
|
57
|
+
});
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
class CompletionIterator extends BaseEventIterator {
|
|
61
|
+
constructor(eventSource) {
|
|
62
|
+
super(eventSource);
|
|
63
|
+
this.totalSize = 0;
|
|
64
|
+
this.currentProgress = 0;
|
|
65
|
+
this.initializeListeners();
|
|
66
|
+
}
|
|
67
|
+
initializeListeners() {
|
|
68
|
+
this.eventSource.on("size", (event) => {
|
|
69
|
+
this.totalSize = event.data.size;
|
|
70
|
+
this.push({
|
|
71
|
+
type: "progress",
|
|
72
|
+
data: {
|
|
73
|
+
progress: 0
|
|
74
|
+
}
|
|
75
|
+
});
|
|
76
|
+
});
|
|
77
|
+
this.eventSource.on("completion", (event) => {
|
|
78
|
+
this.currentProgress += Number(event.data.count);
|
|
79
|
+
this.push({
|
|
80
|
+
type: "progress",
|
|
81
|
+
data: {
|
|
82
|
+
progress: this.currentProgress / this.totalSize
|
|
83
|
+
}
|
|
84
|
+
});
|
|
85
|
+
});
|
|
86
|
+
this.eventSource.on("complete", (event) => {
|
|
87
|
+
this.isComplete = true;
|
|
88
|
+
this.push(event);
|
|
89
|
+
});
|
|
90
|
+
this.eventSource.on("error", (error) => {
|
|
91
|
+
this.eventSource.close();
|
|
92
|
+
this.resolversNext.reject(error);
|
|
93
|
+
});
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
export {
|
|
97
|
+
CompletionIterator,
|
|
98
|
+
ProgressIterator
|
|
99
|
+
};
|
|
@@ -0,0 +1,130 @@
|
|
|
1
|
+
import debug from "debug";
|
|
2
|
+
import { createParser } from "eventsource-parser";
|
|
3
|
+
const log = debug("ef:StreamEventSource");
|
|
4
|
+
class StreamEventSource {
|
|
5
|
+
constructor(stream) {
|
|
6
|
+
this.activeReader = null;
|
|
7
|
+
this.decoder = new TextDecoder();
|
|
8
|
+
this.listeners = {};
|
|
9
|
+
if (!stream) {
|
|
10
|
+
console.error("StreamEventSource: Stream is null or undefined");
|
|
11
|
+
throw new Error("Stream is required");
|
|
12
|
+
}
|
|
13
|
+
this.stream = stream;
|
|
14
|
+
this.parser = createParser({
|
|
15
|
+
onError: (err) => {
|
|
16
|
+
console.error("StreamEventSource: Parser error:", err);
|
|
17
|
+
this.emit("error", err);
|
|
18
|
+
},
|
|
19
|
+
onEvent: (event) => {
|
|
20
|
+
if (event.event) {
|
|
21
|
+
switch (event.event) {
|
|
22
|
+
case "size":
|
|
23
|
+
this.emit("size", {
|
|
24
|
+
type: "size",
|
|
25
|
+
data: JSON.parse(event.data)
|
|
26
|
+
});
|
|
27
|
+
break;
|
|
28
|
+
case "completion":
|
|
29
|
+
this.emit("completion", {
|
|
30
|
+
type: "completion",
|
|
31
|
+
data: JSON.parse(event.data)
|
|
32
|
+
});
|
|
33
|
+
break;
|
|
34
|
+
case "progress":
|
|
35
|
+
this.emit("progress", {
|
|
36
|
+
type: "progress",
|
|
37
|
+
data: JSON.parse(event.data)
|
|
38
|
+
});
|
|
39
|
+
break;
|
|
40
|
+
case "complete":
|
|
41
|
+
this.emit("complete", {
|
|
42
|
+
type: "complete",
|
|
43
|
+
data: JSON.parse(event.data)
|
|
44
|
+
});
|
|
45
|
+
break;
|
|
46
|
+
case "error":
|
|
47
|
+
log("StreamEventSource: Error event", event.data);
|
|
48
|
+
this.emit("error", new Error(event.data));
|
|
49
|
+
break;
|
|
50
|
+
default:
|
|
51
|
+
this.emit(
|
|
52
|
+
"error",
|
|
53
|
+
new Error(`Unknown event: ${event.event} data: ${event.data}`)
|
|
54
|
+
);
|
|
55
|
+
}
|
|
56
|
+
} else {
|
|
57
|
+
this.emit("message", {
|
|
58
|
+
id: event.id,
|
|
59
|
+
data: event.data
|
|
60
|
+
});
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
});
|
|
64
|
+
this.startReading().catch((error) => {
|
|
65
|
+
console.error("StreamEventSource: Error in startReading:", error);
|
|
66
|
+
this.emit("error", error);
|
|
67
|
+
});
|
|
68
|
+
}
|
|
69
|
+
on(event, callback) {
|
|
70
|
+
if (!this.listeners[event]) {
|
|
71
|
+
this.listeners[event] = [];
|
|
72
|
+
}
|
|
73
|
+
this.listeners[event]?.push(callback);
|
|
74
|
+
return this;
|
|
75
|
+
}
|
|
76
|
+
off(event, callback) {
|
|
77
|
+
const listeners = this.listeners[event];
|
|
78
|
+
if (listeners) {
|
|
79
|
+
const index = listeners.indexOf(callback);
|
|
80
|
+
if (index !== -1) {
|
|
81
|
+
listeners.splice(index, 1);
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
return this;
|
|
85
|
+
}
|
|
86
|
+
emit(event, data) {
|
|
87
|
+
for (const callback of this.listeners[event] ?? []) {
|
|
88
|
+
callback(data);
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
whenClosed() {
|
|
92
|
+
return new Promise((resolve) => {
|
|
93
|
+
this.on("end", () => resolve());
|
|
94
|
+
});
|
|
95
|
+
}
|
|
96
|
+
async startReading() {
|
|
97
|
+
try {
|
|
98
|
+
this.activeReader = this.stream.getReader();
|
|
99
|
+
while (true) {
|
|
100
|
+
const { done, value } = await this.activeReader.read();
|
|
101
|
+
if (done) break;
|
|
102
|
+
const chunk = this.decoder.decode(value);
|
|
103
|
+
if (!value) {
|
|
104
|
+
throw new Error("Chunk is null");
|
|
105
|
+
}
|
|
106
|
+
this.parser.feed(chunk);
|
|
107
|
+
}
|
|
108
|
+
this.activeReader.releaseLock();
|
|
109
|
+
this.activeReader = null;
|
|
110
|
+
this.emit("end", []);
|
|
111
|
+
} catch (error) {
|
|
112
|
+
console.error("StreamEventSource: Error reading stream:", error);
|
|
113
|
+
if (error instanceof Error) {
|
|
114
|
+
this.emit("error", error);
|
|
115
|
+
} else {
|
|
116
|
+
this.emit("error", new Error(String(error)));
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
close() {
|
|
121
|
+
if (this.activeReader) {
|
|
122
|
+
this.activeReader.releaseLock();
|
|
123
|
+
this.activeReader = null;
|
|
124
|
+
}
|
|
125
|
+
this.parser.reset();
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
export {
|
|
129
|
+
StreamEventSource
|
|
130
|
+
};
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
import debug from "debug";
|
|
2
|
+
import { StreamEventSource } from "./StreamEventSource.js";
|
|
3
|
+
const log = debug("ef:api:client");
|
|
4
|
+
class Client {
|
|
5
|
+
constructor(token, efHost = "https://editframe.dev") {
|
|
6
|
+
this.authenticatedEventSource = async (path, init = {}) => {
|
|
7
|
+
const response = await this.authenticatedFetch(path, init);
|
|
8
|
+
if (response.body === null) {
|
|
9
|
+
throw new Error("Could not create event source. Response body is null.");
|
|
10
|
+
}
|
|
11
|
+
return new StreamEventSource(response.body);
|
|
12
|
+
};
|
|
13
|
+
this.authenticatedFetch = async (path, init = {}) => {
|
|
14
|
+
init.headers ||= {};
|
|
15
|
+
const url = new URL(path, this.#efHost);
|
|
16
|
+
log(
|
|
17
|
+
"Authenticated fetch",
|
|
18
|
+
{ url: url.toString(), init },
|
|
19
|
+
this.#token ? "(Token will be added as Bearer token)" : "(Using session cookie)"
|
|
20
|
+
);
|
|
21
|
+
if (this.#token) {
|
|
22
|
+
Object.assign(init.headers, {
|
|
23
|
+
Authorization: `Bearer ${this.#token}`
|
|
24
|
+
});
|
|
25
|
+
}
|
|
26
|
+
Object.assign(init.headers, {
|
|
27
|
+
"Content-Type": "application/json"
|
|
28
|
+
});
|
|
29
|
+
init.credentials = "include";
|
|
30
|
+
const response = await fetch(url, init);
|
|
31
|
+
log("Authenticated fetch response", response.status, response.statusText);
|
|
32
|
+
return response;
|
|
33
|
+
};
|
|
34
|
+
log("Creating client with efHost", { efHost, tokenIsSet: !!token });
|
|
35
|
+
this.#token = token;
|
|
36
|
+
this.#efHost = efHost;
|
|
37
|
+
if (token) {
|
|
38
|
+
const { apiKey, apiSecret } = token.match(/^(?<apiSecret>ef_[^_]+)_(?<apiKey>.+)$/)?.groups ?? {};
|
|
39
|
+
if (!apiKey || !apiSecret) {
|
|
40
|
+
throw new Error("Invalid token format. Must look like: ef_{}_{}");
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
#token;
|
|
45
|
+
#efHost;
|
|
46
|
+
}
|
|
47
|
+
export {
|
|
48
|
+
Client
|
|
49
|
+
};
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
import { CreateCaptionFilePayload, createCaptionFile, lookupCaptionFileByMd5, uploadCaptionFile } from "./resources/caption-file.js";
|
|
2
|
+
import { CreateImageFilePayload, createImageFile, lookupImageFileByMd5, uploadImageFile } from "./resources/image-file.js";
|
|
3
|
+
import { CreateISOBMFFFilePayload, TranscribeISOBMFFFilePayload, createISOBMFFFile, getISOBMFFFileTranscription, lookupISOBMFFFileByMd5, transcribeISOBMFFFile, uploadFragmentIndex } from "./resources/isobmff-file.js";
|
|
4
|
+
import { CreateISOBMFFTrackPayload, createISOBMFFTrack, uploadISOBMFFTrack } from "./resources/isobmff-track.js";
|
|
5
|
+
import { CreateRenderPayload, createRender, lookupRenderByMd5, uploadRender } from "./resources/renders.js";
|
|
6
|
+
import { CreateTranscriptionPayload, createTranscription, getTranscriptionInfo, getTranscriptionProgress } from "./resources/transcriptions.js";
|
|
7
|
+
import { createURLToken } from "./resources/url-token.js";
|
|
8
|
+
import { CreateUnprocessedFilePayload, createUnprocessedFile, createUnprocessedFileFromPath, lookupUnprocessedFileByMd5, processIsobmffFile, uploadUnprocessedFile, uploadUnprocessedReadableStream } from "./resources/unprocessed-file.js";
|
|
9
|
+
import { getIsobmffProcessInfo, getIsobmffProcessProgress } from "./resources/process-isobmff.js";
|
|
10
|
+
import { Client } from "./client.js";
|
|
11
|
+
export {
|
|
12
|
+
Client,
|
|
13
|
+
CreateCaptionFilePayload,
|
|
14
|
+
CreateISOBMFFFilePayload,
|
|
15
|
+
CreateISOBMFFTrackPayload,
|
|
16
|
+
CreateImageFilePayload,
|
|
17
|
+
CreateRenderPayload,
|
|
18
|
+
CreateTranscriptionPayload,
|
|
19
|
+
CreateUnprocessedFilePayload,
|
|
20
|
+
TranscribeISOBMFFFilePayload,
|
|
21
|
+
createCaptionFile,
|
|
22
|
+
createISOBMFFFile,
|
|
23
|
+
createISOBMFFTrack,
|
|
24
|
+
createImageFile,
|
|
25
|
+
createRender,
|
|
26
|
+
createTranscription,
|
|
27
|
+
createURLToken,
|
|
28
|
+
createUnprocessedFile,
|
|
29
|
+
createUnprocessedFileFromPath,
|
|
30
|
+
getISOBMFFFileTranscription,
|
|
31
|
+
getIsobmffProcessInfo,
|
|
32
|
+
getIsobmffProcessProgress,
|
|
33
|
+
getTranscriptionInfo,
|
|
34
|
+
getTranscriptionProgress,
|
|
35
|
+
lookupCaptionFileByMd5,
|
|
36
|
+
lookupISOBMFFFileByMd5,
|
|
37
|
+
lookupImageFileByMd5,
|
|
38
|
+
lookupRenderByMd5,
|
|
39
|
+
lookupUnprocessedFileByMd5,
|
|
40
|
+
processIsobmffFile,
|
|
41
|
+
transcribeISOBMFFFile,
|
|
42
|
+
uploadCaptionFile,
|
|
43
|
+
uploadFragmentIndex,
|
|
44
|
+
uploadISOBMFFTrack,
|
|
45
|
+
uploadImageFile,
|
|
46
|
+
uploadRender,
|
|
47
|
+
uploadUnprocessedFile,
|
|
48
|
+
uploadUnprocessedReadableStream
|
|
49
|
+
};
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import { z } from "zod";
|
|
2
1
|
import debug from "debug";
|
|
2
|
+
import { z } from "zod";
|
|
3
3
|
const log = debug("ef:api:caption-file");
|
|
4
4
|
const MAX_CAPTION_SIZE = 1024 * 1024 * 2;
|
|
5
5
|
const CreateCaptionFilePayload = z.object({
|
|
@@ -37,7 +37,8 @@ const uploadCaptionFile = async (client, fileId, fileStream, fileSize) => {
|
|
|
37
37
|
`/api/v1/caption_files/${fileId}/upload`,
|
|
38
38
|
{
|
|
39
39
|
method: "POST",
|
|
40
|
-
body: fileStream
|
|
40
|
+
body: fileStream,
|
|
41
|
+
duplex: "half"
|
|
41
42
|
}
|
|
42
43
|
);
|
|
43
44
|
log("Caption file uploaded", response);
|
|
@@ -48,8 +49,27 @@ const uploadCaptionFile = async (client, fileId, fileStream, fileSize) => {
|
|
|
48
49
|
`Failed to upload caption ${response.status} ${response.statusText}`
|
|
49
50
|
);
|
|
50
51
|
};
|
|
52
|
+
const lookupCaptionFileByMd5 = async (client, md5) => {
|
|
53
|
+
const response = await client.authenticatedFetch(
|
|
54
|
+
`/api/v1/caption_files/md5/${md5}`,
|
|
55
|
+
{
|
|
56
|
+
method: "GET"
|
|
57
|
+
}
|
|
58
|
+
);
|
|
59
|
+
log("Caption file lookup", response);
|
|
60
|
+
if (response.ok) {
|
|
61
|
+
return await response.json();
|
|
62
|
+
}
|
|
63
|
+
if (response.status === 404) {
|
|
64
|
+
return null;
|
|
65
|
+
}
|
|
66
|
+
throw new Error(
|
|
67
|
+
`Failed to lookup caption by md5 ${md5} ${response.status} ${response.statusText}`
|
|
68
|
+
);
|
|
69
|
+
};
|
|
51
70
|
export {
|
|
52
71
|
CreateCaptionFilePayload,
|
|
53
72
|
createCaptionFile,
|
|
73
|
+
lookupCaptionFileByMd5,
|
|
54
74
|
uploadCaptionFile
|
|
55
75
|
};
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import debug from "debug";
|
|
2
|
+
import "mime";
|
|
2
3
|
import { z } from "zod";
|
|
3
4
|
import { uploadChunks } from "../uploadChunks.js";
|
|
4
5
|
const log = debug("ef:api:image-file");
|
|
@@ -26,17 +27,36 @@ const createImageFile = async (client, payload) => {
|
|
|
26
27
|
`Failed to create file ${response.status} ${response.statusText}`
|
|
27
28
|
);
|
|
28
29
|
};
|
|
29
|
-
const uploadImageFile = (client,
|
|
30
|
-
log("Uploading image file",
|
|
30
|
+
const uploadImageFile = (client, uploadDetails, fileStream) => {
|
|
31
|
+
log("Uploading image file", uploadDetails.id);
|
|
31
32
|
return uploadChunks(client, {
|
|
32
|
-
url: `/api/v1/image_files/${
|
|
33
|
-
fileSize,
|
|
33
|
+
url: `/api/v1/image_files/${uploadDetails.id}/upload`,
|
|
34
|
+
fileSize: uploadDetails.byte_size,
|
|
34
35
|
fileStream,
|
|
35
36
|
maxSize: MAX_IMAGE_SIZE
|
|
36
37
|
});
|
|
37
38
|
};
|
|
39
|
+
const lookupImageFileByMd5 = async (client, md5) => {
|
|
40
|
+
const response = await client.authenticatedFetch(
|
|
41
|
+
`/api/v1/image_files/md5/${md5}`,
|
|
42
|
+
{
|
|
43
|
+
method: "GET"
|
|
44
|
+
}
|
|
45
|
+
);
|
|
46
|
+
log("Image file lookup", response);
|
|
47
|
+
if (response.ok) {
|
|
48
|
+
return await response.json();
|
|
49
|
+
}
|
|
50
|
+
if (response.status === 404) {
|
|
51
|
+
return null;
|
|
52
|
+
}
|
|
53
|
+
throw new Error(
|
|
54
|
+
`Failed to lookup image by md5 ${md5} ${response.status} ${response.statusText}`
|
|
55
|
+
);
|
|
56
|
+
};
|
|
38
57
|
export {
|
|
39
58
|
CreateImageFilePayload,
|
|
40
59
|
createImageFile,
|
|
60
|
+
lookupImageFileByMd5,
|
|
41
61
|
uploadImageFile
|
|
42
62
|
};
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
import debug from "debug";
|
|
2
|
+
import { z } from "zod";
|
|
3
|
+
const log = debug("ef:api:isobmff-file");
|
|
4
|
+
const FILE_SIZE_LIMIT = 1024 * 1024 * 2;
|
|
5
|
+
const CreateISOBMFFFilePayload = z.object({
|
|
6
|
+
md5: z.string(),
|
|
7
|
+
filename: z.string()
|
|
8
|
+
});
|
|
9
|
+
const createISOBMFFFile = async (client, payload) => {
|
|
10
|
+
log("Creating isobmff file", payload);
|
|
11
|
+
const response = await client.authenticatedFetch("/api/v1/isobmff_files", {
|
|
12
|
+
method: "POST",
|
|
13
|
+
body: JSON.stringify(payload)
|
|
14
|
+
});
|
|
15
|
+
log("ISOBMFF file created", response);
|
|
16
|
+
if (response.ok) {
|
|
17
|
+
return await response.json();
|
|
18
|
+
}
|
|
19
|
+
throw new Error(
|
|
20
|
+
`Failed to create isobmff file ${response.status} ${response.statusText}`
|
|
21
|
+
);
|
|
22
|
+
};
|
|
23
|
+
const uploadFragmentIndex = async (client, fileId, fileStream, fileSize) => {
|
|
24
|
+
log("Uploading fragment index", fileId);
|
|
25
|
+
if (fileSize > FILE_SIZE_LIMIT) {
|
|
26
|
+
throw new Error(`File size exceeds limit of ${FILE_SIZE_LIMIT} bytes`);
|
|
27
|
+
}
|
|
28
|
+
const response = await client.authenticatedFetch(
|
|
29
|
+
`/api/v1/isobmff_files/${fileId}/index/upload`,
|
|
30
|
+
{
|
|
31
|
+
method: "POST",
|
|
32
|
+
body: fileStream,
|
|
33
|
+
duplex: "half"
|
|
34
|
+
}
|
|
35
|
+
);
|
|
36
|
+
log("Fragment index uploaded", response);
|
|
37
|
+
if (response.ok) {
|
|
38
|
+
return response.json();
|
|
39
|
+
}
|
|
40
|
+
throw new Error(
|
|
41
|
+
`Failed to create fragment index ${response.status} ${response.statusText}`
|
|
42
|
+
);
|
|
43
|
+
};
|
|
44
|
+
const lookupISOBMFFFileByMd5 = async (client, md5) => {
|
|
45
|
+
const response = await client.authenticatedFetch(
|
|
46
|
+
`/api/v1/isobmff_files/md5/${md5}`,
|
|
47
|
+
{
|
|
48
|
+
method: "GET"
|
|
49
|
+
}
|
|
50
|
+
);
|
|
51
|
+
log("ISOBMFF file lookup", response);
|
|
52
|
+
if (response.ok) {
|
|
53
|
+
return await response.json();
|
|
54
|
+
}
|
|
55
|
+
if (response.status === 404) {
|
|
56
|
+
return null;
|
|
57
|
+
}
|
|
58
|
+
throw new Error(
|
|
59
|
+
`Failed to lookup isobmff file by md5 ${md5} ${response.status} ${response.statusText}`
|
|
60
|
+
);
|
|
61
|
+
};
|
|
62
|
+
const getISOBMFFFileTranscription = async (client, id) => {
|
|
63
|
+
const response = await client.authenticatedFetch(
|
|
64
|
+
`/api/v1/isobmff_files/${id}/transcription`
|
|
65
|
+
);
|
|
66
|
+
if (response.ok) {
|
|
67
|
+
return await response.json();
|
|
68
|
+
}
|
|
69
|
+
if (response.status === 404) {
|
|
70
|
+
return null;
|
|
71
|
+
}
|
|
72
|
+
throw new Error(
|
|
73
|
+
`Failed to get isobmff file transcription ${id} ${response.status} ${response.statusText}`
|
|
74
|
+
);
|
|
75
|
+
};
|
|
76
|
+
const TranscribeISOBMFFFilePayload = z.object({
|
|
77
|
+
trackId: z.string().optional()
|
|
78
|
+
});
|
|
79
|
+
const transcribeISOBMFFFile = async (client, id, payload = {}) => {
|
|
80
|
+
const response = await client.authenticatedFetch(
|
|
81
|
+
`/api/v1/isobmff_files/${id}/transcribe`,
|
|
82
|
+
{
|
|
83
|
+
method: "POST",
|
|
84
|
+
body: JSON.stringify(payload)
|
|
85
|
+
}
|
|
86
|
+
);
|
|
87
|
+
if (response.ok) {
|
|
88
|
+
return await response.json();
|
|
89
|
+
}
|
|
90
|
+
throw new Error(
|
|
91
|
+
`Failed to transcribe isobmff file ${id} ${response.status} ${response.statusText}`
|
|
92
|
+
);
|
|
93
|
+
};
|
|
94
|
+
export {
|
|
95
|
+
CreateISOBMFFFilePayload,
|
|
96
|
+
TranscribeISOBMFFFilePayload,
|
|
97
|
+
createISOBMFFFile,
|
|
98
|
+
getISOBMFFFileTranscription,
|
|
99
|
+
lookupISOBMFFFileByMd5,
|
|
100
|
+
transcribeISOBMFFFile,
|
|
101
|
+
uploadFragmentIndex
|
|
102
|
+
};
|
|
@@ -1,7 +1,50 @@
|
|
|
1
1
|
import debug from "debug";
|
|
2
2
|
import { z } from "zod";
|
|
3
|
-
import { AudioStreamSchema, VideoStreamSchema } from "@editframe/assets";
|
|
4
3
|
import { uploadChunks } from "../uploadChunks.js";
|
|
4
|
+
const AudioStreamSchema = z.object({
|
|
5
|
+
index: z.number(),
|
|
6
|
+
codec_name: z.string(),
|
|
7
|
+
codec_long_name: z.string(),
|
|
8
|
+
codec_type: z.literal("audio"),
|
|
9
|
+
codec_tag_string: z.string(),
|
|
10
|
+
codec_tag: z.string(),
|
|
11
|
+
sample_fmt: z.string(),
|
|
12
|
+
sample_rate: z.string(),
|
|
13
|
+
channels: z.number(),
|
|
14
|
+
channel_layout: z.string(),
|
|
15
|
+
bits_per_sample: z.number(),
|
|
16
|
+
initial_padding: z.number().optional(),
|
|
17
|
+
r_frame_rate: z.string(),
|
|
18
|
+
avg_frame_rate: z.string(),
|
|
19
|
+
time_base: z.string(),
|
|
20
|
+
start_pts: z.number(),
|
|
21
|
+
start_time: z.coerce.number(),
|
|
22
|
+
duration_ts: z.number(),
|
|
23
|
+
duration: z.coerce.number(),
|
|
24
|
+
bit_rate: z.string(),
|
|
25
|
+
disposition: z.record(z.unknown())
|
|
26
|
+
});
|
|
27
|
+
const VideoStreamSchema = z.object({
|
|
28
|
+
index: z.number(),
|
|
29
|
+
codec_name: z.string(),
|
|
30
|
+
codec_long_name: z.string(),
|
|
31
|
+
codec_type: z.literal("video"),
|
|
32
|
+
codec_tag_string: z.string(),
|
|
33
|
+
codec_tag: z.string(),
|
|
34
|
+
width: z.number(),
|
|
35
|
+
height: z.number(),
|
|
36
|
+
coded_width: z.number(),
|
|
37
|
+
coded_height: z.number(),
|
|
38
|
+
r_frame_rate: z.string(),
|
|
39
|
+
avg_frame_rate: z.string(),
|
|
40
|
+
time_base: z.string(),
|
|
41
|
+
start_pts: z.number().optional(),
|
|
42
|
+
start_time: z.coerce.number().optional(),
|
|
43
|
+
duration_ts: z.number().optional(),
|
|
44
|
+
duration: z.coerce.number().optional(),
|
|
45
|
+
bit_rate: z.string().optional(),
|
|
46
|
+
disposition: z.record(z.unknown())
|
|
47
|
+
});
|
|
5
48
|
const log = debug("ef:api:isobmff-track");
|
|
6
49
|
const MAX_TRACK_SIZE = 1024 * 1024 * 1024;
|
|
7
50
|
const CreateISOBMFFTrackPayload = z.discriminatedUnion("type", [
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import { ProgressIterator } from "../ProgressIterator.js";
|
|
2
|
+
const getIsobmffProcessProgress = async (client, id) => {
|
|
3
|
+
const eventSource = await client.authenticatedEventSource(
|
|
4
|
+
`/api/v1/process_isobmff/${id}/progress`
|
|
5
|
+
);
|
|
6
|
+
return new ProgressIterator(eventSource);
|
|
7
|
+
};
|
|
8
|
+
const getIsobmffProcessInfo = async (client, id) => {
|
|
9
|
+
const response = await client.authenticatedFetch(
|
|
10
|
+
`/api/v1/process_isobmff/${id}`
|
|
11
|
+
);
|
|
12
|
+
if (response.ok) {
|
|
13
|
+
return await response.json();
|
|
14
|
+
}
|
|
15
|
+
throw new Error(
|
|
16
|
+
`Failed to get isobmff process info ${response.status} ${response.statusText}`
|
|
17
|
+
);
|
|
18
|
+
};
|
|
19
|
+
export {
|
|
20
|
+
getIsobmffProcessInfo,
|
|
21
|
+
getIsobmffProcessProgress
|
|
22
|
+
};
|