@editframe/api 0.11.0-beta.9 → 0.12.0-beta.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. package/dist/CHUNK_SIZE_BYTES.js +1 -1
  2. package/dist/ProgressIterator.d.ts +25 -0
  3. package/dist/ProgressIterator.js +99 -0
  4. package/dist/ProgressIterator.test.d.ts +1 -0
  5. package/dist/StreamEventSource.d.ts +50 -0
  6. package/dist/StreamEventSource.js +130 -0
  7. package/dist/StreamEventSource.test.d.ts +1 -0
  8. package/dist/client.d.ts +6 -3
  9. package/dist/client.js +20 -6
  10. package/dist/index.d.ts +7 -5
  11. package/dist/index.js +20 -11
  12. package/dist/readableFromBuffers.d.ts +1 -2
  13. package/dist/resources/caption-file.d.ts +7 -3
  14. package/dist/resources/caption-file.js +22 -2
  15. package/dist/resources/image-file.d.ts +7 -3
  16. package/dist/resources/image-file.js +19 -0
  17. package/dist/resources/isobmff-file.d.ts +16 -3
  18. package/dist/resources/isobmff-file.js +37 -2
  19. package/dist/resources/isobmff-track.d.ts +5 -7
  20. package/dist/resources/isobmff-track.js +44 -1
  21. package/dist/resources/process-isobmff.d.ts +12 -0
  22. package/dist/resources/process-isobmff.js +22 -0
  23. package/dist/resources/process-isobmff.test.d.ts +1 -0
  24. package/dist/resources/renders.d.ts +10 -5
  25. package/dist/resources/renders.js +21 -2
  26. package/dist/resources/transcriptions.d.ts +24 -0
  27. package/dist/resources/transcriptions.js +45 -0
  28. package/dist/resources/transcriptions.test.d.ts +1 -0
  29. package/dist/resources/unprocessed-file.d.ts +12 -53
  30. package/dist/resources/unprocessed-file.js +31 -130
  31. package/dist/streamChunker.d.ts +1 -2
  32. package/dist/streamChunker.js +20 -9
  33. package/dist/uploadChunks.d.ts +1 -2
  34. package/dist/uploadChunks.js +1 -4
  35. package/package.json +3 -2
  36. package/src/resources/caption-file.test.ts +57 -6
  37. package/src/resources/caption-file.ts +34 -5
  38. package/src/resources/image-file.test.ts +56 -5
  39. package/src/resources/image-file.ts +32 -4
  40. package/src/resources/isobmff-file.test.ts +57 -6
  41. package/src/resources/isobmff-file.ts +64 -5
  42. package/src/resources/isobmff-track.test.ts +3 -3
  43. package/src/resources/isobmff-track.ts +50 -5
  44. package/src/resources/process-isobmff.test.ts +62 -0
  45. package/src/resources/process-isobmff.ts +33 -0
  46. package/src/resources/renders.test.ts +51 -6
  47. package/src/resources/renders.ts +34 -5
  48. package/src/resources/transcriptions.test.ts +49 -0
  49. package/src/resources/transcriptions.ts +64 -0
  50. package/src/resources/unprocessed-file.test.ts +19 -430
  51. package/src/resources/unprocessed-file.ts +45 -161
@@ -1,4 +1,3 @@
1
- import { Readable } from 'node:stream';
2
1
  import { z } from 'zod';
3
2
  import { Client } from '../client.ts';
4
3
  export declare const CreateISOBMFFTrackPayload: z.ZodDiscriminatedUnion<"type", [z.ZodObject<{
@@ -76,8 +75,8 @@ export declare const CreateISOBMFFTrackPayload: z.ZodDiscriminatedUnion<"type",
76
75
  codec_name: z.ZodString;
77
76
  byte_size: z.ZodNumber;
78
77
  }, "strip", z.ZodTypeAny, {
79
- byte_size: number;
80
78
  type: "audio";
79
+ byte_size: number;
81
80
  codec_name: string;
82
81
  file_id: string;
83
82
  track_id: number;
@@ -106,8 +105,8 @@ export declare const CreateISOBMFFTrackPayload: z.ZodDiscriminatedUnion<"type",
106
105
  };
107
106
  duration_ms: number;
108
107
  }, {
109
- byte_size: number;
110
108
  type: "audio";
109
+ byte_size: number;
111
110
  codec_name: string;
112
111
  file_id: string;
113
112
  track_id: number;
@@ -204,8 +203,8 @@ export declare const CreateISOBMFFTrackPayload: z.ZodDiscriminatedUnion<"type",
204
203
  codec_name: z.ZodString;
205
204
  byte_size: z.ZodNumber;
206
205
  }, "strip", z.ZodTypeAny, {
207
- byte_size: number;
208
206
  type: "video";
207
+ byte_size: number;
209
208
  codec_name: string;
210
209
  file_id: string;
211
210
  track_id: number;
@@ -232,8 +231,8 @@ export declare const CreateISOBMFFTrackPayload: z.ZodDiscriminatedUnion<"type",
232
231
  };
233
232
  duration_ms: number;
234
233
  }, {
235
- byte_size: number;
236
234
  type: "video";
235
+ byte_size: number;
237
236
  codec_name: string;
238
237
  file_id: string;
239
238
  track_id: number;
@@ -265,8 +264,7 @@ export interface CreateISOBMFFTrackResult {
265
264
  byte_size: number;
266
265
  track_id: number;
267
266
  file_id: string;
268
- asset_id: string;
269
267
  complete: boolean;
270
268
  }
271
269
  export declare const createISOBMFFTrack: (client: Client, payload: z.infer<typeof CreateISOBMFFTrackPayload>) => Promise<CreateISOBMFFTrackResult>;
272
- export declare const uploadISOBMFFTrack: (client: Client, fileId: string, trackId: number, fileStream: Readable, trackSize: number) => import('../uploadChunks.ts').IteratorWithPromise<import('../uploadChunks.ts').UploadChunkEvent>;
270
+ export declare const uploadISOBMFFTrack: (client: Client, fileId: string, trackId: number, fileStream: ReadableStream, trackSize: number) => import('../uploadChunks.ts').IteratorWithPromise<import('../uploadChunks.ts').UploadChunkEvent>;
@@ -1,7 +1,50 @@
1
1
  import debug from "debug";
2
2
  import { z } from "zod";
3
- import { AudioStreamSchema, VideoStreamSchema } from "@editframe/assets";
4
3
  import { uploadChunks } from "../uploadChunks.js";
4
+ const AudioStreamSchema = z.object({
5
+ index: z.number(),
6
+ codec_name: z.string(),
7
+ codec_long_name: z.string(),
8
+ codec_type: z.literal("audio"),
9
+ codec_tag_string: z.string(),
10
+ codec_tag: z.string(),
11
+ sample_fmt: z.string(),
12
+ sample_rate: z.string(),
13
+ channels: z.number(),
14
+ channel_layout: z.string(),
15
+ bits_per_sample: z.number(),
16
+ initial_padding: z.number().optional(),
17
+ r_frame_rate: z.string(),
18
+ avg_frame_rate: z.string(),
19
+ time_base: z.string(),
20
+ start_pts: z.number(),
21
+ start_time: z.coerce.number(),
22
+ duration_ts: z.number(),
23
+ duration: z.coerce.number(),
24
+ bit_rate: z.string(),
25
+ disposition: z.record(z.unknown())
26
+ });
27
+ const VideoStreamSchema = z.object({
28
+ index: z.number(),
29
+ codec_name: z.string(),
30
+ codec_long_name: z.string(),
31
+ codec_type: z.literal("video"),
32
+ codec_tag_string: z.string(),
33
+ codec_tag: z.string(),
34
+ width: z.number(),
35
+ height: z.number(),
36
+ coded_width: z.number(),
37
+ coded_height: z.number(),
38
+ r_frame_rate: z.string(),
39
+ avg_frame_rate: z.string(),
40
+ time_base: z.string(),
41
+ start_pts: z.number().optional(),
42
+ start_time: z.coerce.number().optional(),
43
+ duration_ts: z.number().optional(),
44
+ duration: z.coerce.number().optional(),
45
+ bit_rate: z.string().optional(),
46
+ disposition: z.record(z.unknown())
47
+ });
5
48
  const log = debug("ef:api:isobmff-track");
6
49
  const MAX_TRACK_SIZE = 1024 * 1024 * 1024;
7
50
  const CreateISOBMFFTrackPayload = z.discriminatedUnion("type", [
@@ -0,0 +1,12 @@
1
+ import { ProgressIterator } from '../ProgressIterator.ts';
2
+ import { Client } from '../client.ts';
3
+ export interface IsobmffProcessInfoResult {
4
+ id: string;
5
+ created_at: string;
6
+ completed_at: string | null;
7
+ failed_at: string | null;
8
+ isobmff_file_id: string | null;
9
+ unprocessed_file_id: string;
10
+ }
11
+ export declare const getIsobmffProcessProgress: (client: Client, id: string) => Promise<ProgressIterator>;
12
+ export declare const getIsobmffProcessInfo: (client: Client, id: string) => Promise<IsobmffProcessInfoResult>;
@@ -0,0 +1,22 @@
1
+ import { ProgressIterator } from "../ProgressIterator.js";
2
+ const getIsobmffProcessProgress = async (client, id) => {
3
+ const eventSource = await client.authenticatedEventSource(
4
+ `/api/v1/process_isobmff/${id}/progress`
5
+ );
6
+ return new ProgressIterator(eventSource);
7
+ };
8
+ const getIsobmffProcessInfo = async (client, id) => {
9
+ const response = await client.authenticatedFetch(
10
+ `/api/v1/process_isobmff/${id}`
11
+ );
12
+ if (response.ok) {
13
+ return await response.json();
14
+ }
15
+ throw new Error(
16
+ `Failed to get isobmff process info ${response.status} ${response.statusText}`
17
+ );
18
+ };
19
+ export {
20
+ getIsobmffProcessInfo,
21
+ getIsobmffProcessProgress
22
+ };
@@ -0,0 +1 @@
1
+ export {};
@@ -1,4 +1,3 @@
1
- import { Readable } from 'node:stream';
2
1
  import { z } from 'zod';
3
2
  import { Client } from '../client.ts';
4
3
  export declare const CreateRenderPayload: z.ZodObject<{
@@ -13,23 +12,29 @@ export declare const CreateRenderPayload: z.ZodObject<{
13
12
  md5: string;
14
13
  height: number;
15
14
  width: number;
16
- strategy: "v1" | "v2";
17
15
  duration_ms: number;
18
16
  fps: number;
19
17
  work_slice_ms: number;
18
+ strategy: "v1" | "v2";
20
19
  }, {
21
20
  md5: string;
22
21
  height: number;
23
22
  width: number;
24
- strategy: "v1" | "v2";
25
23
  duration_ms: number;
26
24
  fps: number;
27
25
  work_slice_ms: number;
26
+ strategy: "v1" | "v2";
28
27
  }>;
29
28
  export interface CreateRenderResult {
30
29
  id: string;
31
30
  md5: string;
32
- status: "complete" | "created" | "failed" | "pending" | "rendering";
31
+ status: "complete" | "created" | "failed" | "pending" | "rendering" | string;
32
+ }
33
+ export interface LookupRenderByMd5Result {
34
+ id: string;
35
+ md5: string;
36
+ status: "complete" | "created" | "failed" | "pending" | "rendering" | string;
33
37
  }
34
38
  export declare const createRender: (client: Client, payload: z.infer<typeof CreateRenderPayload>) => Promise<CreateRenderResult>;
35
- export declare const uploadRender: (client: Client, fileId: string, fileStream: Readable, folderSize: number) => Promise<unknown>;
39
+ export declare const uploadRender: (client: Client, fileId: string, fileStream: ReadableStream, folderSize: number) => Promise<any>;
40
+ export declare const lookupRenderByMd5: (client: Client, md5: string) => Promise<LookupRenderByMd5Result | null>;
@@ -1,5 +1,5 @@
1
- import { z } from "zod";
2
1
  import debug from "debug";
2
+ import { z } from "zod";
3
3
  const log = debug("ef:api:renders");
4
4
  const FILE_SIZE_LIMIT = 1024 * 1024 * 16;
5
5
  const CreateRenderPayload = z.object({
@@ -35,7 +35,8 @@ const uploadRender = async (client, fileId, fileStream, folderSize) => {
35
35
  `/api/v1/renders/${fileId}/upload`,
36
36
  {
37
37
  method: "POST",
38
- body: fileStream
38
+ body: fileStream,
39
+ duplex: "half"
39
40
  }
40
41
  );
41
42
  if (response.ok) {
@@ -45,8 +46,26 @@ const uploadRender = async (client, fileId, fileStream, folderSize) => {
45
46
  `Failed to upload render ${response.status} ${response.statusText}`
46
47
  );
47
48
  };
49
+ const lookupRenderByMd5 = async (client, md5) => {
50
+ const response = await client.authenticatedFetch(
51
+ `/api/v1/renders/md5/${md5}`,
52
+ {
53
+ method: "GET"
54
+ }
55
+ );
56
+ if (response.ok) {
57
+ return await response.json();
58
+ }
59
+ if (response.status === 404) {
60
+ return null;
61
+ }
62
+ throw new Error(
63
+ `Failed to lookup render by md5 ${md5} ${response.status} ${response.statusText}`
64
+ );
65
+ };
48
66
  export {
49
67
  CreateRenderPayload,
50
68
  createRender,
69
+ lookupRenderByMd5,
51
70
  uploadRender
52
71
  };
@@ -0,0 +1,24 @@
1
+ import { z } from 'zod';
2
+ import { CompletionIterator } from '../ProgressIterator.ts';
3
+ import { Client } from '../client.ts';
4
+ export declare const CreateTranscriptionPayload: z.ZodObject<{
5
+ file_id: z.ZodString;
6
+ track_id: z.ZodNumber;
7
+ }, "strip", z.ZodTypeAny, {
8
+ file_id: string;
9
+ track_id: number;
10
+ }, {
11
+ file_id: string;
12
+ track_id: number;
13
+ }>;
14
+ export interface CreateTranscriptionResult {
15
+ id: string;
16
+ status: "complete" | "created" | "failed" | "pending" | "transcribing";
17
+ }
18
+ export interface TranscriptionInfoResult {
19
+ id: string;
20
+ status: "complete" | "created" | "failed" | "pending" | "transcribing";
21
+ }
22
+ export declare const createTranscription: (client: Client, payload: z.infer<typeof CreateTranscriptionPayload>) => Promise<CreateTranscriptionResult>;
23
+ export declare const getTranscriptionProgress: (client: Client, id: string) => Promise<CompletionIterator>;
24
+ export declare const getTranscriptionInfo: (client: Client, id: string) => Promise<TranscriptionInfoResult>;
@@ -0,0 +1,45 @@
1
+ import debug from "debug";
2
+ import { z } from "zod";
3
+ import { CompletionIterator } from "../ProgressIterator.js";
4
+ const log = debug("ef:api:transcriptions");
5
+ const CreateTranscriptionPayload = z.object({
6
+ file_id: z.string(),
7
+ track_id: z.number().int()
8
+ });
9
+ const createTranscription = async (client, payload) => {
10
+ log("Creating transcription", payload);
11
+ const response = await client.authenticatedFetch("/api/v1/transcriptions", {
12
+ method: "POST",
13
+ body: JSON.stringify(payload)
14
+ });
15
+ log("Transcription created", response);
16
+ if (response.ok) {
17
+ return await response.json();
18
+ }
19
+ throw new Error(
20
+ `Failed to create transcription ${response.status} ${response.statusText}`
21
+ );
22
+ };
23
+ const getTranscriptionProgress = async (client, id) => {
24
+ const eventSource = await client.authenticatedEventSource(
25
+ `/api/v1/transcriptions/${id}/progress`
26
+ );
27
+ return new CompletionIterator(eventSource);
28
+ };
29
+ const getTranscriptionInfo = async (client, id) => {
30
+ const response = await client.authenticatedFetch(
31
+ `/api/v1/transcriptions/${id}`
32
+ );
33
+ if (response.ok) {
34
+ return await response.json();
35
+ }
36
+ throw new Error(
37
+ `Failed to get transcription info ${response.status} ${response.statusText}`
38
+ );
39
+ };
40
+ export {
41
+ CreateTranscriptionPayload,
42
+ createTranscription,
43
+ getTranscriptionInfo,
44
+ getTranscriptionProgress
45
+ };
@@ -0,0 +1 @@
1
+ export {};
@@ -1,78 +1,37 @@
1
- import { Readable } from 'node:stream';
2
1
  import { z } from 'zod';
3
2
  import { Client } from '../client.ts';
4
- import { IteratorWithPromise, UploadChunkEvent } from '../uploadChunks.ts';
5
- declare const FileProcessors: z.ZodEffects<z.ZodArray<z.ZodUnion<[z.ZodLiteral<"isobmff">, z.ZodLiteral<"image">, z.ZodLiteral<"captions">, z.ZodString]>, "many">, string[], string[]>;
6
3
  export declare const CreateUnprocessedFilePayload: z.ZodObject<{
7
4
  md5: z.ZodString;
8
5
  filename: z.ZodString;
9
- processes: z.ZodOptional<z.ZodEffects<z.ZodArray<z.ZodUnion<[z.ZodLiteral<"isobmff">, z.ZodLiteral<"image">, z.ZodLiteral<"captions">, z.ZodString]>, "many">, string[], string[]>>;
10
6
  byte_size: z.ZodNumber;
11
7
  }, "strip", z.ZodTypeAny, {
12
8
  md5: string;
13
9
  filename: string;
14
10
  byte_size: number;
15
- processes?: string[] | undefined;
16
11
  }, {
17
12
  md5: string;
18
13
  filename: string;
19
14
  byte_size: number;
20
- processes?: string[] | undefined;
21
15
  }>;
22
- export declare const UpdateUnprocessedFilePayload: z.ZodObject<{
23
- processes: z.ZodOptional<z.ZodEffects<z.ZodArray<z.ZodUnion<[z.ZodLiteral<"isobmff">, z.ZodLiteral<"image">, z.ZodLiteral<"captions">, z.ZodString]>, "many">, string[], string[]>>;
24
- }, "strip", z.ZodTypeAny, {
25
- processes?: string[] | undefined;
26
- }, {
27
- processes?: string[] | undefined;
28
- }>;
29
- export interface CreateUnprocessedFileResult {
16
+ export declare const UpdateUnprocessedFilePayload: z.ZodObject<{}, "strip", z.ZodTypeAny, {}, {}>;
17
+ interface UnprocessedFile {
30
18
  byte_size: number;
31
19
  next_byte: number;
32
20
  complete: boolean;
33
21
  id: string;
34
22
  md5: string;
35
- processes: z.infer<typeof FileProcessors> & string[];
36
- asset_id: string;
37
23
  }
38
- export interface UpdateUnprocessedFileResult {
39
- byte_size?: number;
40
- next_byte: number;
41
- complete: boolean;
24
+ export interface CreateUnprocessedFileResult extends UnprocessedFile {
25
+ }
26
+ export interface LookupUnprocessedFileByMd5Result extends UnprocessedFile {
27
+ }
28
+ export interface UpdateUnprocessedFileResult extends UnprocessedFile {
29
+ }
30
+ export interface ProcessIsobmffFileResult {
42
31
  id: string;
43
- md5: string;
44
- processes: z.infer<typeof FileProcessors>;
45
- asset_id: string;
46
32
  }
47
33
  export declare const createUnprocessedFile: (client: Client, payload: z.infer<typeof CreateUnprocessedFilePayload>) => Promise<CreateUnprocessedFileResult>;
48
- export declare const updateUnprocessedFile: (client: Client, fileId: string, payload: Partial<z.infer<typeof UpdateUnprocessedFilePayload>>) => Promise<UpdateUnprocessedFileResult>;
49
- export declare const uploadUnprocessedFile: (client: Client, fileId: string, fileStream: Readable, fileSize: number) => IteratorWithPromise<UploadChunkEvent>;
50
- export declare const processAVFileBuffer: (client: Client, buffer: Buffer, filename?: string) => {
51
- progress(): AsyncGenerator<{
52
- type: string;
53
- progress: number;
54
- }, void, unknown>;
55
- file: () => Promise<UpdateUnprocessedFileResult>;
56
- };
57
- export declare const processAVFile: (client: Client, filePath: string) => {
58
- progress: () => Promise<AsyncGenerator<{
59
- type: string;
60
- progress: number;
61
- }, void, unknown>>;
62
- file: () => Promise<UpdateUnprocessedFileResult>;
63
- };
64
- export declare const processImageFileBuffer: (client: Client, buffer: Buffer, filename?: string) => {
65
- progress(): AsyncGenerator<{
66
- type: string;
67
- progress: number;
68
- }, void, unknown>;
69
- file: () => Promise<UpdateUnprocessedFileResult>;
70
- };
71
- export declare const processImageFile: (client: Client, filePath: string) => {
72
- progress: () => Promise<AsyncGenerator<{
73
- type: string;
74
- progress: number;
75
- }, void, unknown>>;
76
- file: () => Promise<UpdateUnprocessedFileResult>;
77
- };
34
+ export declare const uploadUnprocessedFile: (client: Client, fileId: string, fileStream: ReadableStream, fileSize: number) => import('../uploadChunks.ts').IteratorWithPromise<import('../uploadChunks.ts').UploadChunkEvent>;
35
+ export declare const lookupUnprocessedFileByMd5: (client: Client, md5: string) => Promise<LookupUnprocessedFileByMd5Result | null>;
36
+ export declare const processIsobmffFile: (client: Client, unprocessedFileId: LookupUnprocessedFileByMd5Result["id"]) => Promise<ProcessIsobmffFileResult>;
78
37
  export {};
@@ -1,36 +1,14 @@
1
- import { createReadStream } from "node:fs";
2
- import { stat } from "node:fs/promises";
3
- import { basename } from "node:path";
4
- import { Readable } from "node:stream";
5
1
  import debug from "debug";
6
2
  import { z } from "zod";
7
- import { md5Buffer, md5FilePath } from "@editframe/assets";
8
3
  import { uploadChunks } from "../uploadChunks.js";
9
4
  const log = debug("ef:api:unprocessed-file");
10
- const FileProcessor = z.union([
11
- z.literal("isobmff"),
12
- z.literal("image"),
13
- z.literal("captions"),
14
- z.string()
15
- ]);
16
- const FileProcessors = z.array(FileProcessor).refine(
17
- (value) => {
18
- return new Set(value).size === value.length;
19
- },
20
- {
21
- message: "Processors list must not include duplicates"
22
- }
23
- );
24
5
  const MAX_FILE_SIZE = 1024 * 1024 * 1024;
25
6
  const CreateUnprocessedFilePayload = z.object({
26
7
  md5: z.string(),
27
8
  filename: z.string(),
28
- processes: FileProcessors.optional(),
29
9
  byte_size: z.number().int().max(MAX_FILE_SIZE)
30
10
  });
31
- const UpdateUnprocessedFilePayload = z.object({
32
- processes: FileProcessors.optional()
33
- });
11
+ z.object({});
34
12
  const createUnprocessedFile = async (client, payload) => {
35
13
  log("Creating an unprocessed file", payload);
36
14
  CreateUnprocessedFilePayload.parse(payload);
@@ -54,24 +32,6 @@ const createUnprocessedFile = async (client, payload) => {
54
32
  `Failed to create unprocessed file ${response.status} ${response.statusText}`
55
33
  );
56
34
  };
57
- const updateUnprocessedFile = async (client, fileId, payload) => {
58
- log("Updating unprocessed file", fileId, payload);
59
- UpdateUnprocessedFilePayload.parse(payload);
60
- const response = await client.authenticatedFetch(
61
- `/api/v1/unprocessed_files/${fileId}`,
62
- {
63
- method: "POST",
64
- body: JSON.stringify(payload)
65
- }
66
- );
67
- log("Unprocessed file updated", response);
68
- if (response.ok) {
69
- return await response.json();
70
- }
71
- throw new Error(
72
- `Failed to update unprocessed file ${response.status} ${response.statusText}`
73
- );
74
- };
75
35
  const uploadUnprocessedFile = (client, fileId, fileStream, fileSize) => {
76
36
  log("Uploading unprocessed file", fileId);
77
37
  return uploadChunks(client, {
@@ -81,100 +41,41 @@ const uploadUnprocessedFile = (client, fileId, fileStream, fileSize) => {
81
41
  maxSize: MAX_FILE_SIZE
82
42
  });
83
43
  };
84
- const processResource = (client, filename, md5, byteSize, processor, doUpload) => {
85
- log("Processing", { filename, md5, byteSize, processor });
86
- const createFilePromise = createUnprocessedFile(client, {
87
- md5,
88
- processes: [],
89
- filename,
90
- byte_size: byteSize
91
- });
92
- return {
93
- async *progress() {
94
- const unprocessedFile = await createFilePromise;
95
- if (unprocessedFile.complete) {
96
- yield { type: "progress", progress: 0 };
97
- yield { type: "progress", progress: 1 };
98
- }
99
- yield* doUpload(unprocessedFile.id);
100
- },
101
- file: async () => {
102
- const unprocessedFile = await createFilePromise;
103
- if (unprocessedFile.complete) {
104
- return unprocessedFile;
105
- }
106
- await doUpload(unprocessedFile.id).whenUploaded();
107
- const fileInformation = await updateUnprocessedFile(
108
- client,
109
- unprocessedFile.id,
110
- {
111
- processes: [processor]
112
- }
113
- );
114
- log("File processed", fileInformation);
115
- return fileInformation;
44
+ const lookupUnprocessedFileByMd5 = async (client, md5) => {
45
+ const response = await client.authenticatedFetch(
46
+ `/api/v1/unprocessed_files/md5/${md5}`,
47
+ {
48
+ method: "GET"
116
49
  }
117
- };
118
- };
119
- const buildBufferProcessor = (processor) => {
120
- return (client, buffer, filename = "buffer") => {
121
- log(`Processing file buffer: ${processor}`, filename);
122
- const md5 = md5Buffer(buffer);
123
- return processResource(
124
- client,
125
- filename,
126
- md5,
127
- buffer.byteLength,
128
- processor,
129
- (id) => {
130
- const readStream = new Readable({
131
- read() {
132
- readStream.push(buffer);
133
- readStream.push(null);
134
- }
135
- });
136
- return uploadUnprocessedFile(client, id, readStream, buffer.byteLength);
137
- }
138
- );
139
- };
50
+ );
51
+ if (response.ok) {
52
+ return await response.json();
53
+ }
54
+ if (response.status === 404) {
55
+ return null;
56
+ }
57
+ throw new Error(
58
+ `Failed to lookup unprocessed file by md5 ${md5} ${response.status} ${response.statusText}`
59
+ );
140
60
  };
141
- const buildFileProcessor = (processor) => {
142
- return (client, filePath) => {
143
- const processPromise = async () => {
144
- const [md5, { size: byteSize }] = await Promise.all([
145
- md5FilePath(filePath),
146
- stat(filePath)
147
- ]);
148
- return processResource(
149
- client,
150
- basename(filePath),
151
- md5,
152
- byteSize,
153
- processor,
154
- (id) => {
155
- const readStream = createReadStream(filePath);
156
- return uploadUnprocessedFile(client, id, readStream, byteSize);
157
- }
158
- );
159
- };
160
- return {
161
- progress: async () => (await processPromise()).progress(),
162
- file: async () => (await processPromise()).file()
163
- };
164
- };
61
+ const processIsobmffFile = async (client, unprocessedFileId) => {
62
+ const response = await client.authenticatedFetch(
63
+ `/api/v1/unprocessed_files/${unprocessedFileId}/isobmff`,
64
+ {
65
+ method: "POST"
66
+ }
67
+ );
68
+ if (response.ok) {
69
+ return await response.json();
70
+ }
71
+ throw new Error(
72
+ `Failed to process isobmff file ${response.status} ${response.statusText}`
73
+ );
165
74
  };
166
- const processAVFileBuffer = buildBufferProcessor("isobmff");
167
- const processAVFile = buildFileProcessor("isobmff");
168
- const processImageFileBuffer = buildBufferProcessor("image");
169
- const processImageFile = buildFileProcessor("image");
170
75
  export {
171
76
  CreateUnprocessedFilePayload,
172
- UpdateUnprocessedFilePayload,
173
77
  createUnprocessedFile,
174
- processAVFile,
175
- processAVFileBuffer,
176
- processImageFile,
177
- processImageFileBuffer,
178
- updateUnprocessedFile,
78
+ lookupUnprocessedFileByMd5,
79
+ processIsobmffFile,
179
80
  uploadUnprocessedFile
180
81
  };
@@ -1,2 +1 @@
1
- import { Readable } from 'node:stream';
2
- export declare function streamChunker(readableStream: Readable, chunkSize?: number): AsyncGenerator<Buffer, void, unknown>;
1
+ export declare function streamChunker(readableStream: ReadableStream, chunkSize?: number): AsyncGenerator<Uint8Array, void, unknown>;
@@ -1,15 +1,26 @@
1
1
  import { CHUNK_SIZE_BYTES } from "./CHUNK_SIZE_BYTES.js";
2
2
  async function* streamChunker(readableStream, chunkSize = CHUNK_SIZE_BYTES) {
3
- let buffer = Buffer.alloc(0);
4
- for await (const chunk of readableStream) {
5
- buffer = Buffer.concat([buffer, chunk]);
6
- while (buffer.length >= chunkSize) {
7
- yield buffer.slice(0, chunkSize);
8
- buffer = buffer.slice(chunkSize);
3
+ const reader = readableStream.getReader();
4
+ let buffer = new Uint8Array(0);
5
+ try {
6
+ while (true) {
7
+ const { done, value } = await reader.read();
8
+ if (done) break;
9
+ const chunk = value;
10
+ const newBuffer = new Uint8Array(buffer.length + chunk.length);
11
+ newBuffer.set(buffer);
12
+ newBuffer.set(chunk, buffer.length);
13
+ buffer = newBuffer;
14
+ while (buffer.length >= chunkSize) {
15
+ yield buffer.slice(0, chunkSize);
16
+ buffer = buffer.slice(chunkSize);
17
+ }
9
18
  }
10
- }
11
- if (buffer.length > 0) {
12
- yield buffer;
19
+ if (buffer.length > 0) {
20
+ yield buffer;
21
+ }
22
+ } finally {
23
+ reader.releaseLock();
13
24
  }
14
25
  }
15
26
  export {
@@ -1,4 +1,3 @@
1
- import { Readable } from 'node:stream';
2
1
  import { Client } from './client.ts';
3
2
  export interface IteratorWithPromise<T> extends AsyncGenerator<T, void, unknown> {
4
3
  whenUploaded: () => Promise<T[]>;
@@ -6,7 +5,7 @@ export interface IteratorWithPromise<T> extends AsyncGenerator<T, void, unknown>
6
5
  export declare const fakeCompleteUpload: () => IteratorWithPromise<UploadChunkEvent>;
7
6
  interface UploadChunksOptions {
8
7
  url: string;
9
- fileStream: Readable;
8
+ fileStream: ReadableStream;
10
9
  fileSize: number;
11
10
  maxSize: number;
12
11
  chunkSizeBytes?: number;
@@ -51,7 +51,7 @@ function uploadChunks(client, {
51
51
  const uploadStatus = await client.authenticatedFetch(url);
52
52
  yield { type: "progress", progress: 0 };
53
53
  if (uploadStatus.status === 200) {
54
- log("Fragment track already uploaded");
54
+ log("Chunk already uploaded");
55
55
  yield { type: "progress", progress: 1 };
56
56
  return;
57
57
  }
@@ -72,9 +72,6 @@ function uploadChunks(client, {
72
72
  progress: Math.min(1, chunkNumber / (fileSize / chunkSizeBytes))
73
73
  };
74
74
  }
75
- if (!fileStream.readableEnded) {
76
- throw new Error("Did not read entire file stream");
77
- }
78
75
  if (!complete) {
79
76
  throw new Error("Did not complete upload");
80
77
  }