@editframe/api 0.8.0-beta.4 → 0.8.0-beta.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. package/dist/CHUNK_SIZE_BYTES.d.ts +1 -0
  2. package/dist/CHUNK_SIZE_BYTES.js +7 -0
  3. package/dist/client.d.ts +0 -2
  4. package/dist/client.js +2 -14
  5. package/dist/client.test.d.ts +1 -0
  6. package/dist/readableFromBuffers.d.ts +2 -0
  7. package/dist/resources/caption-file.d.ts +22 -3
  8. package/dist/resources/caption-file.js +28 -26
  9. package/dist/resources/caption-file.test.d.ts +1 -0
  10. package/dist/resources/image-file.d.ts +5 -3
  11. package/dist/resources/image-file.js +23 -27
  12. package/dist/resources/image-file.test.d.ts +1 -0
  13. package/dist/resources/isobmff-file.d.ts +2 -3
  14. package/dist/resources/isobmff-file.js +17 -23
  15. package/dist/resources/isobmff-file.test.d.ts +1 -0
  16. package/dist/resources/isobmff-track.d.ts +27 -28
  17. package/dist/resources/isobmff-track.js +18 -31
  18. package/dist/resources/isobmff-track.test.d.ts +1 -0
  19. package/dist/resources/renders.d.ts +4 -5
  20. package/dist/resources/renders.js +17 -21
  21. package/dist/resources/renders.test.d.ts +1 -0
  22. package/dist/resources/unprocessed-file.d.ts +14 -12
  23. package/dist/resources/unprocessed-file.js +36 -45
  24. package/dist/resources/unprocessed-file.test.d.ts +1 -0
  25. package/dist/resources/url-token.d.ts +0 -1
  26. package/dist/resources/url-token.test.d.ts +1 -0
  27. package/dist/streamChunker.d.ts +2 -0
  28. package/dist/streamChunker.js +17 -0
  29. package/dist/streamChunker.test.d.ts +1 -0
  30. package/dist/uploadChunks.d.ts +10 -0
  31. package/dist/uploadChunks.js +65 -0
  32. package/dist/uploadChunks.test.d.ts +1 -0
  33. package/package.json +4 -3
  34. package/src/resources/caption-file.test.ts +124 -0
  35. package/src/resources/caption-file.ts +49 -24
  36. package/src/resources/image-file.test.ts +138 -0
  37. package/src/resources/image-file.ts +28 -25
  38. package/src/resources/isobmff-file.test.ts +108 -0
  39. package/src/resources/isobmff-file.ts +19 -22
  40. package/src/resources/isobmff-track.test.ts +152 -0
  41. package/src/resources/isobmff-track.ts +22 -31
  42. package/src/resources/renders.test.ts +112 -0
  43. package/src/resources/renders.ts +19 -20
  44. package/src/resources/test-av-file.txt +1 -0
  45. package/src/resources/unprocessed-file.test.ts +312 -0
  46. package/src/resources/unprocessed-file.ts +41 -44
  47. package/src/resources/url-token.test.ts +46 -0
@@ -0,0 +1 @@
1
+ export {};
@@ -1,7 +1,6 @@
1
1
  import { Readable } from 'node:stream';
2
2
  import { z } from 'zod';
3
3
  import { Client } from '../client.ts';
4
-
5
4
  export declare const CreateRenderPayload: z.ZodObject<{
6
5
  id: z.ZodString;
7
6
  fps: z.ZodNumber;
@@ -14,22 +13,22 @@ export declare const CreateRenderPayload: z.ZodObject<{
14
13
  id: string;
15
14
  height: number;
16
15
  width: number;
16
+ strategy: "v1" | "v2";
17
17
  duration_ms: number;
18
18
  fps: number;
19
19
  work_slice_ms: number;
20
- strategy: "v1" | "v2";
21
20
  }, {
22
21
  id: string;
23
22
  height: number;
24
23
  width: number;
24
+ strategy: "v1" | "v2";
25
25
  duration_ms: number;
26
26
  fps: number;
27
27
  work_slice_ms: number;
28
- strategy: "v1" | "v2";
29
28
  }>;
30
29
  export interface CreateRenderResult {
31
30
  status: "complete" | "created" | "failed" | "pending" | "rendering";
32
31
  id: string;
33
32
  }
34
- export declare const createRender: (client: Client, payload: z.infer<typeof CreateRenderPayload>) => Promise<CreateRenderResult | undefined>;
35
- export declare const uploadRender: (client: Client, fileId: string, fileStream: Readable) => Promise<unknown>;
33
+ export declare const createRender: (client: Client, payload: z.infer<typeof CreateRenderPayload>) => Promise<CreateRenderResult>;
34
+ export declare const uploadRender: (client: Client, fileId: string, fileStream: Readable, folderSize: number) => Promise<unknown>;
@@ -1,6 +1,7 @@
1
1
  import { z } from "zod";
2
2
  import debug from "debug";
3
3
  const log = debug("ef:api:renders");
4
+ const FILE_SIZE_LIMIT = 1024 * 1024 * 16;
4
5
  const CreateRenderPayload = z.object({
5
6
  id: z.string().uuid(),
6
7
  fps: z.number(),
@@ -17,37 +18,32 @@ const createRender = async (client, payload) => {
17
18
  body: JSON.stringify(payload)
18
19
  });
19
20
  log("Render created", response);
20
- switch (response.status) {
21
- case 200: {
22
- return await response.json();
23
- }
24
- default: {
25
- console.error("Failed to create render");
26
- console.error(await response.json());
27
- return;
28
- }
21
+ if (response.ok) {
22
+ return await response.json();
29
23
  }
24
+ throw new Error(
25
+ `Failed to create render ${response.status} ${response.statusText}`
26
+ );
30
27
  };
31
- const uploadRender = async (client, fileId, fileStream) => {
28
+ const uploadRender = async (client, fileId, fileStream, folderSize) => {
32
29
  log("Uploading render", fileId);
33
- const fileIndex = await client.authenticatedFetch(
30
+ log("Folder size", folderSize, "bytes");
31
+ if (folderSize > FILE_SIZE_LIMIT) {
32
+ throw new Error(`File size exceeds limit of ${FILE_SIZE_LIMIT} bytes`);
33
+ }
34
+ const response = await client.authenticatedFetch(
34
35
  `/api/video2/renders/${fileId}/upload`,
35
36
  {
36
37
  method: "POST",
37
38
  body: fileStream
38
39
  }
39
40
  );
40
- log("Render uploaded", fileIndex);
41
- switch (fileIndex.status) {
42
- case 200: {
43
- return fileIndex.json();
44
- }
45
- default: {
46
- console.error("Failed to upload render");
47
- console.error(fileIndex.status, fileIndex.statusText);
48
- return;
49
- }
41
+ if (response.ok) {
42
+ return response.json();
50
43
  }
44
+ throw new Error(
45
+ `Failed to upload render ${response.status} ${response.statusText}`
46
+ );
51
47
  };
52
48
  export {
53
49
  CreateRenderPayload,
@@ -0,0 +1 @@
1
+ export {};
@@ -1,43 +1,45 @@
1
1
  import { Readable } from 'node:stream';
2
2
  import { z } from 'zod';
3
3
  import { Client } from '../client.ts';
4
-
5
- declare const FileProcessors: z.ZodEffects<z.ZodArray<z.ZodUnion<[z.ZodLiteral<"isobmff">, z.ZodLiteral<"captions">]>, "many">, ("isobmff" | "captions")[], ("isobmff" | "captions")[]>;
4
+ declare const FileProcessors: z.ZodEffects<z.ZodArray<z.ZodUnion<[z.ZodLiteral<"isobmff">, z.ZodLiteral<"captions">]>, "many">, ("captions" | "isobmff")[], ("captions" | "isobmff")[]>;
6
5
  export declare const CreateUnprocessedFilePayload: z.ZodObject<{
7
6
  id: z.ZodString;
8
7
  filename: z.ZodString;
9
- processes: z.ZodOptional<z.ZodEffects<z.ZodArray<z.ZodUnion<[z.ZodLiteral<"isobmff">, z.ZodLiteral<"captions">]>, "many">, ("isobmff" | "captions")[], ("isobmff" | "captions")[]>>;
8
+ processes: z.ZodOptional<z.ZodEffects<z.ZodArray<z.ZodUnion<[z.ZodLiteral<"isobmff">, z.ZodLiteral<"captions">]>, "many">, ("captions" | "isobmff")[], ("captions" | "isobmff")[]>>;
9
+ byte_size: z.ZodNumber;
10
10
  }, "strip", z.ZodTypeAny, {
11
11
  id: string;
12
12
  filename: string;
13
- processes?: ("isobmff" | "captions")[] | undefined;
13
+ byte_size: number;
14
+ processes?: ("captions" | "isobmff")[] | undefined;
14
15
  }, {
15
16
  id: string;
16
17
  filename: string;
17
- processes?: ("isobmff" | "captions")[] | undefined;
18
+ byte_size: number;
19
+ processes?: ("captions" | "isobmff")[] | undefined;
18
20
  }>;
19
21
  export declare const UpdateUnprocessedFilePayload: z.ZodObject<{
20
- processes: z.ZodOptional<z.ZodEffects<z.ZodArray<z.ZodUnion<[z.ZodLiteral<"isobmff">, z.ZodLiteral<"captions">]>, "many">, ("isobmff" | "captions")[], ("isobmff" | "captions")[]>>;
22
+ processes: z.ZodOptional<z.ZodEffects<z.ZodArray<z.ZodUnion<[z.ZodLiteral<"isobmff">, z.ZodLiteral<"captions">]>, "many">, ("captions" | "isobmff")[], ("captions" | "isobmff")[]>>;
21
23
  }, "strip", z.ZodTypeAny, {
22
- processes?: ("isobmff" | "captions")[] | undefined;
24
+ processes?: ("captions" | "isobmff")[] | undefined;
23
25
  }, {
24
- processes?: ("isobmff" | "captions")[] | undefined;
26
+ processes?: ("captions" | "isobmff")[] | undefined;
25
27
  }>;
26
28
  export interface CreateUnprocessedFileResult {
27
29
  byte_size: number;
28
- last_received_byte: number;
30
+ next_byte: number;
29
31
  id: string;
30
32
  processes: z.infer<typeof FileProcessors>;
31
33
  }
32
34
  export interface UpdateUnprocessedFileResult {
33
- byte_size: number;
34
- last_received_byte: number;
35
+ byte_size?: number;
36
+ next_byte: number;
35
37
  id: string;
36
38
  processes: z.infer<typeof FileProcessors>;
37
39
  }
38
40
  export declare const createUnprocessedFile: (client: Client, payload: z.infer<typeof CreateUnprocessedFilePayload>) => Promise<CreateUnprocessedFileResult>;
39
41
  export declare const updateUnprocessedFile: (client: Client, fileId: string, payload: Partial<z.infer<typeof UpdateUnprocessedFilePayload>>) => Promise<UpdateUnprocessedFileResult>;
40
- export declare const uploadUnprocessedFile: (client: Client, fileId: string, fileStream: Readable) => Promise<unknown>;
42
+ export declare const uploadUnprocessedFile: (client: Client, fileId: string, fileStream: Readable, fileSize: number) => Promise<void>;
41
43
  export declare const processAVFileBuffer: (client: Client, buffer: Buffer, filename?: string) => Promise<UpdateUnprocessedFileResult>;
42
44
  export declare const processAVFile: (client: Client, filePath: string) => Promise<UpdateUnprocessedFileResult>;
43
45
  export {};
@@ -1,9 +1,11 @@
1
1
  import { Readable } from "node:stream";
2
2
  import { basename } from "node:path";
3
3
  import { createReadStream } from "node:fs";
4
+ import { stat } from "node:fs/promises";
4
5
  import { z } from "zod";
5
6
  import debug from "debug";
6
7
  import { md5Buffer, md5FilePath } from "@editframe/assets";
8
+ import { uploadChunks } from "../uploadChunks.js";
7
9
  const log = debug("ef:api:unprocessed-file");
8
10
  const FileProcessors = z.array(z.union([z.literal("isobmff"), z.literal("captions")])).refine(
9
11
  (value) => {
@@ -13,16 +15,19 @@ const FileProcessors = z.array(z.union([z.literal("isobmff"), z.literal("caption
13
15
  message: "Processors list must not include duplicates"
14
16
  }
15
17
  );
18
+ const MAX_FILE_SIZE = 1024 * 1024 * 1024;
16
19
  const CreateUnprocessedFilePayload = z.object({
17
20
  id: z.string(),
18
21
  filename: z.string(),
19
- processes: FileProcessors.optional()
22
+ processes: FileProcessors.optional(),
23
+ byte_size: z.number().int().max(MAX_FILE_SIZE)
20
24
  });
21
25
  const UpdateUnprocessedFilePayload = z.object({
22
26
  processes: FileProcessors.optional()
23
27
  });
24
28
  const createUnprocessedFile = async (client, payload) => {
25
29
  log("Creating an unprocessed file", payload);
30
+ CreateUnprocessedFilePayload.parse(payload);
26
31
  const response = await client.authenticatedFetch(
27
32
  "/api/v1/unprocessed_files",
28
33
  {
@@ -36,21 +41,16 @@ const createUnprocessedFile = async (client, payload) => {
36
41
  response.statusText,
37
42
  response.headers
38
43
  );
39
- switch (response.status) {
40
- case 200: {
41
- return await response.json();
42
- }
43
- default: {
44
- console.error(
45
- `Failed to create file ${response.status} ${response.statusText}`
46
- );
47
- console.error(await response.text());
48
- throw new Error("Failed to create unprocessed file");
49
- }
44
+ if (response.ok) {
45
+ return await response.json();
50
46
  }
47
+ throw new Error(
48
+ `Failed to create unprocessed file ${response.status} ${response.statusText}`
49
+ );
51
50
  };
52
51
  const updateUnprocessedFile = async (client, fileId, payload) => {
53
52
  log("Updating unprocessed file", fileId, payload);
53
+ UpdateUnprocessedFilePayload.parse(payload);
54
54
  const response = await client.authenticatedFetch(
55
55
  `/api/v1/unprocessed_files/${fileId}`,
56
56
  {
@@ -59,47 +59,32 @@ const updateUnprocessedFile = async (client, fileId, payload) => {
59
59
  }
60
60
  );
61
61
  log("Unprocessed file updated", response);
62
- switch (response.status) {
63
- case 200: {
64
- return await response.json();
65
- }
66
- default: {
67
- console.error(
68
- `Failed to update file ${response.status} ${response.statusText}`
69
- );
70
- throw new Error("Failed to update unprocessed file");
71
- }
62
+ if (response.ok) {
63
+ return await response.json();
72
64
  }
65
+ throw new Error(
66
+ `Failed to update unprocessed file ${response.status} ${response.statusText}`
67
+ );
73
68
  };
74
- const uploadUnprocessedFile = async (client, fileId, fileStream) => {
69
+ const uploadUnprocessedFile = async (client, fileId, fileStream, fileSize) => {
75
70
  log("Uploading unprocessed file", fileId);
76
- const unprocessedFile = await client.authenticatedFetch(
77
- `/api/v1/unprocessed_files/${fileId}/upload`,
78
- {
79
- method: "POST",
80
- body: fileStream
81
- }
82
- );
83
- log("Unprocessed file track uploaded", unprocessedFile);
84
- switch (unprocessedFile.status) {
85
- case 200: {
86
- return unprocessedFile.json();
87
- }
88
- default: {
89
- console.error("Failed to upload unprocessed file");
90
- console.error(unprocessedFile.status, unprocessedFile.statusText);
91
- throw new Error("Failed to upload unprocessed file");
92
- }
93
- }
71
+ await uploadChunks(client, {
72
+ url: `/api/v1/unprocessed_files/${fileId}/upload`,
73
+ fileSize,
74
+ fileStream
75
+ });
76
+ log("Unprocessed file upload complete");
94
77
  };
95
78
  const processAVFileBuffer = async (client, buffer, filename = "buffer") => {
96
79
  log("Processing AV file buffer");
97
80
  const fileId = md5Buffer(buffer);
98
81
  log("File ID", fileId);
82
+ log(`File size: ${buffer.byteLength} bytes`);
99
83
  await createUnprocessedFile(client, {
100
84
  id: fileId,
101
85
  processes: [],
102
- filename
86
+ filename,
87
+ byte_size: buffer.byteLength
103
88
  });
104
89
  const readStream = new Readable({
105
90
  read() {
@@ -107,7 +92,7 @@ const processAVFileBuffer = async (client, buffer, filename = "buffer") => {
107
92
  readStream.push(null);
108
93
  }
109
94
  });
110
- await uploadUnprocessedFile(client, fileId, readStream);
95
+ await uploadUnprocessedFile(client, fileId, readStream, buffer.byteLength);
111
96
  const fileInformation = await updateUnprocessedFile(client, fileId, {
112
97
  processes: ["isobmff"]
113
98
  });
@@ -121,10 +106,16 @@ const processAVFile = async (client, filePath) => {
121
106
  await createUnprocessedFile(client, {
122
107
  id: fileId,
123
108
  processes: [],
124
- filename: basename(filePath)
109
+ filename: basename(filePath),
110
+ byte_size: (await stat(filePath)).size
125
111
  });
126
112
  const readStream = createReadStream(filePath);
127
- await uploadUnprocessedFile(client, fileId, readStream);
113
+ await uploadUnprocessedFile(
114
+ client,
115
+ fileId,
116
+ readStream,
117
+ (await stat(filePath)).size
118
+ );
128
119
  const fileInformation = await updateUnprocessedFile(client, fileId, {
129
120
  processes: ["isobmff"]
130
121
  });
@@ -0,0 +1 @@
1
+ export {};
@@ -1,5 +1,4 @@
1
1
  import { Client } from '../client.ts';
2
-
3
2
  export interface URLTokenResult {
4
3
  token: string;
5
4
  }
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,2 @@
1
+ import { Readable } from 'node:stream';
2
+ export declare function streamChunker(readableStream: Readable, chunkSize?: number): AsyncGenerator<Buffer, void, unknown>;
@@ -0,0 +1,17 @@
1
+ import { CHUNK_SIZE_BYTES } from "./CHUNK_SIZE_BYTES.js";
2
+ async function* streamChunker(readableStream, chunkSize = CHUNK_SIZE_BYTES) {
3
+ let buffer = Buffer.alloc(0);
4
+ for await (const chunk of readableStream) {
5
+ buffer = Buffer.concat([buffer, chunk]);
6
+ while (buffer.length >= chunkSize) {
7
+ yield buffer.slice(0, chunkSize);
8
+ buffer = buffer.slice(chunkSize);
9
+ }
10
+ }
11
+ if (buffer.length > 0) {
12
+ yield buffer;
13
+ }
14
+ }
15
+ export {
16
+ streamChunker
17
+ };
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,10 @@
1
+ import { Readable } from 'node:stream';
2
+ import { Client } from './client.ts';
3
+ interface UploadChunksOptions {
4
+ url: string;
5
+ fileStream: Readable;
6
+ fileSize: number;
7
+ chunkSizeBytes?: number;
8
+ }
9
+ export declare function uploadChunks(client: Client, { url, fileSize, fileStream, chunkSizeBytes, }: UploadChunksOptions): Promise<void>;
10
+ export {};
@@ -0,0 +1,65 @@
1
+ import debug from "debug";
2
+ import { streamChunker } from "./streamChunker.js";
3
+ import { CHUNK_SIZE_BYTES } from "./CHUNK_SIZE_BYTES.js";
4
+ const log = debug("ef:api:uploadChunk");
5
+ const uploadChunk = async (client, {
6
+ url,
7
+ chunkBuffer,
8
+ chunkNumber,
9
+ fileSize,
10
+ chunkSizeBytes = CHUNK_SIZE_BYTES
11
+ }) => {
12
+ const startByte = chunkNumber * chunkSizeBytes;
13
+ const endByte = startByte + chunkBuffer.length - 1;
14
+ log(`Uploading chunk ${chunkNumber} for ${url}`);
15
+ const response = await client.authenticatedFetch(url, {
16
+ method: "POST",
17
+ headers: {
18
+ "Content-Range": `bytes=${startByte}-${endByte}/${fileSize}`,
19
+ "Content-Type": "application/octet-stream"
20
+ },
21
+ body: chunkBuffer
22
+ });
23
+ if (response.ok) {
24
+ if (response.status === 201) {
25
+ log(`File ${url} fully uploaded`);
26
+ return { complete: true, body: await response.json() };
27
+ }
28
+ if (response.status === 202) {
29
+ log(`File ${url} chunk ${chunkNumber} uploaded`);
30
+ return { complete: false, body: await response.json() };
31
+ }
32
+ }
33
+ throw new Error(
34
+ `Failed to upload chunk ${chunkNumber} for ${url} ${response.status} ${response.statusText}`
35
+ );
36
+ };
37
+ async function uploadChunks(client, {
38
+ url,
39
+ fileSize,
40
+ fileStream,
41
+ chunkSizeBytes = CHUNK_SIZE_BYTES
42
+ }) {
43
+ let chunkNumber = 0;
44
+ let complete = false;
45
+ for await (const chunkBuffer of streamChunker(fileStream, chunkSizeBytes)) {
46
+ log(`Uploading chunk ${chunkNumber}`);
47
+ ({ complete } = await uploadChunk(client, {
48
+ url,
49
+ chunkBuffer,
50
+ chunkNumber,
51
+ fileSize,
52
+ chunkSizeBytes
53
+ }));
54
+ chunkNumber++;
55
+ }
56
+ if (!fileStream.readableEnded) {
57
+ throw new Error("Did not read entire file stream");
58
+ }
59
+ if (!complete) {
60
+ throw new Error("Did not complete upload");
61
+ }
62
+ }
63
+ export {
64
+ uploadChunks
65
+ };
@@ -0,0 +1 @@
1
+ export {};
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@editframe/api",
3
- "version": "0.8.0-beta.4",
3
+ "version": "0.8.0-beta.7",
4
4
  "description": "API functions for EditFrame",
5
5
  "exports": {
6
6
  ".": {
@@ -21,13 +21,14 @@
21
21
  "devDependencies": {
22
22
  "@types/jsonwebtoken": "^9.0.6",
23
23
  "@types/node": "^20.14.13",
24
+ "typedoc": "^0.26.5",
24
25
  "typescript": "^5.5.4",
25
26
  "vite": "^5.2.11",
26
- "vite-plugin-dts": "^3.9.1",
27
+ "vite-plugin-dts": "^4.0.3",
27
28
  "vite-tsconfig-paths": "^4.3.2"
28
29
  },
29
30
  "dependencies": {
30
- "@editframe/assets": "0.8.0-beta.4",
31
+ "@editframe/assets": "0.8.0-beta.7",
31
32
  "debug": "^4.3.5",
32
33
  "jsonwebtoken": "^9.0.2",
33
34
  "node-fetch": "^3.3.2",
@@ -0,0 +1,124 @@
1
+ import { test, expect, beforeAll, afterEach, afterAll, describe } from "vitest";
2
+ import { http, HttpResponse } from "msw";
3
+ import { setupServer } from "msw/node";
4
+
5
+ import { Client } from "../client.ts";
6
+ import { createCaptionFile, uploadCaptionFile } from "./caption-file.ts";
7
+ import { readableFromBuffers } from "../readableFromBuffers.ts";
8
+
9
+ const server = setupServer();
10
+ const client = new Client("ef_TEST_TOKEN", "http://localhost");
11
+
12
+ describe("CaptionFile", () => {
13
+ beforeAll(() => server.listen());
14
+ afterEach(() => server.resetHandlers());
15
+ afterAll(() => server.close());
16
+
17
+ describe("createCaptionFile", () => {
18
+ test("Throws when file is too large", async () => {
19
+ await expect(
20
+ createCaptionFile(client, {
21
+ id: "test-id",
22
+ filename: "test",
23
+ byte_size: 1024 * 1024 * 3,
24
+ }),
25
+ ).rejects.toThrowError(
26
+ "File size 3145728 bytes exceeds limit 2097152 bytes",
27
+ );
28
+ });
29
+
30
+ test("Throws when server returns an error", async () => {
31
+ server.use(
32
+ http.post("http://localhost/api/video2/caption_files", () =>
33
+ HttpResponse.text("Internal Server Error", { status: 500 }),
34
+ ),
35
+ );
36
+
37
+ await expect(
38
+ createCaptionFile(client, {
39
+ id: "test-id",
40
+ filename: "test",
41
+ byte_size: 4,
42
+ }),
43
+ ).rejects.toThrowError(
44
+ "Failed to create caption 500 Internal Server Error",
45
+ );
46
+ });
47
+
48
+ test("Returns json data from the http response", async () => {
49
+ server.use(
50
+ http.post("http://localhost/api/video2/caption_files", () =>
51
+ HttpResponse.json(
52
+ { id: "test-id" },
53
+ { status: 200, statusText: "OK" },
54
+ ),
55
+ ),
56
+ );
57
+
58
+ const response = await createCaptionFile(client, {
59
+ id: "test-id",
60
+ filename: "test",
61
+ byte_size: 4,
62
+ });
63
+
64
+ expect(response).toEqual({ id: "test-id" });
65
+ });
66
+ });
67
+
68
+ describe("uploadCaptionFile", () => {
69
+ test("Throws when file is too large", async () => {
70
+ await expect(
71
+ uploadCaptionFile(
72
+ client,
73
+ "test-id",
74
+ readableFromBuffers(Buffer.from("test")),
75
+ 1024 * 1024 * 3,
76
+ ),
77
+ ).rejects.toThrowError(
78
+ "File size 3145728 bytes exceeds limit 2097152 bytes",
79
+ );
80
+ });
81
+
82
+ test("Throws when server returns an error", async () => {
83
+ server.use(
84
+ http.post(
85
+ "http://localhost/api/video2/caption_files/test-id/upload",
86
+ () => HttpResponse.text("Internal Server Error", { status: 500 }),
87
+ ),
88
+ );
89
+
90
+ await expect(
91
+ uploadCaptionFile(
92
+ client,
93
+ "test-id",
94
+ readableFromBuffers(Buffer.from("nice")),
95
+ 4,
96
+ ),
97
+ ).rejects.toThrowError(
98
+ "Failed to upload caption 500 Internal Server Error",
99
+ );
100
+ });
101
+
102
+ test("Returns json data from the http response", async () => {
103
+ server.use(
104
+ http.post(
105
+ "http://localhost/api/video2/caption_files/test-id/upload",
106
+ () =>
107
+ HttpResponse.json(
108
+ { id: "test-id" },
109
+ { status: 200, statusText: "OK" },
110
+ ),
111
+ ),
112
+ );
113
+
114
+ const response = await uploadCaptionFile(
115
+ client,
116
+ "test-id",
117
+ readableFromBuffers(Buffer.from("nice")),
118
+ 4,
119
+ );
120
+
121
+ expect(response).toEqual({ id: "test-id" });
122
+ });
123
+ });
124
+ });