@editframe/api 0.16.8-beta.0 → 0.17.6-beta.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,81 +1,45 @@
1
+ import { uploadChunks } from "../uploadChunks.js";
2
+ import { assertTypesMatch } from "../utils/assertTypesMatch.js";
1
3
  import debug from "debug";
2
4
  import { z } from "zod";
3
- import { uploadChunks } from "../uploadChunks.js";
4
5
  const log = debug("ef:api:unprocessed-file");
5
6
  const MAX_FILE_SIZE = 1024 * 1024 * 1024;
6
7
  const CreateUnprocessedFilePayload = z.object({
7
- md5: z.string(),
8
- filename: z.string(),
9
- byte_size: z.number().int().max(MAX_FILE_SIZE)
8
+ md5: z.string(),
9
+ filename: z.string(),
10
+ byte_size: z.number().int().max(MAX_FILE_SIZE)
10
11
  });
11
- z.object({});
12
+ const UpdateUnprocessedFilePayload = z.object({});
13
+ assertTypesMatch(true);
12
14
  const createUnprocessedFile = async (client, payload) => {
13
- log("Creating an unprocessed file", payload);
14
- CreateUnprocessedFilePayload.parse(payload);
15
- const response = await client.authenticatedFetch(
16
- "/api/v1/unprocessed_files",
17
- {
18
- method: "POST",
19
- body: JSON.stringify(payload)
20
- }
21
- );
22
- log(
23
- "Unprocessed file created",
24
- response.status,
25
- response.statusText,
26
- response.headers
27
- );
28
- if (response.ok) {
29
- return await response.json();
30
- }
31
- throw new Error(
32
- `Failed to create unprocessed file ${response.status} ${response.statusText}`
33
- );
15
+ log("Creating an unprocessed file", payload);
16
+ CreateUnprocessedFilePayload.parse(payload);
17
+ const response = await client.authenticatedFetch("/api/v1/unprocessed_files", {
18
+ method: "POST",
19
+ body: JSON.stringify(payload)
20
+ });
21
+ log("Unprocessed file created", response.status, response.statusText, response.headers);
22
+ if (response.ok) return await response.json();
23
+ throw new Error(`Failed to create unprocessed file ${response.status} ${response.statusText}`);
34
24
  };
35
25
  const uploadUnprocessedReadableStream = (client, uploadDetails, fileStream) => {
36
- log("Uploading unprocessed file", uploadDetails.id);
37
- return uploadChunks(client, {
38
- url: `/api/v1/unprocessed_files/${uploadDetails.id}/upload`,
39
- fileSize: uploadDetails.byte_size,
40
- fileStream,
41
- maxSize: MAX_FILE_SIZE
42
- });
26
+ log("Uploading unprocessed file", uploadDetails.id);
27
+ return uploadChunks(client, {
28
+ url: `/api/v1/unprocessed_files/${uploadDetails.id}/upload`,
29
+ fileSize: uploadDetails.byte_size,
30
+ fileStream,
31
+ maxSize: MAX_FILE_SIZE
32
+ });
43
33
  };
44
34
  const lookupUnprocessedFileByMd5 = async (client, md5) => {
45
- const response = await client.authenticatedFetch(
46
- `/api/v1/unprocessed_files/md5/${md5}`,
47
- {
48
- method: "GET"
49
- }
50
- );
51
- if (response.ok) {
52
- return await response.json();
53
- }
54
- if (response.status === 404) {
55
- return null;
56
- }
57
- throw new Error(
58
- `Failed to lookup unprocessed file by md5 ${md5} ${response.status} ${response.statusText}`
59
- );
35
+ const response = await client.authenticatedFetch(`/api/v1/unprocessed_files/md5/${md5}`, { method: "GET" });
36
+ if (response.ok) return await response.json();
37
+ if (response.status === 404) return null;
38
+ throw new Error(`Failed to lookup unprocessed file by md5 ${md5} ${response.status} ${response.statusText}`);
60
39
  };
61
40
  const processIsobmffFile = async (client, id) => {
62
- const response = await client.authenticatedFetch(
63
- `/api/v1/unprocessed_files/${id}/isobmff`,
64
- {
65
- method: "POST"
66
- }
67
- );
68
- if (response.ok) {
69
- return await response.json();
70
- }
71
- throw new Error(
72
- `Failed to process isobmff file ${id} ${response.status} ${response.statusText}`
73
- );
74
- };
75
- export {
76
- CreateUnprocessedFilePayload,
77
- createUnprocessedFile,
78
- lookupUnprocessedFileByMd5,
79
- processIsobmffFile,
80
- uploadUnprocessedReadableStream
41
+ const response = await client.authenticatedFetch(`/api/v1/unprocessed_files/${id}/isobmff`, { method: "POST" });
42
+ if (response.ok) return await response.json();
43
+ throw new Error(`Failed to process isobmff file ${id} ${response.status} ${response.statusText}`);
81
44
  };
45
+ export { CreateUnprocessedFilePayload, createUnprocessedFile, lookupUnprocessedFileByMd5, processIsobmffFile, uploadUnprocessedReadableStream };
@@ -1,20 +1,12 @@
1
1
  import debug from "debug";
2
2
  const log = debug("ef:api:url-token");
3
3
  const createURLToken = async (client, url) => {
4
- log("Creating signed url for", url);
5
- const response = await client.authenticatedFetch("/api/v1/url-token", {
6
- method: "POST",
7
- body: JSON.stringify({
8
- url
9
- })
10
- });
11
- if (!response.ok) {
12
- throw new Error(
13
- `Failed to create signed url: ${response.status} ${response.statusText} ${await response.text()}`
14
- );
15
- }
16
- return (await response.json()).token;
17
- };
18
- export {
19
- createURLToken
4
+ log("Creating signed url for", url);
5
+ const response = await client.authenticatedFetch("/api/v1/url-token", {
6
+ method: "POST",
7
+ body: JSON.stringify({ url })
8
+ });
9
+ if (!response.ok) throw new Error(`Failed to create signed url: ${response.status} ${response.statusText} ${await response.text()}`);
10
+ return (await response.json()).token;
20
11
  };
12
+ export { createURLToken };
@@ -1,28 +1,24 @@
1
1
  import { CHUNK_SIZE_BYTES } from "./CHUNK_SIZE_BYTES.js";
2
2
  async function* streamChunker(readableStream, chunkSize = CHUNK_SIZE_BYTES) {
3
- const reader = readableStream.getReader();
4
- let buffer = new Uint8Array(0);
5
- try {
6
- while (true) {
7
- const { done, value } = await reader.read();
8
- if (done) break;
9
- const chunk = value;
10
- const newBuffer = new Uint8Array(buffer.length + chunk.length);
11
- newBuffer.set(buffer);
12
- newBuffer.set(chunk, buffer.length);
13
- buffer = newBuffer;
14
- while (buffer.length >= chunkSize) {
15
- yield buffer.slice(0, chunkSize);
16
- buffer = buffer.slice(chunkSize);
17
- }
18
- }
19
- if (buffer.length > 0) {
20
- yield buffer;
21
- }
22
- } finally {
23
- reader.releaseLock();
24
- }
3
+ const reader = readableStream.getReader();
4
+ let buffer = new Uint8Array(0);
5
+ try {
6
+ while (true) {
7
+ const { done, value } = await reader.read();
8
+ if (done) break;
9
+ const chunk = value;
10
+ const newBuffer = new Uint8Array(buffer.length + chunk.length);
11
+ newBuffer.set(buffer);
12
+ newBuffer.set(chunk, buffer.length);
13
+ buffer = newBuffer;
14
+ while (buffer.length >= chunkSize) {
15
+ yield buffer.slice(0, chunkSize);
16
+ buffer = buffer.slice(chunkSize);
17
+ }
18
+ }
19
+ if (buffer.length > 0) yield buffer;
20
+ } finally {
21
+ reader.releaseLock();
22
+ }
25
23
  }
26
- export {
27
- streamChunker
28
- };
24
+ export { streamChunker };
@@ -1,96 +1,80 @@
1
- import debug from "debug";
2
1
  import { CHUNK_SIZE_BYTES } from "./CHUNK_SIZE_BYTES.js";
3
2
  import { streamChunker } from "./streamChunker.js";
3
+ import debug from "debug";
4
4
  const log = debug("ef:api:uploadChunk");
5
- const uploadChunk = async (client, {
6
- url,
7
- chunkBuffer,
8
- chunkNumber,
9
- fileSize,
10
- chunkSizeBytes = CHUNK_SIZE_BYTES
11
- }) => {
12
- const startByte = chunkNumber * chunkSizeBytes;
13
- const endByte = startByte + chunkBuffer.length - 1;
14
- log(`Uploading chunk ${chunkNumber} for ${url}`);
15
- const response = await client.authenticatedFetch(url, {
16
- method: "POST",
17
- headers: {
18
- "Content-Range": `bytes=${startByte}-${endByte}/${fileSize}`,
19
- "Content-Type": "application/octet-stream"
20
- },
21
- body: chunkBuffer
22
- });
23
- if (response.ok) {
24
- if (response.status === 201) {
25
- log(`File ${url} fully uploaded`);
26
- return { complete: true, body: await response.json() };
27
- }
28
- if (response.status === 202) {
29
- log(`File ${url} chunk ${chunkNumber} uploaded`);
30
- return { complete: false, body: await response.json() };
31
- }
32
- }
33
- throw new Error(
34
- `Failed to upload chunk ${chunkNumber} for ${url} ${response.status} ${response.statusText}`
35
- );
5
+ const uploadChunk = async (client, { url, chunkBuffer, chunkNumber, fileSize, chunkSizeBytes = CHUNK_SIZE_BYTES }) => {
6
+ const startByte = chunkNumber * chunkSizeBytes;
7
+ const endByte = startByte + chunkBuffer.length - 1;
8
+ log(`Uploading chunk ${chunkNumber} for ${url}`);
9
+ const response = await client.authenticatedFetch(url, {
10
+ method: "POST",
11
+ headers: {
12
+ "Content-Range": `bytes=${startByte}-${endByte}/${fileSize}`,
13
+ "Content-Type": "application/octet-stream"
14
+ },
15
+ body: chunkBuffer
16
+ });
17
+ if (response.ok) {
18
+ if (response.status === 201) {
19
+ log(`File ${url} fully uploaded`);
20
+ return {
21
+ complete: true,
22
+ body: await response.json()
23
+ };
24
+ }
25
+ if (response.status === 202) {
26
+ log(`File ${url} chunk ${chunkNumber} uploaded`);
27
+ return {
28
+ complete: false,
29
+ body: await response.json()
30
+ };
31
+ }
32
+ }
33
+ throw new Error(`Failed to upload chunk ${chunkNumber} for ${url} ${response.status} ${response.statusText}`);
36
34
  };
37
- function uploadChunks(client, {
38
- url,
39
- fileSize,
40
- fileStream,
41
- maxSize,
42
- chunkSizeBytes = CHUNK_SIZE_BYTES
43
- }) {
44
- const makeGenerator = async function* () {
45
- if (fileSize > maxSize) {
46
- throw new Error(
47
- `File size ${fileSize} bytes exceeds limit ${maxSize} bytes`
48
- );
49
- }
50
- log("Checking upload status", url);
51
- const uploadStatus = await client.authenticatedFetch(url);
52
- yield { type: "progress", progress: 0 };
53
- if (uploadStatus.status === 200) {
54
- log("Chunk already uploaded");
55
- yield { type: "progress", progress: 1 };
56
- return;
57
- }
58
- let chunkNumber = 0;
59
- let complete = false;
60
- for await (const chunkBuffer of streamChunker(fileStream, chunkSizeBytes)) {
61
- log(`Uploading chunk ${chunkNumber}`);
62
- ({ complete } = await uploadChunk(client, {
63
- url,
64
- chunkBuffer,
65
- chunkNumber,
66
- fileSize,
67
- chunkSizeBytes
68
- }));
69
- chunkNumber++;
70
- yield {
71
- type: "progress",
72
- progress: Math.min(1, chunkNumber / (fileSize / chunkSizeBytes))
73
- };
74
- }
75
- if (!complete) {
76
- throw new Error("Did not complete upload");
77
- }
78
- };
79
- const generator = makeGenerator();
80
- generator.whenUploaded = async () => {
81
- if (fileSize > maxSize) {
82
- throw new Error(
83
- `File size ${fileSize} bytes exceeds limit ${maxSize} bytes`
84
- );
85
- }
86
- const events = [];
87
- for await (const event of generator) {
88
- events.push(event);
89
- }
90
- return events;
91
- };
92
- return generator;
35
+ function uploadChunks(client, { url, fileSize, fileStream, maxSize, chunkSizeBytes = CHUNK_SIZE_BYTES }) {
36
+ const makeGenerator = async function* () {
37
+ if (fileSize > maxSize) throw new Error(`File size ${fileSize} bytes exceeds limit ${maxSize} bytes`);
38
+ log("Checking upload status", url);
39
+ const uploadStatus = await client.authenticatedFetch(url);
40
+ yield {
41
+ type: "progress",
42
+ progress: 0
43
+ };
44
+ if (uploadStatus.status === 200) {
45
+ log("Chunk already uploaded");
46
+ yield {
47
+ type: "progress",
48
+ progress: 1
49
+ };
50
+ return;
51
+ }
52
+ let chunkNumber = 0;
53
+ let complete = false;
54
+ for await (const chunkBuffer of streamChunker(fileStream, chunkSizeBytes)) {
55
+ log(`Uploading chunk ${chunkNumber}`);
56
+ ({complete} = await uploadChunk(client, {
57
+ url,
58
+ chunkBuffer,
59
+ chunkNumber,
60
+ fileSize,
61
+ chunkSizeBytes
62
+ }));
63
+ chunkNumber++;
64
+ yield {
65
+ type: "progress",
66
+ progress: Math.min(1, chunkNumber / (fileSize / chunkSizeBytes))
67
+ };
68
+ }
69
+ if (!complete) throw new Error("Did not complete upload");
70
+ };
71
+ const generator = makeGenerator();
72
+ generator.whenUploaded = async () => {
73
+ if (fileSize > maxSize) throw new Error(`File size ${fileSize} bytes exceeds limit ${maxSize} bytes`);
74
+ const events = [];
75
+ for await (const event of generator) events.push(event);
76
+ return events;
77
+ };
78
+ return generator;
93
79
  }
94
- export {
95
- uploadChunks
96
- };
80
+ export { uploadChunks };
@@ -0,0 +1,2 @@
1
+ const assertTypesMatch = (value) => value;
2
+ export { assertTypesMatch };
@@ -1,82 +1,65 @@
1
1
  import { Stream } from "node:stream";
2
2
  const createReadableStreamFromReadable = (source) => {
3
- const pump = new StreamPump(source);
4
- const stream = new ReadableStream(pump, pump);
5
- return stream;
3
+ const pump = new StreamPump(source);
4
+ const stream = new ReadableStream(pump, pump);
5
+ return stream;
6
6
  };
7
- class StreamPump {
8
- constructor(stream) {
9
- this.highWaterMark = stream.readableHighWaterMark || new Stream.Readable().readableHighWaterMark;
10
- this.accumalatedSize = 0;
11
- this.stream = stream;
12
- this.enqueue = this.enqueue.bind(this);
13
- this.error = this.error.bind(this);
14
- this.close = this.close.bind(this);
15
- }
16
- size(chunk) {
17
- return chunk?.byteLength || 0;
18
- }
19
- start(controller) {
20
- this.controller = controller;
21
- this.stream.on("data", this.enqueue);
22
- this.stream.once("error", this.error);
23
- this.stream.once("end", this.close);
24
- this.stream.once("close", this.close);
25
- }
26
- pull() {
27
- this.resume();
28
- }
29
- cancel(reason) {
30
- if (this.stream.destroy) {
31
- this.stream.destroy(reason);
32
- }
33
- this.stream.off("data", this.enqueue);
34
- this.stream.off("error", this.error);
35
- this.stream.off("end", this.close);
36
- this.stream.off("close", this.close);
37
- }
38
- enqueue(chunk) {
39
- if (this.controller) {
40
- try {
41
- const bytes = chunk instanceof Uint8Array ? chunk : Buffer.from(chunk);
42
- const available = (this.controller.desiredSize || 0) - bytes.byteLength;
43
- this.controller.enqueue(bytes);
44
- if (available <= 0) {
45
- this.pause();
46
- }
47
- } catch (error) {
48
- this.controller.error(
49
- new Error(
50
- "Could not create Buffer, chunk must be of type string or an instance of Buffer, ArrayBuffer, or Array or an Array-like Object"
51
- )
52
- );
53
- this.cancel();
54
- }
55
- }
56
- }
57
- pause() {
58
- if (this.stream.pause) {
59
- this.stream.pause();
60
- }
61
- }
62
- resume() {
63
- if (this.stream.readable && this.stream.resume) {
64
- this.stream.resume();
65
- }
66
- }
67
- close() {
68
- if (this.controller) {
69
- this.controller.close();
70
- delete this.controller;
71
- }
72
- }
73
- error(error) {
74
- if (this.controller) {
75
- this.controller.error(error);
76
- delete this.controller;
77
- }
78
- }
79
- }
80
- export {
81
- createReadableStreamFromReadable
7
+ var StreamPump = class {
8
+ constructor(stream) {
9
+ this.highWaterMark = stream.readableHighWaterMark || new Stream.Readable().readableHighWaterMark;
10
+ this.accumalatedSize = 0;
11
+ this.stream = stream;
12
+ this.enqueue = this.enqueue.bind(this);
13
+ this.error = this.error.bind(this);
14
+ this.close = this.close.bind(this);
15
+ }
16
+ size(chunk) {
17
+ return chunk?.byteLength || 0;
18
+ }
19
+ start(controller) {
20
+ this.controller = controller;
21
+ this.stream.on("data", this.enqueue);
22
+ this.stream.once("error", this.error);
23
+ this.stream.once("end", this.close);
24
+ this.stream.once("close", this.close);
25
+ }
26
+ pull() {
27
+ this.resume();
28
+ }
29
+ cancel(reason) {
30
+ if (this.stream.destroy) this.stream.destroy(reason);
31
+ this.stream.off("data", this.enqueue);
32
+ this.stream.off("error", this.error);
33
+ this.stream.off("end", this.close);
34
+ this.stream.off("close", this.close);
35
+ }
36
+ enqueue(chunk) {
37
+ if (this.controller) try {
38
+ const available = (this.controller.desiredSize || 0) - chunk.length;
39
+ this.controller.enqueue(chunk);
40
+ if (available <= 0) this.pause();
41
+ } catch (_error) {
42
+ this.controller.error(/* @__PURE__ */ new Error("Could not create Buffer, chunk must be of type string or an instance of Buffer, ArrayBuffer, or Array or an Array-like Object"));
43
+ this.cancel();
44
+ }
45
+ }
46
+ pause() {
47
+ if (this.stream.pause) this.stream.pause();
48
+ }
49
+ resume() {
50
+ if (this.stream.readable && this.stream.resume) this.stream.resume();
51
+ }
52
+ close() {
53
+ if (this.controller) {
54
+ this.controller.close();
55
+ delete this.controller;
56
+ }
57
+ }
58
+ error(error) {
59
+ if (this.controller) {
60
+ this.controller.error(error);
61
+ delete this.controller;
62
+ }
63
+ }
82
64
  };
65
+ export { createReadableStreamFromReadable };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@editframe/api",
3
- "version": "0.16.8-beta.0",
3
+ "version": "0.17.6-beta.0",
4
4
  "description": "API functions for EditFrame",
5
5
  "exports": {
6
6
  ".": {
@@ -42,10 +42,10 @@
42
42
  "@types/node": "^20.14.13",
43
43
  "typedoc": "^0.26.5",
44
44
  "typescript": "^5.5.4",
45
- "vite-plugin-dts": "^4.0.3"
45
+ "vite-plugin-dts": "^4.5.4"
46
46
  },
47
47
  "dependencies": {
48
- "@editframe/assets": "0.16.8-beta.0",
48
+ "@editframe/assets": "0.17.6-beta.0",
49
49
  "@vitejs/plugin-react": "^4.3.4",
50
50
  "debug": "^4.3.5",
51
51
  "eventsource-parser": "^3.0.0",
@@ -53,8 +53,8 @@
53
53
  "mime": "^4.0.4",
54
54
  "mime-types": "^2.1.35",
55
55
  "node-fetch": "^3.3.2",
56
+ "rolldown-vite": "^6.3.21",
56
57
  "tar": "^7.4.3",
57
- "vite": "^6.3.5",
58
58
  "vite-plugin-singlefile": "^2.1.0",
59
59
  "vite-tsconfig-paths": "^4.3.2",
60
60
  "zod": "^3.23.8"
@@ -2,8 +2,8 @@ import path from "node:path";
2
2
  import { PassThrough } from "node:stream";
3
3
 
4
4
  import react from "@vitejs/plugin-react";
5
+ import { build } from "rolldown-vite";
5
6
  import * as tar from "tar";
6
- import { build } from "vite";
7
7
  import { viteSingleFile } from "vite-plugin-singlefile";
8
8
  import tsconfigPathsPlugin from "vite-tsconfig-paths";
9
9
 
@@ -69,14 +69,14 @@ class StreamPump {
69
69
  enqueue(chunk: Uint8Array | string) {
70
70
  if (this.controller) {
71
71
  try {
72
- const bytes = chunk instanceof Uint8Array ? chunk : Buffer.from(chunk);
72
+ // const bytes = chunk instanceof Uint8Array ? chunk : Buffer.from(chunk);
73
73
 
74
- const available = (this.controller.desiredSize || 0) - bytes.byteLength;
75
- this.controller.enqueue(bytes);
74
+ const available = (this.controller.desiredSize || 0) - chunk.length;
75
+ this.controller.enqueue(chunk as Uint8Array);
76
76
  if (available <= 0) {
77
77
  this.pause();
78
78
  }
79
- } catch (error: any) {
79
+ } catch (_error: any) {
80
80
  this.controller.error(
81
81
  new Error(
82
82
  "Could not create Buffer, chunk must be of type string or an instance of Buffer, ArrayBuffer, or Array or an Array-like Object",
@@ -102,7 +102,6 @@ class StreamPump {
102
102
  close() {
103
103
  if (this.controller) {
104
104
  this.controller.close();
105
- // biome-ignore lint/performance/noDelete: infrequent use
106
105
  delete this.controller;
107
106
  }
108
107
  }
@@ -110,7 +109,6 @@ class StreamPump {
110
109
  error(error: Error) {
111
110
  if (this.controller) {
112
111
  this.controller.error(error);
113
- // biome-ignore lint/performance/noDelete: infrequent use
114
112
  delete this.controller;
115
113
  }
116
114
  }