@editframe/assets 0.16.7-beta.0 → 0.17.6-beta.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/DecoderManager.d.ts +62 -0
- package/dist/DecoderManager.js +114 -0
- package/dist/EncodedAsset.d.ts +58 -16
- package/dist/EncodedAsset.js +436 -565
- package/dist/FrameBuffer.d.ts +62 -0
- package/dist/FrameBuffer.js +89 -0
- package/dist/MP4File.d.ts +9 -1
- package/dist/MP4File.js +205 -219
- package/dist/MP4SampleAnalyzer.d.ts +59 -0
- package/dist/MP4SampleAnalyzer.js +119 -0
- package/dist/Probe.d.ts +1 -0
- package/dist/Probe.js +273 -301
- package/dist/SeekStrategy.d.ts +82 -0
- package/dist/SeekStrategy.js +101 -0
- package/dist/VideoRenderOptions.js +31 -33
- package/dist/idempotentTask.js +78 -78
- package/dist/index.js +1 -15
- package/dist/md5.js +35 -51
- package/dist/memoize.js +9 -12
- package/dist/mp4FileWritable.js +16 -18
- package/dist/tasks/cacheImage.js +13 -15
- package/dist/tasks/findOrCreateCaptions.js +18 -21
- package/dist/tasks/generateTrack.js +45 -63
- package/dist/tasks/generateTrackFragmentIndex.js +88 -101
- package/package.json +4 -4
- package/types.json +1 -1
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
import type * as MP4Box from "mp4box";
|
|
2
|
+
/**
|
|
3
|
+
* Interface for the minimal FrameBuffer API needed by SeekStrategy
|
|
4
|
+
*/
|
|
5
|
+
interface FrameBufferLike {
|
|
6
|
+
findByTimestamp(timestamp: number): VideoFrame | undefined;
|
|
7
|
+
}
|
|
8
|
+
/**
|
|
9
|
+
* State information needed for seeking decisions
|
|
10
|
+
*/
|
|
11
|
+
export interface SeekState {
|
|
12
|
+
/** Current position in the samples array for decoding */
|
|
13
|
+
sampleCursor: number;
|
|
14
|
+
/** Position of the last successfully decoded frame output */
|
|
15
|
+
outCursor: number;
|
|
16
|
+
/** Optional frame buffer for checking cached frames */
|
|
17
|
+
frameBuffer?: FrameBufferLike;
|
|
18
|
+
}
|
|
19
|
+
/**
|
|
20
|
+
* SeekStrategy encapsulates the critical seeking decision logic extracted from VideoAsset.
|
|
21
|
+
*
|
|
22
|
+
* **CRITICAL**: This class preserves the exact seeking behavior and decoder flush conditions
|
|
23
|
+
* from the original VideoAsset implementation to maintain "warm" decoder state performance.
|
|
24
|
+
*
|
|
25
|
+
* The logic here directly controls when the decoder must be flushed, which is the most
|
|
26
|
+
* performance-critical aspect of frame-accurate seeking.
|
|
27
|
+
*/
|
|
28
|
+
export declare class SeekStrategy {
|
|
29
|
+
/**
|
|
30
|
+
* Determines if seeking will skip picture groups (GOPs), requiring a decoder flush.
|
|
31
|
+
*
|
|
32
|
+
* This is an EXACT extraction of the original `seekingWillSkipPictureGroup` logic.
|
|
33
|
+
*
|
|
34
|
+
* @param state Current seek state with cursor positions
|
|
35
|
+
* @param targetSample The sample we want to seek to
|
|
36
|
+
* @param allSamples All samples in display order
|
|
37
|
+
* @returns true if seeking will cross more than one sync frame (GOP boundary)
|
|
38
|
+
*/
|
|
39
|
+
seekingWillSkipPictureGroup(state: SeekState, targetSample: MP4Box.Sample, allSamples: MP4Box.Sample[]): boolean;
|
|
40
|
+
/**
|
|
41
|
+
* Determines if seeking will go backwards in time, requiring a decoder flush.
|
|
42
|
+
*
|
|
43
|
+
* This is an EXACT extraction of the original `seekingWillGoBackwards` logic.
|
|
44
|
+
*
|
|
45
|
+
* @param state Current seek state with cursor positions and frame buffer
|
|
46
|
+
* @param targetSample The sample we want to seek to
|
|
47
|
+
* @param displayOrderedSamples Samples sorted by composition timestamp
|
|
48
|
+
* @returns true if seeking backwards and target frame is not cached
|
|
49
|
+
*/
|
|
50
|
+
seekingWillGoBackwards(state: SeekState, targetSample: MP4Box.Sample, displayOrderedSamples: MP4Box.Sample[]): boolean;
|
|
51
|
+
/**
|
|
52
|
+
* Finds the sync sample at or before the target sample number.
|
|
53
|
+
*
|
|
54
|
+
* This is an EXACT extraction of the sync sample finding logic from the original
|
|
55
|
+
* `seekToTime` method. Used when decoder flush is required to find optimal restart point.
|
|
56
|
+
*
|
|
57
|
+
* @param targetSample The sample we want to seek to
|
|
58
|
+
* @param allSamples All samples in the video
|
|
59
|
+
* @returns The sample number of the sync frame to start decoding from
|
|
60
|
+
* @throws Error if no sync sample found when traversing backwards
|
|
61
|
+
*/
|
|
62
|
+
findSyncSampleBefore(targetSample: MP4Box.Sample, allSamples: MP4Box.Sample[]): number;
|
|
63
|
+
/**
|
|
64
|
+
* The master decision function that determines if the decoder should be flushed.
|
|
65
|
+
*
|
|
66
|
+
* This consolidates the EXACT flush decision logic from the original VideoAsset.seekToTime().
|
|
67
|
+
* The decoder is flushed ONLY when:
|
|
68
|
+
* 1. Seeking will skip picture groups (crosses multiple GOP boundaries), OR
|
|
69
|
+
* 2. Seeking backwards and target frame is not in cache
|
|
70
|
+
*
|
|
71
|
+
* **CRITICAL**: This preserves the sophisticated flush minimization that keeps
|
|
72
|
+
* the decoder "warm" for optimal performance.
|
|
73
|
+
*
|
|
74
|
+
* @param state Current seek state
|
|
75
|
+
* @param targetSample The sample we want to seek to
|
|
76
|
+
* @param allSamples All samples in the video
|
|
77
|
+
* @param displayOrderedSamples Samples sorted by composition timestamp
|
|
78
|
+
* @returns true if decoder should be flushed before seeking
|
|
79
|
+
*/
|
|
80
|
+
shouldFlushDecoder(state: SeekState, targetSample: MP4Box.Sample, allSamples: MP4Box.Sample[], displayOrderedSamples?: MP4Box.Sample[]): boolean;
|
|
81
|
+
}
|
|
82
|
+
export {};
|
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* SeekStrategy encapsulates the critical seeking decision logic extracted from VideoAsset.
|
|
3
|
+
*
|
|
4
|
+
* **CRITICAL**: This class preserves the exact seeking behavior and decoder flush conditions
|
|
5
|
+
* from the original VideoAsset implementation to maintain "warm" decoder state performance.
|
|
6
|
+
*
|
|
7
|
+
* The logic here directly controls when the decoder must be flushed, which is the most
|
|
8
|
+
* performance-critical aspect of frame-accurate seeking.
|
|
9
|
+
*/
|
|
10
|
+
var SeekStrategy = class {
|
|
11
|
+
/**
|
|
12
|
+
* Determines if seeking will skip picture groups (GOPs), requiring a decoder flush.
|
|
13
|
+
*
|
|
14
|
+
* This is an EXACT extraction of the original `seekingWillSkipPictureGroup` logic.
|
|
15
|
+
*
|
|
16
|
+
* @param state Current seek state with cursor positions
|
|
17
|
+
* @param targetSample The sample we want to seek to
|
|
18
|
+
* @param allSamples All samples in display order
|
|
19
|
+
* @returns true if seeking will cross more than one sync frame (GOP boundary)
|
|
20
|
+
*/
|
|
21
|
+
seekingWillSkipPictureGroup(state, targetSample, allSamples) {
|
|
22
|
+
let start = state.sampleCursor;
|
|
23
|
+
const end = targetSample.number;
|
|
24
|
+
let syncFrameCrossings = 0;
|
|
25
|
+
while (start <= end) {
|
|
26
|
+
const sample = allSamples[start];
|
|
27
|
+
if (!sample) break;
|
|
28
|
+
if (sample.is_sync) {
|
|
29
|
+
if (syncFrameCrossings > 1) return true;
|
|
30
|
+
syncFrameCrossings++;
|
|
31
|
+
}
|
|
32
|
+
start++;
|
|
33
|
+
}
|
|
34
|
+
return false;
|
|
35
|
+
}
|
|
36
|
+
/**
|
|
37
|
+
* Determines if seeking will go backwards in time, requiring a decoder flush.
|
|
38
|
+
*
|
|
39
|
+
* This is an EXACT extraction of the original `seekingWillGoBackwards` logic.
|
|
40
|
+
*
|
|
41
|
+
* @param state Current seek state with cursor positions and frame buffer
|
|
42
|
+
* @param targetSample The sample we want to seek to
|
|
43
|
+
* @param displayOrderedSamples Samples sorted by composition timestamp
|
|
44
|
+
* @returns true if seeking backwards and target frame is not cached
|
|
45
|
+
*/
|
|
46
|
+
seekingWillGoBackwards(state, targetSample, displayOrderedSamples) {
|
|
47
|
+
const targetIndex = displayOrderedSamples.indexOf(targetSample);
|
|
48
|
+
const targetInCache = state.frameBuffer?.findByTimestamp(targetSample.cts);
|
|
49
|
+
const atEnd = state.sampleCursor === displayOrderedSamples.length - 1;
|
|
50
|
+
if (atEnd) return false;
|
|
51
|
+
if (targetInCache) return false;
|
|
52
|
+
return state.outCursor > targetIndex;
|
|
53
|
+
}
|
|
54
|
+
/**
|
|
55
|
+
* Finds the sync sample at or before the target sample number.
|
|
56
|
+
*
|
|
57
|
+
* This is an EXACT extraction of the sync sample finding logic from the original
|
|
58
|
+
* `seekToTime` method. Used when decoder flush is required to find optimal restart point.
|
|
59
|
+
*
|
|
60
|
+
* @param targetSample The sample we want to seek to
|
|
61
|
+
* @param allSamples All samples in the video
|
|
62
|
+
* @returns The sample number of the sync frame to start decoding from
|
|
63
|
+
* @throws Error if no sync sample found when traversing backwards
|
|
64
|
+
*/
|
|
65
|
+
findSyncSampleBefore(targetSample, allSamples) {
|
|
66
|
+
let syncSampleNumber = targetSample.number;
|
|
67
|
+
while (syncSampleNumber >= 0) {
|
|
68
|
+
const sample = allSamples[syncSampleNumber];
|
|
69
|
+
if (!sample) break;
|
|
70
|
+
if (sample.is_sync) return syncSampleNumber;
|
|
71
|
+
syncSampleNumber--;
|
|
72
|
+
}
|
|
73
|
+
throw new Error("No sync sample found when traversing backwards");
|
|
74
|
+
}
|
|
75
|
+
/**
|
|
76
|
+
* The master decision function that determines if the decoder should be flushed.
|
|
77
|
+
*
|
|
78
|
+
* This consolidates the EXACT flush decision logic from the original VideoAsset.seekToTime().
|
|
79
|
+
* The decoder is flushed ONLY when:
|
|
80
|
+
* 1. Seeking will skip picture groups (crosses multiple GOP boundaries), OR
|
|
81
|
+
* 2. Seeking backwards and target frame is not in cache
|
|
82
|
+
*
|
|
83
|
+
* **CRITICAL**: This preserves the sophisticated flush minimization that keeps
|
|
84
|
+
* the decoder "warm" for optimal performance.
|
|
85
|
+
*
|
|
86
|
+
* @param state Current seek state
|
|
87
|
+
* @param targetSample The sample we want to seek to
|
|
88
|
+
* @param allSamples All samples in the video
|
|
89
|
+
* @param displayOrderedSamples Samples sorted by composition timestamp
|
|
90
|
+
* @returns true if decoder should be flushed before seeking
|
|
91
|
+
*/
|
|
92
|
+
shouldFlushDecoder(state, targetSample, allSamples, displayOrderedSamples) {
|
|
93
|
+
const targetInCache = state.frameBuffer?.findByTimestamp(targetSample.cts);
|
|
94
|
+
if (targetInCache) return false;
|
|
95
|
+
const orderedSamples = displayOrderedSamples || allSamples;
|
|
96
|
+
if (this.seekingWillSkipPictureGroup(state, targetSample, allSamples)) return true;
|
|
97
|
+
if (this.seekingWillGoBackwards(state, targetSample, orderedSamples)) return true;
|
|
98
|
+
return false;
|
|
99
|
+
}
|
|
100
|
+
};
|
|
101
|
+
export { SeekStrategy };
|
|
@@ -1,36 +1,34 @@
|
|
|
1
1
|
import { z } from "zod";
|
|
2
2
|
const VideoRenderOptions = z.object({
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
3
|
+
mode: z.enum(["canvas", "screenshot"]),
|
|
4
|
+
strategy: z.enum(["v1", "v2"]),
|
|
5
|
+
showFrameBox: z.boolean().optional(),
|
|
6
|
+
encoderOptions: z.object({
|
|
7
|
+
sequenceNumber: z.number(),
|
|
8
|
+
keyframeIntervalMs: z.number(),
|
|
9
|
+
toMs: z.number(),
|
|
10
|
+
fromMs: z.number(),
|
|
11
|
+
shouldPadStart: z.boolean(),
|
|
12
|
+
shouldPadEnd: z.boolean(),
|
|
13
|
+
alignedFromUs: z.number(),
|
|
14
|
+
alignedToUs: z.number(),
|
|
15
|
+
isInitSegment: z.boolean(),
|
|
16
|
+
noVideo: z.boolean().optional(),
|
|
17
|
+
noAudio: z.boolean().optional(),
|
|
18
|
+
video: z.object({
|
|
19
|
+
width: z.number(),
|
|
20
|
+
height: z.number(),
|
|
21
|
+
framerate: z.number(),
|
|
22
|
+
codec: z.string(),
|
|
23
|
+
bitrate: z.number()
|
|
24
|
+
}),
|
|
25
|
+
audio: z.object({
|
|
26
|
+
sampleRate: z.number(),
|
|
27
|
+
codec: z.string(),
|
|
28
|
+
numberOfChannels: z.number(),
|
|
29
|
+
bitrate: z.number()
|
|
30
|
+
})
|
|
31
|
+
}),
|
|
32
|
+
fetchHost: z.string()
|
|
33
33
|
});
|
|
34
|
-
export {
|
|
35
|
-
VideoRenderOptions
|
|
36
|
-
};
|
|
34
|
+
export { VideoRenderOptions };
|
package/dist/idempotentTask.js
CHANGED
|
@@ -1,83 +1,83 @@
|
|
|
1
|
-
import { existsSync, createWriteStream } from "node:fs";
|
|
2
|
-
import path from "node:path";
|
|
3
1
|
import { md5FilePath } from "./md5.js";
|
|
4
2
|
import debug from "debug";
|
|
3
|
+
import { createWriteStream, existsSync } from "node:fs";
|
|
5
4
|
import { mkdir, writeFile } from "node:fs/promises";
|
|
5
|
+
import path from "node:path";
|
|
6
6
|
import { Readable } from "node:stream";
|
|
7
|
-
const idempotentTask = ({
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
};
|
|
81
|
-
|
|
82
|
-
idempotentTask
|
|
7
|
+
const idempotentTask = ({ label, filename, runner }) => {
|
|
8
|
+
const tasks = {};
|
|
9
|
+
return async (rootDir, absolutePath, ...args) => {
|
|
10
|
+
const log = debug(`ef:${label}`);
|
|
11
|
+
const cacheDirRoot = path.join(rootDir, ".cache");
|
|
12
|
+
await mkdir(cacheDirRoot, { recursive: true });
|
|
13
|
+
log(`Running ef:${label} task for ${absolutePath} in ${rootDir}`);
|
|
14
|
+
if (absolutePath.includes("http")) {
|
|
15
|
+
const safePath = absolutePath.replace(/[^a-zA-Z0-9]/g, "_");
|
|
16
|
+
const cachePath$1 = path.join(rootDir, ".cache", `${safePath}.file`);
|
|
17
|
+
if (existsSync(absolutePath)) log(`Already cached ${absolutePath}`);
|
|
18
|
+
else {
|
|
19
|
+
const response = await fetch(absolutePath);
|
|
20
|
+
const stream = response.body;
|
|
21
|
+
if (response.ok && stream) {
|
|
22
|
+
const writeStream = createWriteStream(cachePath$1);
|
|
23
|
+
const readable = Readable.fromWeb(stream);
|
|
24
|
+
readable.pipe(writeStream);
|
|
25
|
+
await new Promise((resolve, reject) => {
|
|
26
|
+
readable.on("error", reject);
|
|
27
|
+
writeStream.on("error", reject);
|
|
28
|
+
writeStream.on("finish", resolve);
|
|
29
|
+
});
|
|
30
|
+
absolutePath = cachePath$1;
|
|
31
|
+
} else throw new Error(`Failed to fetch file from URL ${absolutePath}`);
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
const md5 = await md5FilePath(absolutePath);
|
|
35
|
+
const cacheDir = path.join(cacheDirRoot, md5);
|
|
36
|
+
log(`Cache dir: ${cacheDir}`);
|
|
37
|
+
await mkdir(cacheDir, { recursive: true });
|
|
38
|
+
const cachePath = path.join(cacheDir, filename(absolutePath, ...args));
|
|
39
|
+
const key = cachePath;
|
|
40
|
+
if (existsSync(cachePath)) {
|
|
41
|
+
log(`Returning cached ef:${label} task for ${key}`);
|
|
42
|
+
return {
|
|
43
|
+
cachePath,
|
|
44
|
+
md5Sum: md5
|
|
45
|
+
};
|
|
46
|
+
}
|
|
47
|
+
const maybeTask = tasks[key];
|
|
48
|
+
if (maybeTask) {
|
|
49
|
+
log(`Returning existing ef:${label} task for ${key}`);
|
|
50
|
+
await maybeTask;
|
|
51
|
+
return {
|
|
52
|
+
cachePath,
|
|
53
|
+
md5Sum: md5
|
|
54
|
+
};
|
|
55
|
+
}
|
|
56
|
+
log(`Creating new ef:${label} task for ${key}`);
|
|
57
|
+
const task = runner(absolutePath, ...args);
|
|
58
|
+
tasks[key] = task;
|
|
59
|
+
log(`Awaiting task for ${key}`);
|
|
60
|
+
const result = await task;
|
|
61
|
+
if (result instanceof Readable) {
|
|
62
|
+
log(`Piping task for ${key} to cache`);
|
|
63
|
+
const writeStream = createWriteStream(cachePath);
|
|
64
|
+
result.pipe(writeStream);
|
|
65
|
+
await new Promise((resolve, reject) => {
|
|
66
|
+
result.on("error", reject);
|
|
67
|
+
writeStream.on("error", reject);
|
|
68
|
+
writeStream.on("finish", resolve);
|
|
69
|
+
});
|
|
70
|
+
return {
|
|
71
|
+
cachePath,
|
|
72
|
+
md5Sum: md5
|
|
73
|
+
};
|
|
74
|
+
}
|
|
75
|
+
log(`Writing to ${cachePath}`);
|
|
76
|
+
await writeFile(cachePath, result);
|
|
77
|
+
return {
|
|
78
|
+
md5Sum: md5,
|
|
79
|
+
cachePath
|
|
80
|
+
};
|
|
81
|
+
};
|
|
83
82
|
};
|
|
83
|
+
export { idempotentTask };
|
package/dist/index.js
CHANGED
|
@@ -5,18 +5,4 @@ import { generateTrack, generateTrackFromPath } from "./tasks/generateTrack.js";
|
|
|
5
5
|
import { findOrCreateCaptions, generateCaptionDataFromPath } from "./tasks/findOrCreateCaptions.js";
|
|
6
6
|
import { cacheImage } from "./tasks/cacheImage.js";
|
|
7
7
|
import { VideoRenderOptions } from "./VideoRenderOptions.js";
|
|
8
|
-
export {
|
|
9
|
-
Probe,
|
|
10
|
-
VideoRenderOptions,
|
|
11
|
-
cacheImage,
|
|
12
|
-
findOrCreateCaptions,
|
|
13
|
-
generateCaptionDataFromPath,
|
|
14
|
-
generateTrack,
|
|
15
|
-
generateTrackFragmentIndex,
|
|
16
|
-
generateTrackFragmentIndexFromPath,
|
|
17
|
-
generateTrackFromPath,
|
|
18
|
-
md5Buffer,
|
|
19
|
-
md5Directory,
|
|
20
|
-
md5FilePath,
|
|
21
|
-
md5ReadStream
|
|
22
|
-
};
|
|
8
|
+
export { Probe, VideoRenderOptions, cacheImage, findOrCreateCaptions, generateCaptionDataFromPath, generateTrack, generateTrackFragmentIndex, generateTrackFragmentIndexFromPath, generateTrackFromPath, md5Buffer, md5Directory, md5FilePath, md5ReadStream };
|
package/dist/md5.js
CHANGED
|
@@ -4,63 +4,47 @@ import { join } from "node:path";
|
|
|
4
4
|
import crypto from "node:crypto";
|
|
5
5
|
import ora from "ora";
|
|
6
6
|
async function md5Directory(directory, spinner) {
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
}
|
|
25
|
-
if (shouldEndSpinner) {
|
|
26
|
-
spinner.succeed("MD5 calculated");
|
|
27
|
-
spinner.suffixText = directory;
|
|
28
|
-
}
|
|
29
|
-
return addDashesToUUID(hash.digest("hex"));
|
|
7
|
+
const shouldEndSpinner = !spinner;
|
|
8
|
+
spinner ||= ora("⚡️ Calculating MD5").start();
|
|
9
|
+
spinner.suffixText = directory;
|
|
10
|
+
const files = await readdir(directory, { withFileTypes: true });
|
|
11
|
+
const hashes = await Promise.all(files.map(async (file) => {
|
|
12
|
+
const filePath = join(directory, file.name);
|
|
13
|
+
if (file.isDirectory()) return md5Directory(filePath, spinner);
|
|
14
|
+
spinner.suffixText = filePath;
|
|
15
|
+
return md5FilePath(filePath);
|
|
16
|
+
}));
|
|
17
|
+
const hash = crypto.createHash("md5");
|
|
18
|
+
for (const fileHash of hashes) hash.update(fileHash);
|
|
19
|
+
if (shouldEndSpinner) {
|
|
20
|
+
spinner.succeed("MD5 calculated");
|
|
21
|
+
spinner.suffixText = directory;
|
|
22
|
+
}
|
|
23
|
+
return addDashesToUUID(hash.digest("hex"));
|
|
30
24
|
}
|
|
31
25
|
async function md5FilePath(filePath) {
|
|
32
|
-
|
|
33
|
-
|
|
26
|
+
const readStream = createReadStream(filePath);
|
|
27
|
+
return md5ReadStream(readStream);
|
|
34
28
|
}
|
|
35
29
|
function md5ReadStream(readStream) {
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
30
|
+
return new Promise((resolve, reject) => {
|
|
31
|
+
const hash = crypto.createHash("md5");
|
|
32
|
+
readStream.on("data", (data) => {
|
|
33
|
+
hash.update(data);
|
|
34
|
+
});
|
|
35
|
+
readStream.on("error", reject);
|
|
36
|
+
readStream.on("end", () => {
|
|
37
|
+
resolve(addDashesToUUID(hash.digest("hex")));
|
|
38
|
+
});
|
|
39
|
+
});
|
|
46
40
|
}
|
|
47
41
|
function md5Buffer(buffer) {
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
42
|
+
const hash = crypto.createHash("md5");
|
|
43
|
+
hash.update(buffer);
|
|
44
|
+
return addDashesToUUID(hash.digest("hex"));
|
|
51
45
|
}
|
|
52
46
|
function addDashesToUUID(uuidWithoutDashes) {
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
}
|
|
56
|
-
return (
|
|
57
|
-
// biome-ignore lint/style/useTemplate: using a template makes a long line
|
|
58
|
-
uuidWithoutDashes.slice(0, 8) + "-" + uuidWithoutDashes.slice(8, 12) + "-" + uuidWithoutDashes.slice(12, 16) + "-" + uuidWithoutDashes.slice(16, 20) + "-" + uuidWithoutDashes.slice(20, 32)
|
|
59
|
-
);
|
|
47
|
+
if (uuidWithoutDashes.length !== 32) throw new Error("Invalid UUID without dashes. Expected 32 characters.");
|
|
48
|
+
return uuidWithoutDashes.slice(0, 8) + "-" + uuidWithoutDashes.slice(8, 12) + "-" + uuidWithoutDashes.slice(12, 16) + "-" + uuidWithoutDashes.slice(16, 20) + "-" + uuidWithoutDashes.slice(20, 32);
|
|
60
49
|
}
|
|
61
|
-
export {
|
|
62
|
-
md5Buffer,
|
|
63
|
-
md5Directory,
|
|
64
|
-
md5FilePath,
|
|
65
|
-
md5ReadStream
|
|
66
|
-
};
|
|
50
|
+
export { md5Buffer, md5Directory, md5FilePath, md5ReadStream };
|
package/dist/memoize.js
CHANGED
|
@@ -1,14 +1,11 @@
|
|
|
1
|
+
/** method decorator to memoize the value of a getter */
|
|
1
2
|
const memoize = (_target, _propertyKey, descriptor) => {
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
return memoized.get(this);
|
|
10
|
-
};
|
|
11
|
-
};
|
|
12
|
-
export {
|
|
13
|
-
memoize
|
|
3
|
+
const get = descriptor.get;
|
|
4
|
+
if (!get) return;
|
|
5
|
+
const memoized = /* @__PURE__ */ new WeakMap();
|
|
6
|
+
descriptor.get = function() {
|
|
7
|
+
if (!memoized.has(this)) memoized.set(this, get.call(this));
|
|
8
|
+
return memoized.get(this);
|
|
9
|
+
};
|
|
14
10
|
};
|
|
11
|
+
export { memoize };
|
package/dist/mp4FileWritable.js
CHANGED
|
@@ -1,21 +1,19 @@
|
|
|
1
1
|
import { Writable } from "node:stream";
|
|
2
2
|
const mp4FileWritable = (mp4File) => {
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
};
|
|
19
|
-
export {
|
|
20
|
-
mp4FileWritable
|
|
3
|
+
let arrayBufferStart = 0;
|
|
4
|
+
return new Writable({
|
|
5
|
+
write: (chunk, _encoding, callback) => {
|
|
6
|
+
const mp4BoxBuffer = chunk.buffer;
|
|
7
|
+
mp4BoxBuffer.fileStart = arrayBufferStart;
|
|
8
|
+
arrayBufferStart += chunk.length;
|
|
9
|
+
mp4File.appendBuffer(mp4BoxBuffer, false);
|
|
10
|
+
callback();
|
|
11
|
+
},
|
|
12
|
+
final: (callback) => {
|
|
13
|
+
mp4File.flush();
|
|
14
|
+
mp4File.processSamples(true);
|
|
15
|
+
callback();
|
|
16
|
+
}
|
|
17
|
+
});
|
|
21
18
|
};
|
|
19
|
+
export { mp4FileWritable };
|
package/dist/tasks/cacheImage.js
CHANGED
|
@@ -2,21 +2,19 @@ import { idempotentTask } from "../idempotentTask.js";
|
|
|
2
2
|
import { createReadStream } from "node:fs";
|
|
3
3
|
import path from "node:path";
|
|
4
4
|
const cacheImageTask = idempotentTask({
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
5
|
+
label: "image",
|
|
6
|
+
filename: (absolutePath) => path.basename(absolutePath),
|
|
7
|
+
runner: async (absolutePath) => {
|
|
8
|
+
return createReadStream(absolutePath);
|
|
9
|
+
}
|
|
10
10
|
});
|
|
11
11
|
const cacheImage = async (cacheRoot, absolutePath) => {
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
};
|
|
20
|
-
export {
|
|
21
|
-
cacheImage
|
|
12
|
+
try {
|
|
13
|
+
return await cacheImageTask(cacheRoot, absolutePath);
|
|
14
|
+
} catch (error) {
|
|
15
|
+
console.error(error);
|
|
16
|
+
console.trace("Error caching image", error);
|
|
17
|
+
throw error;
|
|
18
|
+
}
|
|
22
19
|
};
|
|
20
|
+
export { cacheImage };
|