@editframe/assets 0.20.4-beta.0 → 0.21.0-beta.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/Probe.d.ts +1 -1
- package/dist/Probe.js +12 -16
- package/dist/VideoRenderOptions.d.ts +3 -0
- package/dist/VideoRenderOptions.js +1 -0
- package/dist/generateFragmentIndex.js +17 -35
- package/dist/generateSingleTrack.js +3 -4
- package/dist/idempotentTask.js +2 -2
- package/dist/tasks/cacheImage.js +1 -1
- package/dist/tasks/findOrCreateCaptions.js +3 -3
- package/dist/tasks/generateTrack.js +2 -4
- package/dist/tasks/generateTrackFragmentIndex.js +2 -3
- package/package.json +1 -1
- package/types.json +1 -1
package/dist/Probe.d.ts
CHANGED
|
@@ -1149,7 +1149,7 @@ declare abstract class ProbeBase {
|
|
|
1149
1149
|
get ffmpegAudioOutputOptions(): string[];
|
|
1150
1150
|
get ffmpegVideoOutputOptions(): string[];
|
|
1151
1151
|
protected constructor(absolutePath: string);
|
|
1152
|
-
createConformingReadstream(): Readable;
|
|
1152
|
+
createConformingReadstream(): Readable | import('fs').ReadStream;
|
|
1153
1153
|
createTrackReadstream(trackIndex: number): Readable;
|
|
1154
1154
|
}
|
|
1155
1155
|
export declare class Probe extends ProbeBase {
|
package/dist/Probe.js
CHANGED
|
@@ -4,8 +4,8 @@ import { promisify } from "node:util";
|
|
|
4
4
|
import { createReadStream } from "node:fs";
|
|
5
5
|
import * as z$1 from "zod";
|
|
6
6
|
import debug from "debug";
|
|
7
|
-
|
|
8
|
-
|
|
7
|
+
var execPromise = promisify(exec);
|
|
8
|
+
var log = debug("ef:assets:probe");
|
|
9
9
|
const AudioStreamSchema = z$1.object({
|
|
10
10
|
index: z$1.number(),
|
|
11
11
|
codec_name: z$1.string(),
|
|
@@ -52,7 +52,7 @@ const VideoStreamSchema = z$1.object({
|
|
|
52
52
|
bit_rate: z$1.string().optional(),
|
|
53
53
|
disposition: z$1.record(z$1.unknown())
|
|
54
54
|
});
|
|
55
|
-
|
|
55
|
+
var ProbeFormatSchema = z$1.object({
|
|
56
56
|
filename: z$1.string(),
|
|
57
57
|
nb_streams: z$1.number(),
|
|
58
58
|
nb_programs: z$1.number(),
|
|
@@ -71,12 +71,12 @@ const DataStreamSchema = z$1.object({
|
|
|
71
71
|
duration_ts: z$1.number().optional(),
|
|
72
72
|
start_pts: z$1.number().optional()
|
|
73
73
|
});
|
|
74
|
-
|
|
74
|
+
var StreamSchema = z$1.discriminatedUnion("codec_type", [
|
|
75
75
|
AudioStreamSchema,
|
|
76
76
|
VideoStreamSchema,
|
|
77
77
|
DataStreamSchema
|
|
78
78
|
]);
|
|
79
|
-
|
|
79
|
+
var PacketSchema = z$1.object({
|
|
80
80
|
stream_index: z$1.number(),
|
|
81
81
|
pts: z$1.number(),
|
|
82
82
|
pts_time: z$1.coerce.number(),
|
|
@@ -86,18 +86,17 @@ const PacketSchema = z$1.object({
|
|
|
86
86
|
pos: z$1.coerce.number().optional(),
|
|
87
87
|
flags: z$1.string().optional()
|
|
88
88
|
});
|
|
89
|
-
|
|
89
|
+
var ProbeSchema = z$1.object({
|
|
90
90
|
streams: z$1.array(StreamSchema),
|
|
91
91
|
format: ProbeFormatSchema
|
|
92
92
|
});
|
|
93
|
-
|
|
93
|
+
var PacketProbeSchema = z$1.object({
|
|
94
94
|
packets: z$1.array(PacketSchema),
|
|
95
95
|
format: ProbeFormatSchema,
|
|
96
96
|
streams: z$1.array(StreamSchema)
|
|
97
97
|
});
|
|
98
|
-
|
|
98
|
+
var buildProbeArgs = (options) => {
|
|
99
99
|
const streamEntries = "stream=index,codec_name,codec_long_name,codec_type,codec_tag_string,codec_tag,profile,level,width,height,coded_width,coded_height,r_frame_rate,avg_frame_rate,time_base,start_pts,start_time,duration_ts,duration,bit_rate,sample_fmt,sample_rate,channels,channel_layout,bits_per_sample,initial_padding,disposition";
|
|
100
|
-
const packetEntries = "packet=stream_index,pts,pts_time,dts,dts_time,duration,pos,flags";
|
|
101
100
|
return [
|
|
102
101
|
"-v",
|
|
103
102
|
"error",
|
|
@@ -105,7 +104,7 @@ const buildProbeArgs = (options) => {
|
|
|
105
104
|
"-show_streams",
|
|
106
105
|
"-of",
|
|
107
106
|
"json",
|
|
108
|
-
...options.showPackets ? ["-show_entries", `${streamEntries}
|
|
107
|
+
...options.showPackets ? ["-show_entries", `${streamEntries}:packet=stream_index,pts,pts_time,dts,dts_time,duration,pos,flags`] : ["-show_entries", streamEntries]
|
|
109
108
|
];
|
|
110
109
|
};
|
|
111
110
|
var FFProbeRunner = class {
|
|
@@ -149,7 +148,7 @@ var FFProbeRunner = class {
|
|
|
149
148
|
});
|
|
150
149
|
stream.pipe(probe.stdin);
|
|
151
150
|
try {
|
|
152
|
-
|
|
151
|
+
return await Promise.race([new Promise((resolve, reject) => {
|
|
153
152
|
probe.stdout.on("end", () => {
|
|
154
153
|
try {
|
|
155
154
|
const buffer = Buffer.concat(chunks).toString("utf8");
|
|
@@ -159,7 +158,6 @@ var FFProbeRunner = class {
|
|
|
159
158
|
}
|
|
160
159
|
});
|
|
161
160
|
}), processExit]);
|
|
162
|
-
return json;
|
|
163
161
|
} finally {
|
|
164
162
|
stream.unpipe(probe.stdin);
|
|
165
163
|
probe.stdin.end();
|
|
@@ -412,15 +410,13 @@ var PacketProbe = class PacketProbe extends ProbeBase {
|
|
|
412
410
|
const videoStream = this.videoStreams[0];
|
|
413
411
|
if (!videoStream) return [];
|
|
414
412
|
const videoPackets = this.packets.filter((packet) => packet.stream_index === videoStream.index);
|
|
415
|
-
const
|
|
416
|
-
const [num, den] = frameRate.split("/").map(Number);
|
|
413
|
+
const [num, den] = videoStream.r_frame_rate.split("/").map(Number);
|
|
417
414
|
if (!num || !den) return [];
|
|
418
415
|
const packetDuration = den / num;
|
|
419
416
|
if (videoPackets.length === 0) return [];
|
|
420
417
|
const ptsTimes = videoPackets.map((p) => p.pts_time);
|
|
421
418
|
const minPts = Math.min(...ptsTimes);
|
|
422
|
-
const
|
|
423
|
-
const totalDuration = maxPts - minPts + packetDuration;
|
|
419
|
+
const totalDuration = Math.max(...ptsTimes) - minPts + packetDuration;
|
|
424
420
|
return truncateDecimal(Math.round(totalDuration * 1e4) / 1e4, 5);
|
|
425
421
|
}
|
|
426
422
|
};
|
|
@@ -3,6 +3,7 @@ export declare const VideoRenderOptions: z.ZodObject<{
|
|
|
3
3
|
mode: z.ZodEnum<["canvas", "screenshot"]>;
|
|
4
4
|
strategy: z.ZodEnum<["v1", "v2"]>;
|
|
5
5
|
showFrameBox: z.ZodOptional<z.ZodBoolean>;
|
|
6
|
+
enableTracing: z.ZodOptional<z.ZodDefault<z.ZodBoolean>>;
|
|
6
7
|
encoderOptions: z.ZodObject<{
|
|
7
8
|
sequenceNumber: z.ZodNumber;
|
|
8
9
|
keyframeIntervalMs: z.ZodNumber;
|
|
@@ -155,6 +156,7 @@ export declare const VideoRenderOptions: z.ZodObject<{
|
|
|
155
156
|
};
|
|
156
157
|
fetchHost: string;
|
|
157
158
|
showFrameBox?: boolean | undefined;
|
|
159
|
+
enableTracing?: boolean | undefined;
|
|
158
160
|
}, {
|
|
159
161
|
mode: "canvas" | "screenshot";
|
|
160
162
|
strategy: "v1" | "v2";
|
|
@@ -186,5 +188,6 @@ export declare const VideoRenderOptions: z.ZodObject<{
|
|
|
186
188
|
};
|
|
187
189
|
fetchHost: string;
|
|
188
190
|
showFrameBox?: boolean | undefined;
|
|
191
|
+
enableTracing?: boolean | undefined;
|
|
189
192
|
}>;
|
|
190
193
|
export type VideoRenderOptions = z.infer<typeof VideoRenderOptions>;
|
|
@@ -3,6 +3,7 @@ const VideoRenderOptions = z.object({
|
|
|
3
3
|
mode: z.enum(["canvas", "screenshot"]),
|
|
4
4
|
strategy: z.enum(["v1", "v2"]),
|
|
5
5
|
showFrameBox: z.boolean().optional(),
|
|
6
|
+
enableTracing: z.boolean().default(false).optional(),
|
|
6
7
|
encoderOptions: z.object({
|
|
7
8
|
sequenceNumber: z.number(),
|
|
8
9
|
keyframeIntervalMs: z.number(),
|
|
@@ -2,27 +2,23 @@ import { PacketProbe } from "./Probe.js";
|
|
|
2
2
|
import debug from "debug";
|
|
3
3
|
import { Readable, Transform, Writable } from "node:stream";
|
|
4
4
|
import { pipeline } from "node:stream/promises";
|
|
5
|
-
|
|
5
|
+
var log = debug("ef:generateFragmentIndex");
|
|
6
6
|
function constructH264CodecString(codecTagString, profile, level) {
|
|
7
7
|
if (codecTagString !== "avc1" || !profile || level === void 0) return codecTagString;
|
|
8
|
-
const
|
|
8
|
+
const profileIdc = {
|
|
9
9
|
"Baseline": 66,
|
|
10
10
|
"Main": 77,
|
|
11
11
|
"High": 100,
|
|
12
12
|
"High 10": 110,
|
|
13
13
|
"High 422": 122,
|
|
14
14
|
"High 444": 244
|
|
15
|
-
};
|
|
16
|
-
const profileIdc = profileMap[profile];
|
|
15
|
+
}[profile];
|
|
17
16
|
if (!profileIdc) return codecTagString;
|
|
18
17
|
const profileHex = profileIdc.toString(16).padStart(2, "0");
|
|
19
18
|
const constraintFlags = "00";
|
|
20
19
|
const levelHex = level.toString(16).padStart(2, "0");
|
|
21
20
|
return `${codecTagString}.${profileHex}${constraintFlags}${levelHex}`;
|
|
22
21
|
}
|
|
23
|
-
/**
|
|
24
|
-
* Streaming MP4 box parser that detects box boundaries without loading entire file into memory
|
|
25
|
-
*/
|
|
26
22
|
var StreamingBoxParser = class extends Transform {
|
|
27
23
|
constructor() {
|
|
28
24
|
super({ objectMode: false });
|
|
@@ -140,8 +136,7 @@ const generateFragmentIndex = async (inputStream, startTimeOffsetMs, trackIdMapp
|
|
|
140
136
|
const fragmentStart = fragment.offset;
|
|
141
137
|
const fragmentEnd = fragment.offset + fragment.size;
|
|
142
138
|
const videoPackets = probe.packets.filter((packet) => {
|
|
143
|
-
|
|
144
|
-
return stream?.codec_type === "video" && packet.pos !== void 0 && packet.pos >= fragmentStart && packet.pos < fragmentEnd;
|
|
139
|
+
return videoStreams.find((s) => s.index === packet.stream_index)?.codec_type === "video" && packet.pos !== void 0 && packet.pos >= fragmentStart && packet.pos < fragmentEnd;
|
|
145
140
|
}).map((packet) => ({
|
|
146
141
|
pts: packet.pts,
|
|
147
142
|
dts: packet.dts,
|
|
@@ -149,8 +144,7 @@ const generateFragmentIndex = async (inputStream, startTimeOffsetMs, trackIdMapp
|
|
|
149
144
|
isKeyframe: packet.flags?.includes("K") ?? false
|
|
150
145
|
}));
|
|
151
146
|
const audioPackets = probe.packets.filter((packet) => {
|
|
152
|
-
|
|
153
|
-
return stream?.codec_type === "audio" && packet.pos !== void 0 && packet.pos >= fragmentStart && packet.pos < fragmentEnd;
|
|
147
|
+
return audioStreams.find((s) => s.index === packet.stream_index)?.codec_type === "audio" && packet.pos !== void 0 && packet.pos >= fragmentStart && packet.pos < fragmentEnd;
|
|
154
148
|
}).map((packet) => ({
|
|
155
149
|
pts: packet.pts,
|
|
156
150
|
dts: packet.dts,
|
|
@@ -196,20 +190,15 @@ const generateFragmentIndex = async (inputStream, startTimeOffsetMs, trackIdMapp
|
|
|
196
190
|
const keyframe = videoPackets.find((p) => p.isKeyframe) || firstPacket;
|
|
197
191
|
const segmentCts = Math.round(keyframe.pts * timescale / timebase.den);
|
|
198
192
|
const segmentDts = Math.round(keyframe.dts * timescale / timebase.den);
|
|
199
|
-
const
|
|
200
|
-
const nextKeyframe = nextFragmentData?.videoPackets.find((p) => p.isKeyframe);
|
|
193
|
+
const nextKeyframe = fragmentTimingData[fragmentData.fragmentIndex + 1]?.videoPackets.find((p) => p.isKeyframe);
|
|
201
194
|
let segmentDuration;
|
|
202
|
-
if (nextKeyframe)
|
|
203
|
-
|
|
204
|
-
segmentDuration = nextSegmentCts - segmentCts;
|
|
205
|
-
} else {
|
|
195
|
+
if (nextKeyframe) segmentDuration = Math.round(nextKeyframe.pts * timescale / timebase.den) - segmentCts;
|
|
196
|
+
else {
|
|
206
197
|
const allVideoPackets$1 = probe.packets.filter((p) => {
|
|
207
|
-
|
|
208
|
-
return stream?.codec_type === "video";
|
|
198
|
+
return videoStreams.find((s) => s.index === p.stream_index)?.codec_type === "video";
|
|
209
199
|
}).sort((a, b) => a.pts - b.pts);
|
|
210
200
|
const lastPacket = allVideoPackets$1[allVideoPackets$1.length - 1];
|
|
211
|
-
|
|
212
|
-
segmentDuration = streamEnd - segmentCts;
|
|
201
|
+
segmentDuration = Math.round((lastPacket.pts + (lastPacket.duration || 0)) * timescale / timebase.den) - segmentCts;
|
|
213
202
|
}
|
|
214
203
|
segments.push({
|
|
215
204
|
cts: segmentCts,
|
|
@@ -224,8 +213,7 @@ const generateFragmentIndex = async (inputStream, startTimeOffsetMs, trackIdMapp
|
|
|
224
213
|
const firstPacket = totalVideoPackets[0];
|
|
225
214
|
const lastPacket = totalVideoPackets[totalVideoPackets.length - 1];
|
|
226
215
|
const firstPts = Math.round(firstPacket.pts * timescale / timebase.den);
|
|
227
|
-
|
|
228
|
-
totalDuration = lastPts - firstPts;
|
|
216
|
+
totalDuration = Math.round(lastPacket.pts * timescale / timebase.den) - firstPts;
|
|
229
217
|
}
|
|
230
218
|
const finalTrackId = trackIdMapping?.[videoStream.index] ?? videoStream.index + 1;
|
|
231
219
|
trackIndexes[finalTrackId] = {
|
|
@@ -247,8 +235,7 @@ const generateFragmentIndex = async (inputStream, startTimeOffsetMs, trackIdMapp
|
|
|
247
235
|
}
|
|
248
236
|
for (const audioStream of audioStreams) {
|
|
249
237
|
const segments = [];
|
|
250
|
-
const
|
|
251
|
-
const totalSampleCount = totalAudioPackets.length;
|
|
238
|
+
const totalSampleCount = probe.packets.filter((p) => p.stream_index === audioStream.index).length;
|
|
252
239
|
const timebase = probe.audioTimebase;
|
|
253
240
|
if (!timebase) {
|
|
254
241
|
console.warn("No timebase found for audio stream");
|
|
@@ -274,20 +261,15 @@ const generateFragmentIndex = async (inputStream, startTimeOffsetMs, trackIdMapp
|
|
|
274
261
|
const firstPacket = audioPackets[0];
|
|
275
262
|
const segmentCts = Math.round(firstPacket.pts * timescale / timebase.den);
|
|
276
263
|
const segmentDts = Math.round(firstPacket.dts * timescale / timebase.den);
|
|
277
|
-
const
|
|
278
|
-
const nextFirstPacket = nextFragmentData?.audioPackets[0];
|
|
264
|
+
const nextFirstPacket = fragmentTimingData[fragmentData.fragmentIndex + 1]?.audioPackets[0];
|
|
279
265
|
let segmentDuration;
|
|
280
|
-
if (nextFirstPacket)
|
|
281
|
-
|
|
282
|
-
segmentDuration = nextSegmentCts - segmentCts;
|
|
283
|
-
} else {
|
|
266
|
+
if (nextFirstPacket) segmentDuration = Math.round(nextFirstPacket.pts * timescale / timebase.den) - segmentCts;
|
|
267
|
+
else {
|
|
284
268
|
const allAudioPackets$1 = probe.packets.filter((p) => {
|
|
285
|
-
|
|
286
|
-
return stream?.codec_type === "audio";
|
|
269
|
+
return audioStreams.find((s) => s.index === p.stream_index)?.codec_type === "audio";
|
|
287
270
|
}).sort((a, b) => a.pts - b.pts);
|
|
288
271
|
const lastPacket = allAudioPackets$1[allAudioPackets$1.length - 1];
|
|
289
|
-
|
|
290
|
-
segmentDuration = streamEnd - segmentCts;
|
|
272
|
+
segmentDuration = Math.round((lastPacket.pts + (lastPacket.duration || 0)) * timescale / timebase.den) - segmentCts;
|
|
291
273
|
}
|
|
292
274
|
segments.push({
|
|
293
275
|
cts: segmentCts,
|
|
@@ -4,7 +4,7 @@ import { idempotentTask } from "./idempotentTask.js";
|
|
|
4
4
|
import debug from "debug";
|
|
5
5
|
import { PassThrough } from "node:stream";
|
|
6
6
|
import { basename } from "node:path";
|
|
7
|
-
|
|
7
|
+
var log = debug("ef:generateSingleTrack");
|
|
8
8
|
const generateSingleTrackFromPath = async (absolutePath, trackId) => {
|
|
9
9
|
log(`Generating track ${trackId} for ${absolutePath}`);
|
|
10
10
|
const probe = await Probe.probePath(absolutePath);
|
|
@@ -23,8 +23,7 @@ const generateSingleTrackFromPath = async (absolutePath, trackId) => {
|
|
|
23
23
|
outputStream.destroy(error);
|
|
24
24
|
indexStream.destroy(error);
|
|
25
25
|
});
|
|
26
|
-
const
|
|
27
|
-
const fragmentIndexPromise = generateFragmentIndex(indexStream, void 0, trackIdMapping);
|
|
26
|
+
const fragmentIndexPromise = generateFragmentIndex(indexStream, void 0, { 1: trackId });
|
|
28
27
|
fragmentIndexPromise.then(() => {
|
|
29
28
|
if (sourceStreamEnded) outputStream.end();
|
|
30
29
|
else trackStream.once("end", () => {
|
|
@@ -38,7 +37,7 @@ const generateSingleTrackFromPath = async (absolutePath, trackId) => {
|
|
|
38
37
|
fragmentIndex: fragmentIndexPromise
|
|
39
38
|
};
|
|
40
39
|
};
|
|
41
|
-
|
|
40
|
+
idempotentTask({
|
|
42
41
|
label: "track-single",
|
|
43
42
|
filename: (absolutePath, trackId) => `${basename(absolutePath)}.track-${trackId}.mp4`,
|
|
44
43
|
runner: async (absolutePath, trackId) => {
|
package/dist/idempotentTask.js
CHANGED
|
@@ -43,7 +43,7 @@ const idempotentTask = ({ label, filename, runner }) => {
|
|
|
43
43
|
await new Promise((resolve, reject) => {
|
|
44
44
|
readable.on("error", reject);
|
|
45
45
|
writeStream.on("error", reject);
|
|
46
|
-
writeStream.on("finish", resolve);
|
|
46
|
+
writeStream.on("finish", () => resolve());
|
|
47
47
|
});
|
|
48
48
|
const { rename } = await import("node:fs/promises");
|
|
49
49
|
await rename(tempPath, downloadCachePath);
|
|
@@ -91,7 +91,7 @@ const idempotentTask = ({ label, filename, runner }) => {
|
|
|
91
91
|
await new Promise((resolve, reject) => {
|
|
92
92
|
result.on("error", reject);
|
|
93
93
|
writeStream.on("error", reject);
|
|
94
|
-
writeStream.on("finish", resolve);
|
|
94
|
+
writeStream.on("finish", () => resolve());
|
|
95
95
|
});
|
|
96
96
|
const { rename } = await import("node:fs/promises");
|
|
97
97
|
await rename(tempPath, cachePath);
|
package/dist/tasks/cacheImage.js
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { idempotentTask } from "../idempotentTask.js";
|
|
2
2
|
import { createReadStream } from "node:fs";
|
|
3
3
|
import path from "node:path";
|
|
4
|
-
|
|
4
|
+
var cacheImageTask = idempotentTask({
|
|
5
5
|
label: "image",
|
|
6
6
|
filename: (absolutePath) => path.basename(absolutePath),
|
|
7
7
|
runner: async (absolutePath) => {
|
|
@@ -3,15 +3,15 @@ import { exec } from "node:child_process";
|
|
|
3
3
|
import { promisify } from "node:util";
|
|
4
4
|
import debug from "debug";
|
|
5
5
|
import { basename } from "node:path";
|
|
6
|
-
|
|
7
|
-
|
|
6
|
+
var execPromise = promisify(exec);
|
|
7
|
+
var log = debug("ef:generateCaptions");
|
|
8
8
|
const generateCaptionDataFromPath = async (absolutePath) => {
|
|
9
9
|
const command = `whisper_timestamped --language en --efficient --output_format vtt ${absolutePath}`;
|
|
10
10
|
log(`Running command: ${command}`);
|
|
11
11
|
const { stdout } = await execPromise(command);
|
|
12
12
|
return stdout;
|
|
13
13
|
};
|
|
14
|
-
|
|
14
|
+
var generateCaptionDataTask = idempotentTask({
|
|
15
15
|
label: "captions",
|
|
16
16
|
filename: (absolutePath) => `${basename(absolutePath)}.captions.json`,
|
|
17
17
|
runner: generateCaptionDataFromPath
|
|
@@ -3,10 +3,8 @@ import { generateSingleTrackFromPath } from "../generateSingleTrack.js";
|
|
|
3
3
|
import debug from "debug";
|
|
4
4
|
import { basename } from "node:path";
|
|
5
5
|
const generateTrackFromPath = async (absolutePath, trackId) => {
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
const result = await generateSingleTrackFromPath(absolutePath, trackId);
|
|
9
|
-
return result.stream;
|
|
6
|
+
debug("ef:generateTrackFragment")(`Generating track ${trackId} for ${absolutePath}`);
|
|
7
|
+
return (await generateSingleTrackFromPath(absolutePath, trackId)).stream;
|
|
10
8
|
};
|
|
11
9
|
const generateTrackTask = idempotentTask({
|
|
12
10
|
label: "track",
|
|
@@ -25,13 +25,12 @@ const generateTrackFragmentIndexFromPath = async (absolutePath) => {
|
|
|
25
25
|
const trackId = streamIndex + 1;
|
|
26
26
|
log(`Processing track ${trackId} (${stream.codec_type})`);
|
|
27
27
|
const trackStream = probe.createTrackReadstream(streamIndex);
|
|
28
|
-
const
|
|
29
|
-
const singleTrackIndexes = await generateFragmentIndex(trackStream, startTimeOffsetMs, trackIdMapping);
|
|
28
|
+
const singleTrackIndexes = await generateFragmentIndex(trackStream, startTimeOffsetMs, { 0: trackId });
|
|
30
29
|
Object.assign(trackFragmentIndexes, singleTrackIndexes);
|
|
31
30
|
}
|
|
32
31
|
return trackFragmentIndexes;
|
|
33
32
|
};
|
|
34
|
-
|
|
33
|
+
var generateTrackFragmentIndexTask = idempotentTask({
|
|
35
34
|
label: "trackFragmentIndex",
|
|
36
35
|
filename: (absolutePath) => `${basename(absolutePath)}.tracks.json`,
|
|
37
36
|
runner: async (absolutePath) => {
|