@twick/ffmpeg 0.14.22 → 0.15.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +888 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +138 -0
- package/dist/index.d.ts +138 -6
- package/dist/index.js +840 -22
- package/dist/index.js.map +1 -0
- package/package.json +29 -11
- package/dist/ffmpeg-exporter-server.d.ts +0 -26
- package/dist/ffmpeg-exporter-server.d.ts.map +0 -1
- package/dist/ffmpeg-exporter-server.js +0 -90
- package/dist/generate-audio.d.ts +0 -13
- package/dist/generate-audio.d.ts.map +0 -1
- package/dist/generate-audio.js +0 -195
- package/dist/image-stream.d.ts +0 -8
- package/dist/image-stream.d.ts.map +0 -1
- package/dist/image-stream.js +0 -25
- package/dist/index.d.ts.map +0 -1
- package/dist/settings.d.ts +0 -22
- package/dist/settings.d.ts.map +0 -1
- package/dist/settings.js +0 -56
- package/dist/tsconfig.tsbuildinfo +0 -1
- package/dist/utils.d.ts +0 -21
- package/dist/utils.d.ts.map +0 -1
- package/dist/utils.js +0 -232
- package/dist/video-frame-extractor.d.ts +0 -58
- package/dist/video-frame-extractor.d.ts.map +0 -1
- package/dist/video-frame-extractor.js +0 -265
package/dist/index.cjs
ADDED
|
@@ -0,0 +1,888 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __create = Object.create;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
+
var __export = (target, all) => {
|
|
9
|
+
for (var name in all)
|
|
10
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
11
|
+
};
|
|
12
|
+
var __copyProps = (to, from, except, desc) => {
|
|
13
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
14
|
+
for (let key of __getOwnPropNames(from))
|
|
15
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
16
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
17
|
+
}
|
|
18
|
+
return to;
|
|
19
|
+
};
|
|
20
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
22
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
23
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
24
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
25
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
26
|
+
mod
|
|
27
|
+
));
|
|
28
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
29
|
+
|
|
30
|
+
// src/index.ts
|
|
31
|
+
var index_exports = {};
|
|
32
|
+
__export(index_exports, {
|
|
33
|
+
FFmpegExporterServer: () => FFmpegExporterServer,
|
|
34
|
+
VideoFrameExtractor: () => VideoFrameExtractor,
|
|
35
|
+
audioCodecs: () => audioCodecs,
|
|
36
|
+
checkForAudioStream: () => checkForAudioStream,
|
|
37
|
+
concatenateMedia: () => concatenateMedia,
|
|
38
|
+
createSilentAudioFile: () => createSilentAudioFile,
|
|
39
|
+
doesFileExist: () => doesFileExist,
|
|
40
|
+
extensions: () => extensions,
|
|
41
|
+
ffmpegSettings: () => ffmpegSettings,
|
|
42
|
+
generateAudio: () => generateAudio,
|
|
43
|
+
getSampleRate: () => getSampleRate,
|
|
44
|
+
getVideoCodec: () => getVideoCodec,
|
|
45
|
+
getVideoDimensions: () => getVideoDimensions,
|
|
46
|
+
getVideoDuration: () => getVideoDuration,
|
|
47
|
+
getVideoMetadata: () => getVideoMetadata,
|
|
48
|
+
makeSureFolderExists: () => makeSureFolderExists,
|
|
49
|
+
mergeAudioWithVideo: () => mergeAudioWithVideo,
|
|
50
|
+
mergeMedia: () => mergeMedia,
|
|
51
|
+
resolvePath: () => resolvePath
|
|
52
|
+
});
|
|
53
|
+
module.exports = __toCommonJS(index_exports);
|
|
54
|
+
|
|
55
|
+
// src/ffmpeg-exporter-server.ts
|
|
56
|
+
var import_telemetry = require("@twick/telemetry");
|
|
57
|
+
var import_fluent_ffmpeg = __toESM(require("fluent-ffmpeg"), 1);
|
|
58
|
+
var os = __toESM(require("os"), 1);
|
|
59
|
+
var path3 = __toESM(require("path"), 1);
|
|
60
|
+
|
|
61
|
+
// src/image-stream.ts
|
|
62
|
+
var import_stream = require("stream");
|
|
63
|
+
var ImageStream = class extends import_stream.Readable {
|
|
64
|
+
constructor() {
|
|
65
|
+
super(...arguments);
|
|
66
|
+
this.image = null;
|
|
67
|
+
this.hasData = false;
|
|
68
|
+
}
|
|
69
|
+
pushImage(image) {
|
|
70
|
+
this.image = image;
|
|
71
|
+
this.hasData = true;
|
|
72
|
+
this._read();
|
|
73
|
+
}
|
|
74
|
+
// eslint-disable-next-line @typescript-eslint/naming-convention
|
|
75
|
+
_read() {
|
|
76
|
+
if (this.hasData) {
|
|
77
|
+
this.hasData = false;
|
|
78
|
+
this.push(this.image);
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
};
|
|
82
|
+
|
|
83
|
+
// src/settings.ts
|
|
84
|
+
var ffmpegInstaller = __toESM(require("@ffmpeg-installer/ffmpeg"), 1);
|
|
85
|
+
var ffprobeInstaller = __toESM(require("@ffprobe-installer/ffprobe"), 1);
|
|
86
|
+
var ffmpegLogLevels = [
|
|
87
|
+
"quiet",
|
|
88
|
+
"panic",
|
|
89
|
+
"fatal",
|
|
90
|
+
"error",
|
|
91
|
+
"warning",
|
|
92
|
+
"info",
|
|
93
|
+
"verbose",
|
|
94
|
+
"debug",
|
|
95
|
+
"trace"
|
|
96
|
+
];
|
|
97
|
+
var FfmpegSettingState = class {
|
|
98
|
+
constructor() {
|
|
99
|
+
this.ffmpegPath = ffmpegInstaller.path;
|
|
100
|
+
this.ffprobePath = ffprobeInstaller.path;
|
|
101
|
+
if (process.env.FFMPEG_PATH) {
|
|
102
|
+
this.ffmpegPath = process.env.FFMPEG_PATH;
|
|
103
|
+
}
|
|
104
|
+
if (process.env.FFPROBE_PATH) {
|
|
105
|
+
this.ffprobePath = process.env.FFPROBE_PATH;
|
|
106
|
+
}
|
|
107
|
+
this.logLevel = "error";
|
|
108
|
+
if (process.env.FFMPEG_LOG_LEVEL && ffmpegLogLevels.includes(process.env.FFMPEG_LOG_LEVEL)) {
|
|
109
|
+
this.logLevel = process.env.FFMPEG_LOG_LEVEL;
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
getFfmpegPath() {
|
|
113
|
+
return this.ffmpegPath;
|
|
114
|
+
}
|
|
115
|
+
setFfmpegPath(ffmpegPath) {
|
|
116
|
+
this.ffmpegPath = ffmpegPath;
|
|
117
|
+
}
|
|
118
|
+
getFfprobePath() {
|
|
119
|
+
return this.ffprobePath;
|
|
120
|
+
}
|
|
121
|
+
setFfprobePath(ffprobePath) {
|
|
122
|
+
this.ffprobePath = ffprobePath;
|
|
123
|
+
}
|
|
124
|
+
getLogLevel() {
|
|
125
|
+
return this.logLevel;
|
|
126
|
+
}
|
|
127
|
+
setLogLevel(logLevel) {
|
|
128
|
+
this.logLevel = logLevel;
|
|
129
|
+
}
|
|
130
|
+
};
|
|
131
|
+
var ffmpegSettings = new FfmpegSettingState();
|
|
132
|
+
|
|
133
|
+
// src/ffmpeg-exporter-server.ts
|
|
134
|
+
var pixelFormats = {
|
|
135
|
+
mp4: "yuv420p",
|
|
136
|
+
webm: "yuva420p",
|
|
137
|
+
proRes: "yuva444p10le"
|
|
138
|
+
};
|
|
139
|
+
var extensions = {
|
|
140
|
+
mp4: "mp4",
|
|
141
|
+
webm: "webm",
|
|
142
|
+
proRes: "mov"
|
|
143
|
+
};
|
|
144
|
+
var FFmpegExporterServer = class {
|
|
145
|
+
constructor(settings) {
|
|
146
|
+
if (settings.exporter.name !== "@twick/core/ffmpeg") {
|
|
147
|
+
throw new Error("Invalid exporter");
|
|
148
|
+
}
|
|
149
|
+
this.settings = settings;
|
|
150
|
+
this.format = settings.exporter.options.format;
|
|
151
|
+
this.jobFolder = path3.join(
|
|
152
|
+
os.tmpdir(),
|
|
153
|
+
`twick-${this.settings.name}-${settings.hiddenFolderId}`
|
|
154
|
+
);
|
|
155
|
+
this.stream = new ImageStream();
|
|
156
|
+
import_fluent_ffmpeg.default.setFfmpegPath(ffmpegSettings.getFfmpegPath());
|
|
157
|
+
this.command = (0, import_fluent_ffmpeg.default)();
|
|
158
|
+
this.command.input(this.stream).inputFormat("image2pipe").inputFps(settings.fps);
|
|
159
|
+
const size = {
|
|
160
|
+
x: Math.round(settings.size.x * settings.resolutionScale),
|
|
161
|
+
y: Math.round(settings.size.y * settings.resolutionScale)
|
|
162
|
+
};
|
|
163
|
+
this.command.output(path3.join(this.jobFolder, `visuals.${extensions[this.format]}`)).outputOptions([`-pix_fmt ${pixelFormats[this.format]}`, "-shortest"]).outputFps(settings.fps).size(`${size.x}x${size.y}`);
|
|
164
|
+
if (this.format === "proRes") {
|
|
165
|
+
this.command.outputOptions(["-c:v prores_ks", "-profile:v 4444"]);
|
|
166
|
+
}
|
|
167
|
+
this.command.outputOptions(["-movflags +faststart"]);
|
|
168
|
+
this.promise = new Promise((resolve, reject) => {
|
|
169
|
+
this.command.on("end", resolve).on("error", reject);
|
|
170
|
+
});
|
|
171
|
+
}
|
|
172
|
+
async start() {
|
|
173
|
+
this.command.run();
|
|
174
|
+
}
|
|
175
|
+
async handleFrame({ data }) {
|
|
176
|
+
const base64Data = data.slice(data.indexOf(",") + 1);
|
|
177
|
+
this.stream.pushImage(Buffer.from(base64Data, "base64"));
|
|
178
|
+
}
|
|
179
|
+
async end(result) {
|
|
180
|
+
this.stream.pushImage(null);
|
|
181
|
+
if (result === 1) {
|
|
182
|
+
try {
|
|
183
|
+
this.command.kill("SIGKILL");
|
|
184
|
+
await this.promise;
|
|
185
|
+
} catch (err) {
|
|
186
|
+
(0, import_telemetry.sendEvent)(import_telemetry.EventName.Error, { message: err.message });
|
|
187
|
+
}
|
|
188
|
+
} else {
|
|
189
|
+
await this.promise;
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
async kill() {
|
|
193
|
+
try {
|
|
194
|
+
this.command.kill("SIGKILL");
|
|
195
|
+
await this.promise;
|
|
196
|
+
} catch (_) {
|
|
197
|
+
return;
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
};
|
|
201
|
+
|
|
202
|
+
// src/generate-audio.ts
|
|
203
|
+
var import_fluent_ffmpeg2 = __toESM(require("fluent-ffmpeg"), 1);
|
|
204
|
+
var fs2 = __toESM(require("fs"), 1);
|
|
205
|
+
var os3 = __toESM(require("os"), 1);
|
|
206
|
+
var path5 = __toESM(require("path"), 1);
|
|
207
|
+
|
|
208
|
+
// src/utils.ts
|
|
209
|
+
var fs = __toESM(require("fs"), 1);
|
|
210
|
+
var os2 = __toESM(require("os"), 1);
|
|
211
|
+
var path4 = __toESM(require("path"), 1);
|
|
212
|
+
var import_uuid = require("uuid");
|
|
213
|
+
var fluentFfmpeg = require("fluent-ffmpeg");
|
|
214
|
+
function resolvePath(output, assetPath) {
|
|
215
|
+
let resolvedPath;
|
|
216
|
+
if (assetPath.startsWith("http://") || assetPath.startsWith("https://") || assetPath.startsWith("data:")) {
|
|
217
|
+
resolvedPath = assetPath;
|
|
218
|
+
} else {
|
|
219
|
+
resolvedPath = path4.join(output, "../public", assetPath);
|
|
220
|
+
}
|
|
221
|
+
return resolvedPath;
|
|
222
|
+
}
|
|
223
|
+
async function makeSureFolderExists(folderPath) {
|
|
224
|
+
if (await fs.promises.access(folderPath).then(() => false).catch(() => true)) {
|
|
225
|
+
await fs.promises.mkdir(folderPath, { recursive: true });
|
|
226
|
+
}
|
|
227
|
+
}
|
|
228
|
+
async function concatenateMedia(files, outputFile) {
|
|
229
|
+
const tempFile = path4.join(os2.tmpdir(), `${(0, import_uuid.v4)()}.txt`);
|
|
230
|
+
const fileContent = files.map((file) => `file '${file.replace(/'/g, "\\'")}'`).join("\n");
|
|
231
|
+
await fs.promises.writeFile(tempFile, fileContent);
|
|
232
|
+
return new Promise((resolve, reject) => {
|
|
233
|
+
fluentFfmpeg.setFfmpegPath(ffmpegSettings.getFfmpegPath());
|
|
234
|
+
const ffmpegCommand = fluentFfmpeg();
|
|
235
|
+
ffmpegCommand.input(tempFile).inputOptions([
|
|
236
|
+
"-f concat",
|
|
237
|
+
"-safe 0",
|
|
238
|
+
"-protocol_whitelist file,http,https,tcp,tls"
|
|
239
|
+
]).outputOptions(["-c copy"]).on("error", (err) => {
|
|
240
|
+
console.error("Error:", err);
|
|
241
|
+
fs.promises.unlink(tempFile).catch(console.error);
|
|
242
|
+
reject(err);
|
|
243
|
+
}).on("end", () => {
|
|
244
|
+
fs.promises.unlink(tempFile).catch(console.error);
|
|
245
|
+
resolve();
|
|
246
|
+
}).save(outputFile);
|
|
247
|
+
});
|
|
248
|
+
}
|
|
249
|
+
async function createSilentAudioFile(filePath, duration) {
|
|
250
|
+
fluentFfmpeg.setFfmpegPath(ffmpegSettings.getFfmpegPath());
|
|
251
|
+
return new Promise((resolve, reject) => {
|
|
252
|
+
fluentFfmpeg().addInput(`anullsrc=channel_layout=stereo:sample_rate=${48e3}`).inputFormat("lavfi").duration(duration).on("end", () => {
|
|
253
|
+
resolve(filePath);
|
|
254
|
+
}).on("error", (err) => {
|
|
255
|
+
console.error("Error creating silent audio file:", err);
|
|
256
|
+
reject(err);
|
|
257
|
+
}).save(filePath);
|
|
258
|
+
});
|
|
259
|
+
}
|
|
260
|
+
async function getVideoDuration(filePath) {
|
|
261
|
+
fluentFfmpeg.setFfprobePath(ffmpegSettings.getFfprobePath());
|
|
262
|
+
return new Promise((resolve, reject) => {
|
|
263
|
+
fluentFfmpeg.ffprobe(filePath, (err, metadata) => {
|
|
264
|
+
if (err) {
|
|
265
|
+
reject(err);
|
|
266
|
+
return;
|
|
267
|
+
}
|
|
268
|
+
const duration = metadata.format.duration;
|
|
269
|
+
if (duration) {
|
|
270
|
+
resolve(duration);
|
|
271
|
+
} else {
|
|
272
|
+
reject(new Error("Could not determine video duration."));
|
|
273
|
+
}
|
|
274
|
+
});
|
|
275
|
+
});
|
|
276
|
+
}
|
|
277
|
+
async function getVideoDimensions(filePath) {
|
|
278
|
+
return new Promise((resolve, reject) => {
|
|
279
|
+
fluentFfmpeg.ffprobe(filePath, (err, metadata) => {
|
|
280
|
+
if (err) {
|
|
281
|
+
console.error("Error getting video dimensions:", err);
|
|
282
|
+
reject(new Error("Failed to get video dimensions"));
|
|
283
|
+
return;
|
|
284
|
+
}
|
|
285
|
+
const videoStream = metadata.streams.find(
|
|
286
|
+
(stream) => stream.codec_type === "video"
|
|
287
|
+
);
|
|
288
|
+
if (videoStream && videoStream.width && videoStream.height) {
|
|
289
|
+
resolve({
|
|
290
|
+
width: videoStream.width,
|
|
291
|
+
height: videoStream.height
|
|
292
|
+
});
|
|
293
|
+
}
|
|
294
|
+
reject(new Error("Could not find video dimensions in metadata"));
|
|
295
|
+
});
|
|
296
|
+
});
|
|
297
|
+
}
|
|
298
|
+
async function doesFileExist(filePath) {
|
|
299
|
+
try {
|
|
300
|
+
await fs.promises.access(filePath, fs.constants.F_OK);
|
|
301
|
+
return true;
|
|
302
|
+
} catch {
|
|
303
|
+
return false;
|
|
304
|
+
}
|
|
305
|
+
}
|
|
306
|
+
async function mergeAudioWithVideo(audioPath, videoPath, outputPath, audioCodec = "aac") {
|
|
307
|
+
fluentFfmpeg.setFfmpegPath(ffmpegSettings.getFfmpegPath());
|
|
308
|
+
return new Promise((resolve, reject) => {
|
|
309
|
+
fluentFfmpeg().input(videoPath).input(audioPath).outputOptions([
|
|
310
|
+
"-c:v",
|
|
311
|
+
"copy",
|
|
312
|
+
"-c:a",
|
|
313
|
+
audioCodec,
|
|
314
|
+
"-strict",
|
|
315
|
+
"experimental"
|
|
316
|
+
]).on("end", () => {
|
|
317
|
+
resolve();
|
|
318
|
+
}).on("error", (err) => {
|
|
319
|
+
console.error(`Error merging video and audio: ${err.message}`);
|
|
320
|
+
reject(err);
|
|
321
|
+
}).save(outputPath);
|
|
322
|
+
});
|
|
323
|
+
}
|
|
324
|
+
async function checkForAudioStream(file) {
|
|
325
|
+
fluentFfmpeg.setFfprobePath(ffmpegSettings.getFfprobePath());
|
|
326
|
+
return new Promise((resolve, reject) => {
|
|
327
|
+
fluentFfmpeg.ffprobe(file, (err, metadata) => {
|
|
328
|
+
if (err) {
|
|
329
|
+
console.error(`error checking for audioStream for file ${file}`, err);
|
|
330
|
+
reject(err);
|
|
331
|
+
return;
|
|
332
|
+
}
|
|
333
|
+
const audioStreams = metadata.streams.filter(
|
|
334
|
+
(s) => s.codec_type === "audio"
|
|
335
|
+
);
|
|
336
|
+
resolve(audioStreams.length > 0);
|
|
337
|
+
});
|
|
338
|
+
});
|
|
339
|
+
}
|
|
340
|
+
async function getSampleRate(filePath) {
|
|
341
|
+
fluentFfmpeg.setFfprobePath(ffmpegSettings.getFfprobePath());
|
|
342
|
+
return new Promise((resolve, reject) => {
|
|
343
|
+
fluentFfmpeg.ffprobe(filePath, (err, metadata) => {
|
|
344
|
+
if (err) {
|
|
345
|
+
reject(err);
|
|
346
|
+
return;
|
|
347
|
+
}
|
|
348
|
+
const audioStream = metadata.streams.find((s) => s.codec_type === "audio");
|
|
349
|
+
if (audioStream && audioStream.sample_rate) {
|
|
350
|
+
resolve(audioStream.sample_rate);
|
|
351
|
+
} else {
|
|
352
|
+
reject(new Error("No audio stream found"));
|
|
353
|
+
}
|
|
354
|
+
});
|
|
355
|
+
});
|
|
356
|
+
}
|
|
357
|
+
async function getVideoCodec(filePath) {
|
|
358
|
+
fluentFfmpeg.setFfprobePath(ffmpegSettings.getFfprobePath());
|
|
359
|
+
return new Promise((resolve, reject) => {
|
|
360
|
+
fluentFfmpeg.ffprobe(filePath, (err, metadata) => {
|
|
361
|
+
if (err) {
|
|
362
|
+
reject(err);
|
|
363
|
+
return;
|
|
364
|
+
}
|
|
365
|
+
const videoStream = metadata.streams.find((s) => s.codec_type === "video");
|
|
366
|
+
if (videoStream && videoStream.codec_name) {
|
|
367
|
+
resolve(videoStream.codec_name);
|
|
368
|
+
} else {
|
|
369
|
+
reject(new Error("No video stream found"));
|
|
370
|
+
}
|
|
371
|
+
});
|
|
372
|
+
});
|
|
373
|
+
}
|
|
374
|
+
async function getVideoMetadata(filePath) {
|
|
375
|
+
fluentFfmpeg.setFfprobePath(ffmpegSettings.getFfprobePath());
|
|
376
|
+
return new Promise((resolve, reject) => {
|
|
377
|
+
fluentFfmpeg.ffprobe(filePath, (err, metadata) => {
|
|
378
|
+
if (err) {
|
|
379
|
+
reject(err);
|
|
380
|
+
return;
|
|
381
|
+
}
|
|
382
|
+
const videoStream = metadata.streams.find((s) => s.codec_type === "video");
|
|
383
|
+
if (videoStream && videoStream.codec_name && videoStream.width && videoStream.height) {
|
|
384
|
+
resolve({
|
|
385
|
+
codec: videoStream.codec_name,
|
|
386
|
+
width: videoStream.width,
|
|
387
|
+
height: videoStream.height
|
|
388
|
+
});
|
|
389
|
+
} else {
|
|
390
|
+
reject(new Error("Unable to retrieve complete video information"));
|
|
391
|
+
}
|
|
392
|
+
});
|
|
393
|
+
});
|
|
394
|
+
}
|
|
395
|
+
|
|
396
|
+
// src/generate-audio.ts
|
|
397
|
+
var audioCodecs = {
|
|
398
|
+
mp4: "aac",
|
|
399
|
+
webm: "libopus",
|
|
400
|
+
proRes: "aac"
|
|
401
|
+
};
|
|
402
|
+
var SAMPLE_RATE = 48e3;
|
|
403
|
+
function getAssetPlacement(frames) {
|
|
404
|
+
const assets = [];
|
|
405
|
+
const assetTimeMap = /* @__PURE__ */ new Map();
|
|
406
|
+
for (let frame = 0; frame < frames.length; frame++) {
|
|
407
|
+
for (const asset of frames[frame]) {
|
|
408
|
+
if (!assetTimeMap.has(asset.key)) {
|
|
409
|
+
assetTimeMap.set(asset.key, {
|
|
410
|
+
start: asset.currentTime,
|
|
411
|
+
end: asset.currentTime
|
|
412
|
+
});
|
|
413
|
+
assets.push({
|
|
414
|
+
key: asset.key,
|
|
415
|
+
src: asset.src,
|
|
416
|
+
type: asset.type,
|
|
417
|
+
startInVideo: frame,
|
|
418
|
+
endInVideo: frame,
|
|
419
|
+
duration: 0,
|
|
420
|
+
// Placeholder, will be recalculated later based on frames
|
|
421
|
+
durationInSeconds: 0,
|
|
422
|
+
// Placeholder, will be calculated based on currentTime
|
|
423
|
+
playbackRate: asset.playbackRate,
|
|
424
|
+
volume: asset.volume,
|
|
425
|
+
trimLeftInSeconds: asset.currentTime
|
|
426
|
+
});
|
|
427
|
+
} else {
|
|
428
|
+
const timeInfo = assetTimeMap.get(asset.key);
|
|
429
|
+
if (timeInfo) {
|
|
430
|
+
timeInfo.end = asset.currentTime;
|
|
431
|
+
assetTimeMap.set(asset.key, timeInfo);
|
|
432
|
+
}
|
|
433
|
+
const existingAsset = assets.find((a) => a.key === asset.key);
|
|
434
|
+
if (existingAsset) {
|
|
435
|
+
existingAsset.endInVideo = frame;
|
|
436
|
+
}
|
|
437
|
+
}
|
|
438
|
+
}
|
|
439
|
+
}
|
|
440
|
+
assets.forEach((asset) => {
|
|
441
|
+
const timeInfo = assetTimeMap.get(asset.key);
|
|
442
|
+
if (timeInfo) {
|
|
443
|
+
asset.durationInSeconds = (timeInfo.end - timeInfo.start) / asset.playbackRate;
|
|
444
|
+
}
|
|
445
|
+
asset.duration = asset.endInVideo - asset.startInVideo + 1;
|
|
446
|
+
});
|
|
447
|
+
return assets;
|
|
448
|
+
}
|
|
449
|
+
function calculateAtempoFilters(playbackRate) {
|
|
450
|
+
const atempoFilters = [];
|
|
451
|
+
let rate = playbackRate;
|
|
452
|
+
while (rate > 100) {
|
|
453
|
+
atempoFilters.push("atempo=100.0");
|
|
454
|
+
rate /= 100;
|
|
455
|
+
}
|
|
456
|
+
if (rate > 1) {
|
|
457
|
+
atempoFilters.push(`atempo=${rate}`);
|
|
458
|
+
}
|
|
459
|
+
rate = playbackRate;
|
|
460
|
+
while (rate < 0.5) {
|
|
461
|
+
atempoFilters.push("atempo=0.5");
|
|
462
|
+
rate *= 2;
|
|
463
|
+
}
|
|
464
|
+
if (rate < 1) {
|
|
465
|
+
atempoFilters.push(`atempo=${rate}`);
|
|
466
|
+
}
|
|
467
|
+
return atempoFilters;
|
|
468
|
+
}
|
|
469
|
+
async function prepareAudio(outputDir, tempDir, asset, startFrame, endFrame, fps) {
|
|
470
|
+
const sanitizedKey = asset.key.replace(/[/[\]]/g, "-");
|
|
471
|
+
const outputPath = path5.join(tempDir, `${sanitizedKey}.wav`);
|
|
472
|
+
const trimLeft = asset.trimLeftInSeconds / asset.playbackRate;
|
|
473
|
+
const trimRight = 1 / fps + Math.min(
|
|
474
|
+
trimLeft + asset.durationInSeconds,
|
|
475
|
+
trimLeft + (endFrame - startFrame) / fps
|
|
476
|
+
);
|
|
477
|
+
const padStart = asset.startInVideo / fps * 1e3;
|
|
478
|
+
const assetSampleRate = await getSampleRate(
|
|
479
|
+
resolvePath(outputDir, asset.src)
|
|
480
|
+
);
|
|
481
|
+
const padEnd = Math.max(
|
|
482
|
+
0,
|
|
483
|
+
assetSampleRate * (endFrame - startFrame + 1) / fps - assetSampleRate * asset.duration / fps - assetSampleRate * padStart / 1e3
|
|
484
|
+
);
|
|
485
|
+
const atempoFilters = calculateAtempoFilters(asset.playbackRate);
|
|
486
|
+
const resolvedPath = resolvePath(outputDir, asset.src);
|
|
487
|
+
await new Promise((resolve, reject) => {
|
|
488
|
+
const audioFilters = [
|
|
489
|
+
...atempoFilters,
|
|
490
|
+
`atrim=start=${trimLeft}:end=${trimRight}`,
|
|
491
|
+
`apad=pad_len=${padEnd}`,
|
|
492
|
+
`adelay=${padStart}|${padStart}|${padStart}`,
|
|
493
|
+
`volume=${asset.volume}`
|
|
494
|
+
].join(",");
|
|
495
|
+
import_fluent_ffmpeg2.default.setFfmpegPath(ffmpegSettings.getFfmpegPath());
|
|
496
|
+
(0, import_fluent_ffmpeg2.default)(resolvedPath).audioChannels(2).audioCodec("pcm_s16le").audioFrequency(SAMPLE_RATE).outputOptions([`-af`, audioFilters]).on("end", () => {
|
|
497
|
+
resolve();
|
|
498
|
+
}).on("error", (err) => {
|
|
499
|
+
console.error(
|
|
500
|
+
`Error processing audio for asset key: ${asset.key}`,
|
|
501
|
+
err
|
|
502
|
+
);
|
|
503
|
+
reject(err);
|
|
504
|
+
}).save(outputPath);
|
|
505
|
+
});
|
|
506
|
+
return outputPath;
|
|
507
|
+
}
|
|
508
|
+
async function mergeAudioTracks(tempDir, audioFilenames) {
|
|
509
|
+
return new Promise((resolve, reject) => {
|
|
510
|
+
import_fluent_ffmpeg2.default.setFfmpegPath(ffmpegSettings.getFfmpegPath());
|
|
511
|
+
const command = (0, import_fluent_ffmpeg2.default)();
|
|
512
|
+
audioFilenames.forEach((filename) => {
|
|
513
|
+
command.input(filename);
|
|
514
|
+
});
|
|
515
|
+
command.complexFilter([
|
|
516
|
+
`amix=inputs=${audioFilenames.length}:duration=longest,volume=${audioFilenames.length}`
|
|
517
|
+
]).outputOptions(["-c:a", "pcm_s16le"]).on("end", () => {
|
|
518
|
+
resolve();
|
|
519
|
+
}).on("error", (err) => {
|
|
520
|
+
console.error(`Error merging audio tracks: ${err.message}`);
|
|
521
|
+
reject(err);
|
|
522
|
+
}).save(path5.join(tempDir, `audio.wav`));
|
|
523
|
+
});
|
|
524
|
+
}
|
|
525
|
+
async function generateAudio({
|
|
526
|
+
outputDir,
|
|
527
|
+
tempDir,
|
|
528
|
+
assets,
|
|
529
|
+
startFrame,
|
|
530
|
+
endFrame,
|
|
531
|
+
fps
|
|
532
|
+
}) {
|
|
533
|
+
const fullTempDir = path5.join(os3.tmpdir(), tempDir);
|
|
534
|
+
await makeSureFolderExists(outputDir);
|
|
535
|
+
await makeSureFolderExists(fullTempDir);
|
|
536
|
+
const assetPositions = getAssetPlacement(assets);
|
|
537
|
+
const audioFilenames = [];
|
|
538
|
+
for (const asset of assetPositions) {
|
|
539
|
+
let hasAudioStream = true;
|
|
540
|
+
if (asset.type !== "audio") {
|
|
541
|
+
hasAudioStream = await checkForAudioStream(
|
|
542
|
+
resolvePath(outputDir, asset.src)
|
|
543
|
+
);
|
|
544
|
+
}
|
|
545
|
+
if (asset.playbackRate !== 0 && asset.volume !== 0 && hasAudioStream) {
|
|
546
|
+
const filename = await prepareAudio(
|
|
547
|
+
outputDir,
|
|
548
|
+
fullTempDir,
|
|
549
|
+
asset,
|
|
550
|
+
startFrame,
|
|
551
|
+
endFrame,
|
|
552
|
+
fps
|
|
553
|
+
);
|
|
554
|
+
audioFilenames.push(filename);
|
|
555
|
+
}
|
|
556
|
+
}
|
|
557
|
+
if (audioFilenames.length > 0) {
|
|
558
|
+
await mergeAudioTracks(fullTempDir, audioFilenames);
|
|
559
|
+
}
|
|
560
|
+
return audioFilenames;
|
|
561
|
+
}
|
|
562
|
+
async function mergeMedia(outputFilename, outputDir, tempDir, format) {
|
|
563
|
+
const fullTempDir = path5.join(os3.tmpdir(), tempDir);
|
|
564
|
+
await makeSureFolderExists(outputDir);
|
|
565
|
+
await makeSureFolderExists(fullTempDir);
|
|
566
|
+
const audioWavExists = fs2.existsSync(path5.join(fullTempDir, `audio.wav`));
|
|
567
|
+
if (audioWavExists) {
|
|
568
|
+
await mergeAudioWithVideo(
|
|
569
|
+
path5.join(fullTempDir, `audio.wav`),
|
|
570
|
+
path5.join(fullTempDir, `visuals.${extensions[format]}`),
|
|
571
|
+
path5.join(outputDir, `${outputFilename}.${extensions[format]}`),
|
|
572
|
+
audioCodecs[format]
|
|
573
|
+
);
|
|
574
|
+
} else {
|
|
575
|
+
const destination = path5.join(
|
|
576
|
+
outputDir,
|
|
577
|
+
`${outputFilename}.${extensions[format]}`
|
|
578
|
+
);
|
|
579
|
+
await fs2.promises.copyFile(
|
|
580
|
+
path5.join(fullTempDir, `visuals.${extensions[format]}`),
|
|
581
|
+
destination
|
|
582
|
+
);
|
|
583
|
+
}
|
|
584
|
+
if (fullTempDir.endsWith("-undefined")) {
|
|
585
|
+
await fs2.promises.rm(fullTempDir, { recursive: true, force: true }).catch(() => {
|
|
586
|
+
});
|
|
587
|
+
}
|
|
588
|
+
}
|
|
589
|
+
|
|
590
|
+
// src/video-frame-extractor.ts
|
|
591
|
+
var import_telemetry2 = require("@twick/telemetry");
|
|
592
|
+
var fs3 = __toESM(require("fs"), 1);
|
|
593
|
+
var os4 = __toESM(require("os"), 1);
|
|
594
|
+
var path6 = __toESM(require("path"), 1);
|
|
595
|
+
var import_uuid2 = require("uuid");
|
|
596
|
+
var ffmpeg3 = require("fluent-ffmpeg");
|
|
597
|
+
var _VideoFrameExtractor = class _VideoFrameExtractor {
|
|
598
|
+
constructor(filePath, startTime, fps, duration) {
|
|
599
|
+
this.ffmpegPath = ffmpegSettings.getFfmpegPath();
|
|
600
|
+
this.buffer = Buffer.alloc(0);
|
|
601
|
+
this.bufferOffset = 0;
|
|
602
|
+
// Images are buffered in memory until they are requested.
|
|
603
|
+
this.imageBuffers = [];
|
|
604
|
+
this.lastImage = null;
|
|
605
|
+
this.framesProcessed = 0;
|
|
606
|
+
this.width = 0;
|
|
607
|
+
this.height = 0;
|
|
608
|
+
this.frameSize = 0;
|
|
609
|
+
this.codec = null;
|
|
610
|
+
this.process = null;
|
|
611
|
+
this.terminated = false;
|
|
612
|
+
this.state = "processing";
|
|
613
|
+
this.filePath = filePath;
|
|
614
|
+
this.downloadedFilePath = _VideoFrameExtractor.downloadedVideoMap.get(
|
|
615
|
+
filePath
|
|
616
|
+
)?.localPath;
|
|
617
|
+
this.startTimeOffset = _VideoFrameExtractor.downloadedVideoMap.get(filePath)?.startTimeOffset;
|
|
618
|
+
this.startTime = startTime;
|
|
619
|
+
this.duration = duration;
|
|
620
|
+
this.toTime = this.getEndTime(this.startTime);
|
|
621
|
+
this.fps = fps;
|
|
622
|
+
getVideoMetadata(this.downloadedFilePath).then((metadata) => {
|
|
623
|
+
this.width = metadata.width;
|
|
624
|
+
this.height = metadata.height;
|
|
625
|
+
this.frameSize = this.width * this.height * 4;
|
|
626
|
+
this.buffer = Buffer.alloc(this.frameSize);
|
|
627
|
+
this.codec = metadata.codec;
|
|
628
|
+
if (this.startTime >= this.duration) {
|
|
629
|
+
this.process = this.createFfmpegProcessToExtractFirstFrame(
|
|
630
|
+
this.downloadedFilePath,
|
|
631
|
+
this.codec
|
|
632
|
+
);
|
|
633
|
+
return;
|
|
634
|
+
}
|
|
635
|
+
this.process = this.createFfmpegProcess(
|
|
636
|
+
this.startTime - this.startTimeOffset,
|
|
637
|
+
this.toTime,
|
|
638
|
+
this.downloadedFilePath,
|
|
639
|
+
this.fps,
|
|
640
|
+
this.codec
|
|
641
|
+
);
|
|
642
|
+
});
|
|
643
|
+
}
|
|
644
|
+
static downloadVideoChunk(url, startTime, endTime) {
|
|
645
|
+
const outputDir = path6.join(os4.tmpdir(), `twick-decoder-chunks`);
|
|
646
|
+
if (!fs3.existsSync(outputDir)) {
|
|
647
|
+
fs3.mkdirSync(outputDir, { recursive: true });
|
|
648
|
+
}
|
|
649
|
+
return new Promise((resolve, reject) => {
|
|
650
|
+
ffmpeg3.ffprobe(url, (err, metadata) => {
|
|
651
|
+
if (err) {
|
|
652
|
+
reject(err);
|
|
653
|
+
return;
|
|
654
|
+
}
|
|
655
|
+
const format = metadata.format.format_name?.split(",")[-1] || "mp4";
|
|
656
|
+
const outputFileName = `chunk_${(0, import_uuid2.v4)()}.${format}`;
|
|
657
|
+
const outputPath = path6.join(outputDir, outputFileName);
|
|
658
|
+
const toleranceInSeconds = 0.5;
|
|
659
|
+
const adjustedStartTime = Math.max(startTime - toleranceInSeconds, 0);
|
|
660
|
+
ffmpeg3(url).setFfmpegPath(ffmpegSettings.getFfmpegPath()).inputOptions([
|
|
661
|
+
`-ss ${adjustedStartTime}`,
|
|
662
|
+
`-to ${endTime + toleranceInSeconds}`
|
|
663
|
+
]).outputOptions(["-c copy"]).output(outputPath).on("end", () => {
|
|
664
|
+
this.downloadedVideoMap.set(url, {
|
|
665
|
+
localPath: outputPath,
|
|
666
|
+
startTimeOffset: adjustedStartTime
|
|
667
|
+
});
|
|
668
|
+
resolve(outputPath);
|
|
669
|
+
}).on("error", (err2) => reject(err2)).run();
|
|
670
|
+
});
|
|
671
|
+
});
|
|
672
|
+
}
|
|
673
|
+
getTime() {
|
|
674
|
+
return this.startTime + this.framesProcessed / this.fps;
|
|
675
|
+
}
|
|
676
|
+
getLastTime() {
|
|
677
|
+
return this.startTime + (this.framesProcessed - 1) / this.fps;
|
|
678
|
+
}
|
|
679
|
+
getLastFrame() {
|
|
680
|
+
return this.lastImage;
|
|
681
|
+
}
|
|
682
|
+
getWidth() {
|
|
683
|
+
return this.width;
|
|
684
|
+
}
|
|
685
|
+
getHeight() {
|
|
686
|
+
return this.height;
|
|
687
|
+
}
|
|
688
|
+
getEndTime(startTime) {
|
|
689
|
+
return Math.min(
|
|
690
|
+
startTime + _VideoFrameExtractor.chunkLengthInSeconds,
|
|
691
|
+
this.duration
|
|
692
|
+
);
|
|
693
|
+
}
|
|
694
|
+
getArgs(codec, range, fps) {
|
|
695
|
+
const inputOptions = [];
|
|
696
|
+
const outputOptions = [];
|
|
697
|
+
inputOptions.push("-loglevel", ffmpegSettings.getLogLevel());
|
|
698
|
+
if (range) {
|
|
699
|
+
inputOptions.push(
|
|
700
|
+
...["-ss", range[0].toFixed(2), "-to", range[1].toFixed(2)]
|
|
701
|
+
);
|
|
702
|
+
}
|
|
703
|
+
if (codec === "vp9") {
|
|
704
|
+
inputOptions.push("-vcodec", "libvpx-vp9");
|
|
705
|
+
}
|
|
706
|
+
if (fps) {
|
|
707
|
+
outputOptions.push("-vf", `fps=fps=${fps}`);
|
|
708
|
+
}
|
|
709
|
+
if (!range) {
|
|
710
|
+
outputOptions.push("-vframes", "1");
|
|
711
|
+
}
|
|
712
|
+
outputOptions.push("-f", "rawvideo");
|
|
713
|
+
outputOptions.push("-pix_fmt", "rgba");
|
|
714
|
+
return { inputOptions, outputOptions };
|
|
715
|
+
}
|
|
716
|
+
createFfmpegProcess(startTime, toTime, filePath, fps, codec) {
|
|
717
|
+
const { inputOptions, outputOptions } = this.getArgs(
|
|
718
|
+
codec,
|
|
719
|
+
[startTime, toTime],
|
|
720
|
+
fps
|
|
721
|
+
);
|
|
722
|
+
const process2 = ffmpeg3(filePath).setFfmpegPath(this.ffmpegPath).inputOptions(inputOptions).outputOptions(outputOptions).on("end", () => {
|
|
723
|
+
this.handleClose(0);
|
|
724
|
+
}).on("error", (err) => {
|
|
725
|
+
this.handleError(err);
|
|
726
|
+
}).on("stderr", (stderrLine) => {
|
|
727
|
+
console.log(stderrLine);
|
|
728
|
+
}).on("stdout", (stderrLine) => {
|
|
729
|
+
console.log(stderrLine);
|
|
730
|
+
});
|
|
731
|
+
const ffstream = process2.pipe();
|
|
732
|
+
ffstream.on("data", (data) => {
|
|
733
|
+
this.processData(data);
|
|
734
|
+
});
|
|
735
|
+
return process2;
|
|
736
|
+
}
|
|
737
|
+
/**
|
|
738
|
+
* We call this in the case that the time requested is greater than the
|
|
739
|
+
* duration of the video. In this case, we want to display the first frame
|
|
740
|
+
* of the video.
|
|
741
|
+
*
|
|
742
|
+
* Note: This does NOT match the behavior of the old implementation
|
|
743
|
+
* inside of 2d/src/lib/components/Video.ts. In the old implementation, the
|
|
744
|
+
* last frame is shown instead of the first frame.
|
|
745
|
+
*/
|
|
746
|
+
createFfmpegProcessToExtractFirstFrame(filePath, codec) {
|
|
747
|
+
const { inputOptions, outputOptions } = this.getArgs(
|
|
748
|
+
codec,
|
|
749
|
+
void 0,
|
|
750
|
+
void 0
|
|
751
|
+
);
|
|
752
|
+
const process2 = ffmpeg3(filePath).setFfmpegPath(this.ffmpegPath).inputOptions(inputOptions).outputOptions(outputOptions).on("end", () => {
|
|
753
|
+
this.handleClose(0);
|
|
754
|
+
}).on("error", (err) => {
|
|
755
|
+
this.handleError(err);
|
|
756
|
+
}).on("stderr", (stderrLine) => {
|
|
757
|
+
console.log(stderrLine);
|
|
758
|
+
}).on("stdout", (stderrLine) => {
|
|
759
|
+
console.log(stderrLine);
|
|
760
|
+
});
|
|
761
|
+
const ffstream = process2.pipe();
|
|
762
|
+
ffstream.on("data", (data) => {
|
|
763
|
+
this.processData(data);
|
|
764
|
+
});
|
|
765
|
+
return process2;
|
|
766
|
+
}
|
|
767
|
+
processData(data) {
|
|
768
|
+
let dataOffset = 0;
|
|
769
|
+
while (dataOffset < data.length) {
|
|
770
|
+
const remainingSpace = this.frameSize - this.bufferOffset;
|
|
771
|
+
const chunkSize = Math.min(remainingSpace, data.length - dataOffset);
|
|
772
|
+
data.copy(
|
|
773
|
+
this.buffer,
|
|
774
|
+
this.bufferOffset,
|
|
775
|
+
dataOffset,
|
|
776
|
+
dataOffset + chunkSize
|
|
777
|
+
);
|
|
778
|
+
this.bufferOffset += chunkSize;
|
|
779
|
+
dataOffset += chunkSize;
|
|
780
|
+
if (this.bufferOffset === this.frameSize) {
|
|
781
|
+
this.imageBuffers.push(Buffer.from(this.buffer));
|
|
782
|
+
this.bufferOffset = 0;
|
|
783
|
+
}
|
|
784
|
+
}
|
|
785
|
+
}
|
|
786
|
+
async popImage() {
|
|
787
|
+
if (this.imageBuffers.length) {
|
|
788
|
+
const image2 = this.imageBuffers.shift();
|
|
789
|
+
this.framesProcessed++;
|
|
790
|
+
this.lastImage = image2;
|
|
791
|
+
return image2;
|
|
792
|
+
}
|
|
793
|
+
if (this.state === "error") {
|
|
794
|
+
throw new Error("An error occurred while extracting the video frames.");
|
|
795
|
+
}
|
|
796
|
+
if (this.state === "done" && this.toTime >= this.duration) {
|
|
797
|
+
return this.lastImage;
|
|
798
|
+
}
|
|
799
|
+
if (this.state === "done") {
|
|
800
|
+
this.startTime = this.toTime;
|
|
801
|
+
this.toTime = Math.min(
|
|
802
|
+
this.startTime + _VideoFrameExtractor.chunkLengthInSeconds,
|
|
803
|
+
this.duration
|
|
804
|
+
);
|
|
805
|
+
if (!this.codec) {
|
|
806
|
+
throw new Error(
|
|
807
|
+
"Can't extract frames without a codec. This error should never happen."
|
|
808
|
+
);
|
|
809
|
+
}
|
|
810
|
+
this.process = this.createFfmpegProcess(
|
|
811
|
+
this.startTime,
|
|
812
|
+
this.toTime,
|
|
813
|
+
this.downloadedFilePath,
|
|
814
|
+
this.fps,
|
|
815
|
+
this.codec
|
|
816
|
+
);
|
|
817
|
+
this.state = "processing";
|
|
818
|
+
}
|
|
819
|
+
while (this.imageBuffers.length < 1) {
|
|
820
|
+
await new Promise((resolve) => setTimeout(resolve, 50));
|
|
821
|
+
}
|
|
822
|
+
const image = this.imageBuffers.shift();
|
|
823
|
+
this.framesProcessed++;
|
|
824
|
+
this.lastImage = image;
|
|
825
|
+
return image;
|
|
826
|
+
}
|
|
827
|
+
handleClose(code) {
|
|
828
|
+
this.state = code === 0 ? "done" : "error";
|
|
829
|
+
}
|
|
830
|
+
async handleError(err) {
|
|
831
|
+
const code = err.code;
|
|
832
|
+
if (this.terminated) {
|
|
833
|
+
return;
|
|
834
|
+
}
|
|
835
|
+
if (code === "ENOENT") {
|
|
836
|
+
(0, import_telemetry2.sendEvent)(import_telemetry2.EventName.Error, { error: "ffmpeg-not-found" });
|
|
837
|
+
throw new Error(
|
|
838
|
+
"Error: ffmpeg not found. Make sure ffmpeg is installed on your system."
|
|
839
|
+
);
|
|
840
|
+
} else if (err.message.includes("SIGSEGV")) {
|
|
841
|
+
(0, import_telemetry2.sendEvent)(import_telemetry2.EventName.Error, {
|
|
842
|
+
error: "ffmpeg-sigsegv",
|
|
843
|
+
message: err.message
|
|
844
|
+
});
|
|
845
|
+
throw new Error(
|
|
846
|
+
`Error: Segmentation fault when running ffmpeg. This is a common issue on Linux, you might be able to fix it by installing nscd ('sudo apt-get install nscd'). For more information, see https://docs.re.video/common-issues/ffmpeg/`
|
|
847
|
+
);
|
|
848
|
+
} else {
|
|
849
|
+
await (0, import_telemetry2.sendEvent)(import_telemetry2.EventName.Error, {
|
|
850
|
+
error: "ffmpeg-error",
|
|
851
|
+
message: err.message
|
|
852
|
+
});
|
|
853
|
+
throw new Error(
|
|
854
|
+
`An ffmpeg error occurred while fetching frames from source video ${this.filePath}: ${err}`
|
|
855
|
+
);
|
|
856
|
+
}
|
|
857
|
+
}
|
|
858
|
+
destroy() {
|
|
859
|
+
this.terminated = true;
|
|
860
|
+
this.process?.kill("SIGTERM");
|
|
861
|
+
}
|
|
862
|
+
};
|
|
863
|
+
_VideoFrameExtractor.chunkLengthInSeconds = 5;
|
|
864
|
+
_VideoFrameExtractor.downloadedVideoMap = /* @__PURE__ */ new Map();
|
|
865
|
+
var VideoFrameExtractor = _VideoFrameExtractor;
|
|
866
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
867
|
+
0 && (module.exports = {
|
|
868
|
+
FFmpegExporterServer,
|
|
869
|
+
VideoFrameExtractor,
|
|
870
|
+
audioCodecs,
|
|
871
|
+
checkForAudioStream,
|
|
872
|
+
concatenateMedia,
|
|
873
|
+
createSilentAudioFile,
|
|
874
|
+
doesFileExist,
|
|
875
|
+
extensions,
|
|
876
|
+
ffmpegSettings,
|
|
877
|
+
generateAudio,
|
|
878
|
+
getSampleRate,
|
|
879
|
+
getVideoCodec,
|
|
880
|
+
getVideoDimensions,
|
|
881
|
+
getVideoDuration,
|
|
882
|
+
getVideoMetadata,
|
|
883
|
+
makeSureFolderExists,
|
|
884
|
+
mergeAudioWithVideo,
|
|
885
|
+
mergeMedia,
|
|
886
|
+
resolvePath
|
|
887
|
+
});
|
|
888
|
+
//# sourceMappingURL=index.cjs.map
|