@twick/ffmpeg 0.14.21 → 0.15.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +888 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +138 -0
- package/dist/index.d.ts +138 -6
- package/dist/index.js +840 -22
- package/dist/index.js.map +1 -0
- package/package.json +29 -11
- package/dist/ffmpeg-exporter-server.d.ts +0 -26
- package/dist/ffmpeg-exporter-server.d.ts.map +0 -1
- package/dist/ffmpeg-exporter-server.js +0 -90
- package/dist/generate-audio.d.ts +0 -13
- package/dist/generate-audio.d.ts.map +0 -1
- package/dist/generate-audio.js +0 -195
- package/dist/image-stream.d.ts +0 -8
- package/dist/image-stream.d.ts.map +0 -1
- package/dist/image-stream.js +0 -25
- package/dist/index.d.ts.map +0 -1
- package/dist/settings.d.ts +0 -22
- package/dist/settings.d.ts.map +0 -1
- package/dist/settings.js +0 -56
- package/dist/tsconfig.tsbuildinfo +0 -1
- package/dist/utils.d.ts +0 -21
- package/dist/utils.d.ts.map +0 -1
- package/dist/utils.js +0 -232
- package/dist/video-frame-extractor.d.ts +0 -58
- package/dist/video-frame-extractor.d.ts.map +0 -1
- package/dist/video-frame-extractor.js +0 -265
package/dist/index.js
CHANGED
|
@@ -1,22 +1,840 @@
|
|
|
1
|
-
"
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
};
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
1
|
+
var __require = /* @__PURE__ */ ((x) => typeof require !== "undefined" ? require : typeof Proxy !== "undefined" ? new Proxy(x, {
|
|
2
|
+
get: (a, b) => (typeof require !== "undefined" ? require : a)[b]
|
|
3
|
+
}) : x)(function(x) {
|
|
4
|
+
if (typeof require !== "undefined") return require.apply(this, arguments);
|
|
5
|
+
throw Error('Dynamic require of "' + x + '" is not supported');
|
|
6
|
+
});
|
|
7
|
+
|
|
8
|
+
// src/ffmpeg-exporter-server.ts
|
|
9
|
+
import { EventName, sendEvent } from "@twick/telemetry";
|
|
10
|
+
import ffmpeg from "fluent-ffmpeg";
|
|
11
|
+
import * as os from "os";
|
|
12
|
+
import * as path3 from "path";
|
|
13
|
+
|
|
14
|
+
// src/image-stream.ts
|
|
15
|
+
import { Readable } from "stream";
|
|
16
|
+
var ImageStream = class extends Readable {
|
|
17
|
+
constructor() {
|
|
18
|
+
super(...arguments);
|
|
19
|
+
this.image = null;
|
|
20
|
+
this.hasData = false;
|
|
21
|
+
}
|
|
22
|
+
pushImage(image) {
|
|
23
|
+
this.image = image;
|
|
24
|
+
this.hasData = true;
|
|
25
|
+
this._read();
|
|
26
|
+
}
|
|
27
|
+
// eslint-disable-next-line @typescript-eslint/naming-convention
|
|
28
|
+
_read() {
|
|
29
|
+
if (this.hasData) {
|
|
30
|
+
this.hasData = false;
|
|
31
|
+
this.push(this.image);
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
};
|
|
35
|
+
|
|
36
|
+
// src/settings.ts
|
|
37
|
+
import * as ffmpegInstaller from "@ffmpeg-installer/ffmpeg";
|
|
38
|
+
import * as ffprobeInstaller from "@ffprobe-installer/ffprobe";
|
|
39
|
+
var ffmpegLogLevels = [
|
|
40
|
+
"quiet",
|
|
41
|
+
"panic",
|
|
42
|
+
"fatal",
|
|
43
|
+
"error",
|
|
44
|
+
"warning",
|
|
45
|
+
"info",
|
|
46
|
+
"verbose",
|
|
47
|
+
"debug",
|
|
48
|
+
"trace"
|
|
49
|
+
];
|
|
50
|
+
var FfmpegSettingState = class {
|
|
51
|
+
constructor() {
|
|
52
|
+
this.ffmpegPath = ffmpegInstaller.path;
|
|
53
|
+
this.ffprobePath = ffprobeInstaller.path;
|
|
54
|
+
if (process.env.FFMPEG_PATH) {
|
|
55
|
+
this.ffmpegPath = process.env.FFMPEG_PATH;
|
|
56
|
+
}
|
|
57
|
+
if (process.env.FFPROBE_PATH) {
|
|
58
|
+
this.ffprobePath = process.env.FFPROBE_PATH;
|
|
59
|
+
}
|
|
60
|
+
this.logLevel = "error";
|
|
61
|
+
if (process.env.FFMPEG_LOG_LEVEL && ffmpegLogLevels.includes(process.env.FFMPEG_LOG_LEVEL)) {
|
|
62
|
+
this.logLevel = process.env.FFMPEG_LOG_LEVEL;
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
getFfmpegPath() {
|
|
66
|
+
return this.ffmpegPath;
|
|
67
|
+
}
|
|
68
|
+
setFfmpegPath(ffmpegPath) {
|
|
69
|
+
this.ffmpegPath = ffmpegPath;
|
|
70
|
+
}
|
|
71
|
+
getFfprobePath() {
|
|
72
|
+
return this.ffprobePath;
|
|
73
|
+
}
|
|
74
|
+
setFfprobePath(ffprobePath) {
|
|
75
|
+
this.ffprobePath = ffprobePath;
|
|
76
|
+
}
|
|
77
|
+
getLogLevel() {
|
|
78
|
+
return this.logLevel;
|
|
79
|
+
}
|
|
80
|
+
setLogLevel(logLevel) {
|
|
81
|
+
this.logLevel = logLevel;
|
|
82
|
+
}
|
|
83
|
+
};
|
|
84
|
+
var ffmpegSettings = new FfmpegSettingState();
|
|
85
|
+
|
|
86
|
+
// src/ffmpeg-exporter-server.ts
|
|
87
|
+
var pixelFormats = {
|
|
88
|
+
mp4: "yuv420p",
|
|
89
|
+
webm: "yuva420p",
|
|
90
|
+
proRes: "yuva444p10le"
|
|
91
|
+
};
|
|
92
|
+
var extensions = {
|
|
93
|
+
mp4: "mp4",
|
|
94
|
+
webm: "webm",
|
|
95
|
+
proRes: "mov"
|
|
96
|
+
};
|
|
97
|
+
var FFmpegExporterServer = class {
|
|
98
|
+
constructor(settings) {
|
|
99
|
+
if (settings.exporter.name !== "@twick/core/ffmpeg") {
|
|
100
|
+
throw new Error("Invalid exporter");
|
|
101
|
+
}
|
|
102
|
+
this.settings = settings;
|
|
103
|
+
this.format = settings.exporter.options.format;
|
|
104
|
+
this.jobFolder = path3.join(
|
|
105
|
+
os.tmpdir(),
|
|
106
|
+
`twick-${this.settings.name}-${settings.hiddenFolderId}`
|
|
107
|
+
);
|
|
108
|
+
this.stream = new ImageStream();
|
|
109
|
+
ffmpeg.setFfmpegPath(ffmpegSettings.getFfmpegPath());
|
|
110
|
+
this.command = ffmpeg();
|
|
111
|
+
this.command.input(this.stream).inputFormat("image2pipe").inputFps(settings.fps);
|
|
112
|
+
const size = {
|
|
113
|
+
x: Math.round(settings.size.x * settings.resolutionScale),
|
|
114
|
+
y: Math.round(settings.size.y * settings.resolutionScale)
|
|
115
|
+
};
|
|
116
|
+
this.command.output(path3.join(this.jobFolder, `visuals.${extensions[this.format]}`)).outputOptions([`-pix_fmt ${pixelFormats[this.format]}`, "-shortest"]).outputFps(settings.fps).size(`${size.x}x${size.y}`);
|
|
117
|
+
if (this.format === "proRes") {
|
|
118
|
+
this.command.outputOptions(["-c:v prores_ks", "-profile:v 4444"]);
|
|
119
|
+
}
|
|
120
|
+
this.command.outputOptions(["-movflags +faststart"]);
|
|
121
|
+
this.promise = new Promise((resolve, reject) => {
|
|
122
|
+
this.command.on("end", resolve).on("error", reject);
|
|
123
|
+
});
|
|
124
|
+
}
|
|
125
|
+
async start() {
|
|
126
|
+
this.command.run();
|
|
127
|
+
}
|
|
128
|
+
async handleFrame({ data }) {
|
|
129
|
+
const base64Data = data.slice(data.indexOf(",") + 1);
|
|
130
|
+
this.stream.pushImage(Buffer.from(base64Data, "base64"));
|
|
131
|
+
}
|
|
132
|
+
async end(result) {
|
|
133
|
+
this.stream.pushImage(null);
|
|
134
|
+
if (result === 1) {
|
|
135
|
+
try {
|
|
136
|
+
this.command.kill("SIGKILL");
|
|
137
|
+
await this.promise;
|
|
138
|
+
} catch (err) {
|
|
139
|
+
sendEvent(EventName.Error, { message: err.message });
|
|
140
|
+
}
|
|
141
|
+
} else {
|
|
142
|
+
await this.promise;
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
async kill() {
|
|
146
|
+
try {
|
|
147
|
+
this.command.kill("SIGKILL");
|
|
148
|
+
await this.promise;
|
|
149
|
+
} catch (_) {
|
|
150
|
+
return;
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
};
|
|
154
|
+
|
|
155
|
+
// src/generate-audio.ts
|
|
156
|
+
import ffmpeg2 from "fluent-ffmpeg";
|
|
157
|
+
import * as fs2 from "fs";
|
|
158
|
+
import * as os3 from "os";
|
|
159
|
+
import * as path5 from "path";
|
|
160
|
+
|
|
161
|
+
// src/utils.ts
|
|
162
|
+
import * as fs from "fs";
|
|
163
|
+
import * as os2 from "os";
|
|
164
|
+
import * as path4 from "path";
|
|
165
|
+
import { v4 as uuidv4 } from "uuid";
|
|
166
|
+
var fluentFfmpeg = __require("fluent-ffmpeg");
|
|
167
|
+
function resolvePath(output, assetPath) {
|
|
168
|
+
let resolvedPath;
|
|
169
|
+
if (assetPath.startsWith("http://") || assetPath.startsWith("https://") || assetPath.startsWith("data:")) {
|
|
170
|
+
resolvedPath = assetPath;
|
|
171
|
+
} else {
|
|
172
|
+
resolvedPath = path4.join(output, "../public", assetPath);
|
|
173
|
+
}
|
|
174
|
+
return resolvedPath;
|
|
175
|
+
}
|
|
176
|
+
async function makeSureFolderExists(folderPath) {
|
|
177
|
+
if (await fs.promises.access(folderPath).then(() => false).catch(() => true)) {
|
|
178
|
+
await fs.promises.mkdir(folderPath, { recursive: true });
|
|
179
|
+
}
|
|
180
|
+
}
|
|
181
|
+
async function concatenateMedia(files, outputFile) {
|
|
182
|
+
const tempFile = path4.join(os2.tmpdir(), `${uuidv4()}.txt`);
|
|
183
|
+
const fileContent = files.map((file) => `file '${file.replace(/'/g, "\\'")}'`).join("\n");
|
|
184
|
+
await fs.promises.writeFile(tempFile, fileContent);
|
|
185
|
+
return new Promise((resolve, reject) => {
|
|
186
|
+
fluentFfmpeg.setFfmpegPath(ffmpegSettings.getFfmpegPath());
|
|
187
|
+
const ffmpegCommand = fluentFfmpeg();
|
|
188
|
+
ffmpegCommand.input(tempFile).inputOptions([
|
|
189
|
+
"-f concat",
|
|
190
|
+
"-safe 0",
|
|
191
|
+
"-protocol_whitelist file,http,https,tcp,tls"
|
|
192
|
+
]).outputOptions(["-c copy"]).on("error", (err) => {
|
|
193
|
+
console.error("Error:", err);
|
|
194
|
+
fs.promises.unlink(tempFile).catch(console.error);
|
|
195
|
+
reject(err);
|
|
196
|
+
}).on("end", () => {
|
|
197
|
+
fs.promises.unlink(tempFile).catch(console.error);
|
|
198
|
+
resolve();
|
|
199
|
+
}).save(outputFile);
|
|
200
|
+
});
|
|
201
|
+
}
|
|
202
|
+
async function createSilentAudioFile(filePath, duration) {
|
|
203
|
+
fluentFfmpeg.setFfmpegPath(ffmpegSettings.getFfmpegPath());
|
|
204
|
+
return new Promise((resolve, reject) => {
|
|
205
|
+
fluentFfmpeg().addInput(`anullsrc=channel_layout=stereo:sample_rate=${48e3}`).inputFormat("lavfi").duration(duration).on("end", () => {
|
|
206
|
+
resolve(filePath);
|
|
207
|
+
}).on("error", (err) => {
|
|
208
|
+
console.error("Error creating silent audio file:", err);
|
|
209
|
+
reject(err);
|
|
210
|
+
}).save(filePath);
|
|
211
|
+
});
|
|
212
|
+
}
|
|
213
|
+
async function getVideoDuration(filePath) {
|
|
214
|
+
fluentFfmpeg.setFfprobePath(ffmpegSettings.getFfprobePath());
|
|
215
|
+
return new Promise((resolve, reject) => {
|
|
216
|
+
fluentFfmpeg.ffprobe(filePath, (err, metadata) => {
|
|
217
|
+
if (err) {
|
|
218
|
+
reject(err);
|
|
219
|
+
return;
|
|
220
|
+
}
|
|
221
|
+
const duration = metadata.format.duration;
|
|
222
|
+
if (duration) {
|
|
223
|
+
resolve(duration);
|
|
224
|
+
} else {
|
|
225
|
+
reject(new Error("Could not determine video duration."));
|
|
226
|
+
}
|
|
227
|
+
});
|
|
228
|
+
});
|
|
229
|
+
}
|
|
230
|
+
async function getVideoDimensions(filePath) {
|
|
231
|
+
return new Promise((resolve, reject) => {
|
|
232
|
+
fluentFfmpeg.ffprobe(filePath, (err, metadata) => {
|
|
233
|
+
if (err) {
|
|
234
|
+
console.error("Error getting video dimensions:", err);
|
|
235
|
+
reject(new Error("Failed to get video dimensions"));
|
|
236
|
+
return;
|
|
237
|
+
}
|
|
238
|
+
const videoStream = metadata.streams.find(
|
|
239
|
+
(stream) => stream.codec_type === "video"
|
|
240
|
+
);
|
|
241
|
+
if (videoStream && videoStream.width && videoStream.height) {
|
|
242
|
+
resolve({
|
|
243
|
+
width: videoStream.width,
|
|
244
|
+
height: videoStream.height
|
|
245
|
+
});
|
|
246
|
+
}
|
|
247
|
+
reject(new Error("Could not find video dimensions in metadata"));
|
|
248
|
+
});
|
|
249
|
+
});
|
|
250
|
+
}
|
|
251
|
+
async function doesFileExist(filePath) {
|
|
252
|
+
try {
|
|
253
|
+
await fs.promises.access(filePath, fs.constants.F_OK);
|
|
254
|
+
return true;
|
|
255
|
+
} catch {
|
|
256
|
+
return false;
|
|
257
|
+
}
|
|
258
|
+
}
|
|
259
|
+
async function mergeAudioWithVideo(audioPath, videoPath, outputPath, audioCodec = "aac") {
|
|
260
|
+
fluentFfmpeg.setFfmpegPath(ffmpegSettings.getFfmpegPath());
|
|
261
|
+
return new Promise((resolve, reject) => {
|
|
262
|
+
fluentFfmpeg().input(videoPath).input(audioPath).outputOptions([
|
|
263
|
+
"-c:v",
|
|
264
|
+
"copy",
|
|
265
|
+
"-c:a",
|
|
266
|
+
audioCodec,
|
|
267
|
+
"-strict",
|
|
268
|
+
"experimental"
|
|
269
|
+
]).on("end", () => {
|
|
270
|
+
resolve();
|
|
271
|
+
}).on("error", (err) => {
|
|
272
|
+
console.error(`Error merging video and audio: ${err.message}`);
|
|
273
|
+
reject(err);
|
|
274
|
+
}).save(outputPath);
|
|
275
|
+
});
|
|
276
|
+
}
|
|
277
|
+
async function checkForAudioStream(file) {
|
|
278
|
+
fluentFfmpeg.setFfprobePath(ffmpegSettings.getFfprobePath());
|
|
279
|
+
return new Promise((resolve, reject) => {
|
|
280
|
+
fluentFfmpeg.ffprobe(file, (err, metadata) => {
|
|
281
|
+
if (err) {
|
|
282
|
+
console.error(`error checking for audioStream for file ${file}`, err);
|
|
283
|
+
reject(err);
|
|
284
|
+
return;
|
|
285
|
+
}
|
|
286
|
+
const audioStreams = metadata.streams.filter(
|
|
287
|
+
(s) => s.codec_type === "audio"
|
|
288
|
+
);
|
|
289
|
+
resolve(audioStreams.length > 0);
|
|
290
|
+
});
|
|
291
|
+
});
|
|
292
|
+
}
|
|
293
|
+
async function getSampleRate(filePath) {
|
|
294
|
+
fluentFfmpeg.setFfprobePath(ffmpegSettings.getFfprobePath());
|
|
295
|
+
return new Promise((resolve, reject) => {
|
|
296
|
+
fluentFfmpeg.ffprobe(filePath, (err, metadata) => {
|
|
297
|
+
if (err) {
|
|
298
|
+
reject(err);
|
|
299
|
+
return;
|
|
300
|
+
}
|
|
301
|
+
const audioStream = metadata.streams.find((s) => s.codec_type === "audio");
|
|
302
|
+
if (audioStream && audioStream.sample_rate) {
|
|
303
|
+
resolve(audioStream.sample_rate);
|
|
304
|
+
} else {
|
|
305
|
+
reject(new Error("No audio stream found"));
|
|
306
|
+
}
|
|
307
|
+
});
|
|
308
|
+
});
|
|
309
|
+
}
|
|
310
|
+
async function getVideoCodec(filePath) {
|
|
311
|
+
fluentFfmpeg.setFfprobePath(ffmpegSettings.getFfprobePath());
|
|
312
|
+
return new Promise((resolve, reject) => {
|
|
313
|
+
fluentFfmpeg.ffprobe(filePath, (err, metadata) => {
|
|
314
|
+
if (err) {
|
|
315
|
+
reject(err);
|
|
316
|
+
return;
|
|
317
|
+
}
|
|
318
|
+
const videoStream = metadata.streams.find((s) => s.codec_type === "video");
|
|
319
|
+
if (videoStream && videoStream.codec_name) {
|
|
320
|
+
resolve(videoStream.codec_name);
|
|
321
|
+
} else {
|
|
322
|
+
reject(new Error("No video stream found"));
|
|
323
|
+
}
|
|
324
|
+
});
|
|
325
|
+
});
|
|
326
|
+
}
|
|
327
|
+
async function getVideoMetadata(filePath) {
|
|
328
|
+
fluentFfmpeg.setFfprobePath(ffmpegSettings.getFfprobePath());
|
|
329
|
+
return new Promise((resolve, reject) => {
|
|
330
|
+
fluentFfmpeg.ffprobe(filePath, (err, metadata) => {
|
|
331
|
+
if (err) {
|
|
332
|
+
reject(err);
|
|
333
|
+
return;
|
|
334
|
+
}
|
|
335
|
+
const videoStream = metadata.streams.find((s) => s.codec_type === "video");
|
|
336
|
+
if (videoStream && videoStream.codec_name && videoStream.width && videoStream.height) {
|
|
337
|
+
resolve({
|
|
338
|
+
codec: videoStream.codec_name,
|
|
339
|
+
width: videoStream.width,
|
|
340
|
+
height: videoStream.height
|
|
341
|
+
});
|
|
342
|
+
} else {
|
|
343
|
+
reject(new Error("Unable to retrieve complete video information"));
|
|
344
|
+
}
|
|
345
|
+
});
|
|
346
|
+
});
|
|
347
|
+
}
|
|
348
|
+
|
|
349
|
+
// src/generate-audio.ts
|
|
350
|
+
var audioCodecs = {
|
|
351
|
+
mp4: "aac",
|
|
352
|
+
webm: "libopus",
|
|
353
|
+
proRes: "aac"
|
|
354
|
+
};
|
|
355
|
+
var SAMPLE_RATE = 48e3;
|
|
356
|
+
function getAssetPlacement(frames) {
|
|
357
|
+
const assets = [];
|
|
358
|
+
const assetTimeMap = /* @__PURE__ */ new Map();
|
|
359
|
+
for (let frame = 0; frame < frames.length; frame++) {
|
|
360
|
+
for (const asset of frames[frame]) {
|
|
361
|
+
if (!assetTimeMap.has(asset.key)) {
|
|
362
|
+
assetTimeMap.set(asset.key, {
|
|
363
|
+
start: asset.currentTime,
|
|
364
|
+
end: asset.currentTime
|
|
365
|
+
});
|
|
366
|
+
assets.push({
|
|
367
|
+
key: asset.key,
|
|
368
|
+
src: asset.src,
|
|
369
|
+
type: asset.type,
|
|
370
|
+
startInVideo: frame,
|
|
371
|
+
endInVideo: frame,
|
|
372
|
+
duration: 0,
|
|
373
|
+
// Placeholder, will be recalculated later based on frames
|
|
374
|
+
durationInSeconds: 0,
|
|
375
|
+
// Placeholder, will be calculated based on currentTime
|
|
376
|
+
playbackRate: asset.playbackRate,
|
|
377
|
+
volume: asset.volume,
|
|
378
|
+
trimLeftInSeconds: asset.currentTime
|
|
379
|
+
});
|
|
380
|
+
} else {
|
|
381
|
+
const timeInfo = assetTimeMap.get(asset.key);
|
|
382
|
+
if (timeInfo) {
|
|
383
|
+
timeInfo.end = asset.currentTime;
|
|
384
|
+
assetTimeMap.set(asset.key, timeInfo);
|
|
385
|
+
}
|
|
386
|
+
const existingAsset = assets.find((a) => a.key === asset.key);
|
|
387
|
+
if (existingAsset) {
|
|
388
|
+
existingAsset.endInVideo = frame;
|
|
389
|
+
}
|
|
390
|
+
}
|
|
391
|
+
}
|
|
392
|
+
}
|
|
393
|
+
assets.forEach((asset) => {
|
|
394
|
+
const timeInfo = assetTimeMap.get(asset.key);
|
|
395
|
+
if (timeInfo) {
|
|
396
|
+
asset.durationInSeconds = (timeInfo.end - timeInfo.start) / asset.playbackRate;
|
|
397
|
+
}
|
|
398
|
+
asset.duration = asset.endInVideo - asset.startInVideo + 1;
|
|
399
|
+
});
|
|
400
|
+
return assets;
|
|
401
|
+
}
|
|
402
|
+
function calculateAtempoFilters(playbackRate) {
|
|
403
|
+
const atempoFilters = [];
|
|
404
|
+
let rate = playbackRate;
|
|
405
|
+
while (rate > 100) {
|
|
406
|
+
atempoFilters.push("atempo=100.0");
|
|
407
|
+
rate /= 100;
|
|
408
|
+
}
|
|
409
|
+
if (rate > 1) {
|
|
410
|
+
atempoFilters.push(`atempo=${rate}`);
|
|
411
|
+
}
|
|
412
|
+
rate = playbackRate;
|
|
413
|
+
while (rate < 0.5) {
|
|
414
|
+
atempoFilters.push("atempo=0.5");
|
|
415
|
+
rate *= 2;
|
|
416
|
+
}
|
|
417
|
+
if (rate < 1) {
|
|
418
|
+
atempoFilters.push(`atempo=${rate}`);
|
|
419
|
+
}
|
|
420
|
+
return atempoFilters;
|
|
421
|
+
}
|
|
422
|
+
async function prepareAudio(outputDir, tempDir, asset, startFrame, endFrame, fps) {
|
|
423
|
+
const sanitizedKey = asset.key.replace(/[/[\]]/g, "-");
|
|
424
|
+
const outputPath = path5.join(tempDir, `${sanitizedKey}.wav`);
|
|
425
|
+
const trimLeft = asset.trimLeftInSeconds / asset.playbackRate;
|
|
426
|
+
const trimRight = 1 / fps + Math.min(
|
|
427
|
+
trimLeft + asset.durationInSeconds,
|
|
428
|
+
trimLeft + (endFrame - startFrame) / fps
|
|
429
|
+
);
|
|
430
|
+
const padStart = asset.startInVideo / fps * 1e3;
|
|
431
|
+
const assetSampleRate = await getSampleRate(
|
|
432
|
+
resolvePath(outputDir, asset.src)
|
|
433
|
+
);
|
|
434
|
+
const padEnd = Math.max(
|
|
435
|
+
0,
|
|
436
|
+
assetSampleRate * (endFrame - startFrame + 1) / fps - assetSampleRate * asset.duration / fps - assetSampleRate * padStart / 1e3
|
|
437
|
+
);
|
|
438
|
+
const atempoFilters = calculateAtempoFilters(asset.playbackRate);
|
|
439
|
+
const resolvedPath = resolvePath(outputDir, asset.src);
|
|
440
|
+
await new Promise((resolve, reject) => {
|
|
441
|
+
const audioFilters = [
|
|
442
|
+
...atempoFilters,
|
|
443
|
+
`atrim=start=${trimLeft}:end=${trimRight}`,
|
|
444
|
+
`apad=pad_len=${padEnd}`,
|
|
445
|
+
`adelay=${padStart}|${padStart}|${padStart}`,
|
|
446
|
+
`volume=${asset.volume}`
|
|
447
|
+
].join(",");
|
|
448
|
+
ffmpeg2.setFfmpegPath(ffmpegSettings.getFfmpegPath());
|
|
449
|
+
ffmpeg2(resolvedPath).audioChannels(2).audioCodec("pcm_s16le").audioFrequency(SAMPLE_RATE).outputOptions([`-af`, audioFilters]).on("end", () => {
|
|
450
|
+
resolve();
|
|
451
|
+
}).on("error", (err) => {
|
|
452
|
+
console.error(
|
|
453
|
+
`Error processing audio for asset key: ${asset.key}`,
|
|
454
|
+
err
|
|
455
|
+
);
|
|
456
|
+
reject(err);
|
|
457
|
+
}).save(outputPath);
|
|
458
|
+
});
|
|
459
|
+
return outputPath;
|
|
460
|
+
}
|
|
461
|
+
async function mergeAudioTracks(tempDir, audioFilenames) {
|
|
462
|
+
return new Promise((resolve, reject) => {
|
|
463
|
+
ffmpeg2.setFfmpegPath(ffmpegSettings.getFfmpegPath());
|
|
464
|
+
const command = ffmpeg2();
|
|
465
|
+
audioFilenames.forEach((filename) => {
|
|
466
|
+
command.input(filename);
|
|
467
|
+
});
|
|
468
|
+
command.complexFilter([
|
|
469
|
+
`amix=inputs=${audioFilenames.length}:duration=longest,volume=${audioFilenames.length}`
|
|
470
|
+
]).outputOptions(["-c:a", "pcm_s16le"]).on("end", () => {
|
|
471
|
+
resolve();
|
|
472
|
+
}).on("error", (err) => {
|
|
473
|
+
console.error(`Error merging audio tracks: ${err.message}`);
|
|
474
|
+
reject(err);
|
|
475
|
+
}).save(path5.join(tempDir, `audio.wav`));
|
|
476
|
+
});
|
|
477
|
+
}
|
|
478
|
+
async function generateAudio({
|
|
479
|
+
outputDir,
|
|
480
|
+
tempDir,
|
|
481
|
+
assets,
|
|
482
|
+
startFrame,
|
|
483
|
+
endFrame,
|
|
484
|
+
fps
|
|
485
|
+
}) {
|
|
486
|
+
const fullTempDir = path5.join(os3.tmpdir(), tempDir);
|
|
487
|
+
await makeSureFolderExists(outputDir);
|
|
488
|
+
await makeSureFolderExists(fullTempDir);
|
|
489
|
+
const assetPositions = getAssetPlacement(assets);
|
|
490
|
+
const audioFilenames = [];
|
|
491
|
+
for (const asset of assetPositions) {
|
|
492
|
+
let hasAudioStream = true;
|
|
493
|
+
if (asset.type !== "audio") {
|
|
494
|
+
hasAudioStream = await checkForAudioStream(
|
|
495
|
+
resolvePath(outputDir, asset.src)
|
|
496
|
+
);
|
|
497
|
+
}
|
|
498
|
+
if (asset.playbackRate !== 0 && asset.volume !== 0 && hasAudioStream) {
|
|
499
|
+
const filename = await prepareAudio(
|
|
500
|
+
outputDir,
|
|
501
|
+
fullTempDir,
|
|
502
|
+
asset,
|
|
503
|
+
startFrame,
|
|
504
|
+
endFrame,
|
|
505
|
+
fps
|
|
506
|
+
);
|
|
507
|
+
audioFilenames.push(filename);
|
|
508
|
+
}
|
|
509
|
+
}
|
|
510
|
+
if (audioFilenames.length > 0) {
|
|
511
|
+
await mergeAudioTracks(fullTempDir, audioFilenames);
|
|
512
|
+
}
|
|
513
|
+
return audioFilenames;
|
|
514
|
+
}
|
|
515
|
+
async function mergeMedia(outputFilename, outputDir, tempDir, format) {
|
|
516
|
+
const fullTempDir = path5.join(os3.tmpdir(), tempDir);
|
|
517
|
+
await makeSureFolderExists(outputDir);
|
|
518
|
+
await makeSureFolderExists(fullTempDir);
|
|
519
|
+
const audioWavExists = fs2.existsSync(path5.join(fullTempDir, `audio.wav`));
|
|
520
|
+
if (audioWavExists) {
|
|
521
|
+
await mergeAudioWithVideo(
|
|
522
|
+
path5.join(fullTempDir, `audio.wav`),
|
|
523
|
+
path5.join(fullTempDir, `visuals.${extensions[format]}`),
|
|
524
|
+
path5.join(outputDir, `${outputFilename}.${extensions[format]}`),
|
|
525
|
+
audioCodecs[format]
|
|
526
|
+
);
|
|
527
|
+
} else {
|
|
528
|
+
const destination = path5.join(
|
|
529
|
+
outputDir,
|
|
530
|
+
`${outputFilename}.${extensions[format]}`
|
|
531
|
+
);
|
|
532
|
+
await fs2.promises.copyFile(
|
|
533
|
+
path5.join(fullTempDir, `visuals.${extensions[format]}`),
|
|
534
|
+
destination
|
|
535
|
+
);
|
|
536
|
+
}
|
|
537
|
+
if (fullTempDir.endsWith("-undefined")) {
|
|
538
|
+
await fs2.promises.rm(fullTempDir, { recursive: true, force: true }).catch(() => {
|
|
539
|
+
});
|
|
540
|
+
}
|
|
541
|
+
}
|
|
542
|
+
|
|
543
|
+
// src/video-frame-extractor.ts
|
|
544
|
+
import { EventName as EventName2, sendEvent as sendEvent2 } from "@twick/telemetry";
|
|
545
|
+
import * as fs3 from "fs";
|
|
546
|
+
import * as os4 from "os";
|
|
547
|
+
import * as path6 from "path";
|
|
548
|
+
import { v4 as uuidv42 } from "uuid";
|
|
549
|
+
var ffmpeg3 = __require("fluent-ffmpeg");
|
|
550
|
+
var _VideoFrameExtractor = class _VideoFrameExtractor {
|
|
551
|
+
constructor(filePath, startTime, fps, duration) {
|
|
552
|
+
this.ffmpegPath = ffmpegSettings.getFfmpegPath();
|
|
553
|
+
this.buffer = Buffer.alloc(0);
|
|
554
|
+
this.bufferOffset = 0;
|
|
555
|
+
// Images are buffered in memory until they are requested.
|
|
556
|
+
this.imageBuffers = [];
|
|
557
|
+
this.lastImage = null;
|
|
558
|
+
this.framesProcessed = 0;
|
|
559
|
+
this.width = 0;
|
|
560
|
+
this.height = 0;
|
|
561
|
+
this.frameSize = 0;
|
|
562
|
+
this.codec = null;
|
|
563
|
+
this.process = null;
|
|
564
|
+
this.terminated = false;
|
|
565
|
+
this.state = "processing";
|
|
566
|
+
this.filePath = filePath;
|
|
567
|
+
this.downloadedFilePath = _VideoFrameExtractor.downloadedVideoMap.get(
|
|
568
|
+
filePath
|
|
569
|
+
)?.localPath;
|
|
570
|
+
this.startTimeOffset = _VideoFrameExtractor.downloadedVideoMap.get(filePath)?.startTimeOffset;
|
|
571
|
+
this.startTime = startTime;
|
|
572
|
+
this.duration = duration;
|
|
573
|
+
this.toTime = this.getEndTime(this.startTime);
|
|
574
|
+
this.fps = fps;
|
|
575
|
+
getVideoMetadata(this.downloadedFilePath).then((metadata) => {
|
|
576
|
+
this.width = metadata.width;
|
|
577
|
+
this.height = metadata.height;
|
|
578
|
+
this.frameSize = this.width * this.height * 4;
|
|
579
|
+
this.buffer = Buffer.alloc(this.frameSize);
|
|
580
|
+
this.codec = metadata.codec;
|
|
581
|
+
if (this.startTime >= this.duration) {
|
|
582
|
+
this.process = this.createFfmpegProcessToExtractFirstFrame(
|
|
583
|
+
this.downloadedFilePath,
|
|
584
|
+
this.codec
|
|
585
|
+
);
|
|
586
|
+
return;
|
|
587
|
+
}
|
|
588
|
+
this.process = this.createFfmpegProcess(
|
|
589
|
+
this.startTime - this.startTimeOffset,
|
|
590
|
+
this.toTime,
|
|
591
|
+
this.downloadedFilePath,
|
|
592
|
+
this.fps,
|
|
593
|
+
this.codec
|
|
594
|
+
);
|
|
595
|
+
});
|
|
596
|
+
}
|
|
597
|
+
static downloadVideoChunk(url, startTime, endTime) {
|
|
598
|
+
const outputDir = path6.join(os4.tmpdir(), `twick-decoder-chunks`);
|
|
599
|
+
if (!fs3.existsSync(outputDir)) {
|
|
600
|
+
fs3.mkdirSync(outputDir, { recursive: true });
|
|
601
|
+
}
|
|
602
|
+
return new Promise((resolve, reject) => {
|
|
603
|
+
ffmpeg3.ffprobe(url, (err, metadata) => {
|
|
604
|
+
if (err) {
|
|
605
|
+
reject(err);
|
|
606
|
+
return;
|
|
607
|
+
}
|
|
608
|
+
const format = metadata.format.format_name?.split(",")[-1] || "mp4";
|
|
609
|
+
const outputFileName = `chunk_${uuidv42()}.${format}`;
|
|
610
|
+
const outputPath = path6.join(outputDir, outputFileName);
|
|
611
|
+
const toleranceInSeconds = 0.5;
|
|
612
|
+
const adjustedStartTime = Math.max(startTime - toleranceInSeconds, 0);
|
|
613
|
+
ffmpeg3(url).setFfmpegPath(ffmpegSettings.getFfmpegPath()).inputOptions([
|
|
614
|
+
`-ss ${adjustedStartTime}`,
|
|
615
|
+
`-to ${endTime + toleranceInSeconds}`
|
|
616
|
+
]).outputOptions(["-c copy"]).output(outputPath).on("end", () => {
|
|
617
|
+
this.downloadedVideoMap.set(url, {
|
|
618
|
+
localPath: outputPath,
|
|
619
|
+
startTimeOffset: adjustedStartTime
|
|
620
|
+
});
|
|
621
|
+
resolve(outputPath);
|
|
622
|
+
}).on("error", (err2) => reject(err2)).run();
|
|
623
|
+
});
|
|
624
|
+
});
|
|
625
|
+
}
|
|
626
|
+
getTime() {
|
|
627
|
+
return this.startTime + this.framesProcessed / this.fps;
|
|
628
|
+
}
|
|
629
|
+
getLastTime() {
|
|
630
|
+
return this.startTime + (this.framesProcessed - 1) / this.fps;
|
|
631
|
+
}
|
|
632
|
+
getLastFrame() {
|
|
633
|
+
return this.lastImage;
|
|
634
|
+
}
|
|
635
|
+
getWidth() {
|
|
636
|
+
return this.width;
|
|
637
|
+
}
|
|
638
|
+
getHeight() {
|
|
639
|
+
return this.height;
|
|
640
|
+
}
|
|
641
|
+
getEndTime(startTime) {
|
|
642
|
+
return Math.min(
|
|
643
|
+
startTime + _VideoFrameExtractor.chunkLengthInSeconds,
|
|
644
|
+
this.duration
|
|
645
|
+
);
|
|
646
|
+
}
|
|
647
|
+
getArgs(codec, range, fps) {
|
|
648
|
+
const inputOptions = [];
|
|
649
|
+
const outputOptions = [];
|
|
650
|
+
inputOptions.push("-loglevel", ffmpegSettings.getLogLevel());
|
|
651
|
+
if (range) {
|
|
652
|
+
inputOptions.push(
|
|
653
|
+
...["-ss", range[0].toFixed(2), "-to", range[1].toFixed(2)]
|
|
654
|
+
);
|
|
655
|
+
}
|
|
656
|
+
if (codec === "vp9") {
|
|
657
|
+
inputOptions.push("-vcodec", "libvpx-vp9");
|
|
658
|
+
}
|
|
659
|
+
if (fps) {
|
|
660
|
+
outputOptions.push("-vf", `fps=fps=${fps}`);
|
|
661
|
+
}
|
|
662
|
+
if (!range) {
|
|
663
|
+
outputOptions.push("-vframes", "1");
|
|
664
|
+
}
|
|
665
|
+
outputOptions.push("-f", "rawvideo");
|
|
666
|
+
outputOptions.push("-pix_fmt", "rgba");
|
|
667
|
+
return { inputOptions, outputOptions };
|
|
668
|
+
}
|
|
669
|
+
createFfmpegProcess(startTime, toTime, filePath, fps, codec) {
|
|
670
|
+
const { inputOptions, outputOptions } = this.getArgs(
|
|
671
|
+
codec,
|
|
672
|
+
[startTime, toTime],
|
|
673
|
+
fps
|
|
674
|
+
);
|
|
675
|
+
const process2 = ffmpeg3(filePath).setFfmpegPath(this.ffmpegPath).inputOptions(inputOptions).outputOptions(outputOptions).on("end", () => {
|
|
676
|
+
this.handleClose(0);
|
|
677
|
+
}).on("error", (err) => {
|
|
678
|
+
this.handleError(err);
|
|
679
|
+
}).on("stderr", (stderrLine) => {
|
|
680
|
+
console.log(stderrLine);
|
|
681
|
+
}).on("stdout", (stderrLine) => {
|
|
682
|
+
console.log(stderrLine);
|
|
683
|
+
});
|
|
684
|
+
const ffstream = process2.pipe();
|
|
685
|
+
ffstream.on("data", (data) => {
|
|
686
|
+
this.processData(data);
|
|
687
|
+
});
|
|
688
|
+
return process2;
|
|
689
|
+
}
|
|
690
|
+
/**
|
|
691
|
+
* We call this in the case that the time requested is greater than the
|
|
692
|
+
* duration of the video. In this case, we want to display the first frame
|
|
693
|
+
* of the video.
|
|
694
|
+
*
|
|
695
|
+
* Note: This does NOT match the behavior of the old implementation
|
|
696
|
+
* inside of 2d/src/lib/components/Video.ts. In the old implementation, the
|
|
697
|
+
* last frame is shown instead of the first frame.
|
|
698
|
+
*/
|
|
699
|
+
createFfmpegProcessToExtractFirstFrame(filePath, codec) {
|
|
700
|
+
const { inputOptions, outputOptions } = this.getArgs(
|
|
701
|
+
codec,
|
|
702
|
+
void 0,
|
|
703
|
+
void 0
|
|
704
|
+
);
|
|
705
|
+
const process2 = ffmpeg3(filePath).setFfmpegPath(this.ffmpegPath).inputOptions(inputOptions).outputOptions(outputOptions).on("end", () => {
|
|
706
|
+
this.handleClose(0);
|
|
707
|
+
}).on("error", (err) => {
|
|
708
|
+
this.handleError(err);
|
|
709
|
+
}).on("stderr", (stderrLine) => {
|
|
710
|
+
console.log(stderrLine);
|
|
711
|
+
}).on("stdout", (stderrLine) => {
|
|
712
|
+
console.log(stderrLine);
|
|
713
|
+
});
|
|
714
|
+
const ffstream = process2.pipe();
|
|
715
|
+
ffstream.on("data", (data) => {
|
|
716
|
+
this.processData(data);
|
|
717
|
+
});
|
|
718
|
+
return process2;
|
|
719
|
+
}
|
|
720
|
+
processData(data) {
|
|
721
|
+
let dataOffset = 0;
|
|
722
|
+
while (dataOffset < data.length) {
|
|
723
|
+
const remainingSpace = this.frameSize - this.bufferOffset;
|
|
724
|
+
const chunkSize = Math.min(remainingSpace, data.length - dataOffset);
|
|
725
|
+
data.copy(
|
|
726
|
+
this.buffer,
|
|
727
|
+
this.bufferOffset,
|
|
728
|
+
dataOffset,
|
|
729
|
+
dataOffset + chunkSize
|
|
730
|
+
);
|
|
731
|
+
this.bufferOffset += chunkSize;
|
|
732
|
+
dataOffset += chunkSize;
|
|
733
|
+
if (this.bufferOffset === this.frameSize) {
|
|
734
|
+
this.imageBuffers.push(Buffer.from(this.buffer));
|
|
735
|
+
this.bufferOffset = 0;
|
|
736
|
+
}
|
|
737
|
+
}
|
|
738
|
+
}
|
|
739
|
+
async popImage() {
|
|
740
|
+
if (this.imageBuffers.length) {
|
|
741
|
+
const image2 = this.imageBuffers.shift();
|
|
742
|
+
this.framesProcessed++;
|
|
743
|
+
this.lastImage = image2;
|
|
744
|
+
return image2;
|
|
745
|
+
}
|
|
746
|
+
if (this.state === "error") {
|
|
747
|
+
throw new Error("An error occurred while extracting the video frames.");
|
|
748
|
+
}
|
|
749
|
+
if (this.state === "done" && this.toTime >= this.duration) {
|
|
750
|
+
return this.lastImage;
|
|
751
|
+
}
|
|
752
|
+
if (this.state === "done") {
|
|
753
|
+
this.startTime = this.toTime;
|
|
754
|
+
this.toTime = Math.min(
|
|
755
|
+
this.startTime + _VideoFrameExtractor.chunkLengthInSeconds,
|
|
756
|
+
this.duration
|
|
757
|
+
);
|
|
758
|
+
if (!this.codec) {
|
|
759
|
+
throw new Error(
|
|
760
|
+
"Can't extract frames without a codec. This error should never happen."
|
|
761
|
+
);
|
|
762
|
+
}
|
|
763
|
+
this.process = this.createFfmpegProcess(
|
|
764
|
+
this.startTime,
|
|
765
|
+
this.toTime,
|
|
766
|
+
this.downloadedFilePath,
|
|
767
|
+
this.fps,
|
|
768
|
+
this.codec
|
|
769
|
+
);
|
|
770
|
+
this.state = "processing";
|
|
771
|
+
}
|
|
772
|
+
while (this.imageBuffers.length < 1) {
|
|
773
|
+
await new Promise((resolve) => setTimeout(resolve, 50));
|
|
774
|
+
}
|
|
775
|
+
const image = this.imageBuffers.shift();
|
|
776
|
+
this.framesProcessed++;
|
|
777
|
+
this.lastImage = image;
|
|
778
|
+
return image;
|
|
779
|
+
}
|
|
780
|
+
handleClose(code) {
|
|
781
|
+
this.state = code === 0 ? "done" : "error";
|
|
782
|
+
}
|
|
783
|
+
async handleError(err) {
|
|
784
|
+
const code = err.code;
|
|
785
|
+
if (this.terminated) {
|
|
786
|
+
return;
|
|
787
|
+
}
|
|
788
|
+
if (code === "ENOENT") {
|
|
789
|
+
sendEvent2(EventName2.Error, { error: "ffmpeg-not-found" });
|
|
790
|
+
throw new Error(
|
|
791
|
+
"Error: ffmpeg not found. Make sure ffmpeg is installed on your system."
|
|
792
|
+
);
|
|
793
|
+
} else if (err.message.includes("SIGSEGV")) {
|
|
794
|
+
sendEvent2(EventName2.Error, {
|
|
795
|
+
error: "ffmpeg-sigsegv",
|
|
796
|
+
message: err.message
|
|
797
|
+
});
|
|
798
|
+
throw new Error(
|
|
799
|
+
`Error: Segmentation fault when running ffmpeg. This is a common issue on Linux, you might be able to fix it by installing nscd ('sudo apt-get install nscd'). For more information, see https://docs.re.video/common-issues/ffmpeg/`
|
|
800
|
+
);
|
|
801
|
+
} else {
|
|
802
|
+
await sendEvent2(EventName2.Error, {
|
|
803
|
+
error: "ffmpeg-error",
|
|
804
|
+
message: err.message
|
|
805
|
+
});
|
|
806
|
+
throw new Error(
|
|
807
|
+
`An ffmpeg error occurred while fetching frames from source video ${this.filePath}: ${err}`
|
|
808
|
+
);
|
|
809
|
+
}
|
|
810
|
+
}
|
|
811
|
+
destroy() {
|
|
812
|
+
this.terminated = true;
|
|
813
|
+
this.process?.kill("SIGTERM");
|
|
814
|
+
}
|
|
815
|
+
};
|
|
816
|
+
_VideoFrameExtractor.chunkLengthInSeconds = 5;
|
|
817
|
+
_VideoFrameExtractor.downloadedVideoMap = /* @__PURE__ */ new Map();
|
|
818
|
+
var VideoFrameExtractor = _VideoFrameExtractor;
|
|
819
|
+
export {
|
|
820
|
+
FFmpegExporterServer,
|
|
821
|
+
VideoFrameExtractor,
|
|
822
|
+
audioCodecs,
|
|
823
|
+
checkForAudioStream,
|
|
824
|
+
concatenateMedia,
|
|
825
|
+
createSilentAudioFile,
|
|
826
|
+
doesFileExist,
|
|
827
|
+
extensions,
|
|
828
|
+
ffmpegSettings,
|
|
829
|
+
generateAudio,
|
|
830
|
+
getSampleRate,
|
|
831
|
+
getVideoCodec,
|
|
832
|
+
getVideoDimensions,
|
|
833
|
+
getVideoDuration,
|
|
834
|
+
getVideoMetadata,
|
|
835
|
+
makeSureFolderExists,
|
|
836
|
+
mergeAudioWithVideo,
|
|
837
|
+
mergeMedia,
|
|
838
|
+
resolvePath
|
|
839
|
+
};
|
|
840
|
+
//# sourceMappingURL=index.js.map
|