@marcuth/movie.js 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/dist/clips/audio-clip.d.ts +24 -0
- package/dist/clips/audio-clip.js +127 -0
- package/dist/clips/clip.d.ts +16 -0
- package/dist/clips/clip.js +17 -0
- package/dist/clips/composition-clip.d.ts +10 -0
- package/dist/clips/composition-clip.js +73 -0
- package/dist/clips/concatenation-clip.d.ts +10 -0
- package/dist/clips/concatenation-clip.js +42 -0
- package/dist/clips/image-clip.d.ts +23 -0
- package/dist/clips/image-clip.js +96 -0
- package/dist/clips/index.d.ts +7 -0
- package/dist/clips/index.js +23 -0
- package/dist/clips/repeat-clip.d.ts +23 -0
- package/dist/clips/repeat-clip.js +31 -0
- package/dist/clips/video-clip.d.ts +18 -0
- package/dist/clips/video-clip.js +91 -0
- package/dist/ffmpeg-filter-spec.d.ts +7 -0
- package/dist/ffmpeg-filter-spec.js +2 -0
- package/dist/ffmpeg-input.d.ts +10 -0
- package/dist/ffmpeg-input.js +2 -0
- package/dist/index.d.ts +14 -0
- package/dist/index.js +44 -0
- package/dist/render-context.d.ts +14 -0
- package/dist/render-context.js +2 -0
- package/dist/render-result.d.ts +9 -0
- package/dist/render-result.js +31 -0
- package/dist/template.d.ts +17 -0
- package/dist/template.js +90 -0
- package/dist/utils/font-config.d.ts +9 -0
- package/dist/utils/font-config.js +19 -0
- package/dist/utils/index.d.ts +1 -0
- package/dist/utils/index.js +17 -0
- package/dist/utils/resolve-path.d.ts +12 -0
- package/dist/utils/resolve-path.js +9 -0
- package/package.json +35 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025 Marcuth
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import { Path } from "../utils/resolve-path";
|
|
2
|
+
import { RenderContext } from "../render-context";
|
|
3
|
+
import { FFmpegInput } from "../ffmpeg-input";
|
|
4
|
+
import { Clip } from "./clip";
|
|
5
|
+
export type AudioClipOptions<RenderData> = {
|
|
6
|
+
path: Path<RenderData>;
|
|
7
|
+
volume?: number;
|
|
8
|
+
loop?: boolean;
|
|
9
|
+
subClip?: [number, number];
|
|
10
|
+
fadeIn?: number;
|
|
11
|
+
fadeOut?: number;
|
|
12
|
+
};
|
|
13
|
+
export declare class AudioClip<RenderData> extends Clip<RenderData> {
|
|
14
|
+
readonly path: Path<RenderData>;
|
|
15
|
+
readonly volume?: number;
|
|
16
|
+
readonly loop?: boolean;
|
|
17
|
+
readonly subClip?: [number, number];
|
|
18
|
+
readonly fadeIn?: number;
|
|
19
|
+
readonly fadeOut?: number;
|
|
20
|
+
constructor({ path, volume, loop, subClip, fadeIn, fadeOut, }: AudioClipOptions<RenderData>);
|
|
21
|
+
protected getInput(path: string, inputIndex: number): FFmpegInput;
|
|
22
|
+
getDuration(path: string): Promise<number>;
|
|
23
|
+
build(data: RenderData, context: RenderContext): Promise<void>;
|
|
24
|
+
}
|
|
@@ -0,0 +1,127 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.AudioClip = void 0;
|
|
7
|
+
const fluent_ffmpeg_1 = __importDefault(require("fluent-ffmpeg"));
|
|
8
|
+
const resolve_path_1 = require("../utils/resolve-path");
|
|
9
|
+
const clip_1 = require("./clip");
|
|
10
|
+
class AudioClip extends clip_1.Clip {
|
|
11
|
+
constructor({ path, volume, loop, subClip, fadeIn, fadeOut, }) {
|
|
12
|
+
super();
|
|
13
|
+
this.path = path;
|
|
14
|
+
this.volume = volume;
|
|
15
|
+
this.loop = loop;
|
|
16
|
+
this.subClip = subClip;
|
|
17
|
+
this.fadeIn = fadeIn;
|
|
18
|
+
this.fadeOut = fadeOut;
|
|
19
|
+
}
|
|
20
|
+
getInput(path, inputIndex) {
|
|
21
|
+
const inputOptions = [];
|
|
22
|
+
if (this.loop) {
|
|
23
|
+
inputOptions.push("-stream_loop", "-1");
|
|
24
|
+
}
|
|
25
|
+
if (this.subClip) {
|
|
26
|
+
const [start, end] = this.subClip;
|
|
27
|
+
inputOptions.push("-ss", `${start}`);
|
|
28
|
+
inputOptions.push("-to", `${end}`);
|
|
29
|
+
}
|
|
30
|
+
return {
|
|
31
|
+
path: path,
|
|
32
|
+
aliases: {
|
|
33
|
+
audio: `[${inputIndex}:a]`,
|
|
34
|
+
},
|
|
35
|
+
type: "audio",
|
|
36
|
+
options: inputOptions,
|
|
37
|
+
index: inputIndex
|
|
38
|
+
};
|
|
39
|
+
}
|
|
40
|
+
async getDuration(path) {
|
|
41
|
+
var _a, _b;
|
|
42
|
+
try {
|
|
43
|
+
const metadata = await new Promise((resolve, reject) => {
|
|
44
|
+
fluent_ffmpeg_1.default.ffprobe(path, (err, data) => err ? reject(err) : resolve(data));
|
|
45
|
+
});
|
|
46
|
+
return Math.floor((_b = (_a = metadata === null || metadata === void 0 ? void 0 : metadata.format) === null || _a === void 0 ? void 0 : _a.duration) !== null && _b !== void 0 ? _b : 0);
|
|
47
|
+
}
|
|
48
|
+
catch (err) {
|
|
49
|
+
return 0;
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
async build(data, context) {
|
|
53
|
+
const path = (0, resolve_path_1.resolvePath)({ path: this.path, data: data, index: context.clipIndex });
|
|
54
|
+
const input = this.getInput(path, context.inputIndex);
|
|
55
|
+
context.command.input(input.path);
|
|
56
|
+
if (input.options && input.options.length > 0) {
|
|
57
|
+
context.command.inputOptions(input.options);
|
|
58
|
+
}
|
|
59
|
+
let duration = await this.getDuration(path);
|
|
60
|
+
if (this.subClip) {
|
|
61
|
+
const [start, end] = this.subClip;
|
|
62
|
+
duration = Math.max(end - start, 0);
|
|
63
|
+
}
|
|
64
|
+
let currentAudioOutput = input.aliases.audio;
|
|
65
|
+
if (duration > 0 && !this.loop) {
|
|
66
|
+
const trimOutput = `[trimAudio${context.inputIndex}]`;
|
|
67
|
+
context.filters.push({
|
|
68
|
+
filter: "atrim",
|
|
69
|
+
options: { end: duration },
|
|
70
|
+
inputs: currentAudioOutput,
|
|
71
|
+
outputs: trimOutput,
|
|
72
|
+
});
|
|
73
|
+
currentAudioOutput = trimOutput;
|
|
74
|
+
}
|
|
75
|
+
if (this.fadeIn && this.fadeIn > 0) {
|
|
76
|
+
const fadeInOutput = `[fadeInAudio${context.inputIndex}]`;
|
|
77
|
+
context.filters.push({
|
|
78
|
+
filter: "afade",
|
|
79
|
+
options: { t: "in", st: 0, d: this.fadeIn },
|
|
80
|
+
inputs: currentAudioOutput,
|
|
81
|
+
outputs: fadeInOutput
|
|
82
|
+
});
|
|
83
|
+
currentAudioOutput = fadeInOutput;
|
|
84
|
+
}
|
|
85
|
+
if (this.fadeOut && this.fadeOut > 0) {
|
|
86
|
+
const fadeOutOutput = `[fadeOutAudio${context.inputIndex}]`;
|
|
87
|
+
if (this.loop) {
|
|
88
|
+
context.filters.push({
|
|
89
|
+
filter: "afade",
|
|
90
|
+
options: {
|
|
91
|
+
t: "out",
|
|
92
|
+
d: this.fadeOut
|
|
93
|
+
},
|
|
94
|
+
inputs: currentAudioOutput,
|
|
95
|
+
outputs: fadeOutOutput
|
|
96
|
+
});
|
|
97
|
+
}
|
|
98
|
+
else {
|
|
99
|
+
const start = Math.max((duration || 0) - this.fadeOut, 0);
|
|
100
|
+
context.filters.push({
|
|
101
|
+
filter: "afade",
|
|
102
|
+
options: {
|
|
103
|
+
t: "out",
|
|
104
|
+
st: start,
|
|
105
|
+
d: this.fadeOut
|
|
106
|
+
},
|
|
107
|
+
inputs: currentAudioOutput,
|
|
108
|
+
outputs: fadeOutOutput
|
|
109
|
+
});
|
|
110
|
+
}
|
|
111
|
+
currentAudioOutput = fadeOutOutput;
|
|
112
|
+
}
|
|
113
|
+
if (this.volume !== undefined) {
|
|
114
|
+
const volumeOutput = `[volAudio${context.inputIndex}]`;
|
|
115
|
+
context.filters.push({
|
|
116
|
+
filter: "volume",
|
|
117
|
+
options: `${this.volume}`,
|
|
118
|
+
inputs: currentAudioOutput,
|
|
119
|
+
outputs: volumeOutput
|
|
120
|
+
});
|
|
121
|
+
currentAudioOutput = volumeOutput;
|
|
122
|
+
}
|
|
123
|
+
context.labels.mixAudio.push(currentAudioOutput);
|
|
124
|
+
context.inputIndex++;
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
exports.AudioClip = AudioClip;
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import { FFmpegInput } from "../ffmpeg-input";
|
|
2
|
+
import { RenderContext } from "../render-context";
|
|
3
|
+
export type WhenOptions<Data> = {
|
|
4
|
+
data: Data;
|
|
5
|
+
index: number;
|
|
6
|
+
};
|
|
7
|
+
export type WhenFunction<RenderData> = (options: WhenOptions<RenderData>) => boolean;
|
|
8
|
+
export declare abstract class Clip<RenderData> {
|
|
9
|
+
protected readonly when?: WhenFunction<RenderData> | undefined;
|
|
10
|
+
constructor(when?: WhenFunction<RenderData> | undefined);
|
|
11
|
+
readonly videoFilters: string[];
|
|
12
|
+
readonly audioFilters: string[];
|
|
13
|
+
shouldRender(data: RenderData, index?: number): boolean;
|
|
14
|
+
protected getInput(path: string, inputIndex: number, fps?: number): FFmpegInput;
|
|
15
|
+
abstract build(data: RenderData, context: RenderContext): void | Promise<void>;
|
|
16
|
+
}
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.Clip = void 0;
|
|
4
|
+
class Clip {
|
|
5
|
+
constructor(when) {
|
|
6
|
+
this.when = when;
|
|
7
|
+
this.videoFilters = [];
|
|
8
|
+
this.audioFilters = [];
|
|
9
|
+
}
|
|
10
|
+
shouldRender(data, index = 0) {
|
|
11
|
+
return this.when ? this.when({ data, index }) : true;
|
|
12
|
+
}
|
|
13
|
+
getInput(path, inputIndex, fps) {
|
|
14
|
+
throw new Error("Method not implemented.");
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
exports.Clip = Clip;
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import { RenderContext } from "../render-context";
|
|
2
|
+
import { Clip } from "./clip";
|
|
3
|
+
export type CompositionClipOptions<RenderData> = {
|
|
4
|
+
clips: Clip<RenderData>[];
|
|
5
|
+
};
|
|
6
|
+
export declare class CompositionClip<RenderData> extends Clip<RenderData> {
|
|
7
|
+
readonly clips: Clip<RenderData>[];
|
|
8
|
+
constructor({ clips }: CompositionClipOptions<RenderData>);
|
|
9
|
+
build(data: RenderData, context: RenderContext): Promise<void>;
|
|
10
|
+
}
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.CompositionClip = void 0;
|
|
4
|
+
const clip_1 = require("./clip");
|
|
5
|
+
class CompositionClip extends clip_1.Clip {
|
|
6
|
+
constructor({ clips }) {
|
|
7
|
+
super();
|
|
8
|
+
if (clips.length === 0) {
|
|
9
|
+
throw new Error("CompositionClip: at least one clip is required");
|
|
10
|
+
}
|
|
11
|
+
this.clips = clips;
|
|
12
|
+
}
|
|
13
|
+
async build(data, context) {
|
|
14
|
+
const startVideoIndex = context.labels.video.length;
|
|
15
|
+
const startAudioIndex = context.labels.structuralAudio.length;
|
|
16
|
+
const startMixAudioIndex = context.labels.mixAudio.length;
|
|
17
|
+
for (const clip of this.clips) {
|
|
18
|
+
await clip.build(data, context);
|
|
19
|
+
}
|
|
20
|
+
const videoLabels = context.labels.video.slice(startVideoIndex);
|
|
21
|
+
const audioLabels = context.labels.structuralAudio.slice(startAudioIndex);
|
|
22
|
+
const mixAudioLabels = context.labels.mixAudio.slice(startMixAudioIndex);
|
|
23
|
+
if (videoLabels.length !== audioLabels.length) {
|
|
24
|
+
throw new Error(`CompositionClip: video/audio mismatch (${videoLabels.length} videos, ${audioLabels.length} audios)`);
|
|
25
|
+
}
|
|
26
|
+
const outV = `[v${context.labels.video.length}]`;
|
|
27
|
+
const outBaseA = `[baseA${context.labels.structuralAudio.length}]`;
|
|
28
|
+
const outA = `[a${context.labels.structuralAudio.length}]`;
|
|
29
|
+
context.filters.push({
|
|
30
|
+
filter: "concat",
|
|
31
|
+
options: {
|
|
32
|
+
n: videoLabels.length,
|
|
33
|
+
v: 1,
|
|
34
|
+
a: 1
|
|
35
|
+
},
|
|
36
|
+
inputs: videoLabels.map((v, i) => `${v}${audioLabels[i]}`).join(""),
|
|
37
|
+
outputs: outV + outBaseA
|
|
38
|
+
});
|
|
39
|
+
if (mixAudioLabels.length > 0) {
|
|
40
|
+
const mixLabel = `[mix${context.labels.structuralAudio.length}]`;
|
|
41
|
+
context.filters.push({
|
|
42
|
+
filter: "amix",
|
|
43
|
+
options: {
|
|
44
|
+
inputs: mixAudioLabels.length
|
|
45
|
+
},
|
|
46
|
+
inputs: mixAudioLabels.join(""),
|
|
47
|
+
outputs: mixLabel
|
|
48
|
+
});
|
|
49
|
+
context.filters.push({
|
|
50
|
+
filter: "amix",
|
|
51
|
+
options: {
|
|
52
|
+
inputs: 2,
|
|
53
|
+
duration: "first"
|
|
54
|
+
},
|
|
55
|
+
inputs: `${outBaseA}${mixLabel}`,
|
|
56
|
+
outputs: outA
|
|
57
|
+
});
|
|
58
|
+
}
|
|
59
|
+
else {
|
|
60
|
+
context.filters.push({
|
|
61
|
+
filter: "anull",
|
|
62
|
+
inputs: outBaseA,
|
|
63
|
+
outputs: outA
|
|
64
|
+
});
|
|
65
|
+
}
|
|
66
|
+
context.labels.video.splice(startVideoIndex);
|
|
67
|
+
context.labels.structuralAudio.splice(startAudioIndex);
|
|
68
|
+
context.labels.mixAudio.splice(startMixAudioIndex);
|
|
69
|
+
context.labels.video.push(outV);
|
|
70
|
+
context.labels.structuralAudio.push(outA);
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
exports.CompositionClip = CompositionClip;
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import { RenderContext } from "../render-context";
|
|
2
|
+
import { Clip } from "./clip";
|
|
3
|
+
export type ConcatenationClipOptions<RenderData> = {
|
|
4
|
+
clips: Clip<RenderData>[];
|
|
5
|
+
};
|
|
6
|
+
export declare class ConcatenationClip<RenderData> extends Clip<RenderData> {
|
|
7
|
+
readonly clips: Clip<RenderData>[];
|
|
8
|
+
constructor({ clips }: ConcatenationClipOptions<RenderData>);
|
|
9
|
+
build(data: RenderData, context: RenderContext): Promise<void>;
|
|
10
|
+
}
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.ConcatenationClip = void 0;
|
|
4
|
+
const clip_1 = require("./clip");
|
|
5
|
+
class ConcatenationClip extends clip_1.Clip {
|
|
6
|
+
constructor({ clips }) {
|
|
7
|
+
super();
|
|
8
|
+
if (clips.length < 1) {
|
|
9
|
+
throw new Error("ConcatenationClip: at least one clip is required");
|
|
10
|
+
}
|
|
11
|
+
this.clips = clips;
|
|
12
|
+
}
|
|
13
|
+
async build(data, context) {
|
|
14
|
+
const startVideoIndex = context.labels.video.length;
|
|
15
|
+
const startAudioIndex = context.labels.structuralAudio.length;
|
|
16
|
+
for (const clip of this.clips) {
|
|
17
|
+
await clip.build(data, context);
|
|
18
|
+
}
|
|
19
|
+
const videoLabels = context.labels.video.slice(startVideoIndex);
|
|
20
|
+
const audioLabels = context.labels.structuralAudio.slice(startAudioIndex);
|
|
21
|
+
if (videoLabels.length !== audioLabels.length) {
|
|
22
|
+
throw new Error(`ConcatenationClip: video/audio mismatch (${videoLabels.length} videos, ${audioLabels.length} audios)`);
|
|
23
|
+
}
|
|
24
|
+
const outV = `[v${context.labels.video.length}]`;
|
|
25
|
+
const outA = `[a${context.labels.structuralAudio.length}]`;
|
|
26
|
+
context.filters.push({
|
|
27
|
+
filter: "concat",
|
|
28
|
+
options: {
|
|
29
|
+
n: videoLabels.length,
|
|
30
|
+
v: 1,
|
|
31
|
+
a: 1
|
|
32
|
+
},
|
|
33
|
+
inputs: videoLabels.map((v, i) => `${v}${audioLabels[i]}`).join(""),
|
|
34
|
+
outputs: outV + outA
|
|
35
|
+
});
|
|
36
|
+
context.labels.video.splice(startVideoIndex);
|
|
37
|
+
context.labels.structuralAudio.splice(startAudioIndex);
|
|
38
|
+
context.labels.video.push(outV);
|
|
39
|
+
context.labels.structuralAudio.push(outA);
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
exports.ConcatenationClip = ConcatenationClip;
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import { RenderContext } from "../render-context";
|
|
2
|
+
import { FFmpegInput } from "../ffmpeg-input";
|
|
3
|
+
import { Clip } from "./clip";
|
|
4
|
+
import { Path } from "../utils/resolve-path";
|
|
5
|
+
export type ImageClipOptions<RenderData> = {
|
|
6
|
+
path: Path<RenderData>;
|
|
7
|
+
width?: number;
|
|
8
|
+
height?: number;
|
|
9
|
+
duration: number;
|
|
10
|
+
fadeIn?: number;
|
|
11
|
+
fadeOut?: number;
|
|
12
|
+
};
|
|
13
|
+
export declare class ImageClip<RenderData> extends Clip<RenderData> {
|
|
14
|
+
readonly duration: number;
|
|
15
|
+
readonly path: Path<RenderData>;
|
|
16
|
+
readonly fadeIn?: number;
|
|
17
|
+
readonly fadeOut?: number;
|
|
18
|
+
readonly width?: number;
|
|
19
|
+
readonly height?: number;
|
|
20
|
+
constructor({ duration, path, width, height, fadeIn, fadeOut }: ImageClipOptions<RenderData>);
|
|
21
|
+
protected getInput(path: string, inputIndex: number, fps: number): FFmpegInput;
|
|
22
|
+
build(data: RenderData, context: RenderContext): void;
|
|
23
|
+
}
|
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.ImageClip = void 0;
|
|
4
|
+
const clip_1 = require("./clip");
|
|
5
|
+
const resolve_path_1 = require("../utils/resolve-path");
|
|
6
|
+
class ImageClip extends clip_1.Clip {
|
|
7
|
+
constructor({ duration, path, width, height, fadeIn, fadeOut }) {
|
|
8
|
+
super();
|
|
9
|
+
this.duration = duration;
|
|
10
|
+
this.path = path;
|
|
11
|
+
this.fadeIn = fadeIn;
|
|
12
|
+
this.fadeOut = fadeOut;
|
|
13
|
+
this.width = width;
|
|
14
|
+
this.height = height;
|
|
15
|
+
}
|
|
16
|
+
getInput(path, inputIndex, fps) {
|
|
17
|
+
return {
|
|
18
|
+
path: path,
|
|
19
|
+
index: Number(inputIndex),
|
|
20
|
+
aliases: {
|
|
21
|
+
video: `[${inputIndex}:v]`,
|
|
22
|
+
audio: `[a${inputIndex}]`
|
|
23
|
+
},
|
|
24
|
+
type: "image",
|
|
25
|
+
options: [
|
|
26
|
+
"-loop 1",
|
|
27
|
+
`-t ${this.duration}`,
|
|
28
|
+
`-framerate ${fps}`
|
|
29
|
+
]
|
|
30
|
+
};
|
|
31
|
+
}
|
|
32
|
+
build(data, context) {
|
|
33
|
+
var _a, _b;
|
|
34
|
+
const path = (0, resolve_path_1.resolvePath)({ path: this.path, data: data, index: context.clipIndex });
|
|
35
|
+
const input = this.getInput(path, context.inputIndex, context.fps);
|
|
36
|
+
let currentVideoOutput = input.aliases.video;
|
|
37
|
+
const currentAudioOutput = input.aliases.audio;
|
|
38
|
+
context.command
|
|
39
|
+
.input(input.path)
|
|
40
|
+
.inputOptions(input.options);
|
|
41
|
+
const anullSrcLabel = `[anull${context.inputIndex}]`;
|
|
42
|
+
context.filters.push({
|
|
43
|
+
filter: "anullsrc",
|
|
44
|
+
options: { sample_rate: 44100, channel_layout: "stereo" },
|
|
45
|
+
outputs: anullSrcLabel,
|
|
46
|
+
});
|
|
47
|
+
context.filters.push({
|
|
48
|
+
filter: "atrim",
|
|
49
|
+
options: { end: this.duration },
|
|
50
|
+
inputs: anullSrcLabel,
|
|
51
|
+
outputs: currentAudioOutput,
|
|
52
|
+
});
|
|
53
|
+
if (this.width !== undefined || this.height !== undefined) {
|
|
54
|
+
const scaleOutput = `scale${context.inputIndex}`;
|
|
55
|
+
context.filters.push({
|
|
56
|
+
filter: "scale",
|
|
57
|
+
options: { w: (_a = this.width) !== null && _a !== void 0 ? _a : -1, h: (_b = this.height) !== null && _b !== void 0 ? _b : -1 },
|
|
58
|
+
inputs: currentVideoOutput,
|
|
59
|
+
outputs: scaleOutput,
|
|
60
|
+
});
|
|
61
|
+
currentVideoOutput = scaleOutput;
|
|
62
|
+
}
|
|
63
|
+
if (this.fadeIn && this.fadeIn > 0) {
|
|
64
|
+
const fadeInOutput = `fadeIn${context.inputIndex}`;
|
|
65
|
+
context.filters.push({
|
|
66
|
+
filter: "fade",
|
|
67
|
+
options: { t: "in", st: 0, d: this.fadeIn },
|
|
68
|
+
inputs: currentVideoOutput,
|
|
69
|
+
outputs: fadeInOutput
|
|
70
|
+
});
|
|
71
|
+
currentVideoOutput = fadeInOutput;
|
|
72
|
+
}
|
|
73
|
+
if (this.fadeOut && this.fadeOut > 0) {
|
|
74
|
+
const start = Math.max(this.duration - this.fadeOut, 0);
|
|
75
|
+
const fadeOutOutput = `[v${context.inputIndex}]`;
|
|
76
|
+
context.filters.push({
|
|
77
|
+
filter: "fade",
|
|
78
|
+
options: { t: "out", st: start, d: this.fadeOut },
|
|
79
|
+
inputs: currentVideoOutput,
|
|
80
|
+
outputs: fadeOutOutput
|
|
81
|
+
});
|
|
82
|
+
currentVideoOutput = fadeOutOutput;
|
|
83
|
+
}
|
|
84
|
+
if (!this.fadeOut || this.fadeOut <= 0) {
|
|
85
|
+
context.filters.push({
|
|
86
|
+
filter: "null",
|
|
87
|
+
inputs: currentVideoOutput,
|
|
88
|
+
outputs: `[v${context.inputIndex}]`
|
|
89
|
+
});
|
|
90
|
+
}
|
|
91
|
+
context.labels.video.push(`[v${context.inputIndex}]`);
|
|
92
|
+
context.labels.structuralAudio.push(currentAudioOutput);
|
|
93
|
+
context.inputIndex++;
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
exports.ImageClip = ImageClip;
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
14
|
+
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
|
15
|
+
};
|
|
16
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
17
|
+
__exportStar(require("./clip"), exports);
|
|
18
|
+
__exportStar(require("./image-clip"), exports);
|
|
19
|
+
__exportStar(require("./video-clip"), exports);
|
|
20
|
+
__exportStar(require("./audio-clip"), exports);
|
|
21
|
+
__exportStar(require("./composition-clip"), exports);
|
|
22
|
+
__exportStar(require("./repeat-clip"), exports);
|
|
23
|
+
__exportStar(require("./concatenation-clip"), exports);
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import { RenderContext } from "../render-context";
|
|
2
|
+
import { Clip } from "./clip";
|
|
3
|
+
export type EachOptions<Data> = {
|
|
4
|
+
data: Data;
|
|
5
|
+
index: number;
|
|
6
|
+
};
|
|
7
|
+
export type ClipFunctionOptions = {
|
|
8
|
+
index: number;
|
|
9
|
+
length: number;
|
|
10
|
+
};
|
|
11
|
+
export type ClipFunction<RenderData, Item> = (item: Item, options: ClipFunctionOptions) => Clip<RenderData>;
|
|
12
|
+
export type RepeatClipOptions<RenderData, Item> = {
|
|
13
|
+
each: EachFunction<RenderData, Item>;
|
|
14
|
+
clip: ClipFunction<RenderData, Item>;
|
|
15
|
+
};
|
|
16
|
+
export type EachFunction<Data, Item> = (options: EachOptions<Data>) => Item[];
|
|
17
|
+
export declare class RepeatClip<RenderData, Item> extends Clip<RenderData> {
|
|
18
|
+
readonly each: EachFunction<RenderData, Item>;
|
|
19
|
+
readonly clip: ClipFunction<RenderData, Item>;
|
|
20
|
+
constructor({ each, clip, }: RepeatClipOptions<RenderData, Item>);
|
|
21
|
+
build(data: RenderData, context: RenderContext): Promise<void>;
|
|
22
|
+
private runBuildWithProgress;
|
|
23
|
+
}
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.RepeatClip = void 0;
|
|
7
|
+
const cli_progress_1 = __importDefault(require("cli-progress"));
|
|
8
|
+
const clip_1 = require("./clip");
|
|
9
|
+
class RepeatClip extends clip_1.Clip {
|
|
10
|
+
constructor({ each, clip, }) {
|
|
11
|
+
super();
|
|
12
|
+
this.each = each;
|
|
13
|
+
this.clip = clip;
|
|
14
|
+
}
|
|
15
|
+
async build(data, context) {
|
|
16
|
+
const items = this.each({ data: data, index: 0 });
|
|
17
|
+
await this.runBuildWithProgress(items, data, context);
|
|
18
|
+
}
|
|
19
|
+
async runBuildWithProgress(items, data, context) {
|
|
20
|
+
const buildBar = new cli_progress_1.default.SingleBar({ format: "Repeat Clip Build |{bar}| {value}/{total} clips", hideCursor: true }, cli_progress_1.default.Presets.shades_classic);
|
|
21
|
+
buildBar.start(items.length, 0);
|
|
22
|
+
const length = items.length;
|
|
23
|
+
for (let i = 0; i < length; i++) {
|
|
24
|
+
const clip = this.clip(items[i], { index: i, length: length });
|
|
25
|
+
await clip.build(data, context);
|
|
26
|
+
buildBar.increment();
|
|
27
|
+
}
|
|
28
|
+
buildBar.stop();
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
exports.RepeatClip = RepeatClip;
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import { Path } from "../utils/resolve-path";
|
|
2
|
+
import { RenderContext } from "../render-context";
|
|
3
|
+
import { FFmpegInput } from "../ffmpeg-input";
|
|
4
|
+
import { Clip } from "./clip";
|
|
5
|
+
export type VideoClipOptions<RenderData> = {
|
|
6
|
+
path: Path<RenderData>;
|
|
7
|
+
fadeIn?: number;
|
|
8
|
+
fadeOut?: number;
|
|
9
|
+
};
|
|
10
|
+
export declare class VideoClip<RenderData> extends Clip<RenderData> {
|
|
11
|
+
readonly path: Path<RenderData>;
|
|
12
|
+
readonly fadeIn?: number;
|
|
13
|
+
readonly fadeOut?: number;
|
|
14
|
+
constructor({ path, fadeIn, fadeOut }: VideoClipOptions<RenderData>);
|
|
15
|
+
protected getInput(path: string, inputIndex: number): FFmpegInput;
|
|
16
|
+
getDuration(path: string): Promise<number>;
|
|
17
|
+
build(data: RenderData, context: RenderContext): Promise<void>;
|
|
18
|
+
}
|
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.VideoClip = void 0;
|
|
4
|
+
const fluent_ffmpeg_1 = require("fluent-ffmpeg");
|
|
5
|
+
const resolve_path_1 = require("../utils/resolve-path");
|
|
6
|
+
const clip_1 = require("./clip");
|
|
7
|
+
class VideoClip extends clip_1.Clip {
|
|
8
|
+
constructor({ path, fadeIn, fadeOut }) {
|
|
9
|
+
super();
|
|
10
|
+
this.path = path;
|
|
11
|
+
this.fadeIn = fadeIn;
|
|
12
|
+
this.fadeOut = fadeOut;
|
|
13
|
+
}
|
|
14
|
+
getInput(path, inputIndex) {
|
|
15
|
+
return {
|
|
16
|
+
path: path,
|
|
17
|
+
aliases: {
|
|
18
|
+
video: `[${inputIndex}:v]`,
|
|
19
|
+
audio: `[${inputIndex}:a]`
|
|
20
|
+
},
|
|
21
|
+
type: "video",
|
|
22
|
+
index: inputIndex,
|
|
23
|
+
};
|
|
24
|
+
}
|
|
25
|
+
async getDuration(path) {
|
|
26
|
+
return await new Promise((resolve, reject) => {
|
|
27
|
+
(0, fluent_ffmpeg_1.ffprobe)(path, (error, data) => {
|
|
28
|
+
if (error) {
|
|
29
|
+
reject(error);
|
|
30
|
+
}
|
|
31
|
+
resolve(data.format.duration || 0);
|
|
32
|
+
});
|
|
33
|
+
});
|
|
34
|
+
}
|
|
35
|
+
async build(data, context) {
|
|
36
|
+
const path = (0, resolve_path_1.resolvePath)({ path: this.path, data: data, index: context.clipIndex });
|
|
37
|
+
const input = this.getInput(path, context.inputIndex);
|
|
38
|
+
let currentVideoOutput = input.aliases.video;
|
|
39
|
+
let currentAudioOutput = input.aliases.audio;
|
|
40
|
+
const duration = await this.getDuration(path);
|
|
41
|
+
context.command.input(path);
|
|
42
|
+
if (this.fadeIn !== undefined && this.fadeIn > 0) {
|
|
43
|
+
const fadeInOutput = `[fadeIn${context.inputIndex}]`;
|
|
44
|
+
const fadeInAudioOutput = `[fadeInAudio${context.inputIndex}]`;
|
|
45
|
+
context.filters.push({
|
|
46
|
+
filter: "fade",
|
|
47
|
+
options: { t: "in", st: 0, d: this.fadeIn },
|
|
48
|
+
inputs: currentVideoOutput,
|
|
49
|
+
outputs: fadeInOutput
|
|
50
|
+
});
|
|
51
|
+
context.filters.push({
|
|
52
|
+
filter: "afade",
|
|
53
|
+
options: { t: "in", st: 0, d: this.fadeIn },
|
|
54
|
+
inputs: currentAudioOutput,
|
|
55
|
+
outputs: fadeInAudioOutput
|
|
56
|
+
});
|
|
57
|
+
currentVideoOutput = fadeInOutput;
|
|
58
|
+
currentAudioOutput = fadeInAudioOutput;
|
|
59
|
+
}
|
|
60
|
+
if (this.fadeOut !== undefined && this.fadeOut > 0) {
|
|
61
|
+
const start = Math.max(duration - this.fadeOut, 0);
|
|
62
|
+
const fadeOutOutput = `[v${context.inputIndex}]`;
|
|
63
|
+
const fadeOutAudioOutput = `[fadeOutAudio${context.inputIndex}]`;
|
|
64
|
+
context.filters.push({
|
|
65
|
+
filter: "fade",
|
|
66
|
+
options: { t: "out", st: start, d: this.fadeOut },
|
|
67
|
+
inputs: currentVideoOutput,
|
|
68
|
+
outputs: fadeOutOutput
|
|
69
|
+
});
|
|
70
|
+
context.filters.push({
|
|
71
|
+
filter: "afade",
|
|
72
|
+
options: { t: "out", st: start, d: this.fadeOut },
|
|
73
|
+
inputs: currentAudioOutput,
|
|
74
|
+
outputs: fadeOutAudioOutput
|
|
75
|
+
});
|
|
76
|
+
currentVideoOutput = fadeOutOutput;
|
|
77
|
+
currentAudioOutput = fadeOutAudioOutput;
|
|
78
|
+
}
|
|
79
|
+
else {
|
|
80
|
+
context.filters.push({
|
|
81
|
+
filter: "null",
|
|
82
|
+
inputs: currentVideoOutput,
|
|
83
|
+
outputs: `[v${context.inputIndex}]`
|
|
84
|
+
});
|
|
85
|
+
}
|
|
86
|
+
context.labels.video.push(`[v${context.inputIndex}]`);
|
|
87
|
+
context.labels.structuralAudio.push(currentAudioOutput);
|
|
88
|
+
context.inputIndex++;
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
exports.VideoClip = VideoClip;
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import { AudioClip, AudioClipOptions, ImageClip, ImageClipOptions, VideoClipOptions, CompositionClip, CompositionClipOptions, RepeatClip, RepeatClipOptions, VideoClip, ConcatenationClip, ConcatenationClipOptions } from "./clips";
|
|
2
|
+
import { Template, TemplateOptions } from "./template";
|
|
3
|
+
export * from "./clips";
|
|
4
|
+
export * from "./template";
|
|
5
|
+
declare const movie: {
|
|
6
|
+
template<RenderData>(options: TemplateOptions<RenderData>): Template<RenderData>;
|
|
7
|
+
audio<RenderData>(options: AudioClipOptions<RenderData>): AudioClip<RenderData>;
|
|
8
|
+
image<RenderData>(options: ImageClipOptions<RenderData>): ImageClip<RenderData>;
|
|
9
|
+
video<RenderData>(options: VideoClipOptions<RenderData>): VideoClip<RenderData>;
|
|
10
|
+
composition<RenderData>(options: CompositionClipOptions<RenderData>): CompositionClip<RenderData>;
|
|
11
|
+
repeat<RenderData, Item>(options: RepeatClipOptions<RenderData, Item>): RepeatClip<RenderData, Item>;
|
|
12
|
+
concatenation<RenderData>(options: ConcatenationClipOptions<RenderData>): ConcatenationClip<RenderData>;
|
|
13
|
+
};
|
|
14
|
+
export default movie;
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
14
|
+
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
|
15
|
+
};
|
|
16
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
17
|
+
const clips_1 = require("./clips");
|
|
18
|
+
const template_1 = require("./template");
|
|
19
|
+
__exportStar(require("./clips"), exports);
|
|
20
|
+
__exportStar(require("./template"), exports);
|
|
21
|
+
const movie = {
|
|
22
|
+
template(options) {
|
|
23
|
+
return new template_1.Template(options);
|
|
24
|
+
},
|
|
25
|
+
audio(options) {
|
|
26
|
+
return new clips_1.AudioClip(options);
|
|
27
|
+
},
|
|
28
|
+
image(options) {
|
|
29
|
+
return new clips_1.ImageClip(options);
|
|
30
|
+
},
|
|
31
|
+
video(options) {
|
|
32
|
+
return new clips_1.VideoClip(options);
|
|
33
|
+
},
|
|
34
|
+
composition(options) {
|
|
35
|
+
return new clips_1.CompositionClip(options);
|
|
36
|
+
},
|
|
37
|
+
repeat(options) {
|
|
38
|
+
return new clips_1.RepeatClip(options);
|
|
39
|
+
},
|
|
40
|
+
concatenation(options) {
|
|
41
|
+
return new clips_1.ConcatenationClip(options);
|
|
42
|
+
}
|
|
43
|
+
};
|
|
44
|
+
exports.default = movie;
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import ffmpeg from "fluent-ffmpeg";
|
|
2
|
+
import { FFmpegFilterSpec } from "./ffmpeg-filter-spec";
|
|
3
|
+
export type RenderContext = {
|
|
4
|
+
command: ffmpeg.FfmpegCommand;
|
|
5
|
+
fps: number;
|
|
6
|
+
inputIndex: number;
|
|
7
|
+
clipIndex: number;
|
|
8
|
+
filters: FFmpegFilterSpec[];
|
|
9
|
+
labels: {
|
|
10
|
+
video: string[];
|
|
11
|
+
structuralAudio: string[];
|
|
12
|
+
mixAudio: string[];
|
|
13
|
+
};
|
|
14
|
+
};
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import ffmpeg from "fluent-ffmpeg";
|
|
2
|
+
export declare class RenderResult {
|
|
3
|
+
private readonly format;
|
|
4
|
+
private readonly command;
|
|
5
|
+
constructor(format: string, command: ffmpeg.FfmpegCommand);
|
|
6
|
+
toStream(): import("node:stream").Writable | import("node:stream").PassThrough;
|
|
7
|
+
toBuffer(): Promise<Buffer>;
|
|
8
|
+
toFile(filePath: string): Promise<void>;
|
|
9
|
+
}
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.RenderResult = void 0;
|
|
4
|
+
class RenderResult {
|
|
5
|
+
constructor(format, command) {
|
|
6
|
+
this.format = format;
|
|
7
|
+
this.command = command;
|
|
8
|
+
}
|
|
9
|
+
toStream() {
|
|
10
|
+
return this.command.format(this.format).pipe();
|
|
11
|
+
}
|
|
12
|
+
async toBuffer() {
|
|
13
|
+
const chunks = [];
|
|
14
|
+
const stream = this.toStream();
|
|
15
|
+
return new Promise((resolve, reject) => {
|
|
16
|
+
stream.on("data", chunk => chunks.push(chunk));
|
|
17
|
+
stream.on("end", () => resolve(Buffer.concat(chunks)));
|
|
18
|
+
stream.on("error", reject);
|
|
19
|
+
});
|
|
20
|
+
}
|
|
21
|
+
async toFile(filePath) {
|
|
22
|
+
return new Promise((resolve, reject) => {
|
|
23
|
+
this.command
|
|
24
|
+
.format(this.format)
|
|
25
|
+
.save(filePath)
|
|
26
|
+
.on("end", () => resolve())
|
|
27
|
+
.on("error", reject);
|
|
28
|
+
});
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
exports.RenderResult = RenderResult;
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import { Clip } from "./clips";
|
|
2
|
+
import { RenderResult } from "./render-result";
|
|
3
|
+
export type TemplateOptions<RenderData> = {
|
|
4
|
+
clips: Clip<RenderData>[];
|
|
5
|
+
config: {
|
|
6
|
+
format: string;
|
|
7
|
+
fps: number;
|
|
8
|
+
outputOptions?: string[];
|
|
9
|
+
};
|
|
10
|
+
debug?: boolean;
|
|
11
|
+
};
|
|
12
|
+
export declare class Template<RenderData> {
|
|
13
|
+
private readonly options;
|
|
14
|
+
constructor(options: TemplateOptions<RenderData>);
|
|
15
|
+
render(data: RenderData): Promise<RenderResult>;
|
|
16
|
+
private runBuildWithProgress;
|
|
17
|
+
}
|
package/dist/template.js
ADDED
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.Template = void 0;
|
|
7
|
+
const cli_progress_1 = __importDefault(require("cli-progress"));
|
|
8
|
+
const fluent_ffmpeg_1 = __importDefault(require("fluent-ffmpeg"));
|
|
9
|
+
const render_result_1 = require("./render-result");
|
|
10
|
+
class Template {
|
|
11
|
+
constructor(options) {
|
|
12
|
+
this.options = options;
|
|
13
|
+
}
|
|
14
|
+
async render(data) {
|
|
15
|
+
var _a;
|
|
16
|
+
const command = (0, fluent_ffmpeg_1.default)();
|
|
17
|
+
const context = {
|
|
18
|
+
command: command,
|
|
19
|
+
fps: this.options.config.fps,
|
|
20
|
+
inputIndex: 0,
|
|
21
|
+
clipIndex: 0,
|
|
22
|
+
filters: [],
|
|
23
|
+
labels: {
|
|
24
|
+
structuralAudio: [],
|
|
25
|
+
mixAudio: [],
|
|
26
|
+
video: []
|
|
27
|
+
},
|
|
28
|
+
};
|
|
29
|
+
// if (this.options.debug) {
|
|
30
|
+
// command
|
|
31
|
+
// .on("start", (commandLine) => {
|
|
32
|
+
// console.log("Spawned Ffmpeg with command: " + commandLine)
|
|
33
|
+
// })
|
|
34
|
+
// .on("error", (err, stdout, stderr) => {
|
|
35
|
+
// console.error("Error: " + err.message)
|
|
36
|
+
// console.error("ffmpeg stderr: " + stderr)
|
|
37
|
+
// })
|
|
38
|
+
// }
|
|
39
|
+
await this.runBuildWithProgress(data, context);
|
|
40
|
+
const concatFilter = context.labels.video
|
|
41
|
+
.map((v, i) => `${v}${context.labels.structuralAudio[i]}`)
|
|
42
|
+
.join("") +
|
|
43
|
+
`concat=n=${context.labels.video.length}:v=1:a=1[outv][basea]`;
|
|
44
|
+
const mixFilter = context.labels.mixAudio.length > 0
|
|
45
|
+
? `${context.labels.mixAudio.join("")}amix=inputs=${context.labels.mixAudio.length}[mixa]`
|
|
46
|
+
: null;
|
|
47
|
+
const finalAudioFilter = mixFilter
|
|
48
|
+
? `[basea][mixa]amix=inputs=2[outa]`
|
|
49
|
+
: `[basea]anull[outa]`;
|
|
50
|
+
const filterComplex = [
|
|
51
|
+
...context.filters,
|
|
52
|
+
concatFilter,
|
|
53
|
+
mixFilter,
|
|
54
|
+
finalAudioFilter
|
|
55
|
+
].filter((filter) => filter !== null);
|
|
56
|
+
// console.dir({
|
|
57
|
+
// concatFilter,
|
|
58
|
+
// finalAudioFilter,
|
|
59
|
+
// mixFilter,
|
|
60
|
+
// filterComplex,
|
|
61
|
+
// context: {
|
|
62
|
+
// ...context,
|
|
63
|
+
// command: "<command>"
|
|
64
|
+
// }
|
|
65
|
+
// }, {
|
|
66
|
+
// depth: null
|
|
67
|
+
// })
|
|
68
|
+
command.complexFilter(filterComplex);
|
|
69
|
+
command.outputOptions([
|
|
70
|
+
"-map [outv]",
|
|
71
|
+
"-map [outa]",
|
|
72
|
+
"-c:v libx264",
|
|
73
|
+
"-c:a aac",
|
|
74
|
+
"-pix_fmt yuv420p",
|
|
75
|
+
...((_a = this.options.config.outputOptions) !== null && _a !== void 0 ? _a : [])
|
|
76
|
+
]);
|
|
77
|
+
return new render_result_1.RenderResult(this.options.config.format, command);
|
|
78
|
+
}
|
|
79
|
+
async runBuildWithProgress(data, context) {
|
|
80
|
+
const buildBar = new cli_progress_1.default.SingleBar({ format: "Template Build |{bar}| {value}/{total} clips", hideCursor: true }, cli_progress_1.default.Presets.shades_classic);
|
|
81
|
+
buildBar.start(this.options.clips.length, 0);
|
|
82
|
+
for (let i = 0; i < this.options.clips.length; i++) {
|
|
83
|
+
const clip = this.options.clips[i];
|
|
84
|
+
await clip.build(data, context);
|
|
85
|
+
buildBar.increment();
|
|
86
|
+
}
|
|
87
|
+
buildBar.stop();
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
exports.Template = Template;
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
export type FontConfigOptions = {
|
|
2
|
+
color?: string;
|
|
3
|
+
filePath?: string;
|
|
4
|
+
};
|
|
5
|
+
export type PartialFontOptions = {
|
|
6
|
+
size: number;
|
|
7
|
+
color?: string;
|
|
8
|
+
};
|
|
9
|
+
export declare function fontConfig(options: FontConfigOptions): (partialOptions: PartialFontOptions) => TextClipFontOptions;
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.fontConfig = fontConfig;
|
|
4
|
+
function fontConfig(options) {
|
|
5
|
+
return (partialOptions) => {
|
|
6
|
+
var _a;
|
|
7
|
+
const color = (_a = options.color) !== null && _a !== void 0 ? _a : partialOptions.color;
|
|
8
|
+
const filePath = options.filePath;
|
|
9
|
+
const size = partialOptions.size;
|
|
10
|
+
if (!color) {
|
|
11
|
+
throw new Error("Font color is required");
|
|
12
|
+
}
|
|
13
|
+
return {
|
|
14
|
+
size: size,
|
|
15
|
+
color: color,
|
|
16
|
+
filePath: filePath,
|
|
17
|
+
};
|
|
18
|
+
};
|
|
19
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export * from "./font-config";
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
14
|
+
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
|
15
|
+
};
|
|
16
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
17
|
+
__exportStar(require("./font-config"), exports);
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
export type ComputePathOptions<RenderData> = {
|
|
2
|
+
data: RenderData;
|
|
3
|
+
index: number;
|
|
4
|
+
};
|
|
5
|
+
export type ComputePath<RenderData> = (options: ComputePathOptions<RenderData>) => string;
|
|
6
|
+
export type Path<RenderData> = ComputePath<RenderData> | string;
|
|
7
|
+
export type ResolvePathOptions<RenderData> = {
|
|
8
|
+
path: Path<RenderData>;
|
|
9
|
+
data: RenderData;
|
|
10
|
+
index: number;
|
|
11
|
+
};
|
|
12
|
+
export declare function resolvePath<RenderData>({ path, ...params }: ResolvePathOptions<RenderData>): string;
|
package/package.json
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@marcuth/movie.js",
|
|
3
|
+
"version": "0.0.1",
|
|
4
|
+
"description": "Video template builder",
|
|
5
|
+
"main": "./dist/index.js",
|
|
6
|
+
"module": "./dist/index.js",
|
|
7
|
+
"types": "./dist/index.d.ts",
|
|
8
|
+
"files": [
|
|
9
|
+
"dist/*",
|
|
10
|
+
"!/**/__tests__"
|
|
11
|
+
],
|
|
12
|
+
"scripts": {
|
|
13
|
+
"build": "tsc"
|
|
14
|
+
},
|
|
15
|
+
"keywords": [
|
|
16
|
+
"moviepy",
|
|
17
|
+
"video",
|
|
18
|
+
"ffmpeg wrapper",
|
|
19
|
+
"video template builder"
|
|
20
|
+
],
|
|
21
|
+
"author": "Marcuth",
|
|
22
|
+
"license": "MIT",
|
|
23
|
+
"type": "commonjs",
|
|
24
|
+
"devDependencies": {
|
|
25
|
+
"@types/cli-progress": "^3.11.6",
|
|
26
|
+
"@types/fluent-ffmpeg": "^2.1.28",
|
|
27
|
+
"@types/node": "^25.0.3",
|
|
28
|
+
"ts-node": "^10.9.2",
|
|
29
|
+
"typescript": "^5.9.3"
|
|
30
|
+
},
|
|
31
|
+
"dependencies": {
|
|
32
|
+
"cli-progress": "^3.12.0",
|
|
33
|
+
"fluent-ffmpeg": "^2.1.3"
|
|
34
|
+
}
|
|
35
|
+
}
|