mulmocast 2.1.17 → 2.1.19
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/lib/actions/movie.d.ts
CHANGED
|
@@ -28,7 +28,7 @@ export declare const getTransitionVideoId: (transition: MulmoTransition, videoId
|
|
|
28
28
|
};
|
|
29
29
|
export declare const getConcatVideoFilter: (concatVideoId: string, videoIdsForBeats: VideoId[]) => string;
|
|
30
30
|
export declare const validateBeatSource: (studioBeat: MulmoStudioContext["studio"]["beats"][number], index: number) => string;
|
|
31
|
-
export declare const addSplitAndExtractFrames: (ffmpegContext: FfmpegContext, videoId: string,
|
|
31
|
+
export declare const addSplitAndExtractFrames: (ffmpegContext: FfmpegContext, videoId: string, firstDuration: number, lastDuration: number, isMovie: boolean, needFirst: boolean, needLast: boolean, canvasInfo: {
|
|
32
32
|
width: number;
|
|
33
33
|
height: number;
|
|
34
34
|
}) => void;
|
package/lib/actions/movie.js
CHANGED
|
@@ -147,8 +147,7 @@ const addTransitionEffects = (ffmpegContext, captionedVideoId, context, transiti
|
|
|
147
147
|
// Limit transition duration to be no longer than either beat's duration
|
|
148
148
|
const prevBeatDuration = context.studio.beats[beatIndex - 1].duration ?? 1;
|
|
149
149
|
const currentBeatDuration = context.studio.beats[beatIndex].duration ?? 1;
|
|
150
|
-
const
|
|
151
|
-
const duration = Math.min(transition.duration, maxDuration);
|
|
150
|
+
const duration = getClampedTransitionDuration(transition.duration, prevBeatDuration, currentBeatDuration);
|
|
152
151
|
const outputVideoId = `trans_${beatIndex}_o`;
|
|
153
152
|
const processedVideoId = `${transitionVideoId}_f`;
|
|
154
153
|
if (transition.type === "fade") {
|
|
@@ -277,6 +276,33 @@ export const getConcatVideoFilter = (concatVideoId, videoIdsForBeats) => {
|
|
|
277
276
|
const inputs = videoIds.map((id) => `[${id}]`).join("");
|
|
278
277
|
return `${inputs}concat=n=${videoIds.length}:v=1:a=0[${concatVideoId}]`;
|
|
279
278
|
};
|
|
279
|
+
const getClampedTransitionDuration = (transitionDuration, prevBeatDuration, currentBeatDuration) => {
|
|
280
|
+
const maxDuration = Math.min(prevBeatDuration, currentBeatDuration) * 0.9; // Use 90% to leave some margin
|
|
281
|
+
return Math.min(transitionDuration, maxDuration);
|
|
282
|
+
};
|
|
283
|
+
const getTransitionFrameDurations = (context, index) => {
|
|
284
|
+
const minFrame = 1 / 30; // 30fpsを想定。最小1フレーム
|
|
285
|
+
const beats = context.studio.beats;
|
|
286
|
+
const scriptBeats = context.studio.script.beats;
|
|
287
|
+
const currentTransition = MulmoPresentationStyleMethods.getMovieTransition(context, scriptBeats[index]);
|
|
288
|
+
let firstDuration = 0;
|
|
289
|
+
if (currentTransition && index > 0) {
|
|
290
|
+
const prevBeatDuration = beats[index - 1].duration ?? 1;
|
|
291
|
+
const currentBeatDuration = beats[index].duration ?? 1;
|
|
292
|
+
firstDuration = getClampedTransitionDuration(currentTransition.duration, prevBeatDuration, currentBeatDuration);
|
|
293
|
+
}
|
|
294
|
+
const nextTransition = index < scriptBeats.length - 1 ? MulmoPresentationStyleMethods.getMovieTransition(context, scriptBeats[index + 1]) : null;
|
|
295
|
+
let lastDuration = 0;
|
|
296
|
+
if (nextTransition) {
|
|
297
|
+
const prevBeatDuration = beats[index].duration ?? 1;
|
|
298
|
+
const currentBeatDuration = beats[index + 1].duration ?? 1;
|
|
299
|
+
lastDuration = getClampedTransitionDuration(nextTransition.duration, prevBeatDuration, currentBeatDuration);
|
|
300
|
+
}
|
|
301
|
+
return {
|
|
302
|
+
firstDuration: Math.max(firstDuration, minFrame),
|
|
303
|
+
lastDuration: Math.max(lastDuration, minFrame),
|
|
304
|
+
};
|
|
305
|
+
};
|
|
280
306
|
export const validateBeatSource = (studioBeat, index) => {
|
|
281
307
|
const sourceFile = studioBeat.lipSyncFile ?? studioBeat.soundEffectFile ?? studioBeat.movieFile ?? studioBeat.htmlImageFile ?? studioBeat.imageFile;
|
|
282
308
|
assert(!!sourceFile, `studioBeat.imageFile or studioBeat.movieFile is not set: index=${index}`, false, createVideoSourceError(index));
|
|
@@ -284,7 +310,7 @@ export const validateBeatSource = (studioBeat, index) => {
|
|
|
284
310
|
assert(!!studioBeat.duration, `studioBeat.duration is not set: index=${index}`);
|
|
285
311
|
return sourceFile;
|
|
286
312
|
};
|
|
287
|
-
export const addSplitAndExtractFrames = (ffmpegContext, videoId,
|
|
313
|
+
export const addSplitAndExtractFrames = (ffmpegContext, videoId, firstDuration, lastDuration, isMovie, needFirst, needLast, canvasInfo) => {
|
|
288
314
|
const outputs = [`[${videoId}]`];
|
|
289
315
|
if (needFirst)
|
|
290
316
|
outputs.push(`[${videoId}_first_src]`);
|
|
@@ -294,20 +320,20 @@ export const addSplitAndExtractFrames = (ffmpegContext, videoId, duration, isMov
|
|
|
294
320
|
if (needFirst) {
|
|
295
321
|
// Create static frame using nullsrc as base for proper framerate/timebase
|
|
296
322
|
// Note: setpts must NOT be used here as it loses framerate metadata needed by xfade
|
|
297
|
-
ffmpegContext.filterComplex.push(`nullsrc=size=${canvasInfo.width}x${canvasInfo.height}:duration=${
|
|
323
|
+
ffmpegContext.filterComplex.push(`nullsrc=size=${canvasInfo.width}x${canvasInfo.height}:duration=${firstDuration}:rate=30[${videoId}_first_null]`);
|
|
298
324
|
ffmpegContext.filterComplex.push(`[${videoId}_first_src]select='eq(n,0)',scale=${canvasInfo.width}:${canvasInfo.height}[${videoId}_first_frame]`);
|
|
299
325
|
ffmpegContext.filterComplex.push(`[${videoId}_first_null][${videoId}_first_frame]overlay=format=auto,fps=30[${videoId}_first]`);
|
|
300
326
|
}
|
|
301
327
|
if (needLast) {
|
|
302
328
|
if (isMovie) {
|
|
303
329
|
// Movie beats: extract actual last frame
|
|
304
|
-
ffmpegContext.filterComplex.push(`nullsrc=size=${canvasInfo.width}x${canvasInfo.height}:duration=${
|
|
330
|
+
ffmpegContext.filterComplex.push(`nullsrc=size=${canvasInfo.width}x${canvasInfo.height}:duration=${lastDuration}:rate=30[${videoId}_last_null]`);
|
|
305
331
|
ffmpegContext.filterComplex.push(`[${videoId}_last_src]reverse,select='eq(n,0)',reverse,scale=${canvasInfo.width}:${canvasInfo.height}[${videoId}_last_frame]`);
|
|
306
332
|
ffmpegContext.filterComplex.push(`[${videoId}_last_null][${videoId}_last_frame]overlay=format=auto,fps=30[${videoId}_last]`);
|
|
307
333
|
}
|
|
308
334
|
else {
|
|
309
335
|
// Image beats: all frames are identical, so just select one
|
|
310
|
-
ffmpegContext.filterComplex.push(`nullsrc=size=${canvasInfo.width}x${canvasInfo.height}:duration=${
|
|
336
|
+
ffmpegContext.filterComplex.push(`nullsrc=size=${canvasInfo.width}x${canvasInfo.height}:duration=${lastDuration}:rate=30[${videoId}_last_null]`);
|
|
311
337
|
ffmpegContext.filterComplex.push(`[${videoId}_last_src]select='eq(n,0)',scale=${canvasInfo.width}:${canvasInfo.height}[${videoId}_last_frame]`);
|
|
312
338
|
ffmpegContext.filterComplex.push(`[${videoId}_last_null][${videoId}_last_frame]overlay=format=auto,fps=30[${videoId}_last]`);
|
|
313
339
|
}
|
|
@@ -364,7 +390,8 @@ export const createVideo = async (audioArtifactFilePath, outputVideoPath, contex
|
|
|
364
390
|
const needLast = needsLastFrame[index]; // Next beat has transition
|
|
365
391
|
videoIdsForBeats.push(videoId);
|
|
366
392
|
if (needFirst || needLast) {
|
|
367
|
-
|
|
393
|
+
const { firstDuration, lastDuration } = getTransitionFrameDurations(context, index);
|
|
394
|
+
addSplitAndExtractFrames(ffmpegContext, videoId, firstDuration, lastDuration, isMovie, needFirst, needLast, canvasInfo);
|
|
368
395
|
}
|
|
369
396
|
// Record transition info if this beat has a transition
|
|
370
397
|
const transition = MulmoPresentationStyleMethods.getMovieTransition(context, beat);
|
|
@@ -159,7 +159,7 @@ export declare const provider2LLMAgent: {
|
|
|
159
159
|
readonly agentName: "geminiAgent";
|
|
160
160
|
readonly defaultModel: "gemini-2.5-flash";
|
|
161
161
|
readonly max_tokens: 8192;
|
|
162
|
-
readonly models: readonly ["gemini-2.5-pro", "gemini-2.5-flash", "gemini-2.5-flash-lite"
|
|
162
|
+
readonly models: readonly ["gemini-2.5-pro", "gemini-2.5-flash", "gemini-2.5-flash-lite"];
|
|
163
163
|
readonly keyName: "GEMINI_API_KEY";
|
|
164
164
|
};
|
|
165
165
|
readonly groq: {
|
|
@@ -287,7 +287,7 @@ export const provider2LLMAgent = {
|
|
|
287
287
|
agentName: "geminiAgent",
|
|
288
288
|
defaultModel: "gemini-2.5-flash",
|
|
289
289
|
max_tokens: 8192,
|
|
290
|
-
models: ["gemini-2.5-pro", "gemini-2.5-flash", "gemini-2.5-flash-lite"
|
|
290
|
+
models: ["gemini-2.5-pro", "gemini-2.5-flash", "gemini-2.5-flash-lite"],
|
|
291
291
|
keyName: "GEMINI_API_KEY",
|
|
292
292
|
},
|
|
293
293
|
groq: {
|
package/lib/utils/error_cause.js
CHANGED
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "mulmocast",
|
|
3
|
-
"version": "2.1.
|
|
3
|
+
"version": "2.1.19",
|
|
4
4
|
"description": "",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "lib/index.node.js",
|
|
@@ -74,10 +74,10 @@
|
|
|
74
74
|
"homepage": "https://github.com/receptron/mulmocast-cli#readme",
|
|
75
75
|
"dependencies": {
|
|
76
76
|
"@google-cloud/text-to-speech": "^6.4.0",
|
|
77
|
-
"@google/genai": "^1.
|
|
77
|
+
"@google/genai": "^1.38.0",
|
|
78
78
|
"@graphai/anthropic_agent": "^2.0.12",
|
|
79
79
|
"@graphai/browserless_agent": "^2.0.1",
|
|
80
|
-
"@graphai/gemini_agent": "^2.0.
|
|
80
|
+
"@graphai/gemini_agent": "^2.0.4",
|
|
81
81
|
"@graphai/groq_agent": "^2.0.2",
|
|
82
82
|
"@graphai/input_agents": "^1.0.2",
|
|
83
83
|
"@graphai/openai_agent": "^2.0.8",
|
|
@@ -86,23 +86,23 @@
|
|
|
86
86
|
"@graphai/vanilla_node_agents": "^2.0.4",
|
|
87
87
|
"@inquirer/input": "^5.0.4",
|
|
88
88
|
"@inquirer/select": "^5.0.4",
|
|
89
|
-
"@modelcontextprotocol/sdk": "^1.25.
|
|
89
|
+
"@modelcontextprotocol/sdk": "^1.25.3",
|
|
90
90
|
"@mozilla/readability": "^0.6.0",
|
|
91
91
|
"@tavily/core": "^0.5.11",
|
|
92
92
|
"archiver": "^7.0.1",
|
|
93
|
-
"clipboardy": "^5.0
|
|
93
|
+
"clipboardy": "^5.1.0",
|
|
94
94
|
"dotenv": "^17.2.3",
|
|
95
95
|
"fluent-ffmpeg": "^2.1.3",
|
|
96
96
|
"graphai": "^2.0.16",
|
|
97
97
|
"jsdom": "^27.4.0",
|
|
98
98
|
"marked": "^17.0.1",
|
|
99
99
|
"mulmocast-vision": "^1.0.8",
|
|
100
|
-
"ora": "^9.
|
|
101
|
-
"puppeteer": "^24.
|
|
100
|
+
"ora": "^9.1.0",
|
|
101
|
+
"puppeteer": "^24.36.0",
|
|
102
102
|
"replicate": "^1.4.0",
|
|
103
103
|
"yaml": "^2.8.2",
|
|
104
104
|
"yargs": "^18.0.0",
|
|
105
|
-
"zod": "^4.3.
|
|
105
|
+
"zod": "^4.3.6"
|
|
106
106
|
},
|
|
107
107
|
"devDependencies": {
|
|
108
108
|
"@receptron/test_utils": "^2.0.3",
|
|
@@ -114,10 +114,10 @@
|
|
|
114
114
|
"eslint-config-prettier": "^10.1.8",
|
|
115
115
|
"eslint-plugin-prettier": "^5.5.5",
|
|
116
116
|
"eslint-plugin-sonarjs": "^3.0.5",
|
|
117
|
-
"prettier": "^3.8.
|
|
117
|
+
"prettier": "^3.8.1",
|
|
118
118
|
"tsx": "^4.21.0",
|
|
119
119
|
"typescript": "^5.9.3",
|
|
120
|
-
"typescript-eslint": "^8.53.
|
|
120
|
+
"typescript-eslint": "^8.53.1"
|
|
121
121
|
},
|
|
122
122
|
"engines": {
|
|
123
123
|
"node": ">=20.0.0"
|