mulmocast 1.2.16 → 1.2.18
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/assets/templates/ghibli_comic.json +3 -3
- package/assets/templates/ghibli_comic_strips.json +30 -0
- package/lib/actions/images.d.ts +2 -0
- package/lib/actions/images.js +9 -4
- package/lib/actions/translate.js +5 -4
- package/lib/agents/puppeteer_crawler_agent.d.ts +4 -0
- package/lib/agents/puppeteer_crawler_agent.js +94 -0
- package/lib/data/promptTemplates.d.ts +77 -0
- package/lib/data/promptTemplates.js +58 -2
- package/lib/data/templateDataSet.d.ts +1 -0
- package/lib/data/templateDataSet.js +5 -1
- package/lib/utils/ffmpeg_utils.js +3 -1
- package/package.json +4 -1
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
|
-
"title": "Ghibli comic
|
|
3
|
-
"description": "
|
|
4
|
-
"systemPrompt": "Another AI will generate comic strips for each beat based on the
|
|
2
|
+
"title": "Ghibli-style comic.",
|
|
3
|
+
"description": "Ghibli-style comic.",
|
|
4
|
+
"systemPrompt": "Another AI will generate comic strips for each beat based on the imagePrompt of that beat. Mention the reference in one of beats, if it exists. Use the JSON below as a template.",
|
|
5
5
|
"presentationStyle": {
|
|
6
6
|
"$mulmocast": {
|
|
7
7
|
"version": "1.1",
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
{
|
|
2
|
+
"title": "Ghibli-style comic strips with nano banana.",
|
|
3
|
+
"description": "Ghibli-style comic strips with nano banana.",
|
|
4
|
+
"systemPrompt": "Another AI will generate comic strips for each beat based on the text description of that beat. Mention the reference in one of beats, if it exists. Use the JSON below as a template.",
|
|
5
|
+
"presentationStyle": {
|
|
6
|
+
"$mulmocast": {
|
|
7
|
+
"version": "1.1",
|
|
8
|
+
"credit": "closing"
|
|
9
|
+
},
|
|
10
|
+
"canvasSize": {
|
|
11
|
+
"width": 1536,
|
|
12
|
+
"height": 1024
|
|
13
|
+
},
|
|
14
|
+
"imageParams": {
|
|
15
|
+
"provider": "google",
|
|
16
|
+
"model": "gemini-2.5-flash-image-preview",
|
|
17
|
+
"style": "<style>Ghibli style multi-panel comic strips in landscape mode. Use speech bubbles with short, natural dialogue (1–6 words). Keep text minimal, like real comics. Let the art convey the story and emotions. Use the input image as the presenter.</style>",
|
|
18
|
+
"images": {
|
|
19
|
+
"presenter": {
|
|
20
|
+
"type": "image",
|
|
21
|
+
"source": {
|
|
22
|
+
"kind": "url",
|
|
23
|
+
"url": "https://raw.githubusercontent.com/receptron/mulmocast-media/refs/heads/main/characters/ghibli_presenter.png"
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
},
|
|
29
|
+
"scriptName": "text_only_template.json"
|
|
30
|
+
}
|
package/lib/actions/images.d.ts
CHANGED
package/lib/actions/images.js
CHANGED
|
@@ -52,6 +52,8 @@ const beat_graph_data = {
|
|
|
52
52
|
__mapIndex: {},
|
|
53
53
|
forceMovie: { value: false },
|
|
54
54
|
forceImage: { value: false },
|
|
55
|
+
forceLipSync: { value: false },
|
|
56
|
+
forceSoundEffect: { value: false },
|
|
55
57
|
preprocessor: {
|
|
56
58
|
agent: imagePreprocessAgent,
|
|
57
59
|
inputs: {
|
|
@@ -188,7 +190,8 @@ const beat_graph_data = {
|
|
|
188
190
|
return { hasMovieAudio: true };
|
|
189
191
|
}
|
|
190
192
|
const sourceFile = namedInputs.movieFile || namedInputs.imageFile;
|
|
191
|
-
if (!sourceFile
|
|
193
|
+
if (!sourceFile) {
|
|
194
|
+
// no need to check if the file exists (ffmpegGetMediaDuration will check it if it is local file)
|
|
192
195
|
return { hasMovieAudio: false };
|
|
193
196
|
}
|
|
194
197
|
const { hasAudio } = await ffmpegGetMediaDuration(sourceFile);
|
|
@@ -214,7 +217,7 @@ const beat_graph_data = {
|
|
|
214
217
|
duration: ":preprocessor.beatDuration",
|
|
215
218
|
},
|
|
216
219
|
cache: {
|
|
217
|
-
force: [":context.force"],
|
|
220
|
+
force: [":context.force", ":forceSoundEffect"],
|
|
218
221
|
file: ":preprocessor.soundEffectFile",
|
|
219
222
|
index: ":__mapIndex",
|
|
220
223
|
id: ":beat.id",
|
|
@@ -261,7 +264,7 @@ const beat_graph_data = {
|
|
|
261
264
|
duration: ":preprocessor.beatDuration",
|
|
262
265
|
},
|
|
263
266
|
cache: {
|
|
264
|
-
force: [":context.force"],
|
|
267
|
+
force: [":context.force", ":forceLipSync"],
|
|
265
268
|
file: ":preprocessor.lipSyncFile",
|
|
266
269
|
index: ":__mapIndex",
|
|
267
270
|
id: ":beat.id",
|
|
@@ -428,7 +431,7 @@ export const images = async (context, args) => {
|
|
|
428
431
|
// public api
|
|
429
432
|
export const generateBeatImage = async (inputs) => {
|
|
430
433
|
const { index, context, args } = inputs;
|
|
431
|
-
const { settings, callbacks, forceMovie, forceImage } = args ?? {};
|
|
434
|
+
const { settings, callbacks, forceMovie, forceImage, forceLipSync, forceSoundEffect } = args ?? {};
|
|
432
435
|
const options = await graphOption(context, settings);
|
|
433
436
|
const injections = await prepareGenerateImages(context);
|
|
434
437
|
const graph = new GraphAI(beat_graph_data, defaultAgents, options);
|
|
@@ -441,6 +444,8 @@ export const generateBeatImage = async (inputs) => {
|
|
|
441
444
|
graph.injectValue("beat", context.studio.script.beats[index]);
|
|
442
445
|
graph.injectValue("forceMovie", forceMovie ?? false);
|
|
443
446
|
graph.injectValue("forceImage", forceImage ?? false);
|
|
447
|
+
graph.injectValue("forceLipSync", forceLipSync ?? false);
|
|
448
|
+
graph.injectValue("forceSoundEffect", forceSoundEffect ?? false);
|
|
444
449
|
if (callbacks) {
|
|
445
450
|
callbacks.forEach((callback) => {
|
|
446
451
|
graph.registerCallback(callback);
|
package/lib/actions/translate.js
CHANGED
|
@@ -68,10 +68,9 @@ const beatGraph = {
|
|
|
68
68
|
// for cache
|
|
69
69
|
multiLingual: {
|
|
70
70
|
agent: (namedInputs) => {
|
|
71
|
-
const { multiLinguals, beatIndex, text
|
|
72
|
-
const key = beatId(beat?.id, beatIndex);
|
|
71
|
+
const { multiLinguals, beatIndex, text } = namedInputs;
|
|
73
72
|
const cacheKey = hashSHA256(text ?? "");
|
|
74
|
-
const multiLingual = multiLinguals?.[
|
|
73
|
+
const multiLingual = multiLinguals?.[beatIndex];
|
|
75
74
|
if (!multiLingual) {
|
|
76
75
|
return { cacheKey, multiLingualTexts: {} };
|
|
77
76
|
}
|
|
@@ -87,7 +86,6 @@ const beatGraph = {
|
|
|
87
86
|
},
|
|
88
87
|
inputs: {
|
|
89
88
|
text: ":beat.text",
|
|
90
|
-
beat: ":beat",
|
|
91
89
|
beatIndex: ":__mapIndex",
|
|
92
90
|
multiLinguals: ":context.multiLingual",
|
|
93
91
|
},
|
|
@@ -199,14 +197,17 @@ const localizedTextCacheAgentFilter = async (context, next) => {
|
|
|
199
197
|
}
|
|
200
198
|
// same language
|
|
201
199
|
if (targetLang === lang) {
|
|
200
|
+
GraphAILogger.log(`translate: ${beatIndex} same lang`);
|
|
202
201
|
return { text: beat.text };
|
|
203
202
|
}
|
|
204
203
|
// The original text is unchanged and the target language text is present
|
|
205
204
|
if (multiLingual.cacheKey === multiLingual.multiLingualTexts[targetLang]?.cacheKey) {
|
|
205
|
+
GraphAILogger.log(`translate: ${beatIndex} cache hit`);
|
|
206
206
|
return { text: multiLingual.multiLingualTexts[targetLang].text };
|
|
207
207
|
}
|
|
208
208
|
try {
|
|
209
209
|
MulmoStudioContextMethods.setBeatSessionState(mulmoContext, "multiLingual", beatIndex, beat.id, true);
|
|
210
|
+
GraphAILogger.log(`translate: ${beatIndex} run`);
|
|
210
211
|
return await next(context);
|
|
211
212
|
}
|
|
212
213
|
finally {
|
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
import { GraphAILogger } from "graphai";
|
|
2
|
+
import puppeteer from "puppeteer";
|
|
3
|
+
import { Readability } from "@mozilla/readability";
|
|
4
|
+
import { JSDOM } from "jsdom";
|
|
5
|
+
const NAV_TIMEOUT = 45_000;
|
|
6
|
+
const normalize = (s) => s
|
|
7
|
+
.replace(/\r\n/g, "\n")
|
|
8
|
+
.replace(/[\n\t]{2,}/g, "\n")
|
|
9
|
+
.trim();
|
|
10
|
+
const waitStable = async (page, ms = 1200, step = 200) => {
|
|
11
|
+
let last = -1;
|
|
12
|
+
let stable = 0;
|
|
13
|
+
while (stable < ms) {
|
|
14
|
+
const len = await page.evaluate(() => document.body?.innerText?.length || 0);
|
|
15
|
+
stable = len === last ? stable + step : 0;
|
|
16
|
+
last = len;
|
|
17
|
+
await new Promise((r) => setTimeout(r, step));
|
|
18
|
+
}
|
|
19
|
+
};
|
|
20
|
+
const fetchArticle = async (url) => {
|
|
21
|
+
const browser = await puppeteer.launch({
|
|
22
|
+
headless: true,
|
|
23
|
+
args: ["--no-sandbox", "--disable-setuid-sandbox", "--disable-dev-shm-usage"],
|
|
24
|
+
});
|
|
25
|
+
const page = await browser.newPage();
|
|
26
|
+
await page.setUserAgent("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/124.0.0.0 Safari/537.36");
|
|
27
|
+
await page.setViewport({ width: 1366, height: 900 });
|
|
28
|
+
try {
|
|
29
|
+
await page.goto(url, { waitUntil: "networkidle2", timeout: NAV_TIMEOUT });
|
|
30
|
+
await Promise.race([page.waitForSelector("article, main, [role=main], .article, .post", { timeout: 8000 }), new Promise((r) => setTimeout(r, 8000))]);
|
|
31
|
+
await waitStable(page, 1200);
|
|
32
|
+
const html = await page.content();
|
|
33
|
+
const dom = new JSDOM(html, { url: page.url() });
|
|
34
|
+
const reader = new Readability(dom.window.document);
|
|
35
|
+
const a = reader.parse();
|
|
36
|
+
const title = a?.title || (await page.title()) || null;
|
|
37
|
+
const text = normalize(a?.textContent || "");
|
|
38
|
+
let finalText = text;
|
|
39
|
+
if (finalText.length < 100) {
|
|
40
|
+
const raw = await page.evaluate(() => {
|
|
41
|
+
const el = document.querySelector("article, main, [role=main], .article, .post") || document.body;
|
|
42
|
+
return el?.textContent || "";
|
|
43
|
+
});
|
|
44
|
+
finalText = normalize(raw);
|
|
45
|
+
}
|
|
46
|
+
return {
|
|
47
|
+
url,
|
|
48
|
+
title,
|
|
49
|
+
byline: a?.byline || null,
|
|
50
|
+
excerpt: a?.excerpt || null,
|
|
51
|
+
length: a?.length ?? (finalText?.length || null),
|
|
52
|
+
textContent: finalText || null,
|
|
53
|
+
};
|
|
54
|
+
}
|
|
55
|
+
finally {
|
|
56
|
+
await page.close().catch(() => { });
|
|
57
|
+
await browser.close().catch(() => { });
|
|
58
|
+
}
|
|
59
|
+
};
|
|
60
|
+
export const puppeteerCrawlerAgent = async ({ namedInputs }) => {
|
|
61
|
+
const { url } = namedInputs;
|
|
62
|
+
GraphAILogger.log(url);
|
|
63
|
+
try {
|
|
64
|
+
const data = await fetchArticle(url);
|
|
65
|
+
GraphAILogger.log(JSON.stringify({ ok: true, ...data }));
|
|
66
|
+
return {
|
|
67
|
+
data,
|
|
68
|
+
content: data.textContent,
|
|
69
|
+
};
|
|
70
|
+
}
|
|
71
|
+
catch (e) {
|
|
72
|
+
const errorMessage = e instanceof Error ? e.message : String(e);
|
|
73
|
+
GraphAILogger.log(JSON.stringify({ ok: false, url, error: errorMessage }));
|
|
74
|
+
return { content: errorMessage };
|
|
75
|
+
}
|
|
76
|
+
};
|
|
77
|
+
const puppeteerCrawlerAgentInfo = {
|
|
78
|
+
name: "puppeteerCrawlerAgent",
|
|
79
|
+
agent: puppeteerCrawlerAgent,
|
|
80
|
+
mock: puppeteerCrawlerAgent,
|
|
81
|
+
samples: [
|
|
82
|
+
{
|
|
83
|
+
params: {},
|
|
84
|
+
inputs: {},
|
|
85
|
+
result: {},
|
|
86
|
+
},
|
|
87
|
+
],
|
|
88
|
+
description: "Puppeteer Crawler Agent",
|
|
89
|
+
category: ["net"],
|
|
90
|
+
repository: "https://github.com/receptron/mulmocast-cli",
|
|
91
|
+
author: "Receptron team",
|
|
92
|
+
license: "MIT",
|
|
93
|
+
};
|
|
94
|
+
export default puppeteerCrawlerAgentInfo;
|
|
@@ -37,6 +37,7 @@ export declare const promptTemplates: ({
|
|
|
37
37
|
};
|
|
38
38
|
style: string;
|
|
39
39
|
provider?: undefined;
|
|
40
|
+
model?: undefined;
|
|
40
41
|
};
|
|
41
42
|
movieParams: {
|
|
42
43
|
provider: string;
|
|
@@ -106,6 +107,7 @@ export declare const promptTemplates: ({
|
|
|
106
107
|
};
|
|
107
108
|
style: string;
|
|
108
109
|
provider?: undefined;
|
|
110
|
+
model?: undefined;
|
|
109
111
|
};
|
|
110
112
|
movieParams: {
|
|
111
113
|
model: string;
|
|
@@ -178,6 +180,7 @@ export declare const promptTemplates: ({
|
|
|
178
180
|
};
|
|
179
181
|
provider: string;
|
|
180
182
|
style?: undefined;
|
|
183
|
+
model?: undefined;
|
|
181
184
|
};
|
|
182
185
|
movieParams: {
|
|
183
186
|
provider: string;
|
|
@@ -231,6 +234,7 @@ export declare const promptTemplates: ({
|
|
|
231
234
|
style: string;
|
|
232
235
|
images?: undefined;
|
|
233
236
|
provider?: undefined;
|
|
237
|
+
model?: undefined;
|
|
234
238
|
};
|
|
235
239
|
movieParams: {
|
|
236
240
|
provider: string;
|
|
@@ -297,6 +301,74 @@ export declare const promptTemplates: ({
|
|
|
297
301
|
};
|
|
298
302
|
style: string;
|
|
299
303
|
provider?: undefined;
|
|
304
|
+
model?: undefined;
|
|
305
|
+
};
|
|
306
|
+
movieParams: {
|
|
307
|
+
provider: string;
|
|
308
|
+
model?: undefined;
|
|
309
|
+
};
|
|
310
|
+
soundEffectParams: {
|
|
311
|
+
provider: string;
|
|
312
|
+
};
|
|
313
|
+
speechParams: {
|
|
314
|
+
speakers: {
|
|
315
|
+
Presenter: {
|
|
316
|
+
displayName: {
|
|
317
|
+
en: string;
|
|
318
|
+
};
|
|
319
|
+
voiceId: string;
|
|
320
|
+
lang?: undefined;
|
|
321
|
+
speechOptions?: undefined;
|
|
322
|
+
};
|
|
323
|
+
Announcer?: undefined;
|
|
324
|
+
Student?: undefined;
|
|
325
|
+
Teacher?: undefined;
|
|
326
|
+
};
|
|
327
|
+
};
|
|
328
|
+
};
|
|
329
|
+
scriptName: string;
|
|
330
|
+
systemPrompt: string;
|
|
331
|
+
title: string;
|
|
332
|
+
} | {
|
|
333
|
+
description: string;
|
|
334
|
+
filename: string;
|
|
335
|
+
presentationStyle: {
|
|
336
|
+
$mulmocast: {
|
|
337
|
+
credit: string;
|
|
338
|
+
version: string;
|
|
339
|
+
};
|
|
340
|
+
audioParams: {
|
|
341
|
+
audioVolume: number;
|
|
342
|
+
bgmVolume: number;
|
|
343
|
+
closingPadding: number;
|
|
344
|
+
introPadding: number;
|
|
345
|
+
outroPadding: number;
|
|
346
|
+
padding: number;
|
|
347
|
+
suppressSpeech: boolean;
|
|
348
|
+
bgm?: undefined;
|
|
349
|
+
};
|
|
350
|
+
canvasSize: {
|
|
351
|
+
height: number;
|
|
352
|
+
width: number;
|
|
353
|
+
};
|
|
354
|
+
imageParams: {
|
|
355
|
+
images: {
|
|
356
|
+
presenter: {
|
|
357
|
+
source: {
|
|
358
|
+
kind: string;
|
|
359
|
+
url: string;
|
|
360
|
+
};
|
|
361
|
+
type: string;
|
|
362
|
+
};
|
|
363
|
+
girl?: undefined;
|
|
364
|
+
ani?: undefined;
|
|
365
|
+
optimus?: undefined;
|
|
366
|
+
"[CHARACTER_1_ID]"?: undefined;
|
|
367
|
+
"[CHARACTER_2_ID]"?: undefined;
|
|
368
|
+
};
|
|
369
|
+
model: string;
|
|
370
|
+
provider: string;
|
|
371
|
+
style: string;
|
|
300
372
|
};
|
|
301
373
|
movieParams: {
|
|
302
374
|
provider: string;
|
|
@@ -369,6 +441,7 @@ export declare const promptTemplates: ({
|
|
|
369
441
|
};
|
|
370
442
|
style: string;
|
|
371
443
|
provider?: undefined;
|
|
444
|
+
model?: undefined;
|
|
372
445
|
};
|
|
373
446
|
movieParams: {
|
|
374
447
|
provider: string;
|
|
@@ -422,6 +495,7 @@ export declare const promptTemplates: ({
|
|
|
422
495
|
style: string;
|
|
423
496
|
images?: undefined;
|
|
424
497
|
provider?: undefined;
|
|
498
|
+
model?: undefined;
|
|
425
499
|
};
|
|
426
500
|
movieParams: {
|
|
427
501
|
provider: string;
|
|
@@ -486,6 +560,7 @@ export declare const promptTemplates: ({
|
|
|
486
560
|
style: string;
|
|
487
561
|
images?: undefined;
|
|
488
562
|
provider?: undefined;
|
|
563
|
+
model?: undefined;
|
|
489
564
|
};
|
|
490
565
|
movieParams: {
|
|
491
566
|
provider: string;
|
|
@@ -552,6 +627,7 @@ export declare const promptTemplates: ({
|
|
|
552
627
|
};
|
|
553
628
|
style: string;
|
|
554
629
|
provider?: undefined;
|
|
630
|
+
model?: undefined;
|
|
555
631
|
};
|
|
556
632
|
movieParams: {
|
|
557
633
|
provider: string;
|
|
@@ -608,6 +684,7 @@ export declare const promptTemplates: ({
|
|
|
608
684
|
style: string;
|
|
609
685
|
images?: undefined;
|
|
610
686
|
provider?: undefined;
|
|
687
|
+
model?: undefined;
|
|
611
688
|
};
|
|
612
689
|
movieParams: {
|
|
613
690
|
provider: string;
|
|
@@ -323,7 +323,7 @@ export const promptTemplates = [
|
|
|
323
323
|
title: "Dr. Slump Style",
|
|
324
324
|
},
|
|
325
325
|
{
|
|
326
|
-
description: "
|
|
326
|
+
description: "Ghibli-style comic.",
|
|
327
327
|
filename: "ghibli_comic",
|
|
328
328
|
presentationStyle: {
|
|
329
329
|
$mulmocast: {
|
|
@@ -373,8 +373,64 @@ export const promptTemplates = [
|
|
|
373
373
|
},
|
|
374
374
|
},
|
|
375
375
|
scriptName: "image_prompts_template.json",
|
|
376
|
+
systemPrompt: "Another AI will generate comic strips for each beat based on the imagePrompt of that beat. Mention the reference in one of beats, if it exists. Use the JSON below as a template.",
|
|
377
|
+
title: "Ghibli-style comic.",
|
|
378
|
+
},
|
|
379
|
+
{
|
|
380
|
+
description: "Ghibli-style comic strips with nano banana.",
|
|
381
|
+
filename: "ghibli_comic_strips",
|
|
382
|
+
presentationStyle: {
|
|
383
|
+
$mulmocast: {
|
|
384
|
+
credit: "closing",
|
|
385
|
+
version: "1.1",
|
|
386
|
+
},
|
|
387
|
+
audioParams: {
|
|
388
|
+
audioVolume: 1,
|
|
389
|
+
bgmVolume: 0.2,
|
|
390
|
+
closingPadding: 0.8,
|
|
391
|
+
introPadding: 1,
|
|
392
|
+
outroPadding: 1,
|
|
393
|
+
padding: 0.3,
|
|
394
|
+
suppressSpeech: false,
|
|
395
|
+
},
|
|
396
|
+
canvasSize: {
|
|
397
|
+
height: 1024,
|
|
398
|
+
width: 1536,
|
|
399
|
+
},
|
|
400
|
+
imageParams: {
|
|
401
|
+
images: {
|
|
402
|
+
presenter: {
|
|
403
|
+
source: {
|
|
404
|
+
kind: "url",
|
|
405
|
+
url: "https://raw.githubusercontent.com/receptron/mulmocast-media/refs/heads/main/characters/ghibli_presenter.png",
|
|
406
|
+
},
|
|
407
|
+
type: "image",
|
|
408
|
+
},
|
|
409
|
+
},
|
|
410
|
+
model: "gemini-2.5-flash-image-preview",
|
|
411
|
+
provider: "google",
|
|
412
|
+
style: "<style>Ghibli style multi-panel comic strips in landscape mode. Use speech bubbles with short, natural dialogue (1–6 words). Keep text minimal, like real comics. Let the art convey the story and emotions. Use the input image as the presenter.</style>",
|
|
413
|
+
},
|
|
414
|
+
movieParams: {
|
|
415
|
+
provider: "replicate",
|
|
416
|
+
},
|
|
417
|
+
soundEffectParams: {
|
|
418
|
+
provider: "replicate",
|
|
419
|
+
},
|
|
420
|
+
speechParams: {
|
|
421
|
+
speakers: {
|
|
422
|
+
Presenter: {
|
|
423
|
+
displayName: {
|
|
424
|
+
en: "Presenter",
|
|
425
|
+
},
|
|
426
|
+
voiceId: "shimmer",
|
|
427
|
+
},
|
|
428
|
+
},
|
|
429
|
+
},
|
|
430
|
+
},
|
|
431
|
+
scriptName: "text_only_template.json",
|
|
376
432
|
systemPrompt: "Another AI will generate comic strips for each beat based on the text description of that beat. Mention the reference in one of beats, if it exists. Use the JSON below as a template.",
|
|
377
|
-
title: "Ghibli comic
|
|
433
|
+
title: "Ghibli-style comic strips with nano banana.",
|
|
378
434
|
},
|
|
379
435
|
{
|
|
380
436
|
description: "Template for Ghost in the shell style comic presentation.",
|
|
@@ -31,10 +31,14 @@ export const templateDataSet = {
|
|
|
31
31
|
"```JSON\n" +
|
|
32
32
|
`{"$mulmocast":{"version":"1.1","credit":"closing"},"title":"[TITLE: Brief, engaging title for the topic]","lang":"en","references":[{"url":"[SOURCE_URL: URL of the source material]","title":"[SOURCE_TITLE: Title of the referenced article, or paper]","type":"[SOURCE_TYPE: article, paper]"}],"beats":[{"text":"[OPENING_BEAT: Introduce the topic with a hook. Reference the source material and set up why this topic matters. Usually 2-3 sentences that grab attention and provide context.]","imagePrompt":"[IMAGE_PROMPT: A prompt for the image to be generated for this beat.]"},{"text":"[MAIN_CONCEPT: Define or explain the core concept/idea. This should be the central focus of your narrative. Keep it clear and accessible.]","imagePrompt":"[IMAGE_PROMPT: A prompt for the image to be generated for this beat.]"},{"text":"[SUPPORTING_DETAIL_1: Additional context, examples, or elaboration that helps illustrate the main concept. This could include how it works, why it's important, or real-world applications.]","imagePrompt":"[IMAGE_PROMPT: A prompt for the image to be generated for this beat.]"},{"text":"[SUPPORTING_DETAIL_2: Continue with more examples, deeper explanation, or different aspects of the topic if needed.]","imagePrompt":"[IMAGE_PROMPT: A prompt for the image to be generated for this beat.]"},{"text":"[ADDITIONAL_BEATS: Add more beats as necessary to fully explore the topic. Complex topics may require 6-10+ beats to cover adequately. Each beat should advance the narrative or provide valuable information.]","imagePrompt":"[IMAGE_PROMPT: A prompt for the image to be generated for this beat.]"},{"text":"[CONCLUSION/IMPACT: Wrap up with the significance, implications, or key takeaway. Help the audience understand why this matters to them.]","imagePrompt":"[IMAGE_PROMPT: A prompt for the image to be generated for this beat.]"}],"canvasSize":{"width":1536,"height":1024},"imageParams":{"style":"<style>Dragon Ball/Dr. Slump aesthetic.</style>","images":{"girl":{"type":"image","source":{"kind":"url","url":"https://raw.githubusercontent.com/receptron/mulmocast-media/refs/heads/main/characters/slump_presenter.png"}}}}}\n` +
|
|
33
33
|
"```",
|
|
34
|
-
ghibli_comic: "Another AI will generate comic strips for each beat based on the
|
|
34
|
+
ghibli_comic: "Another AI will generate comic strips for each beat based on the imagePrompt of that beat. Mention the reference in one of beats, if it exists. Use the JSON below as a template.\n" +
|
|
35
35
|
"```JSON\n" +
|
|
36
36
|
`{"$mulmocast":{"version":"1.1","credit":"closing"},"title":"[TITLE: Brief, engaging title for the topic]","lang":"en","references":[{"url":"[SOURCE_URL: URL of the source material]","title":"[SOURCE_TITLE: Title of the referenced article, or paper]","type":"[SOURCE_TYPE: article, paper]"}],"beats":[{"text":"[OPENING_BEAT: Introduce the topic with a hook. Reference the source material and set up why this topic matters. Usually 2-3 sentences that grab attention and provide context.]","imagePrompt":"[IMAGE_PROMPT: A prompt for the image to be generated for this beat.]"},{"text":"[MAIN_CONCEPT: Define or explain the core concept/idea. This should be the central focus of your narrative. Keep it clear and accessible.]","imagePrompt":"[IMAGE_PROMPT: A prompt for the image to be generated for this beat.]"},{"text":"[SUPPORTING_DETAIL_1: Additional context, examples, or elaboration that helps illustrate the main concept. This could include how it works, why it's important, or real-world applications.]","imagePrompt":"[IMAGE_PROMPT: A prompt for the image to be generated for this beat.]"},{"text":"[SUPPORTING_DETAIL_2: Continue with more examples, deeper explanation, or different aspects of the topic if needed.]","imagePrompt":"[IMAGE_PROMPT: A prompt for the image to be generated for this beat.]"},{"text":"[ADDITIONAL_BEATS: Add more beats as necessary to fully explore the topic. Complex topics may require 6-10+ beats to cover adequately. Each beat should advance the narrative or provide valuable information.]","imagePrompt":"[IMAGE_PROMPT: A prompt for the image to be generated for this beat.]"},{"text":"[CONCLUSION/IMPACT: Wrap up with the significance, implications, or key takeaway. Help the audience understand why this matters to them.]","imagePrompt":"[IMAGE_PROMPT: A prompt for the image to be generated for this beat.]"}],"canvasSize":{"width":1536,"height":1024},"imageParams":{"style":"<style>Ghibli style</style>","images":{"presenter":{"type":"image","source":{"kind":"url","url":"https://raw.githubusercontent.com/receptron/mulmocast-media/refs/heads/main/characters/ghibli_presenter.png"}}}}}\n` +
|
|
37
37
|
"```",
|
|
38
|
+
ghibli_comic_strips: "Another AI will generate comic strips for each beat based on the text description of that beat. Mention the reference in one of beats, if it exists. Use the JSON below as a template.\n" +
|
|
39
|
+
"```JSON\n" +
|
|
40
|
+
`{"$mulmocast":{"version":"1.1","credit":"closing"},"title":"[TITLE: Brief, engaging title for the topic]","lang":"en","references":[{"url":"[SOURCE_URL: URL of the source material]","title":"[SOURCE_TITLE: Title of the referenced article, or paper]","type":"[SOURCE_TYPE: article, paper]"}],"beats":[{"text":"[OPENING_BEAT: Introduce the topic with a hook. Reference the source material and set up why this topic matters. Usually 2-3 sentences that grab attention and provide context.]"},{"text":"[MAIN_CONCEPT: Define or explain the core concept/idea. This should be the central focus of your narrative. Keep it clear and accessible.]"},{"text":"[SUPPORTING_DETAIL_1: Additional context, examples, or elaboration that helps illustrate the main concept. This could include how it works, why it's important, or real-world applications.]"},{"text":"[SUPPORTING_DETAIL_2: Continue with more examples, deeper explanation, or different aspects of the topic if needed.]"},{"text":"[ADDITIONAL_BEATS: Add more beats as necessary to fully explore the topic. Complex topics may require 6-10+ beats to cover adequately. Each beat should advance the narrative or provide valuable information.]"},{"text":"[CONCLUSION/IMPACT: Wrap up with the significance, implications, or key takeaway. Help the audience understand why this matters to them.]"}],"canvasSize":{"width":1536,"height":1024},"imageParams":{"provider":"google","model":"gemini-2.5-flash-image-preview","style":"<style>Ghibli style multi-panel comic strips in landscape mode. Use speech bubbles with short, natural dialogue (1–6 words). Keep text minimal, like real comics. Let the art convey the story and emotions. Use the input image as the presenter.</style>","images":{"presenter":{"type":"image","source":{"kind":"url","url":"https://raw.githubusercontent.com/receptron/mulmocast-media/refs/heads/main/characters/ghibli_presenter.png"}}}}}\n` +
|
|
41
|
+
"```",
|
|
38
42
|
ghost_comic: "Another AI will generate images for each beat based on the image prompt of that beat. Mention the reference in one of beats, if it exists. Use the JSON below as a template.\n" +
|
|
39
43
|
"```JSON\n" +
|
|
40
44
|
`{"$mulmocast":{"version":"1.1","credit":"closing"},"title":"[TITLE: Brief, engaging title for the topic]","lang":"en","references":[{"url":"[SOURCE_URL: URL of the source material]","title":"[SOURCE_TITLE: Title of the referenced article, or paper]","type":"[SOURCE_TYPE: article, paper]"}],"beats":[{"text":"[OPENING_BEAT: Introduce the topic with a hook. Reference the source material and set up why this topic matters. Usually 2-3 sentences that grab attention and provide context.]","imagePrompt":"[IMAGE_PROMPT: A prompt for the image to be generated for this beat.]"},{"text":"[MAIN_CONCEPT: Define or explain the core concept/idea. This should be the central focus of your narrative. Keep it clear and accessible.]","imagePrompt":"[IMAGE_PROMPT: A prompt for the image to be generated for this beat.]"},{"text":"[SUPPORTING_DETAIL_1: Additional context, examples, or elaboration that helps illustrate the main concept. This could include how it works, why it's important, or real-world applications.]","imagePrompt":"[IMAGE_PROMPT: A prompt for the image to be generated for this beat.]"},{"text":"[SUPPORTING_DETAIL_2: Continue with more examples, deeper explanation, or different aspects of the topic if needed.]","imagePrompt":"[IMAGE_PROMPT: A prompt for the image to be generated for this beat.]"},{"text":"[ADDITIONAL_BEATS: Add more beats as necessary to fully explore the topic. Complex topics may require 6-10+ beats to cover adequately. Each beat should advance the narrative or provide valuable information.]","imagePrompt":"[IMAGE_PROMPT: A prompt for the image to be generated for this beat.]"},{"text":"[CONCLUSION/IMPACT: Wrap up with the significance, implications, or key takeaway. Help the audience understand why this matters to them.]","imagePrompt":"[IMAGE_PROMPT: A prompt for the image to be generated for this beat.]"}],"canvasSize":{"width":1536,"height":1024},"imageParams":{"style":"<style>Ghost in the shell aesthetic.</style>","images":{"presenter":{"type":"image","source":{"kind":"url","url":"https://raw.githubusercontent.com/receptron/mulmocast-media/refs/heads/main/characters/ghost_presenter.png"}},"optimus":{"type":"image","source":{"kind":"url","url":"https://raw.githubusercontent.com/receptron/mulmocast-media/refs/heads/main/characters/optimus.png"}}}}}\n` +
|
|
@@ -64,7 +64,9 @@ export const ffmpegGetMediaDuration = (filePath) => {
|
|
|
64
64
|
return new Promise((resolve, reject) => {
|
|
65
65
|
// Only check file existence for local paths, not URLs
|
|
66
66
|
if (!filePath.startsWith("http://") && !filePath.startsWith("https://") && !fs.existsSync(filePath)) {
|
|
67
|
-
reject
|
|
67
|
+
// NOTE: We don't reject here for scripts/test/test_hello_image.json, which uses mock image agent.
|
|
68
|
+
// reject(new Error(`File not found: ${filePath}`));
|
|
69
|
+
resolve({ duration: 0, hasAudio: false });
|
|
68
70
|
return;
|
|
69
71
|
}
|
|
70
72
|
ffmpeg.ffprobe(filePath, (err, metadata) => {
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "mulmocast",
|
|
3
|
-
"version": "1.2.
|
|
3
|
+
"version": "1.2.18",
|
|
4
4
|
"description": "",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "lib/index.node.js",
|
|
@@ -79,11 +79,13 @@
|
|
|
79
79
|
"@inquirer/input": "^4.2.2",
|
|
80
80
|
"@inquirer/select": "^4.3.2",
|
|
81
81
|
"@modelcontextprotocol/sdk": "^1.17.4",
|
|
82
|
+
"@mozilla/readability": "^0.6.0",
|
|
82
83
|
"@tavily/core": "^0.5.11",
|
|
83
84
|
"clipboardy": "^4.0.0",
|
|
84
85
|
"dotenv": "^17.2.1",
|
|
85
86
|
"fluent-ffmpeg": "^2.1.3",
|
|
86
87
|
"graphai": "^2.0.14",
|
|
88
|
+
"jsdom": "^26.1.0",
|
|
87
89
|
"marked": "^16.2.0",
|
|
88
90
|
"ora": "^8.2.0",
|
|
89
91
|
"puppeteer": "^24.17.0",
|
|
@@ -96,6 +98,7 @@
|
|
|
96
98
|
"devDependencies": {
|
|
97
99
|
"@receptron/test_utils": "^2.0.3",
|
|
98
100
|
"@types/fluent-ffmpeg": "^2.1.26",
|
|
101
|
+
"@types/jsdom": "^21.1.7",
|
|
99
102
|
"@types/yargs": "^17.0.33",
|
|
100
103
|
"eslint": "^9.34.0",
|
|
101
104
|
"eslint-config-prettier": "^10.1.8",
|