mulmocast 2.6.3 → 2.6.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -1
- package/assets/html/chart.html +40 -7
- package/lib/actions/image_agents.d.ts +6 -0
- package/lib/actions/image_agents.js +28 -2
- package/lib/actions/image_references.d.ts +1 -0
- package/lib/actions/image_references.js +49 -11
- package/lib/actions/images.d.ts +26 -0
- package/lib/actions/images.js +2 -0
- package/lib/agents/movie_genai_agent.js +36 -6
- package/lib/agents/movie_replicate_agent.js +20 -3
- package/lib/methods/mulmo_presentation_style.d.ts +6 -0
- package/lib/slide/layouts/index.js +3 -0
- package/lib/slide/layouts/waterfall.d.ts +2 -0
- package/lib/slide/layouts/waterfall.js +63 -0
- package/lib/slide/render.js +4 -1
- package/lib/slide/schema.d.ts +176 -0
- package/lib/slide/schema.js +18 -0
- package/lib/slide/utils.d.ts +1 -0
- package/lib/slide/utils.js +21 -1
- package/lib/types/agent.d.ts +6 -0
- package/lib/types/provider2agent.d.ts +6 -11
- package/lib/types/provider2agent.js +10 -0
- package/lib/types/schema.d.ts +532 -0
- package/lib/types/schema.js +14 -0
- package/lib/types/slide.d.ts +176 -0
- package/lib/types/slide.js +18 -0
- package/lib/utils/context.d.ts +167 -0
- package/lib/utils/html_render.js +4 -3
- package/lib/utils/image_plugins/chart.js +19 -6
- package/lib/utils/image_plugins/mermaid.js +5 -1
- package/package.json +7 -7
- package/scripts/test/test_image_prompt_reference.json +55 -0
- package/scripts/test/test_ir_visualizations.json +317 -0
- package/scripts/test/test_movie_references.json +101 -0
- package/scripts/test/test_plugin_features.json +151 -0
package/README.md
CHANGED
|
@@ -396,8 +396,9 @@ MulmoCast includes a powerful **Slide DSL** (`type: "slide"`) for creating struc
|
|
|
396
396
|
|
|
397
397
|
### Features
|
|
398
398
|
|
|
399
|
-
- **
|
|
399
|
+
- **12 Layouts**: title, columns, comparison, grid, bigQuote, stats, timeline, split, matrix, table, funnel, waterfall
|
|
400
400
|
- **10 Content Block Types**: text, bullets, code, callout, metric, divider, image, imageRef, chart, mermaid
|
|
401
|
+
- **Chart.js Plugins**: sankey (`chartjs-chart-sankey`) and treemap (`chartjs-chart-treemap`) auto-loaded by chart type
|
|
401
402
|
- **13-Color Theme System**: Semantic color palette with dark/light support
|
|
402
403
|
- **6 Preset Themes**: dark, pop, warm, creative, minimal, corporate
|
|
403
404
|
|
package/assets/html/chart.html
CHANGED
|
@@ -5,14 +5,18 @@
|
|
|
5
5
|
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
|
6
6
|
<title>Simple Chart.js Bar Chart</title>
|
|
7
7
|
<style>
|
|
8
|
+
html, body { height: 100%; margin: 0; }
|
|
9
|
+
body { display: flex; flex-direction: column; align-items: center; justify-content: center; }
|
|
8
10
|
${style}
|
|
9
11
|
.chart-container {
|
|
10
12
|
width: ${chart_width}px;
|
|
11
|
-
|
|
13
|
+
height: 70vh;
|
|
14
|
+
position: relative;
|
|
12
15
|
}
|
|
13
16
|
</style>
|
|
14
17
|
<!-- Include Chart.js from CDN -->
|
|
15
|
-
<script src="https://cdn.jsdelivr.net/npm/chart.js"></script>
|
|
18
|
+
<script src="https://cdn.jsdelivr.net/npm/chart.js@4"></script>
|
|
19
|
+
${chart_plugins}
|
|
16
20
|
</head>
|
|
17
21
|
<body>
|
|
18
22
|
<h1>${title}</h1>
|
|
@@ -20,9 +24,7 @@
|
|
|
20
24
|
<canvas id="myChart" data-chart-ready="false"></canvas>
|
|
21
25
|
</div>
|
|
22
26
|
|
|
23
|
-
<!-- Plain JavaScript instead of TypeScript -->
|
|
24
27
|
<script>
|
|
25
|
-
// Wait for DOM and Chart.js to be ready, then render.
|
|
26
28
|
function initChart() {
|
|
27
29
|
if (!window.Chart) return false;
|
|
28
30
|
const ctx = document.getElementById('myChart');
|
|
@@ -32,10 +34,41 @@
|
|
|
32
34
|
// Disable animation for static image generation
|
|
33
35
|
if (!chartData.options) chartData.options = {};
|
|
34
36
|
chartData.options.animation = false;
|
|
37
|
+
chartData.options.responsive = true;
|
|
38
|
+
chartData.options.maintainAspectRatio = false;
|
|
35
39
|
|
|
36
|
-
//
|
|
40
|
+
// Treemap: convert backgroundColor array to scriptable function
|
|
41
|
+
if (chartData.type === 'treemap') {
|
|
42
|
+
chartData.data.datasets.forEach((ds) => {
|
|
43
|
+
if (Array.isArray(ds.backgroundColor)) {
|
|
44
|
+
const colors = ds.backgroundColor;
|
|
45
|
+
ds.backgroundColor = (c) => colors[c.dataIndex % colors.length];
|
|
46
|
+
}
|
|
47
|
+
});
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
// Sankey: convert colorFrom/colorTo objects to lookup functions
|
|
51
|
+
if (chartData.type === 'sankey') {
|
|
52
|
+
chartData.data.datasets.forEach((ds) => {
|
|
53
|
+
if (ds.colorFrom && typeof ds.colorFrom === 'object') {
|
|
54
|
+
const fromMap = ds.colorFrom;
|
|
55
|
+
ds.colorFrom = (c) => fromMap[c.dataset.data[c.dataIndex].from] || '#999';
|
|
56
|
+
}
|
|
57
|
+
if (ds.colorTo && typeof ds.colorTo === 'object') {
|
|
58
|
+
const toMap = ds.colorTo;
|
|
59
|
+
ds.colorTo = (c) => toMap[c.dataset.data[c.dataIndex].to] || '#999';
|
|
60
|
+
}
|
|
61
|
+
});
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
try {
|
|
65
|
+
const chart = new Chart(ctx, chartData);
|
|
66
|
+
chart.resize();
|
|
67
|
+
chart.update();
|
|
68
|
+
} catch (e) {
|
|
69
|
+
console.error('Chart init error:', e);
|
|
70
|
+
}
|
|
37
71
|
|
|
38
|
-
new Chart(ctx, chartData);
|
|
39
72
|
requestAnimationFrame(() => {
|
|
40
73
|
requestAnimationFrame(() => {
|
|
41
74
|
ctx.dataset.chartReady = "true";
|
|
@@ -49,7 +82,7 @@
|
|
|
49
82
|
setTimeout(waitForChart, 50);
|
|
50
83
|
}
|
|
51
84
|
|
|
52
|
-
document.addEventListener('DOMContentLoaded',
|
|
85
|
+
document.addEventListener('DOMContentLoaded', () => {
|
|
53
86
|
waitForChart();
|
|
54
87
|
});
|
|
55
88
|
</script>
|
|
@@ -22,6 +22,12 @@ type ImagePreprocessAgentReturnValue = {
|
|
|
22
22
|
agent: string;
|
|
23
23
|
movieParams: MulmoMovieParams;
|
|
24
24
|
};
|
|
25
|
+
firstFrameImagePath?: string;
|
|
26
|
+
lastFrameImagePath?: string;
|
|
27
|
+
movieReferenceImages?: {
|
|
28
|
+
imagePath: string;
|
|
29
|
+
referenceType: "ASSET" | "STYLE";
|
|
30
|
+
}[];
|
|
25
31
|
};
|
|
26
32
|
type ImagePreprocessAgentResponseBase = ImagePreprocessAgentReturnValue & {
|
|
27
33
|
imagePath?: string;
|
|
@@ -66,6 +66,28 @@ export const imagePreprocessAgent = async (namedInputs) => {
|
|
|
66
66
|
}
|
|
67
67
|
}
|
|
68
68
|
returnValue.movieAgentInfo = MulmoPresentationStyleMethods.getMovieAgentInfo(context.presentationStyle, beat);
|
|
69
|
+
// Resolve movie reference images from imageRefs
|
|
70
|
+
const movieParams = beat.movieParams ?? context.presentationStyle.movieParams;
|
|
71
|
+
if (movieParams?.firstFrameImageName && imageRefs) {
|
|
72
|
+
const firstFramePath = imageRefs[movieParams.firstFrameImageName];
|
|
73
|
+
if (firstFramePath) {
|
|
74
|
+
returnValue.firstFrameImagePath = firstFramePath;
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
if (movieParams?.lastFrameImageName && imageRefs) {
|
|
78
|
+
const lastFramePath = imageRefs[movieParams.lastFrameImageName];
|
|
79
|
+
if (lastFramePath) {
|
|
80
|
+
returnValue.lastFrameImagePath = lastFramePath;
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
if (movieParams?.referenceImages && imageRefs) {
|
|
84
|
+
returnValue.movieReferenceImages = movieParams.referenceImages
|
|
85
|
+
.map((ref) => {
|
|
86
|
+
const refPath = imageRefs[ref.imageName];
|
|
87
|
+
return refPath ? { imagePath: refPath, referenceType: ref.referenceType } : undefined;
|
|
88
|
+
})
|
|
89
|
+
.filter((r) => r !== undefined);
|
|
90
|
+
}
|
|
69
91
|
if (beat.image) {
|
|
70
92
|
const plugin = MulmoBeatMethods.getPlugin(beat);
|
|
71
93
|
const pluginPath = plugin.path({ beat, context, imagePath, ...htmlStyle(context, beat) });
|
|
@@ -106,13 +128,17 @@ export const imagePreprocessAgent = async (namedInputs) => {
|
|
|
106
128
|
}
|
|
107
129
|
if (beat.moviePrompt && !beat.imagePrompt) {
|
|
108
130
|
// ImageOnlyMoviePreprocessAgentResponse
|
|
109
|
-
|
|
131
|
+
// If firstFrameImageName is specified, use the resolved ref image as the movie's first frame
|
|
132
|
+
const base = { ...returnValue, imagePath, imageFromMovie: true };
|
|
133
|
+
return returnValue.firstFrameImagePath ? { ...base, referenceImageForMovie: returnValue.firstFrameImagePath } : base;
|
|
110
134
|
}
|
|
111
135
|
// referenceImages for "edit_image", openai agent.
|
|
112
136
|
const referenceImages = MulmoBeatMethods.getImageReferenceForImageGenerator(beat, imageRefs ?? {});
|
|
113
137
|
const prompt = imagePrompt(beat, imageAgentInfo.imageParams.style);
|
|
114
138
|
// ImageGenearalPreprocessAgentResponse
|
|
115
|
-
|
|
139
|
+
// firstFrameImagePath (from movieParams.firstFrameImageName) takes precedence over generated image
|
|
140
|
+
const movieFirstFramePath = returnValue.firstFrameImagePath ?? imagePath;
|
|
141
|
+
return { ...returnValue, imagePath, referenceImageForMovie: movieFirstFramePath, imageAgentInfo, prompt, referenceImages };
|
|
116
142
|
};
|
|
117
143
|
export const imagePluginAgent = async (namedInputs) => {
|
|
118
144
|
const { context, beat, index, imageRefs, movieRefs } = namedInputs;
|
|
@@ -7,12 +7,13 @@ import { agentGenerationError, imageReferenceAction, imageFileTarget, movieFileT
|
|
|
7
7
|
// public api
|
|
8
8
|
// Application may call this function directly to generate reference image.
|
|
9
9
|
export const generateReferenceImage = async (inputs) => {
|
|
10
|
-
const { context, key, index, image, force } = inputs;
|
|
10
|
+
const { context, key, index, image, referenceImagePath, force } = inputs;
|
|
11
11
|
const imagePath = getReferenceImagePath(context, key, "png");
|
|
12
12
|
// generate image
|
|
13
13
|
const imageAgentInfo = MulmoPresentationStyleMethods.getImageAgentInfo(context.presentationStyle);
|
|
14
14
|
const prompt = `${image.prompt}\n${imageAgentInfo.imageParams.style || ""}`;
|
|
15
15
|
GraphAILogger.info(`Generating reference image for ${key}: ${prompt}`);
|
|
16
|
+
const referenceImages = referenceImagePath ? [referenceImagePath] : undefined;
|
|
16
17
|
const image_graph_data = {
|
|
17
18
|
version: 0.5,
|
|
18
19
|
nodes: {
|
|
@@ -22,6 +23,7 @@ export const generateReferenceImage = async (inputs) => {
|
|
|
22
23
|
inputs: {
|
|
23
24
|
media: "image",
|
|
24
25
|
prompt,
|
|
26
|
+
referenceImages,
|
|
25
27
|
cache: {
|
|
26
28
|
force: [context.force, force ?? false],
|
|
27
29
|
file: imagePath,
|
|
@@ -58,12 +60,14 @@ export const getMediaRefs = async (context) => {
|
|
|
58
60
|
}
|
|
59
61
|
const imageRefs = {};
|
|
60
62
|
const movieRefs = {};
|
|
63
|
+
// Stage 1: resolve non-referencing entries (image, imagePrompt without referenceImageName, movie)
|
|
61
64
|
await Promise.all(Object.keys(images)
|
|
62
65
|
.sort()
|
|
63
66
|
.map(async (key, index) => {
|
|
64
67
|
const image = images[key];
|
|
65
|
-
if (image.type === "imagePrompt") {
|
|
66
|
-
|
|
68
|
+
if (image.type === "imagePrompt" && !image.referenceImageName) {
|
|
69
|
+
const refPath = image.referenceImage ? await MulmoMediaSourceMethods.imageReference(image.referenceImage, context, key) : undefined;
|
|
70
|
+
imageRefs[key] = await generateReferenceImage({ context, key, index, image, referenceImagePath: refPath, force: false });
|
|
67
71
|
}
|
|
68
72
|
else if (image.type === "image") {
|
|
69
73
|
imageRefs[key] = await MulmoMediaSourceMethods.imageReference(image.source, context, key);
|
|
@@ -72,10 +76,23 @@ export const getMediaRefs = async (context) => {
|
|
|
72
76
|
movieRefs[key] = await resolveMovieReference(image, context, key);
|
|
73
77
|
}
|
|
74
78
|
}));
|
|
79
|
+
// Stage 2: resolve imagePrompt with referenceImageName (depends on Stage 1 results)
|
|
80
|
+
await Promise.all(Object.keys(images)
|
|
81
|
+
.sort()
|
|
82
|
+
.map(async (key, index) => {
|
|
83
|
+
const image = images[key];
|
|
84
|
+
if (image.type === "imagePrompt" && image.referenceImageName) {
|
|
85
|
+
const refPath = imageRefs[image.referenceImageName];
|
|
86
|
+
if (!refPath) {
|
|
87
|
+
GraphAILogger.warn(`imagePrompt "${key}": referenceImageName "${image.referenceImageName}" not found in imageRefs — generating without reference`);
|
|
88
|
+
}
|
|
89
|
+
imageRefs[key] = await generateReferenceImage({ context, key, index, image, referenceImagePath: refPath, force: false });
|
|
90
|
+
}
|
|
91
|
+
}));
|
|
75
92
|
return { imageRefs, movieRefs };
|
|
76
93
|
};
|
|
77
|
-
const resolveMovieReference = async (
|
|
78
|
-
return MulmoMediaSourceMethods.imageReference(
|
|
94
|
+
const resolveMovieReference = async (media, context, key) => {
|
|
95
|
+
return MulmoMediaSourceMethods.imageReference(media.source, context, key);
|
|
79
96
|
};
|
|
80
97
|
const generateReferenceMovie = async (inputs) => {
|
|
81
98
|
const { context, key, index, moviePrompt, imagePath } = inputs;
|
|
@@ -124,17 +141,19 @@ const generateReferenceMovie = async (inputs) => {
|
|
|
124
141
|
const resolveLocalRefs = async (context, images, beatIndex, globalImageRefs) => {
|
|
125
142
|
const localImageRefs = {};
|
|
126
143
|
const localMovieRefs = {};
|
|
127
|
-
// Stage 1: image, imagePrompt, movie (parallel)
|
|
144
|
+
// Stage 1: image, imagePrompt (without referenceImageName), movie (parallel)
|
|
128
145
|
await Promise.all(Object.keys(images)
|
|
129
146
|
.sort()
|
|
130
147
|
.map(async (key, i) => {
|
|
131
148
|
const entry = images[key];
|
|
132
|
-
if (entry.type === "imagePrompt") {
|
|
149
|
+
if (entry.type === "imagePrompt" && !entry.referenceImageName) {
|
|
150
|
+
const refPath = entry.referenceImage ? await MulmoMediaSourceMethods.imageReference(entry.referenceImage, context, key) : undefined;
|
|
133
151
|
localImageRefs[key] = await generateReferenceImage({
|
|
134
152
|
context,
|
|
135
153
|
key,
|
|
136
154
|
index: beatIndex * 100 + i,
|
|
137
155
|
image: entry,
|
|
156
|
+
referenceImagePath: refPath,
|
|
138
157
|
});
|
|
139
158
|
}
|
|
140
159
|
else if (entry.type === "image") {
|
|
@@ -144,20 +163,39 @@ const resolveLocalRefs = async (context, images, beatIndex, globalImageRefs) =>
|
|
|
144
163
|
localMovieRefs[key] = await resolveMovieReference(entry, context, key);
|
|
145
164
|
}
|
|
146
165
|
}));
|
|
147
|
-
// Stage 2:
|
|
166
|
+
// Stage 2: imagePrompt with referenceImageName (depends on Stage 1)
|
|
167
|
+
const combinedImageRefsForImagePrompt = { ...globalImageRefs, ...localImageRefs };
|
|
168
|
+
await Promise.all(Object.keys(images)
|
|
169
|
+
.sort()
|
|
170
|
+
.map(async (key, i) => {
|
|
171
|
+
const entry = images[key];
|
|
172
|
+
if (entry.type === "imagePrompt" && entry.referenceImageName) {
|
|
173
|
+
const refPath = combinedImageRefsForImagePrompt[entry.referenceImageName];
|
|
174
|
+
if (!refPath) {
|
|
175
|
+
GraphAILogger.warn(`imagePrompt "${key}": referenceImageName "${entry.referenceImageName}" not found — generating without reference`);
|
|
176
|
+
}
|
|
177
|
+
localImageRefs[key] = await generateReferenceImage({
|
|
178
|
+
context,
|
|
179
|
+
key,
|
|
180
|
+
index: beatIndex * 100 + i,
|
|
181
|
+
image: entry,
|
|
182
|
+
referenceImagePath: refPath,
|
|
183
|
+
});
|
|
184
|
+
}
|
|
185
|
+
}));
|
|
186
|
+
// Stage 3: moviePrompt (imageName references imageRefs only)
|
|
148
187
|
const combinedImageRefs = { ...globalImageRefs, ...localImageRefs };
|
|
149
188
|
await Promise.all(Object.keys(images)
|
|
150
189
|
.sort()
|
|
151
190
|
.map(async (key, i) => {
|
|
152
191
|
const entry = images[key];
|
|
153
192
|
if (entry.type === "moviePrompt") {
|
|
154
|
-
const
|
|
155
|
-
const refImagePath = mp.imageName ? combinedImageRefs[mp.imageName] : undefined;
|
|
193
|
+
const refImagePath = entry.imageName ? combinedImageRefs[entry.imageName] : undefined;
|
|
156
194
|
localMovieRefs[key] = await generateReferenceMovie({
|
|
157
195
|
context,
|
|
158
196
|
key,
|
|
159
197
|
index: beatIndex * 100 + i,
|
|
160
|
-
moviePrompt:
|
|
198
|
+
moviePrompt: entry,
|
|
161
199
|
imagePath: refImagePath,
|
|
162
200
|
});
|
|
163
201
|
}
|
package/lib/actions/images.d.ts
CHANGED
|
@@ -70,6 +70,12 @@ export declare const beat_graph_data: {
|
|
|
70
70
|
agent: string;
|
|
71
71
|
movieParams: import("../types/type.js").MulmoMovieParams;
|
|
72
72
|
};
|
|
73
|
+
firstFrameImagePath?: string;
|
|
74
|
+
lastFrameImagePath?: string;
|
|
75
|
+
movieReferenceImages?: {
|
|
76
|
+
imagePath: string;
|
|
77
|
+
referenceType: "ASSET" | "STYLE";
|
|
78
|
+
}[];
|
|
73
79
|
} & {
|
|
74
80
|
imagePath?: string;
|
|
75
81
|
}) | ({
|
|
@@ -95,6 +101,12 @@ export declare const beat_graph_data: {
|
|
|
95
101
|
agent: string;
|
|
96
102
|
movieParams: import("../types/type.js").MulmoMovieParams;
|
|
97
103
|
};
|
|
104
|
+
firstFrameImagePath?: string;
|
|
105
|
+
lastFrameImagePath?: string;
|
|
106
|
+
movieReferenceImages?: {
|
|
107
|
+
imagePath: string;
|
|
108
|
+
referenceType: "ASSET" | "STYLE";
|
|
109
|
+
}[];
|
|
98
110
|
} & {
|
|
99
111
|
imagePath?: string;
|
|
100
112
|
} & {
|
|
@@ -131,6 +143,12 @@ export declare const beat_graph_data: {
|
|
|
131
143
|
agent: string;
|
|
132
144
|
movieParams: import("../types/type.js").MulmoMovieParams;
|
|
133
145
|
};
|
|
146
|
+
firstFrameImagePath?: string;
|
|
147
|
+
lastFrameImagePath?: string;
|
|
148
|
+
movieReferenceImages?: {
|
|
149
|
+
imagePath: string;
|
|
150
|
+
referenceType: "ASSET" | "STYLE";
|
|
151
|
+
}[];
|
|
134
152
|
} & {
|
|
135
153
|
imagePath?: string;
|
|
136
154
|
} & {
|
|
@@ -159,6 +177,12 @@ export declare const beat_graph_data: {
|
|
|
159
177
|
agent: string;
|
|
160
178
|
movieParams: import("../types/type.js").MulmoMovieParams;
|
|
161
179
|
};
|
|
180
|
+
firstFrameImagePath?: string;
|
|
181
|
+
lastFrameImagePath?: string;
|
|
182
|
+
movieReferenceImages?: {
|
|
183
|
+
imagePath: string;
|
|
184
|
+
referenceType: "ASSET" | "STYLE";
|
|
185
|
+
}[];
|
|
162
186
|
} & {
|
|
163
187
|
imagePath?: string;
|
|
164
188
|
} & {
|
|
@@ -282,6 +306,8 @@ export declare const beat_graph_data: {
|
|
|
282
306
|
onComplete: string[];
|
|
283
307
|
prompt: string;
|
|
284
308
|
imagePath: string;
|
|
309
|
+
lastFrameImagePath: string;
|
|
310
|
+
referenceImages: string;
|
|
285
311
|
movieFile: string;
|
|
286
312
|
cache: {
|
|
287
313
|
force: string[];
|
package/lib/actions/images.js
CHANGED
|
@@ -177,6 +177,8 @@ export const beat_graph_data = {
|
|
|
177
177
|
onComplete: [":imageGenerator", ":imagePlugin"], // to wait for imageGenerator to finish
|
|
178
178
|
prompt: ":beat.moviePrompt",
|
|
179
179
|
imagePath: ":preprocessor.referenceImageForMovie",
|
|
180
|
+
lastFrameImagePath: ":preprocessor.lastFrameImagePath",
|
|
181
|
+
referenceImages: ":preprocessor.movieReferenceImages",
|
|
180
182
|
movieFile: ":preprocessor.movieFile", // for google genai agent
|
|
181
183
|
cache: {
|
|
182
184
|
force: [":context.force", ":forceMovie"],
|
|
@@ -94,25 +94,55 @@ const generateExtendedVideo = async (ai, model, prompt, aspectRatio, imagePath,
|
|
|
94
94
|
}
|
|
95
95
|
return downloadVideo(ai, result.video, movieFile, isVertexAI);
|
|
96
96
|
};
|
|
97
|
-
const generateStandardVideo = async (ai, model, prompt, aspectRatio, imagePath, duration, movieFile, isVertexAI) => {
|
|
98
|
-
const
|
|
97
|
+
const generateStandardVideo = async (ai, model, prompt, aspectRatio, imagePath, lastFrameImagePath, referenceImages, duration, movieFile, isVertexAI) => {
|
|
98
|
+
const capabilities = provider2MovieAgent.google.modelParams[model];
|
|
99
99
|
const payload = {
|
|
100
100
|
model,
|
|
101
101
|
prompt,
|
|
102
102
|
config: {
|
|
103
|
-
durationSeconds:
|
|
103
|
+
durationSeconds: capabilities?.supportsPersonGeneration === false ? undefined : duration,
|
|
104
104
|
aspectRatio,
|
|
105
|
-
personGeneration: imagePath ? undefined : PersonGeneration.ALLOW_ALL,
|
|
105
|
+
personGeneration: imagePath || !capabilities?.supportsPersonGeneration ? undefined : PersonGeneration.ALLOW_ALL,
|
|
106
106
|
},
|
|
107
107
|
image: imagePath ? loadImageAsBase64(imagePath) : undefined,
|
|
108
108
|
};
|
|
109
|
+
// Validate and apply lastFrame
|
|
110
|
+
if (lastFrameImagePath) {
|
|
111
|
+
if (!capabilities?.supportsLastFrame) {
|
|
112
|
+
GraphAILogger.warn(`movieGenAIAgent: model ${model} does not support lastFrame — ignoring lastFrameImageName`);
|
|
113
|
+
}
|
|
114
|
+
else if (!imagePath) {
|
|
115
|
+
GraphAILogger.warn(`movieGenAIAgent: lastFrame requires a first frame image (imagePrompt or firstFrameImageName) — ignoring lastFrameImageName`);
|
|
116
|
+
}
|
|
117
|
+
else {
|
|
118
|
+
payload.config.lastFrame = loadImageAsBase64(lastFrameImagePath);
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
// Validate and apply referenceImages (mutually exclusive with image/lastFrame)
|
|
122
|
+
if (referenceImages && referenceImages.length > 0) {
|
|
123
|
+
if (!capabilities?.supportsReferenceImages) {
|
|
124
|
+
GraphAILogger.warn(`movieGenAIAgent: model ${model} does not support referenceImages — ignoring`);
|
|
125
|
+
}
|
|
126
|
+
else if (imagePath) {
|
|
127
|
+
GraphAILogger.warn(`movieGenAIAgent: referenceImages cannot be combined with first frame image — ignoring referenceImages`);
|
|
128
|
+
}
|
|
129
|
+
else if (lastFrameImagePath) {
|
|
130
|
+
GraphAILogger.warn(`movieGenAIAgent: referenceImages cannot be combined with lastFrame — ignoring referenceImages`);
|
|
131
|
+
}
|
|
132
|
+
else {
|
|
133
|
+
payload.config.referenceImages = referenceImages.map((ref) => ({
|
|
134
|
+
image: loadImageAsBase64(ref.imagePath),
|
|
135
|
+
referenceType: ref.referenceType,
|
|
136
|
+
}));
|
|
137
|
+
}
|
|
138
|
+
}
|
|
109
139
|
const operation = await ai.models.generateVideos(payload);
|
|
110
140
|
const response = await pollUntilDone(ai, operation);
|
|
111
141
|
const video = getVideoFromResponse(response);
|
|
112
142
|
return downloadVideo(ai, video, movieFile, isVertexAI);
|
|
113
143
|
};
|
|
114
144
|
export const movieGenAIAgent = async ({ namedInputs, params, config, }) => {
|
|
115
|
-
const { prompt, imagePath, movieFile } = namedInputs;
|
|
145
|
+
const { prompt, imagePath, lastFrameImagePath, referenceImages, movieFile } = namedInputs;
|
|
116
146
|
const aspectRatio = getAspectRatio(params.canvasSize, ASPECT_RATIOS);
|
|
117
147
|
const model = params.model ?? provider2MovieAgent.google.defaultModel;
|
|
118
148
|
const apiKey = config?.apiKey;
|
|
@@ -144,7 +174,7 @@ export const movieGenAIAgent = async ({ namedInputs, params, config, }) => {
|
|
|
144
174
|
return generateExtendedVideo(ai, model, prompt, aspectRatio, imagePath, requestedDuration, movieFile, isVertexAI);
|
|
145
175
|
}
|
|
146
176
|
// Standard mode
|
|
147
|
-
return generateStandardVideo(ai, model, prompt, aspectRatio, imagePath, duration, movieFile, isVertexAI);
|
|
177
|
+
return generateStandardVideo(ai, model, prompt, aspectRatio, imagePath, lastFrameImagePath, referenceImages, duration, movieFile, isVertexAI);
|
|
148
178
|
}
|
|
149
179
|
catch (error) {
|
|
150
180
|
GraphAILogger.info("Failed to generate movie:", error.message);
|
|
@@ -3,7 +3,7 @@ import { GraphAILogger } from "graphai";
|
|
|
3
3
|
import Replicate from "replicate";
|
|
4
4
|
import { apiKeyMissingError, agentGenerationError, agentInvalidResponseError, imageAction, movieFileTarget, videoDurationTarget, unsupportedModelTarget, } from "../utils/error_cause.js";
|
|
5
5
|
import { provider2MovieAgent, getModelDuration } from "../types/provider2agent.js";
|
|
6
|
-
async function generateMovie(model, apiKey, prompt, imagePath, aspectRatio, duration) {
|
|
6
|
+
async function generateMovie(model, apiKey, prompt, imagePath, lastFrameImagePath, aspectRatio, duration) {
|
|
7
7
|
const replicate = new Replicate({
|
|
8
8
|
auth: apiKey,
|
|
9
9
|
});
|
|
@@ -37,6 +37,23 @@ async function generateMovie(model, apiKey, prompt, imagePath, aspectRatio, dura
|
|
|
37
37
|
input.image = base64Image;
|
|
38
38
|
}
|
|
39
39
|
}
|
|
40
|
+
// Add last frame image if provided and model supports it
|
|
41
|
+
if (lastFrameImagePath) {
|
|
42
|
+
const lastImageParam = provider2MovieAgent.replicate.modelParams[model]?.last_image;
|
|
43
|
+
if (lastImageParam) {
|
|
44
|
+
if (!imagePath) {
|
|
45
|
+
GraphAILogger.warn(`movieReplicateAgent: model ${model} requires a first frame image to use lastFrame — ignoring lastFrameImageName`);
|
|
46
|
+
}
|
|
47
|
+
else {
|
|
48
|
+
const buffer = readFileSync(lastFrameImagePath);
|
|
49
|
+
const base64Image = `data:image/png;base64,${buffer.toString("base64")}`;
|
|
50
|
+
input[lastImageParam] = base64Image;
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
else {
|
|
54
|
+
GraphAILogger.warn(`movieReplicateAgent: model ${model} does not support lastFrame — ignoring lastFrameImageName`);
|
|
55
|
+
}
|
|
56
|
+
}
|
|
40
57
|
try {
|
|
41
58
|
const output = await replicate.run(model, { input });
|
|
42
59
|
// Download the generated video
|
|
@@ -72,7 +89,7 @@ export const getAspectRatio = (canvasSize) => {
|
|
|
72
89
|
return "9:16";
|
|
73
90
|
};
|
|
74
91
|
export const movieReplicateAgent = async ({ namedInputs, params, config, }) => {
|
|
75
|
-
const { prompt, imagePath } = namedInputs;
|
|
92
|
+
const { prompt, imagePath, lastFrameImagePath } = namedInputs;
|
|
76
93
|
const aspectRatio = getAspectRatio(params.canvasSize);
|
|
77
94
|
const model = params.model ?? provider2MovieAgent.replicate.defaultModel;
|
|
78
95
|
if (!provider2MovieAgent.replicate.modelParams[model]) {
|
|
@@ -93,7 +110,7 @@ export const movieReplicateAgent = async ({ namedInputs, params, config, }) => {
|
|
|
93
110
|
});
|
|
94
111
|
}
|
|
95
112
|
try {
|
|
96
|
-
const buffer = await generateMovie(model, apiKey, prompt, imagePath, aspectRatio, duration);
|
|
113
|
+
const buffer = await generateMovie(model, apiKey, prompt, imagePath, lastFrameImagePath, aspectRatio, duration);
|
|
97
114
|
if (buffer) {
|
|
98
115
|
return { buffer };
|
|
99
116
|
}
|
|
@@ -173,6 +173,12 @@ export declare const MulmoPresentationStyleMethods: {
|
|
|
173
173
|
})[] | undefined;
|
|
174
174
|
vertexai_project?: string | undefined;
|
|
175
175
|
vertexai_location?: string | undefined;
|
|
176
|
+
firstFrameImageName?: string | undefined;
|
|
177
|
+
lastFrameImageName?: string | undefined;
|
|
178
|
+
referenceImages?: {
|
|
179
|
+
imageName: string;
|
|
180
|
+
referenceType: "ASSET" | "STYLE";
|
|
181
|
+
}[] | undefined;
|
|
176
182
|
speed?: number | undefined;
|
|
177
183
|
};
|
|
178
184
|
keyName: string;
|
|
@@ -9,6 +9,7 @@ import { layoutSplit } from "./split.js";
|
|
|
9
9
|
import { layoutMatrix } from "./matrix.js";
|
|
10
10
|
import { layoutTable } from "./table.js";
|
|
11
11
|
import { layoutFunnel } from "./funnel.js";
|
|
12
|
+
import { layoutWaterfall } from "./waterfall.js";
|
|
12
13
|
import { escapeHtml } from "../utils.js";
|
|
13
14
|
/** Render the inner content of a slide (without the wrapper div) */
|
|
14
15
|
export const renderSlideContent = (slide) => {
|
|
@@ -35,6 +36,8 @@ export const renderSlideContent = (slide) => {
|
|
|
35
36
|
return layoutTable(slide);
|
|
36
37
|
case "funnel":
|
|
37
38
|
return layoutFunnel(slide);
|
|
39
|
+
case "waterfall":
|
|
40
|
+
return layoutWaterfall(slide);
|
|
38
41
|
default: {
|
|
39
42
|
const _exhaustive = slide;
|
|
40
43
|
return `<p class="text-white p-8">Unknown layout: ${escapeHtml(String(_exhaustive.layout))}</p>`;
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
import { renderInlineMarkup, c, slideHeader, renderOptionalCallout } from "../utils.js";
|
|
2
|
+
/** Height of the chart area as percentage of available space */
|
|
3
|
+
const CHART_HEIGHT_PCT = 75;
|
|
4
|
+
export const layoutWaterfall = (data) => {
|
|
5
|
+
const parts = [slideHeader(data)];
|
|
6
|
+
const items = data.items || [];
|
|
7
|
+
const positions = buildWaterfallPositions(items);
|
|
8
|
+
const globalMax = Math.max(...positions.map((p) => p.top));
|
|
9
|
+
const globalMin = Math.min(...positions.map((p) => p.bottom));
|
|
10
|
+
const range = globalMax - globalMin || 1;
|
|
11
|
+
parts.push(`<div class="flex gap-1 px-12 mt-4 flex-1" style="min-height: 0">`);
|
|
12
|
+
items.forEach((item, i) => {
|
|
13
|
+
const pos = positions[i];
|
|
14
|
+
const isTotal = item.isTotal ?? false;
|
|
15
|
+
const isPositive = item.value >= 0;
|
|
16
|
+
const color = resolveBarColor(item.color, isTotal, isPositive);
|
|
17
|
+
const bottomPct = ((pos.bottom - globalMin) / range) * CHART_HEIGHT_PCT;
|
|
18
|
+
const heightPct = Math.max(((pos.top - pos.bottom) / range) * CHART_HEIGHT_PCT, 1.5);
|
|
19
|
+
const topOfBar = bottomPct + heightPct;
|
|
20
|
+
const labelTopPct = 100 - topOfBar;
|
|
21
|
+
const formattedValue = formatValue(item.value, data.unit, isTotal);
|
|
22
|
+
parts.push(`<div class="flex-1 relative" style="height: 100%">`);
|
|
23
|
+
// Value label (above bar)
|
|
24
|
+
parts.push(` <p class="absolute w-full text-xs font-bold text-d-text font-body text-center" style="top: ${labelTopPct - 4}%">${renderInlineMarkup(formattedValue)}</p>`);
|
|
25
|
+
// Bar (absolute positioned from bottom)
|
|
26
|
+
parts.push(` <div class="absolute left-1 right-1 bg-${c(color)} rounded-t" style="bottom: ${bottomPct}%; height: ${heightPct}%"></div>`);
|
|
27
|
+
// Bottom label
|
|
28
|
+
parts.push(` <p class="absolute bottom-0 w-full text-xs text-d-muted font-body text-center" style="transform: translateY(100%)">${renderInlineMarkup(item.label)}</p>`);
|
|
29
|
+
parts.push(`</div>`);
|
|
30
|
+
});
|
|
31
|
+
parts.push(`</div>`);
|
|
32
|
+
// Labels area
|
|
33
|
+
parts.push(`<div class="h-10 shrink-0"></div>`);
|
|
34
|
+
parts.push(renderOptionalCallout(data.callout));
|
|
35
|
+
return parts.join("\n");
|
|
36
|
+
};
|
|
37
|
+
const buildWaterfallPositions = (items) => {
|
|
38
|
+
let runningTotal = 0;
|
|
39
|
+
return items.map((item) => {
|
|
40
|
+
if (item.isTotal) {
|
|
41
|
+
runningTotal = item.value;
|
|
42
|
+
return { top: Math.max(item.value, 0), bottom: Math.min(item.value, 0) };
|
|
43
|
+
}
|
|
44
|
+
const prevTotal = runningTotal;
|
|
45
|
+
runningTotal += item.value;
|
|
46
|
+
if (item.value >= 0) {
|
|
47
|
+
return { top: runningTotal, bottom: prevTotal };
|
|
48
|
+
}
|
|
49
|
+
return { top: prevTotal, bottom: runningTotal };
|
|
50
|
+
});
|
|
51
|
+
};
|
|
52
|
+
const resolveBarColor = (itemColor, isTotal, isPositive) => {
|
|
53
|
+
if (itemColor)
|
|
54
|
+
return itemColor;
|
|
55
|
+
if (isTotal)
|
|
56
|
+
return "primary";
|
|
57
|
+
return isPositive ? "success" : "danger";
|
|
58
|
+
};
|
|
59
|
+
const formatValue = (value, unit, isTotal) => {
|
|
60
|
+
const prefix = !isTotal && value > 0 ? "+" : "";
|
|
61
|
+
const suffix = unit ? ` ${unit}` : "";
|
|
62
|
+
return `${prefix}${value}${suffix}`;
|
|
63
|
+
};
|
package/lib/slide/render.js
CHANGED
|
@@ -9,10 +9,13 @@ const isDarkBg = (hex) => {
|
|
|
9
9
|
};
|
|
10
10
|
/** Build CDN script tags for chart/mermaid when needed */
|
|
11
11
|
const buildCdnScripts = (theme, slide) => {
|
|
12
|
-
const { hasChart, hasMermaid } = detectBlockTypes(slide);
|
|
12
|
+
const { hasChart, hasMermaid, chartPlugins } = detectBlockTypes(slide);
|
|
13
13
|
const scripts = [];
|
|
14
14
|
if (hasChart) {
|
|
15
15
|
scripts.push('<script src="https://cdn.jsdelivr.net/npm/chart.js"></script>');
|
|
16
|
+
chartPlugins.forEach((cdn) => {
|
|
17
|
+
scripts.push(`<script src="${cdn}"></script>`);
|
|
18
|
+
});
|
|
16
19
|
}
|
|
17
20
|
if (hasMermaid) {
|
|
18
21
|
const mermaidTheme = isDarkBg(theme.colors.bg) ? "dark" : "default";
|