@conceptcraft/mindframes 0.1.4 → 0.1.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +2665 -207
- package/dist/index.js.map +1 -1
- package/package.json +1 -1
package/dist/index.js
CHANGED
|
@@ -293,6 +293,12 @@ function progressBar(current, total, width = 30, showPercentage = true) {
|
|
|
293
293
|
const bar = chalk.green("\u2588".repeat(filled)) + chalk.gray("\u2591".repeat(empty));
|
|
294
294
|
return showPercentage ? `[${bar}] ${percentage}%` : `[${bar}]`;
|
|
295
295
|
}
|
|
296
|
+
function formatJson(data) {
|
|
297
|
+
return JSON.stringify(data, null, 2);
|
|
298
|
+
}
|
|
299
|
+
function printJson(data) {
|
|
300
|
+
console.log(formatJson(data));
|
|
301
|
+
}
|
|
296
302
|
var init_output = __esm({
|
|
297
303
|
"src/lib/output.ts"() {
|
|
298
304
|
"use strict";
|
|
@@ -300,11 +306,19 @@ var init_output = __esm({
|
|
|
300
306
|
}
|
|
301
307
|
});
|
|
302
308
|
|
|
309
|
+
// src/types/media.ts
|
|
310
|
+
var init_media = __esm({
|
|
311
|
+
"src/types/media.ts"() {
|
|
312
|
+
"use strict";
|
|
313
|
+
}
|
|
314
|
+
});
|
|
315
|
+
|
|
303
316
|
// src/types/index.ts
|
|
304
317
|
var EXIT_CODES;
|
|
305
318
|
var init_types = __esm({
|
|
306
319
|
"src/types/index.ts"() {
|
|
307
320
|
"use strict";
|
|
321
|
+
init_media();
|
|
308
322
|
EXIT_CODES = {
|
|
309
323
|
SUCCESS: 0,
|
|
310
324
|
GENERAL_ERROR: 1,
|
|
@@ -889,6 +903,132 @@ async function validateGeneration(mode, slideCount, teamId) {
|
|
|
889
903
|
}
|
|
890
904
|
return limits;
|
|
891
905
|
}
|
|
906
|
+
async function generateSpeech(ttsRequest) {
|
|
907
|
+
const apiUrl = getApiUrl();
|
|
908
|
+
if (!hasAuth()) {
|
|
909
|
+
throw new ApiError(
|
|
910
|
+
"Not authenticated. Run 'cc login' or set CC_SLIDES_API_KEY environment variable.",
|
|
911
|
+
401,
|
|
912
|
+
2
|
|
913
|
+
);
|
|
914
|
+
}
|
|
915
|
+
const authHeaders = await getAuthHeaders();
|
|
916
|
+
let response;
|
|
917
|
+
try {
|
|
918
|
+
response = await fetch(`${apiUrl}/api/cli/tts`, {
|
|
919
|
+
method: "POST",
|
|
920
|
+
headers: {
|
|
921
|
+
"Content-Type": "application/json",
|
|
922
|
+
...authHeaders
|
|
923
|
+
},
|
|
924
|
+
body: JSON.stringify(ttsRequest)
|
|
925
|
+
});
|
|
926
|
+
} catch (error2) {
|
|
927
|
+
throw new ApiError(
|
|
928
|
+
`Network error: ${error2 instanceof Error ? error2.message : "Unknown error"}`,
|
|
929
|
+
0,
|
|
930
|
+
5
|
|
931
|
+
);
|
|
932
|
+
}
|
|
933
|
+
if (!response.ok) {
|
|
934
|
+
const errorText = await response.text().catch(() => "Unknown error");
|
|
935
|
+
let errorMessage;
|
|
936
|
+
try {
|
|
937
|
+
const errorJson = JSON.parse(errorText);
|
|
938
|
+
errorMessage = errorJson.error || errorJson.message || errorText;
|
|
939
|
+
} catch {
|
|
940
|
+
errorMessage = errorText;
|
|
941
|
+
}
|
|
942
|
+
throw new ApiError(errorMessage, response.status, response.status === 401 ? 2 : 1);
|
|
943
|
+
}
|
|
944
|
+
const audioData = Buffer.from(await response.arrayBuffer());
|
|
945
|
+
const duration = parseFloat(response.headers.get("X-Duration-Seconds") || "0");
|
|
946
|
+
const cost = parseFloat(response.headers.get("X-Cost-USD") || "0");
|
|
947
|
+
const provider = response.headers.get("X-Provider") || "unknown";
|
|
948
|
+
const format = response.headers.get("X-Audio-Format") || "mp3";
|
|
949
|
+
let timestamps;
|
|
950
|
+
const timestampsHeader = response.headers.get("X-Timestamps");
|
|
951
|
+
if (timestampsHeader) {
|
|
952
|
+
try {
|
|
953
|
+
timestamps = JSON.parse(timestampsHeader);
|
|
954
|
+
} catch {
|
|
955
|
+
}
|
|
956
|
+
}
|
|
957
|
+
return {
|
|
958
|
+
audioData,
|
|
959
|
+
duration,
|
|
960
|
+
cost,
|
|
961
|
+
provider,
|
|
962
|
+
format,
|
|
963
|
+
timestamps
|
|
964
|
+
};
|
|
965
|
+
}
|
|
966
|
+
async function getVoices() {
|
|
967
|
+
return request("/api/cli/tts");
|
|
968
|
+
}
|
|
969
|
+
async function generateMusic(musicRequest) {
|
|
970
|
+
const response = await request("/api/cli/music", {
|
|
971
|
+
method: "POST",
|
|
972
|
+
body: musicRequest
|
|
973
|
+
});
|
|
974
|
+
if (!response.data) {
|
|
975
|
+
throw new ApiError(`Invalid API response: ${JSON.stringify(response)}`, 500, 1);
|
|
976
|
+
}
|
|
977
|
+
return {
|
|
978
|
+
requestId: response.data.id,
|
|
979
|
+
status: response.data.status,
|
|
980
|
+
audioUrl: response.data.audioUrl,
|
|
981
|
+
duration: response.data.duration,
|
|
982
|
+
cost: response.data.cost,
|
|
983
|
+
error: response.data.error
|
|
984
|
+
};
|
|
985
|
+
}
|
|
986
|
+
async function checkMusicStatus(requestId) {
|
|
987
|
+
const response = await request(
|
|
988
|
+
`/api/cli/music?requestId=${encodeURIComponent(requestId)}`
|
|
989
|
+
);
|
|
990
|
+
return {
|
|
991
|
+
requestId: response.data.id,
|
|
992
|
+
status: response.data.status,
|
|
993
|
+
audioUrl: response.data.audioUrl,
|
|
994
|
+
duration: response.data.duration,
|
|
995
|
+
cost: response.data.cost,
|
|
996
|
+
error: response.data.error
|
|
997
|
+
};
|
|
998
|
+
}
|
|
999
|
+
async function mixAudio(mixRequest) {
|
|
1000
|
+
return request("/api/cli/mix", {
|
|
1001
|
+
method: "POST",
|
|
1002
|
+
body: mixRequest
|
|
1003
|
+
});
|
|
1004
|
+
}
|
|
1005
|
+
async function checkMixStatus(requestId) {
|
|
1006
|
+
return request(
|
|
1007
|
+
`/api/cli/mix?requestId=${encodeURIComponent(requestId)}`
|
|
1008
|
+
);
|
|
1009
|
+
}
|
|
1010
|
+
async function searchImages(searchRequest) {
|
|
1011
|
+
return request("/api/cli/images/search", {
|
|
1012
|
+
method: "POST",
|
|
1013
|
+
body: searchRequest
|
|
1014
|
+
});
|
|
1015
|
+
}
|
|
1016
|
+
async function searchVideos(searchRequest) {
|
|
1017
|
+
return request("/api/cli/videos/search", {
|
|
1018
|
+
method: "POST",
|
|
1019
|
+
body: searchRequest
|
|
1020
|
+
});
|
|
1021
|
+
}
|
|
1022
|
+
async function pollForCompletion(checkFn, maxAttempts = 60, intervalMs = 2e3) {
|
|
1023
|
+
for (let i = 0; i < maxAttempts; i++) {
|
|
1024
|
+
const result = await checkFn();
|
|
1025
|
+
if (result.status === "completed" || result.status === "failed") {
|
|
1026
|
+
return result;
|
|
1027
|
+
}
|
|
1028
|
+
await new Promise((resolve5) => setTimeout(resolve5, intervalMs));
|
|
1029
|
+
}
|
|
1030
|
+
throw new ApiError("Operation timed out", 408, 1);
|
|
1031
|
+
}
|
|
892
1032
|
var ApiError;
|
|
893
1033
|
var init_api = __esm({
|
|
894
1034
|
"src/lib/api.ts"() {
|
|
@@ -992,10 +1132,10 @@ function generateState() {
|
|
|
992
1132
|
async function findAvailablePort(start, end) {
|
|
993
1133
|
for (let port = start; port <= end; port++) {
|
|
994
1134
|
try {
|
|
995
|
-
await new Promise((
|
|
1135
|
+
await new Promise((resolve5, reject) => {
|
|
996
1136
|
const server = http.createServer();
|
|
997
1137
|
server.listen(port, () => {
|
|
998
|
-
server.close(() =>
|
|
1138
|
+
server.close(() => resolve5());
|
|
999
1139
|
});
|
|
1000
1140
|
server.on("error", reject);
|
|
1001
1141
|
});
|
|
@@ -1053,7 +1193,21 @@ async function exchangeCodeForTokens(tokenEndpoint, code, codeVerifier, redirect
|
|
|
1053
1193
|
return response.json();
|
|
1054
1194
|
}
|
|
1055
1195
|
function startCallbackServer(port, expectedState) {
|
|
1056
|
-
return new Promise((
|
|
1196
|
+
return new Promise((resolve5, reject) => {
|
|
1197
|
+
let timeoutId;
|
|
1198
|
+
let settled = false;
|
|
1199
|
+
const cleanup = () => {
|
|
1200
|
+
if (settled) return;
|
|
1201
|
+
settled = true;
|
|
1202
|
+
clearTimeout(timeoutId);
|
|
1203
|
+
process.off("SIGINT", onCancel);
|
|
1204
|
+
process.off("SIGTERM", onCancel);
|
|
1205
|
+
server.close();
|
|
1206
|
+
};
|
|
1207
|
+
const onCancel = () => {
|
|
1208
|
+
cleanup();
|
|
1209
|
+
reject(new Error("Login cancelled"));
|
|
1210
|
+
};
|
|
1057
1211
|
const server = http.createServer((req, res) => {
|
|
1058
1212
|
const url = new URL(req.url || "", `http://localhost:${port}`);
|
|
1059
1213
|
if (url.pathname !== "/callback") {
|
|
@@ -1078,7 +1232,7 @@ function startCallbackServer(port, expectedState) {
|
|
|
1078
1232
|
</body>
|
|
1079
1233
|
</html>
|
|
1080
1234
|
`);
|
|
1081
|
-
|
|
1235
|
+
cleanup();
|
|
1082
1236
|
reject(new Error(errorDescription || errorParam));
|
|
1083
1237
|
return;
|
|
1084
1238
|
}
|
|
@@ -1094,7 +1248,7 @@ function startCallbackServer(port, expectedState) {
|
|
|
1094
1248
|
</body>
|
|
1095
1249
|
</html>
|
|
1096
1250
|
`);
|
|
1097
|
-
|
|
1251
|
+
cleanup();
|
|
1098
1252
|
reject(new Error("Missing authorization code or state"));
|
|
1099
1253
|
return;
|
|
1100
1254
|
}
|
|
@@ -1110,7 +1264,7 @@ function startCallbackServer(port, expectedState) {
|
|
|
1110
1264
|
</body>
|
|
1111
1265
|
</html>
|
|
1112
1266
|
`);
|
|
1113
|
-
|
|
1267
|
+
cleanup();
|
|
1114
1268
|
reject(new Error("State mismatch"));
|
|
1115
1269
|
return;
|
|
1116
1270
|
}
|
|
@@ -1124,20 +1278,14 @@ function startCallbackServer(port, expectedState) {
|
|
|
1124
1278
|
</body>
|
|
1125
1279
|
</html>
|
|
1126
1280
|
`);
|
|
1127
|
-
|
|
1128
|
-
|
|
1281
|
+
cleanup();
|
|
1282
|
+
resolve5({ code, state });
|
|
1129
1283
|
});
|
|
1130
1284
|
server.listen(port);
|
|
1131
|
-
|
|
1132
|
-
|
|
1133
|
-
|
|
1134
|
-
|
|
1135
|
-
process.once("SIGINT", cleanup);
|
|
1136
|
-
process.once("SIGTERM", cleanup);
|
|
1137
|
-
setTimeout(() => {
|
|
1138
|
-
process.off("SIGINT", cleanup);
|
|
1139
|
-
process.off("SIGTERM", cleanup);
|
|
1140
|
-
server.close();
|
|
1285
|
+
process.once("SIGINT", onCancel);
|
|
1286
|
+
process.once("SIGTERM", onCancel);
|
|
1287
|
+
timeoutId = setTimeout(() => {
|
|
1288
|
+
cleanup();
|
|
1141
1289
|
reject(new Error("Login timed out - no callback received within 5 minutes"));
|
|
1142
1290
|
}, 5 * 60 * 1e3);
|
|
1143
1291
|
});
|
|
@@ -1238,6 +1386,7 @@ var init_login = __esm({
|
|
|
1238
1386
|
}
|
|
1239
1387
|
try {
|
|
1240
1388
|
await runLoginFlow(options);
|
|
1389
|
+
process.exit(0);
|
|
1241
1390
|
} catch {
|
|
1242
1391
|
process.exit(1);
|
|
1243
1392
|
}
|
|
@@ -1246,7 +1395,7 @@ var init_login = __esm({
|
|
|
1246
1395
|
});
|
|
1247
1396
|
|
|
1248
1397
|
// src/index.ts
|
|
1249
|
-
import { Command as
|
|
1398
|
+
import { Command as Command20 } from "commander";
|
|
1250
1399
|
import chalk13 from "chalk";
|
|
1251
1400
|
|
|
1252
1401
|
// src/lib/brand.ts
|
|
@@ -2273,21 +2422,21 @@ Uploading ${options.file.length} file(s)...`));
|
|
|
2273
2422
|
}
|
|
2274
2423
|
});
|
|
2275
2424
|
async function readStdin() {
|
|
2276
|
-
return new Promise((
|
|
2425
|
+
return new Promise((resolve5) => {
|
|
2277
2426
|
let data = "";
|
|
2278
2427
|
process.stdin.setEncoding("utf8");
|
|
2279
2428
|
if (process.stdin.isTTY) {
|
|
2280
|
-
|
|
2429
|
+
resolve5("");
|
|
2281
2430
|
return;
|
|
2282
2431
|
}
|
|
2283
2432
|
process.stdin.on("data", (chunk) => {
|
|
2284
2433
|
data += chunk;
|
|
2285
2434
|
});
|
|
2286
2435
|
process.stdin.on("end", () => {
|
|
2287
|
-
|
|
2436
|
+
resolve5(data.trim());
|
|
2288
2437
|
});
|
|
2289
2438
|
setTimeout(() => {
|
|
2290
|
-
|
|
2439
|
+
resolve5(data.trim());
|
|
2291
2440
|
}, 100);
|
|
2292
2441
|
});
|
|
2293
2442
|
}
|
|
@@ -2985,6 +3134,7 @@ import chalk12 from "chalk";
|
|
|
2985
3134
|
import { mkdirSync, writeFileSync, existsSync as existsSync2 } from "fs";
|
|
2986
3135
|
import { join } from "path";
|
|
2987
3136
|
import { homedir } from "os";
|
|
3137
|
+
import { execSync } from "child_process";
|
|
2988
3138
|
function generateSkillContent(b) {
|
|
2989
3139
|
const cmd2 = b.name;
|
|
2990
3140
|
const pkg = b.packageName;
|
|
@@ -2992,34 +3142,105 @@ function generateSkillContent(b) {
|
|
|
2992
3142
|
const name = b.displayName;
|
|
2993
3143
|
return `---
|
|
2994
3144
|
name: ${cmd2}
|
|
2995
|
-
description: Create AI-powered presentations
|
|
3145
|
+
description: Create AI-powered presentations and videos. Use for slides, decks, video content, voiceovers, and music generation.
|
|
3146
|
+
metadata:
|
|
3147
|
+
tags: presentations, video, tts, music, animation, remotion
|
|
3148
|
+
video:
|
|
3149
|
+
description: End-to-end AI video creation workflow with TTS voiceover and music generation. Use this skill when users want to create videos, promotional content, explainers, tourism videos, product demos, or any video content from an idea or topic. Handles the complete workflow - research, script writing, asset gathering, audio generation (voiceover + music), and orchestrates video creation. Use together with remotion-best-practices skill for Remotion-specific patterns. Triggers on requests like "create a video about X", "make a promotional video", "build a video for Y", or any video content creation task.
|
|
2996
3150
|
---
|
|
2997
3151
|
|
|
2998
3152
|
# ${name} CLI
|
|
2999
3153
|
|
|
3000
|
-
Create professional presentations directly from your terminal.
|
|
3154
|
+
Create professional presentations and videos directly from your terminal.
|
|
3001
3155
|
|
|
3002
3156
|
## Prerequisites
|
|
3003
3157
|
|
|
3004
3158
|
\`\`\`bash
|
|
3005
|
-
# Install globally
|
|
3006
3159
|
npm install -g ${pkg}
|
|
3160
|
+
${cmd2} login # Authenticate (opens browser)
|
|
3161
|
+
${cmd2} whoami # Verify setup
|
|
3162
|
+
\`\`\`
|
|
3163
|
+
|
|
3164
|
+
## Rules
|
|
3165
|
+
|
|
3166
|
+
Read these for detailed usage:
|
|
3167
|
+
|
|
3168
|
+
- [rules/presentations.md](rules/presentations.md) - Creating AI-powered presentations
|
|
3169
|
+
- [rules/video.md](rules/video.md) - Video creation workflow and commands
|
|
3170
|
+
- [rules/motion-standards.md](rules/motion-standards.md) - Animation quality standards
|
|
3171
|
+
- [rules/micro-interactions.md](rules/micro-interactions.md) - Animation components and patterns
|
|
3172
|
+
- **remotion-best-practices** skill (auto-installed) - Remotion-specific patterns
|
|
3173
|
+
|
|
3174
|
+
## Quick Reference
|
|
3175
|
+
|
|
3176
|
+
### Presentations
|
|
3177
|
+
|
|
3178
|
+
\`\`\`bash
|
|
3179
|
+
# Create from context
|
|
3180
|
+
cat README.md | ${cmd2} create "Project Overview"
|
|
3181
|
+
|
|
3182
|
+
# With files
|
|
3183
|
+
${cmd2} create "Product Demo" --file ./deck.pptx --file ./logo.png
|
|
3184
|
+
|
|
3185
|
+
# With options
|
|
3186
|
+
${cmd2} create "API Docs" --slides 8 --tone educational --goal inform
|
|
3187
|
+
\`\`\`
|
|
3188
|
+
|
|
3189
|
+
### Video Creation
|
|
3190
|
+
|
|
3191
|
+
\`\`\`bash
|
|
3192
|
+
# Scaffold project
|
|
3193
|
+
${cmd2} video init my-video
|
|
3007
3194
|
|
|
3008
|
-
#
|
|
3009
|
-
${cmd2}
|
|
3195
|
+
# Generate all assets (voiceover, music, images)
|
|
3196
|
+
${cmd2} video create --script "Your narration..." --output ./public
|
|
3197
|
+
|
|
3198
|
+
# Search for stock content
|
|
3199
|
+
${cmd2} image search -q "tropical beach" -n 5
|
|
3200
|
+
${cmd2} video search "tech workspace" -n 5
|
|
3201
|
+
\`\`\`
|
|
3202
|
+
|
|
3203
|
+
### Audio Generation
|
|
3204
|
+
|
|
3205
|
+
\`\`\`bash
|
|
3206
|
+
# Text-to-speech
|
|
3207
|
+
${cmd2} tts generate -t "Narration text" -o voice.wav --voice Kore
|
|
3208
|
+
|
|
3209
|
+
# Music generation
|
|
3210
|
+
${cmd2} music generate -p "uplifting corporate" -d 30 -o music.mp3
|
|
3211
|
+
|
|
3212
|
+
# Mix audio into video
|
|
3213
|
+
${cmd2} mix create --video video.mp4 --voice voice.wav --music music.mp3 -o final.mp4
|
|
3010
3214
|
\`\`\`
|
|
3011
3215
|
|
|
3012
|
-
##
|
|
3216
|
+
## Assets
|
|
3217
|
+
|
|
3218
|
+
Copy animation components from \`assets/animation-components.tsx\` for Remotion videos.
|
|
3219
|
+
|
|
3220
|
+
## Asking Questions
|
|
3221
|
+
|
|
3222
|
+
When you need to ask the user for preferences (voice, music, duration, etc.), use the \`AskUserQuestion\` tool if available. This provides a better UX with selectable options. See \`rules/video.md\` for the question format.
|
|
3223
|
+
`;
|
|
3224
|
+
}
|
|
3225
|
+
function generatePresentationsRule(b) {
|
|
3226
|
+
const cmd2 = b.name;
|
|
3227
|
+
const url = b.apiUrl;
|
|
3228
|
+
return `---
|
|
3229
|
+
name: presentations
|
|
3230
|
+
description: Creating AI-powered presentations
|
|
3231
|
+
---
|
|
3232
|
+
|
|
3233
|
+
# Presentations
|
|
3234
|
+
|
|
3235
|
+
## Workflow
|
|
3013
3236
|
|
|
3014
3237
|
1. **Gather context** - Read relevant files, code, or documentation
|
|
3015
3238
|
2. **Create presentation** - Pass context to \`${cmd2} create\`
|
|
3016
3239
|
3. **Share URL** - Return the presentation link to the user
|
|
3017
3240
|
|
|
3018
|
-
##
|
|
3241
|
+
## Create Command
|
|
3019
3242
|
|
|
3020
|
-
|
|
3021
|
-
|
|
3022
|
-
Context is **required**. Provide it via one of these methods:
|
|
3243
|
+
Context is **required**. Provide via:
|
|
3023
3244
|
|
|
3024
3245
|
\`\`\`bash
|
|
3025
3246
|
# Upload files (PDFs, PPTX, images, docs)
|
|
@@ -3031,19 +3252,19 @@ ${cmd2} create "Topic Title" --context "Key points, data, facts..."
|
|
|
3031
3252
|
# From a text file
|
|
3032
3253
|
${cmd2} create "Topic Title" --context-file ./notes.md
|
|
3033
3254
|
|
|
3034
|
-
# Pipe content
|
|
3255
|
+
# Pipe content
|
|
3035
3256
|
cat README.md | ${cmd2} create "Project Overview"
|
|
3036
3257
|
|
|
3037
|
-
# From URLs
|
|
3258
|
+
# From URLs
|
|
3038
3259
|
${cmd2} create "Competitor Analysis" --sources https://example.com/report
|
|
3039
3260
|
|
|
3040
|
-
# Combine
|
|
3261
|
+
# Combine sources
|
|
3041
3262
|
cat src/auth/*.ts | ${cmd2} create "Auth System" \\
|
|
3042
3263
|
--file ./architecture.png \\
|
|
3043
3264
|
--context "Focus on security patterns"
|
|
3044
3265
|
\`\`\`
|
|
3045
3266
|
|
|
3046
|
-
|
|
3267
|
+
## Options
|
|
3047
3268
|
|
|
3048
3269
|
| Option | Description | Default |
|
|
3049
3270
|
|--------|-------------|---------|
|
|
@@ -3053,251 +3274,2483 @@ cat src/auth/*.ts | ${cmd2} create "Auth System" \\
|
|
|
3053
3274
|
| \`--amount <amount>\` | Density: \`minimal\`, \`concise\`, \`detailed\`, \`extensive\` | concise |
|
|
3054
3275
|
| \`--audience <text>\` | Target audience | General Audience |
|
|
3055
3276
|
| \`-g, --goal <type>\` | Purpose: \`inform\`, \`persuade\`, \`train\`, \`learn\`, \`entertain\`, \`report\` | - |
|
|
3056
|
-
|
|
|
3057
|
-
| \`-f, --file <paths...>\` | Files to upload (PDF, PPTX, images, docs) | - |
|
|
3277
|
+
| \`-f, --file <paths...>\` | Files to upload | - |
|
|
3058
3278
|
| \`-l, --language <lang>\` | Output language | en |
|
|
3059
|
-
| \`-b, --brand <id>\` | Branding ID
|
|
3060
|
-
| \`-o, --output <format>\` | Output: \`human\`, \`json\`, \`quiet\` | human |
|
|
3279
|
+
| \`-b, --brand <id>\` | Branding ID | - |
|
|
3061
3280
|
|
|
3062
|
-
|
|
3281
|
+
## Other Commands
|
|
3063
3282
|
|
|
3064
3283
|
\`\`\`bash
|
|
3065
|
-
#
|
|
3066
|
-
${cmd2}
|
|
3284
|
+
${cmd2} list # List presentations
|
|
3285
|
+
${cmd2} get <id-or-slug> # Get details
|
|
3286
|
+
${cmd2} export <id> -o deck.zip # Export to ZIP
|
|
3287
|
+
${cmd2} import ./deck.zip # Import presentation
|
|
3288
|
+
${cmd2} branding list # List brandings
|
|
3289
|
+
${cmd2} branding extract https://... # Extract branding from URL
|
|
3290
|
+
\`\`\`
|
|
3291
|
+
|
|
3292
|
+
## Output
|
|
3293
|
+
|
|
3294
|
+
\`\`\`
|
|
3295
|
+
\u2713 Presentation created successfully
|
|
3296
|
+
|
|
3297
|
+
Title: Authentication System
|
|
3298
|
+
Slides: 8
|
|
3299
|
+
Generated in: 45s \xB7 12,500 tokens
|
|
3300
|
+
|
|
3301
|
+
Open: ${url}/en/view/presentations/auth-system-v1-abc123
|
|
3302
|
+
\`\`\`
|
|
3303
|
+
`;
|
|
3304
|
+
}
|
|
3305
|
+
function generateVideoRule(b) {
|
|
3306
|
+
const cmd2 = b.name;
|
|
3307
|
+
return `---
|
|
3308
|
+
name: video
|
|
3309
|
+
description: Video creation workflow - project-based UI replication AND stock-based videos
|
|
3310
|
+
---
|
|
3311
|
+
|
|
3312
|
+
# Video Creation
|
|
3313
|
+
|
|
3314
|
+
**Replicate the app's UI AS CLOSELY AS POSSIBLE - almost an exact copy.**
|
|
3315
|
+
|
|
3316
|
+
The video should look like the REAL app. Same layout. Same colors. Same buttons. Same everything. If someone watches the video and then opens the app, they should recognize it immediately.
|
|
3317
|
+
|
|
3318
|
+
---
|
|
3319
|
+
|
|
3320
|
+
## \u26D4 HARD RULES
|
|
3321
|
+
|
|
3322
|
+
1. **NO GENERIC SHAPES** - Don't draw random rectangles. Replicate what the app actually looks like.
|
|
3323
|
+
2. **NO MADE-UP CONTENT** - Don't invent "Finding 1: Performance improved 45%". Use real content from the app.
|
|
3324
|
+
3. **READ BEFORE BUILDING** - Read the app's components to understand their visual structure before writing any code.
|
|
3325
|
+
4. **MATCH THE BRAND** - Use exact colors from tailwind.config, exact fonts, exact visual style.
|
|
3326
|
+
5. **ALWAYS FRESH PROJECT** - Delete existing video project, create new with \`${cmd2} video init\`.
|
|
3327
|
+
|
|
3328
|
+
---
|
|
3329
|
+
|
|
3330
|
+
## \u{1F534} PHASE 0: READ REFERENCES FIRST
|
|
3331
|
+
|
|
3332
|
+
**Before doing ANYTHING, read these files:**
|
|
3067
3333
|
|
|
3068
|
-
|
|
3069
|
-
|
|
3070
|
-
|
|
3334
|
+
1. Read: rules/motion-standards.md (animation quality)
|
|
3335
|
+
2. Read: rules/micro-interactions.md (animation patterns)
|
|
3336
|
+
3. Read: rules/component-integration.md (patterns)
|
|
3337
|
+
4. Read: rules/project-video-workflow.md (full workflow)
|
|
3338
|
+
5. Skill: remotion-best-practices
|
|
3071
3339
|
|
|
3072
|
-
|
|
3073
|
-
|
|
3340
|
+
---
|
|
3341
|
+
|
|
3342
|
+
## \u{1F3AF} TWO VIDEO MODES
|
|
3343
|
+
|
|
3344
|
+
### Mode A: Project-Based Video (PREFERRED)
|
|
3345
|
+
Use when user has a project/app and wants to showcase it.
|
|
3346
|
+
- **Triggers:** "create video for my app", "product demo", "feature walkthrough", "promotional video for [project]"
|
|
3347
|
+
- **Approach:** Read components \u2192 replicate UI pixel-perfect \u2192 add animations
|
|
3348
|
+
- **Result:** Video looks IDENTICAL to the real app
|
|
3349
|
+
|
|
3350
|
+
### Mode B: Stock-Based Video
|
|
3351
|
+
Use ONLY when user has NO project or explicitly wants stock content.
|
|
3352
|
+
- **Triggers:** "create a video about tourism", "make a generic explainer"
|
|
3353
|
+
- **Approach:** Use \`${cmd2} video create\` with stock images
|
|
3354
|
+
- **Result:** Generic video with stock imagery
|
|
3074
3355
|
|
|
3075
|
-
|
|
3076
|
-
${cmd2} export <id-or-slug> -o presentation.zip
|
|
3356
|
+
**DEFAULT TO MODE A if user mentions their app/project.**
|
|
3077
3357
|
|
|
3078
|
-
|
|
3079
|
-
|
|
3358
|
+
---
|
|
3359
|
+
|
|
3360
|
+
## Pre-Creation Questions
|
|
3080
3361
|
|
|
3081
|
-
|
|
3082
|
-
${cmd2} branding list
|
|
3083
|
-
${cmd2} branding extract https://company.com
|
|
3362
|
+
Before creating a video, use \`AskUserQuestion\` tool (if available) to ask:
|
|
3084
3363
|
|
|
3085
|
-
|
|
3086
|
-
|
|
3087
|
-
|
|
3364
|
+
\`\`\`json
|
|
3365
|
+
{
|
|
3366
|
+
"questions": [
|
|
3367
|
+
{
|
|
3368
|
+
"question": "Which voice would you prefer for the narration?",
|
|
3369
|
+
"header": "Voice",
|
|
3370
|
+
"options": [
|
|
3371
|
+
{ "label": "Kore (Recommended)", "description": "Female, professional voice - best for narration" },
|
|
3372
|
+
{ "label": "Puck", "description": "Male, energetic voice - good for promos" },
|
|
3373
|
+
{ "label": "Rachel", "description": "Female, calm voice" },
|
|
3374
|
+
{ "label": "No voiceover", "description": "Music only, no narration" }
|
|
3375
|
+
],
|
|
3376
|
+
"multiSelect": false
|
|
3377
|
+
},
|
|
3378
|
+
{
|
|
3379
|
+
"question": "What background music style fits your video?",
|
|
3380
|
+
"header": "Music",
|
|
3381
|
+
"options": [
|
|
3382
|
+
{ "label": "Uplifting/positive", "description": "Energetic and inspiring" },
|
|
3383
|
+
{ "label": "Corporate/professional", "description": "Modern, polished business feel" },
|
|
3384
|
+
{ "label": "Cinematic/dramatic", "description": "Epic, impactful presentation" },
|
|
3385
|
+
{ "label": "Calm ambient", "description": "Soft, subtle background" }
|
|
3386
|
+
],
|
|
3387
|
+
"multiSelect": false
|
|
3388
|
+
},
|
|
3389
|
+
{
|
|
3390
|
+
"question": "How long should the video be?",
|
|
3391
|
+
"header": "Duration",
|
|
3392
|
+
"options": [
|
|
3393
|
+
{ "label": "15 seconds", "description": "Quick teaser" },
|
|
3394
|
+
{ "label": "30 seconds", "description": "Social media friendly" },
|
|
3395
|
+
{ "label": "60 seconds", "description": "Standard length" }
|
|
3396
|
+
],
|
|
3397
|
+
"multiSelect": false
|
|
3398
|
+
}
|
|
3399
|
+
]
|
|
3400
|
+
}
|
|
3088
3401
|
\`\`\`
|
|
3089
3402
|
|
|
3090
|
-
|
|
3403
|
+
If \`AskUserQuestion\` tool is not available, ask these questions in text format.
|
|
3404
|
+
|
|
3405
|
+
## Audio-First Workflow
|
|
3406
|
+
|
|
3407
|
+
**IMPORTANT:** This workflow ensures video and audio are always in sync. The CLI generates audio first, parses the script into sections, and calculates exact timing for each section. Scenes MUST use these timings.
|
|
3408
|
+
|
|
3409
|
+
### Step 1: Write Script
|
|
3091
3410
|
|
|
3092
|
-
|
|
3411
|
+
Write narration for the target duration. Structure: Hook \u2192 Key points \u2192 CTA
|
|
3412
|
+
|
|
3413
|
+
Tip: ~2.5 words per second for natural pacing.
|
|
3414
|
+
|
|
3415
|
+
### Step 2: Generate Assets (Audio-First)
|
|
3093
3416
|
|
|
3094
3417
|
\`\`\`bash
|
|
3095
|
-
|
|
3096
|
-
|
|
3097
|
-
--
|
|
3098
|
-
--
|
|
3418
|
+
${cmd2} video create \\
|
|
3419
|
+
--script "Your narration script..." \\
|
|
3420
|
+
--topic "topic for image search" \\
|
|
3421
|
+
--voice Kore \\
|
|
3422
|
+
--music-prompt "uplifting corporate" \\
|
|
3423
|
+
--num-images 5 \\
|
|
3424
|
+
--output ./public
|
|
3099
3425
|
\`\`\`
|
|
3100
3426
|
|
|
3101
|
-
|
|
3427
|
+
This generates:
|
|
3428
|
+
- \`public/audio/voiceover.wav\` - TTS voiceover (determines total duration)
|
|
3429
|
+
- \`public/audio/music.mp3\` - Background music (auto-matches voiceover length)
|
|
3430
|
+
- \`public/images/scene-*.jpg\` - Stock images
|
|
3431
|
+
- \`public/video-manifest.json\` - **Contains sections with exact TTS timestamps**
|
|
3102
3432
|
|
|
3103
|
-
|
|
3104
|
-
|
|
3105
|
-
|
|
3106
|
-
|
|
3107
|
-
|
|
3108
|
-
|
|
3433
|
+
### Step 3: Read Manifest Sections
|
|
3434
|
+
|
|
3435
|
+
The manifest includes a \`sections\` array with **exact timing from TTS character-level timestamps**:
|
|
3436
|
+
|
|
3437
|
+
\`\`\`json
|
|
3438
|
+
{
|
|
3439
|
+
"voiceover": {
|
|
3440
|
+
"path": "audio/voiceover.wav",
|
|
3441
|
+
"duration": 15.2,
|
|
3442
|
+
"timestamps": {
|
|
3443
|
+
"characters": ["P", "u", "e", "r", "t", "o", " ", ...],
|
|
3444
|
+
"characterStartTimesSeconds": [0, 0.05, 0.1, ...],
|
|
3445
|
+
"characterEndTimesSeconds": [0.05, 0.1, 0.15, ...]
|
|
3446
|
+
}
|
|
3447
|
+
},
|
|
3448
|
+
"sections": [
|
|
3449
|
+
{
|
|
3450
|
+
"id": 1,
|
|
3451
|
+
"text": "Puerto Rico. La Isla del Encanto.",
|
|
3452
|
+
"wordCount": 5,
|
|
3453
|
+
"startTime": 0,
|
|
3454
|
+
"endTime": 2.8,
|
|
3455
|
+
"durationInSeconds": 2.8,
|
|
3456
|
+
"durationInFrames": 84,
|
|
3457
|
+
"imagePath": "images/scene-1.jpg"
|
|
3458
|
+
},
|
|
3459
|
+
{
|
|
3460
|
+
"id": 2,
|
|
3461
|
+
"text": "Discover five hundred years of history.",
|
|
3462
|
+
"wordCount": 7,
|
|
3463
|
+
"startTime": 2.8,
|
|
3464
|
+
"endTime": 8.2,
|
|
3465
|
+
"durationInSeconds": 5.4,
|
|
3466
|
+
"durationInFrames": 162,
|
|
3467
|
+
"imagePath": "images/scene-2.jpg"
|
|
3468
|
+
}
|
|
3469
|
+
],
|
|
3470
|
+
"totalDurationInFrames": 450,
|
|
3471
|
+
"fps": 30
|
|
3472
|
+
}
|
|
3109
3473
|
\`\`\`
|
|
3110
3474
|
|
|
3111
|
-
|
|
3475
|
+
**Key points:**
|
|
3476
|
+
- Section timing is derived from actual TTS audio timestamps (not estimated)
|
|
3477
|
+
- \`voiceover.timestamps\` contains character-level timing for word-by-word animations
|
|
3478
|
+
- Video duration will always match voiceover duration exactly
|
|
3112
3479
|
|
|
3113
|
-
|
|
3114
|
-
|
|
3115
|
-
|
|
3480
|
+
### Step 4: Create Scenes (Match Section Timing)
|
|
3481
|
+
|
|
3482
|
+
**CRITICAL:** Use \`durationInFrames\` from each section. This ensures audio/video sync.
|
|
3483
|
+
|
|
3484
|
+
\`\`\`tsx
|
|
3485
|
+
// Read manifest sections and create matching scenes
|
|
3486
|
+
import manifest from '../../public/video-manifest.json';
|
|
3487
|
+
|
|
3488
|
+
// Scene durations MUST match manifest sections
|
|
3489
|
+
export const SECTION_1_DURATION = manifest.sections[0].durationInFrames; // 84
|
|
3490
|
+
export const SECTION_2_DURATION = manifest.sections[1].durationInFrames; // 162
|
|
3491
|
+
// ... etc
|
|
3492
|
+
|
|
3493
|
+
export const FULL_VIDEO_DURATION = manifest.totalDurationInFrames; // 450
|
|
3494
|
+
\`\`\`
|
|
3495
|
+
|
|
3496
|
+
Example scene component:
|
|
3497
|
+
|
|
3498
|
+
\`\`\`tsx
|
|
3499
|
+
// src/remotion/scenes/Scene1.tsx
|
|
3500
|
+
import { AbsoluteFill, Img, staticFile, useCurrentFrame, useVideoConfig, spring } from "remotion";
|
|
3501
|
+
import manifest from '../../../public/video-manifest.json';
|
|
3502
|
+
|
|
3503
|
+
const section = manifest.sections[0];
|
|
3504
|
+
export const SCENE_1_DURATION = section.durationInFrames;
|
|
3505
|
+
|
|
3506
|
+
export const Scene1: React.FC = () => {
|
|
3507
|
+
const frame = useCurrentFrame();
|
|
3508
|
+
const { fps } = useVideoConfig();
|
|
3509
|
+
const progress = spring({ frame, fps, config: { damping: 15, stiffness: 100 } });
|
|
3510
|
+
|
|
3511
|
+
return (
|
|
3512
|
+
<AbsoluteFill>
|
|
3513
|
+
<Img src={staticFile(section.imagePath)} style={{ width: '100%', height: '100%', objectFit: 'cover' }} />
|
|
3514
|
+
<div style={{
|
|
3515
|
+
position: 'absolute',
|
|
3516
|
+
bottom: 100,
|
|
3517
|
+
left: 0,
|
|
3518
|
+
right: 0,
|
|
3519
|
+
textAlign: 'center',
|
|
3520
|
+
opacity: progress,
|
|
3521
|
+
transform: \`translateY(\${(1 - progress) * 20}px)\`,
|
|
3522
|
+
}}>
|
|
3523
|
+
<h1 style={{ color: 'white', fontSize: 60, textShadow: '2px 2px 8px rgba(0,0,0,0.8)' }}>
|
|
3524
|
+
{section.text}
|
|
3525
|
+
</h1>
|
|
3526
|
+
</div>
|
|
3527
|
+
</AbsoluteFill>
|
|
3528
|
+
);
|
|
3529
|
+
};
|
|
3530
|
+
\`\`\`
|
|
3531
|
+
|
|
3532
|
+
### Step 5: Update FullVideo.tsx
|
|
3533
|
+
|
|
3534
|
+
\`\`\`tsx
|
|
3535
|
+
import { AbsoluteFill, Series, Audio, staticFile, useCurrentFrame, interpolate } from "remotion";
|
|
3536
|
+
import manifest from '../../public/video-manifest.json';
|
|
3537
|
+
import { Scene1, SCENE_1_DURATION } from "./scenes/Scene1";
|
|
3538
|
+
import { Scene2, SCENE_2_DURATION } from "./scenes/Scene2";
|
|
3539
|
+
// ... import all scenes
|
|
3540
|
+
|
|
3541
|
+
export const FULL_VIDEO_DURATION = manifest.totalDurationInFrames;
|
|
3542
|
+
|
|
3543
|
+
const BackgroundMusic: React.FC = () => {
|
|
3544
|
+
const frame = useCurrentFrame();
|
|
3545
|
+
const fadeIn = interpolate(frame, [0, 10], [0, 1], { extrapolateRight: "clamp" });
|
|
3546
|
+
const fadeOut = interpolate(frame, [FULL_VIDEO_DURATION - 20, FULL_VIDEO_DURATION], [1, 0], { extrapolateLeft: "clamp" });
|
|
3547
|
+
return <Audio src={staticFile("audio/music.mp3")} volume={fadeIn * fadeOut * 0.25} />;
|
|
3548
|
+
};
|
|
3549
|
+
|
|
3550
|
+
export const FullVideo: React.FC = () => {
|
|
3551
|
+
return (
|
|
3552
|
+
<AbsoluteFill>
|
|
3553
|
+
<Series>
|
|
3554
|
+
<Series.Sequence durationInFrames={SCENE_1_DURATION}>
|
|
3555
|
+
<Scene1 />
|
|
3556
|
+
</Series.Sequence>
|
|
3557
|
+
<Series.Sequence durationInFrames={SCENE_2_DURATION}>
|
|
3558
|
+
<Scene2 />
|
|
3559
|
+
</Series.Sequence>
|
|
3560
|
+
{/* Add all sections */}
|
|
3561
|
+
</Series>
|
|
3562
|
+
|
|
3563
|
+
<Audio src={staticFile("audio/voiceover.wav")} volume={1} />
|
|
3564
|
+
<BackgroundMusic />
|
|
3565
|
+
</AbsoluteFill>
|
|
3566
|
+
);
|
|
3567
|
+
};
|
|
3116
3568
|
\`\`\`
|
|
3117
3569
|
|
|
3118
|
-
###
|
|
3570
|
+
### Step 6: Preview & Render
|
|
3119
3571
|
|
|
3120
3572
|
\`\`\`bash
|
|
3121
|
-
|
|
3122
|
-
|
|
3123
|
-
--goal persuade \\
|
|
3124
|
-
--audience "Enterprise buyers" \\
|
|
3125
|
-
--tone professional
|
|
3573
|
+
npm run dev # Preview in Remotion Studio
|
|
3574
|
+
npm run render # Output to out/video.mp4
|
|
3126
3575
|
\`\`\`
|
|
3127
3576
|
|
|
3128
|
-
|
|
3577
|
+
## CLI Commands Reference
|
|
3578
|
+
|
|
3579
|
+
### ${cmd2} video create
|
|
3580
|
+
|
|
3581
|
+
| Option | Required | Default | Description |
|
|
3582
|
+
|--------|----------|---------|-------------|
|
|
3583
|
+
| \`-s, --script <text>\` | Yes* | - | Narration script |
|
|
3584
|
+
| \`--script-file <path>\` | Yes* | - | Path to script file |
|
|
3585
|
+
| \`-t, --topic <text>\` | No | auto | Topic for image search |
|
|
3586
|
+
| \`-v, --voice <name>\` | No | Kore | TTS voice |
|
|
3587
|
+
| \`-m, --music-prompt <text>\` | No | auto | Music description |
|
|
3588
|
+
| \`-n, --num-images <n>\` | No | 5 | Number of images |
|
|
3589
|
+
| \`-o, --output <dir>\` | No | ./public | Output directory |
|
|
3590
|
+
|
|
3591
|
+
### ${cmd2} tts generate
|
|
3129
3592
|
|
|
3130
3593
|
\`\`\`bash
|
|
3131
|
-
${cmd2}
|
|
3132
|
-
|
|
3133
|
-
--sources https://report.com/industry.pdf \\
|
|
3134
|
-
--tone formal --audience "Executive team" \\
|
|
3135
|
-
--goal report
|
|
3594
|
+
${cmd2} tts generate -t "Narration text" -o voice.wav --voice Kore
|
|
3595
|
+
${cmd2} tts voices # List all voices
|
|
3136
3596
|
\`\`\`
|
|
3137
3597
|
|
|
3138
|
-
|
|
3598
|
+
**Voices:** Kore (professional female), Puck (energetic male), Rachel (calm female), alloy (neutral)
|
|
3599
|
+
|
|
3600
|
+
### ${cmd2} music generate
|
|
3139
3601
|
|
|
3140
|
-
|
|
3602
|
+
\`\`\`bash
|
|
3603
|
+
${cmd2} music generate -p "uplifting corporate" -d 30 -o music.mp3
|
|
3141
3604
|
\`\`\`
|
|
3142
|
-
\u2713 Presentation created successfully
|
|
3143
3605
|
|
|
3144
|
-
|
|
3145
|
-
Slides: 8
|
|
3146
|
-
Generated in: 45s \xB7 12,500 tokens
|
|
3606
|
+
**Good prompts:** "uplifting corporate", "calm ambient, soft piano", "cinematic orchestral"
|
|
3147
3607
|
|
|
3148
|
-
|
|
3608
|
+
### ${cmd2} image search / video search
|
|
3609
|
+
|
|
3610
|
+
\`\`\`bash
|
|
3611
|
+
${cmd2} image search -q "tropical beach" -n 5 -s large
|
|
3612
|
+
${cmd2} video search "tech workspace" -n 5
|
|
3149
3613
|
\`\`\`
|
|
3150
3614
|
|
|
3151
|
-
|
|
3615
|
+
### ${cmd2} mix create
|
|
3616
|
+
|
|
3617
|
+
Post-process audio into existing video:
|
|
3618
|
+
|
|
3152
3619
|
\`\`\`bash
|
|
3153
|
-
|
|
3620
|
+
${cmd2} mix create --video video.mp4 --voice voice.wav --music music.mp3 -o final.mp4
|
|
3154
3621
|
\`\`\`
|
|
3155
3622
|
|
|
3156
|
-
##
|
|
3623
|
+
## Audio Guidelines
|
|
3157
3624
|
|
|
3158
|
-
|
|
3159
|
-
|
|
3160
|
-
|
|
3161
|
-
|
|
3162
|
-
5. **Specify audience** - Helps tailor complexity and terminology.
|
|
3163
|
-
6. **Combine sources** - Pipe multiple files for comprehensive presentations.
|
|
3625
|
+
| Element | Volume | Notes |
|
|
3626
|
+
|---------|--------|-------|
|
|
3627
|
+
| Voiceover | 100% | Primary audio |
|
|
3628
|
+
| Background music | 20-30% | Fade in/out over ~10-20 frames |
|
|
3164
3629
|
|
|
3165
|
-
|
|
3630
|
+
Generate music 5s longer than video for fade out.
|
|
3166
3631
|
|
|
3167
|
-
|
|
3168
|
-
- **Images**: JPEG, PNG, GIF, WebP
|
|
3169
|
-
- **Text**: Markdown, TXT, CSV, JSON
|
|
3632
|
+
## Animation Quality Checklist
|
|
3170
3633
|
|
|
3171
|
-
|
|
3634
|
+
Before rendering, ensure your video follows these standards from motion-standards.md:
|
|
3635
|
+
|
|
3636
|
+
1. **Physics over linearity** - Use \`spring()\` for all animations, never linear interpolate for movement
|
|
3637
|
+
2. **Orchestration** - Stagger element entrances (3-8 frame delays), never animate all at once
|
|
3638
|
+
3. **Virtual camera** - Add subtle zoom/scale even on static scenes (1.0 \u2192 1.03 over duration)
|
|
3639
|
+
4. **Micro-interactions** - Use components from micro-interactions.md for buttons, text reveals, highlights
|
|
3640
|
+
|
|
3641
|
+
---
|
|
3642
|
+
|
|
3643
|
+
# \u{1F3AC} PROJECT-BASED VIDEO WORKFLOW (Mode A)
|
|
3644
|
+
|
|
3645
|
+
**Use this when user has a project/app to showcase.**
|
|
3646
|
+
|
|
3647
|
+
## \u{1F4CB} PHASE 1: EXPLORE THE APP
|
|
3648
|
+
|
|
3649
|
+
### 1.1 Find Brand Assets
|
|
3172
3650
|
|
|
3173
3651
|
\`\`\`bash
|
|
3174
|
-
#
|
|
3175
|
-
|
|
3652
|
+
# Logo
|
|
3653
|
+
find src -name "*[Ll]ogo*" 2>/dev/null
|
|
3654
|
+
find public -name "*logo*" 2>/dev/null
|
|
3176
3655
|
|
|
3177
|
-
#
|
|
3178
|
-
|
|
3656
|
+
# Colors - THIS IS CRITICAL
|
|
3657
|
+
cat tailwind.config.* | grep -A 30 "colors"
|
|
3658
|
+
cat src/app/globals.css | head -50
|
|
3179
3659
|
|
|
3180
|
-
#
|
|
3181
|
-
|
|
3660
|
+
# Fonts
|
|
3661
|
+
grep -r "fontFamily" tailwind.config.* src/app/layout.tsx
|
|
3182
3662
|
\`\`\`
|
|
3183
|
-
`;
|
|
3184
|
-
}
|
|
3185
|
-
var EDITORS = [
|
|
3186
|
-
{ name: "Claude Code", dir: ".claude" },
|
|
3187
|
-
{ name: "Cursor", dir: ".cursor" },
|
|
3188
|
-
{ name: "Codex", dir: ".codex" },
|
|
3189
|
-
{ name: "OpenCode", dir: ".opencode" },
|
|
3190
|
-
{ name: "Windsurf", dir: ".windsurf" },
|
|
3191
|
-
{ name: "Agent", dir: ".agent" }
|
|
3192
|
-
];
|
|
3193
|
-
var skillCommand = new Command14("skill").description(`Manage ${brand.displayName} skill for AI coding assistants`).addHelpText(
|
|
3194
|
-
"after",
|
|
3195
|
-
`
|
|
3196
|
-
${chalk12.bold("Examples:")}
|
|
3197
|
-
${chalk12.gray("# Install skill for all detected editors")}
|
|
3198
|
-
$ ${brand.name} skill install
|
|
3199
3663
|
|
|
3200
|
-
|
|
3201
|
-
$ ${brand.name} skill install --dir ~/.claude
|
|
3664
|
+
### 1.2 Read Key UI Components
|
|
3202
3665
|
|
|
3203
|
-
|
|
3204
|
-
|
|
3205
|
-
|
|
3206
|
-
|
|
3207
|
-
|
|
3208
|
-
|
|
3209
|
-
|
|
3210
|
-
|
|
3211
|
-
|
|
3212
|
-
|
|
3213
|
-
|
|
3214
|
-
|
|
3215
|
-
|
|
3216
|
-
|
|
3217
|
-
|
|
3218
|
-
|
|
3219
|
-
|
|
3220
|
-
|
|
3221
|
-
|
|
3222
|
-
|
|
3223
|
-
|
|
3224
|
-
|
|
3225
|
-
|
|
3666
|
+
**Don't copy - just read to understand the visual structure:**
|
|
3667
|
+
|
|
3668
|
+
\`\`\`bash
|
|
3669
|
+
# Find main components
|
|
3670
|
+
find src/components -name "*.tsx" | head -30
|
|
3671
|
+
|
|
3672
|
+
# Read them to understand layout, colors, structure
|
|
3673
|
+
cat src/components/slides/SlidesSidebar.tsx
|
|
3674
|
+
cat src/components/tools/ToolsPanel.tsx
|
|
3675
|
+
cat src/components/ui/button.tsx
|
|
3676
|
+
\`\`\`
|
|
3677
|
+
|
|
3678
|
+
**For each component, note:**
|
|
3679
|
+
- Layout structure (sidebar? grid? list?)
|
|
3680
|
+
- Colors used (bg-slate-900, text-teal-400, etc.)
|
|
3681
|
+
- Visual elements (badges, icons, thumbnails)
|
|
3682
|
+
- Typography (font sizes, weights)
|
|
3683
|
+
|
|
3684
|
+
### 1.3 Document Your Findings
|
|
3685
|
+
|
|
3686
|
+
\`\`\`markdown
|
|
3687
|
+
## Brand Discovery: [App Name]
|
|
3688
|
+
|
|
3689
|
+
### Colors (from tailwind.config)
|
|
3690
|
+
- Background: #0f172a (slate-900)
|
|
3691
|
+
- Surface: #1e293b (slate-800)
|
|
3692
|
+
- Primary: #14b8a6 (teal-500)
|
|
3693
|
+
- Accent: #f472b6 (pink-400)
|
|
3694
|
+
- Text: #ffffff / #94a3b8 (slate-400)
|
|
3695
|
+
|
|
3696
|
+
### Key UI Elements I Observed
|
|
3697
|
+
1. **Sidebar** - Dark bg, slide thumbnails with numbers
|
|
3698
|
+
2. **Main viewer** - Light slide content area
|
|
3699
|
+
3. **Tools panel** - Grid of cards with icons
|
|
3700
|
+
\`\`\`
|
|
3701
|
+
|
|
3702
|
+
---
|
|
3703
|
+
|
|
3704
|
+
## \u{1F4CB} PHASE 2: PLAN THE VIDEO
|
|
3705
|
+
|
|
3706
|
+
### Scene Structure
|
|
3707
|
+
|
|
3708
|
+
\`\`\`markdown
|
|
3709
|
+
## Video Plan: [App Name] Demo
|
|
3710
|
+
|
|
3711
|
+
### Scene 1: Intro (3s / 90 frames)
|
|
3712
|
+
**What to show:** Logo + tagline on dark background
|
|
3713
|
+
**Colors:** bg #0f172a, logo centered
|
|
3714
|
+
**Animation:** Logo scales in with spring, tagline fades up
|
|
3715
|
+
|
|
3716
|
+
### Scene 2: Sidebar UI (5s / 150 frames)
|
|
3717
|
+
**What to show:** Replicate the slides sidebar
|
|
3718
|
+
**Reference:** Read src/components/slides/SlidesSidebar.tsx
|
|
3719
|
+
**Build:** Dark sidebar with slide items, thumbnails
|
|
3720
|
+
**Animation:** Sidebar slides in, items stagger
|
|
3721
|
+
|
|
3722
|
+
### Scene 3: Main Editor (5s / 150 frames)
|
|
3723
|
+
**What to show:** Replicate the slide viewer
|
|
3724
|
+
**Reference:** Read src/components/slides/SlideViewer.tsx
|
|
3725
|
+
**Animation:** Content fades in
|
|
3726
|
+
|
|
3727
|
+
### Scene 4: CTA (3s / 90 frames)
|
|
3728
|
+
**What to show:** Logo + CTA button + URL
|
|
3729
|
+
**Animation:** Logo fades in, button pulses
|
|
3730
|
+
\`\`\`
|
|
3731
|
+
|
|
3732
|
+
---
|
|
3733
|
+
|
|
3734
|
+
## \u{1F528} PHASE 3: BUILD
|
|
3735
|
+
|
|
3736
|
+
### 3.1 Create Fresh Project
|
|
3737
|
+
|
|
3738
|
+
\`\`\`bash
|
|
3739
|
+
rm -rf ../appname-video
|
|
3740
|
+
${cmd2} video init ../appname-video
|
|
3741
|
+
cd ../appname-video
|
|
3742
|
+
\`\`\`
|
|
3743
|
+
|
|
3744
|
+
### 3.2 Copy Brand Assets Only
|
|
3745
|
+
|
|
3746
|
+
\`\`\`bash
|
|
3747
|
+
# Logo
|
|
3748
|
+
cp ../myapp/public/logo.svg ./public/
|
|
3749
|
+
|
|
3750
|
+
# Tailwind config (for colors/fonts)
|
|
3751
|
+
cp ../myapp/tailwind.config.* ./
|
|
3752
|
+
|
|
3753
|
+
# Global CSS
|
|
3754
|
+
cp ../myapp/src/app/globals.css ./src/styles/
|
|
3755
|
+
\`\`\`
|
|
3756
|
+
|
|
3757
|
+
### 3.3 Build Scene Components - PIXEL PERFECT
|
|
3758
|
+
|
|
3759
|
+
**Each scene replicates what you observed, using Remotion:**
|
|
3760
|
+
|
|
3761
|
+
\`\`\`tsx
|
|
3762
|
+
// src/remotion/scenes/SidebarScene.tsx
|
|
3763
|
+
// Replicates: src/components/slides/SlidesSidebar.tsx
|
|
3764
|
+
|
|
3765
|
+
import React from "react";
|
|
3766
|
+
import { AbsoluteFill, useCurrentFrame, spring, useVideoConfig } from "remotion";
|
|
3767
|
+
|
|
3768
|
+
const mockSlides = [
|
|
3769
|
+
{ id: 1, title: "Title Slide", selected: true },
|
|
3770
|
+
{ id: 2, title: "Overview", selected: false },
|
|
3771
|
+
{ id: 3, title: "Key Players", selected: false },
|
|
3772
|
+
];
|
|
3773
|
+
|
|
3774
|
+
export const SIDEBAR_SCENE_DURATION = 150;
|
|
3775
|
+
|
|
3776
|
+
export const SidebarScene: React.FC = () => {
|
|
3777
|
+
const frame = useCurrentFrame();
|
|
3778
|
+
const { fps } = useVideoConfig();
|
|
3779
|
+
|
|
3780
|
+
const sidebarProgress = spring({ frame, fps, config: { damping: 20, stiffness: 100 } });
|
|
3781
|
+
const sidebarX = (1 - sidebarProgress) * -280;
|
|
3782
|
+
|
|
3783
|
+
return (
|
|
3784
|
+
<AbsoluteFill style={{ backgroundColor: "#0f172a" }}>
|
|
3785
|
+
{/* Sidebar - EXACT colors from tailwind.config */}
|
|
3786
|
+
<div style={{
|
|
3787
|
+
width: 280,
|
|
3788
|
+
height: "100%",
|
|
3789
|
+
backgroundColor: "#0f172a",
|
|
3790
|
+
borderRight: "1px solid #1e293b",
|
|
3791
|
+
transform: \`translateX(\${sidebarX}px)\`,
|
|
3792
|
+
padding: 16,
|
|
3793
|
+
}}>
|
|
3794
|
+
{/* Header - EXACT styling from component */}
|
|
3795
|
+
<div style={{ display: "flex", alignItems: "center", gap: 8, marginBottom: 16 }}>
|
|
3796
|
+
<span style={{ color: "#14b8a6", fontSize: 14, fontWeight: 500 }}>
|
|
3797
|
+
SLIDES CONTROL
|
|
3798
|
+
</span>
|
|
3799
|
+
</div>
|
|
3800
|
+
|
|
3801
|
+
{/* Slide items - staggered animation */}
|
|
3802
|
+
{mockSlides.map((slide, i) => {
|
|
3803
|
+
const itemProgress = spring({
|
|
3804
|
+
frame: frame - 10 - i * 8,
|
|
3805
|
+
fps,
|
|
3806
|
+
config: { damping: 15, stiffness: 100 },
|
|
3807
|
+
});
|
|
3808
|
+
|
|
3809
|
+
return (
|
|
3810
|
+
<div key={slide.id} style={{
|
|
3811
|
+
opacity: itemProgress,
|
|
3812
|
+
transform: \`translateX(\${(1 - itemProgress) * -20}px)\`,
|
|
3813
|
+
marginBottom: 8,
|
|
3814
|
+
padding: 12,
|
|
3815
|
+
borderRadius: 8,
|
|
3816
|
+
backgroundColor: slide.selected ? "#1e293b" : "transparent",
|
|
3817
|
+
border: slide.selected ? "1px solid #14b8a6" : "1px solid transparent",
|
|
3818
|
+
}}>
|
|
3819
|
+
<div style={{ display: "flex", alignItems: "center", gap: 12 }}>
|
|
3820
|
+
<div style={{ width: 48, height: 32, backgroundColor: "#334155", borderRadius: 4 }} />
|
|
3821
|
+
<div>
|
|
3822
|
+
<span style={{ color: "#64748b", fontSize: 12 }}>
|
|
3823
|
+
SLIDE {String(i + 1).padStart(2, "0")}
|
|
3824
|
+
</span>
|
|
3825
|
+
{slide.selected && <span style={{ color: "#f87171", fontSize: 12, marginLeft: 8 }}>SELECTED</span>}
|
|
3826
|
+
<p style={{ color: "#ffffff", fontSize: 14, margin: 0 }}>{slide.title}</p>
|
|
3827
|
+
</div>
|
|
3828
|
+
</div>
|
|
3829
|
+
</div>
|
|
3830
|
+
);
|
|
3831
|
+
})}
|
|
3832
|
+
</div>
|
|
3833
|
+
</AbsoluteFill>
|
|
3834
|
+
);
|
|
3835
|
+
};
|
|
3836
|
+
\`\`\`
|
|
3837
|
+
|
|
3838
|
+
### 3.4 Key Principles: PIXEL-PERFECT Replication
|
|
3839
|
+
|
|
3840
|
+
**The video UI should be indistinguishable from the real app.**
|
|
3841
|
+
|
|
3842
|
+
1. **EXACT colors** - Copy hex values directly from tailwind.config
|
|
3843
|
+
2. **EXACT spacing** - If \`p-4 gap-3\`, use \`padding: 16px, gap: 12px\`
|
|
3844
|
+
3. **EXACT typography** - Same font size, weight, color
|
|
3845
|
+
4. **EXACT borders** - Same border width, color, radius
|
|
3846
|
+
5. **EXACT layout** - Same flex direction, alignment, widths
|
|
3847
|
+
6. **Then add animations** - spring() entrances, stagger delays
|
|
3848
|
+
|
|
3849
|
+
---
|
|
3850
|
+
|
|
3851
|
+
## \u{1F3AC} PHASE 4: AUDIO & RENDER
|
|
3852
|
+
|
|
3853
|
+
### Generate Audio
|
|
3854
|
+
|
|
3855
|
+
\`\`\`bash
|
|
3856
|
+
${cmd2} video create \\
|
|
3857
|
+
--script "Your narration..." \\
|
|
3858
|
+
--music-prompt "modern uplifting tech" \\
|
|
3859
|
+
--output ./public
|
|
3860
|
+
\`\`\`
|
|
3861
|
+
|
|
3862
|
+
### Preview & Render
|
|
3863
|
+
|
|
3864
|
+
\`\`\`bash
|
|
3865
|
+
npm run dev # Preview
|
|
3866
|
+
npm run render # Output to out/video.mp4
|
|
3867
|
+
\`\`\`
|
|
3868
|
+
|
|
3869
|
+
---
|
|
3870
|
+
|
|
3871
|
+
## \u274C WHAT NOT TO DO
|
|
3872
|
+
|
|
3873
|
+
### Bad: Generic rectangles
|
|
3874
|
+
\`\`\`tsx
|
|
3875
|
+
// \u274C NO
|
|
3876
|
+
<div style={{ background: "linear-gradient(#667eea, #764ba2)", width: 200, height: 150 }} />
|
|
3877
|
+
\`\`\`
|
|
3878
|
+
|
|
3879
|
+
### Bad: Made-up content
|
|
3880
|
+
\`\`\`tsx
|
|
3881
|
+
// \u274C NO
|
|
3882
|
+
<h2>Key Insights from Research</h2>
|
|
3883
|
+
<li>Finding 1: Performance improved by 45%</li>
|
|
3884
|
+
\`\`\`
|
|
3885
|
+
|
|
3886
|
+
### Bad: Not matching the app
|
|
3887
|
+
\`\`\`tsx
|
|
3888
|
+
// \u274C NO - App uses slate-900, not gray-800
|
|
3889
|
+
<div style={{ backgroundColor: "#1f2937" }}>
|
|
3890
|
+
\`\`\`
|
|
3891
|
+
|
|
3892
|
+
### Good: Replicated UI with correct brand
|
|
3893
|
+
\`\`\`tsx
|
|
3894
|
+
// \u2705 YES - Matches actual app colors and structure
|
|
3895
|
+
<div style={{ backgroundColor: "#0f172a", borderColor: "#1e293b" }}>
|
|
3896
|
+
<span style={{ color: "#14b8a6" }}>SLIDES CONTROL</span>
|
|
3897
|
+
</div>
|
|
3898
|
+
\`\`\`
|
|
3899
|
+
|
|
3900
|
+
---
|
|
3901
|
+
|
|
3902
|
+
## \u2705 Project Video Checklist
|
|
3903
|
+
|
|
3904
|
+
### Before Building
|
|
3905
|
+
- [ ] Read motion-standards.md and micro-interactions.md
|
|
3906
|
+
- [ ] Found logo path
|
|
3907
|
+
- [ ] Found colors from tailwind.config
|
|
3908
|
+
- [ ] Read key components to understand visual structure
|
|
3909
|
+
- [ ] Documented findings
|
|
3910
|
+
- [ ] Planned scenes
|
|
3911
|
+
|
|
3912
|
+
### While Building
|
|
3913
|
+
- [ ] Using exact colors from tailwind.config
|
|
3914
|
+
- [ ] Matching layout structure of real app
|
|
3915
|
+
- [ ] Using spring() for animations
|
|
3916
|
+
- [ ] Mock data is realistic
|
|
3917
|
+
|
|
3918
|
+
### Before Render
|
|
3919
|
+
- [ ] Logo appears in intro and CTA
|
|
3920
|
+
- [ ] Colors match the app exactly
|
|
3921
|
+
- [ ] All scenes have smooth animations
|
|
3922
|
+
`;
|
|
3923
|
+
}
|
|
3924
|
+
function generateMotionStandardsRule() {
|
|
3925
|
+
return `---
|
|
3926
|
+
name: motion-standards
|
|
3927
|
+
description: Animation quality standards for high-end video production
|
|
3928
|
+
---
|
|
3929
|
+
|
|
3930
|
+
# Motion Design Standards
|
|
3931
|
+
|
|
3932
|
+
Generate videos that feel like high-end productions (Apple, Stripe, Linear quality).
|
|
3933
|
+
|
|
3934
|
+
**Follow these standards for every Remotion component.**
|
|
3935
|
+
|
|
3936
|
+
## STANDARD 01: PHYSICS OVER LINEARITY
|
|
3937
|
+
|
|
3938
|
+
- **Rule:** Never use linear interpolation for movement or scaling
|
|
3939
|
+
- **Implementation:** Use \`spring()\` for ALL entrance/exit animations
|
|
3940
|
+
- **Default config:** \`{ mass: 0.8, stiffness: 150, damping: 15 }\`
|
|
3941
|
+
|
|
3942
|
+
\`\`\`tsx
|
|
3943
|
+
// BAD
|
|
3944
|
+
const opacity = interpolate(frame, [0, 30], [0, 1]);
|
|
3945
|
+
|
|
3946
|
+
// GOOD
|
|
3947
|
+
const progress = spring({ frame, fps, config: { mass: 0.8, stiffness: 150, damping: 15 } });
|
|
3948
|
+
\`\`\`
|
|
3949
|
+
|
|
3950
|
+
## STANDARD 02: ORCHESTRATION & CASCADE
|
|
3951
|
+
|
|
3952
|
+
- **Rule:** NEVER animate all elements simultaneously
|
|
3953
|
+
- **Implementation:** Staggered entrances with 3-5 frames between items
|
|
3954
|
+
|
|
3955
|
+
\`\`\`tsx
|
|
3956
|
+
// GOOD - cascading entrance
|
|
3957
|
+
<FadeIn delay={0}><Header /></FadeIn>
|
|
3958
|
+
<FadeIn delay={8}><Content /></FadeIn>
|
|
3959
|
+
<FadeIn delay={16}><Footer /></FadeIn>
|
|
3960
|
+
|
|
3961
|
+
// GOOD - staggered list
|
|
3962
|
+
{items.map((item, i) => (
|
|
3963
|
+
<SlideUp key={item.id} delay={i * 4}>
|
|
3964
|
+
<ListItem data={item} />
|
|
3965
|
+
</SlideUp>
|
|
3966
|
+
))}
|
|
3967
|
+
\`\`\`
|
|
3968
|
+
|
|
3969
|
+
## STANDARD 03: THE VIRTUAL CAMERA
|
|
3970
|
+
|
|
3971
|
+
- **Rule:** Even when UI is idle, add subtle movement
|
|
3972
|
+
- **Implementation:** Dolly zoom (slow push in)
|
|
3973
|
+
|
|
3974
|
+
\`\`\`tsx
|
|
3975
|
+
const CinematicContainer = ({ children }) => {
|
|
3976
|
+
const frame = useCurrentFrame();
|
|
3977
|
+
const { durationInFrames } = useVideoConfig();
|
|
3978
|
+
const scale = interpolate(frame, [0, durationInFrames], [1, 1.03]);
|
|
3979
|
+
|
|
3980
|
+
return (
|
|
3981
|
+
<AbsoluteFill style={{ transform: \`scale(\${scale})\` }}>
|
|
3982
|
+
{children}
|
|
3983
|
+
</AbsoluteFill>
|
|
3984
|
+
);
|
|
3985
|
+
};
|
|
3986
|
+
\`\`\`
|
|
3987
|
+
|
|
3988
|
+
## STANDARD 04: HUMAN SIMULATION
|
|
3989
|
+
|
|
3990
|
+
- **Rule:** NEVER move cursor in straight lines
|
|
3991
|
+
- **Implementation:** Use curved/Bezier paths for cursor movement
|
|
3992
|
+
|
|
3993
|
+
## STANDARD 05: TECHNICAL CONSTRAINTS
|
|
3994
|
+
|
|
3995
|
+
1. **Styling:** Tailwind CSS or inline styles
|
|
3996
|
+
2. **Layout:** Use \`AbsoluteFill\` for scene composition
|
|
3997
|
+
3. **State:** NO \`useState\` or \`useEffect\` - derive from \`useCurrentFrame()\`
|
|
3998
|
+
|
|
3999
|
+
## Execution Checklist
|
|
4000
|
+
|
|
4001
|
+
1. Analyze UI hierarchy
|
|
4002
|
+
2. Choreograph order of appearance
|
|
4003
|
+
3. Apply \`spring()\` physics
|
|
4004
|
+
4. Add subtle camera movement
|
|
4005
|
+
5. Human touches for interactions
|
|
4006
|
+
`;
|
|
4007
|
+
}
|
|
4008
|
+
function generateMicroInteractionsRule() {
|
|
4009
|
+
return `---
|
|
4010
|
+
name: micro-interactions
|
|
4011
|
+
description: Animation components and patterns
|
|
4012
|
+
---
|
|
4013
|
+
|
|
4014
|
+
# Micro-Interactions
|
|
4015
|
+
|
|
4016
|
+
## Core Principles
|
|
4017
|
+
|
|
4018
|
+
1. **Subtle** - Effects enhance, never distract
|
|
4019
|
+
2. **Purposeful** - Every animation communicates something
|
|
4020
|
+
3. **Physics-based** - Use \`spring()\`, not linear easing
|
|
4021
|
+
4. **Continuous** - Always have something moving subtly
|
|
4022
|
+
|
|
4023
|
+
## Spring Configurations
|
|
4024
|
+
|
|
4025
|
+
\`\`\`tsx
|
|
4026
|
+
const SPRING_CONFIGS = {
|
|
4027
|
+
snappy: { damping: 15, stiffness: 200, mass: 0.5 },
|
|
4028
|
+
smooth: { damping: 20, stiffness: 100, mass: 1 },
|
|
4029
|
+
bouncy: { damping: 8, stiffness: 150, mass: 0.8 },
|
|
4030
|
+
gentle: { damping: 30, stiffness: 50, mass: 1 },
|
|
4031
|
+
};
|
|
4032
|
+
\`\`\`
|
|
4033
|
+
|
|
4034
|
+
## Entry Animations
|
|
4035
|
+
|
|
4036
|
+
### Fade + Slide
|
|
4037
|
+
|
|
4038
|
+
\`\`\`tsx
|
|
4039
|
+
const AnimatedEntry = ({ delay = 0, direction = 'up', children }) => {
|
|
4040
|
+
const frame = useCurrentFrame();
|
|
4041
|
+
const { fps } = useVideoConfig();
|
|
4042
|
+
|
|
4043
|
+
const progress = spring({
|
|
4044
|
+
frame: frame - delay,
|
|
4045
|
+
fps,
|
|
4046
|
+
config: { damping: 20, stiffness: 100 }
|
|
4047
|
+
});
|
|
4048
|
+
|
|
4049
|
+
const directions = {
|
|
4050
|
+
up: { x: 0, y: 30 },
|
|
4051
|
+
down: { x: 0, y: -30 },
|
|
4052
|
+
left: { x: 30, y: 0 },
|
|
4053
|
+
right: { x: -30, y: 0 },
|
|
4054
|
+
};
|
|
4055
|
+
|
|
4056
|
+
const { x, y } = directions[direction];
|
|
4057
|
+
|
|
4058
|
+
return (
|
|
4059
|
+
<div style={{
|
|
4060
|
+
opacity: progress,
|
|
4061
|
+
transform: \`translate(\${x * (1 - progress)}px, \${y * (1 - progress)}px)\`,
|
|
4062
|
+
}}>
|
|
4063
|
+
{children}
|
|
4064
|
+
</div>
|
|
4065
|
+
);
|
|
4066
|
+
};
|
|
4067
|
+
\`\`\`
|
|
4068
|
+
|
|
4069
|
+
### Staggered List
|
|
4070
|
+
|
|
4071
|
+
\`\`\`tsx
|
|
4072
|
+
const StaggeredList = ({ children, itemDelay = 5 }) => (
|
|
4073
|
+
<>
|
|
4074
|
+
{React.Children.map(children, (child, i) => (
|
|
4075
|
+
<AnimatedEntry delay={i * itemDelay}>{child}</AnimatedEntry>
|
|
4076
|
+
))}
|
|
4077
|
+
</>
|
|
4078
|
+
);
|
|
4079
|
+
\`\`\`
|
|
4080
|
+
|
|
4081
|
+
## Interaction Simulation
|
|
4082
|
+
|
|
4083
|
+
### Button Press
|
|
4084
|
+
|
|
4085
|
+
\`\`\`tsx
|
|
4086
|
+
const ButtonPress = ({ pressFrame, children }) => {
|
|
4087
|
+
const frame = useCurrentFrame();
|
|
4088
|
+
const { fps } = useVideoConfig();
|
|
4089
|
+
|
|
4090
|
+
const isPressing = frame >= pressFrame && frame < pressFrame + 3;
|
|
4091
|
+
const isReleasing = frame >= pressFrame + 3;
|
|
4092
|
+
|
|
4093
|
+
const releaseProgress = isReleasing ? spring({
|
|
4094
|
+
frame: frame - pressFrame - 3,
|
|
4095
|
+
fps,
|
|
4096
|
+
config: { damping: 10, stiffness: 300 }
|
|
4097
|
+
}) : 0;
|
|
4098
|
+
|
|
4099
|
+
const scale = isPressing ? 0.95 : (0.95 + releaseProgress * 0.05);
|
|
4100
|
+
|
|
4101
|
+
return <div style={{ transform: \`scale(\${scale})\` }}>{children}</div>;
|
|
4102
|
+
};
|
|
4103
|
+
\`\`\`
|
|
4104
|
+
|
|
4105
|
+
### Typed Text
|
|
4106
|
+
|
|
4107
|
+
\`\`\`tsx
|
|
4108
|
+
const TypedText = ({ text, startFrame = 0, speed = 2 }) => {
|
|
4109
|
+
const frame = useCurrentFrame();
|
|
4110
|
+
const charsToShow = Math.floor((frame - startFrame) / speed);
|
|
4111
|
+
|
|
4112
|
+
if (frame < startFrame) return null;
|
|
4113
|
+
|
|
4114
|
+
return (
|
|
4115
|
+
<span>
|
|
4116
|
+
{text.slice(0, Math.min(charsToShow, text.length))}
|
|
4117
|
+
{charsToShow < text.length && (
|
|
4118
|
+
<span style={{ opacity: frame % 15 < 8 ? 1 : 0 }}>|</span>
|
|
4119
|
+
)}
|
|
4120
|
+
</span>
|
|
4121
|
+
);
|
|
4122
|
+
};
|
|
4123
|
+
\`\`\`
|
|
4124
|
+
|
|
4125
|
+
### Counting Number
|
|
4126
|
+
|
|
4127
|
+
\`\`\`tsx
|
|
4128
|
+
const CountingNumber = ({ from = 0, to, startFrame = 0, duration = 30 }) => {
|
|
4129
|
+
const frame = useCurrentFrame();
|
|
4130
|
+
const progress = interpolate(frame - startFrame, [0, duration], [0, 1], {
|
|
4131
|
+
extrapolateLeft: 'clamp', extrapolateRight: 'clamp'
|
|
4132
|
+
});
|
|
4133
|
+
const eased = 1 - Math.pow(1 - progress, 3);
|
|
4134
|
+
return <span>{Math.round(from + (to - from) * eased)}</span>;
|
|
4135
|
+
};
|
|
4136
|
+
\`\`\`
|
|
4137
|
+
|
|
4138
|
+
## Timing Guidelines
|
|
4139
|
+
|
|
4140
|
+
| Effect | Duration |
|
|
4141
|
+
|--------|----------|
|
|
4142
|
+
| Entry animation | 15-25 frames |
|
|
4143
|
+
| Button press | 10-15 frames |
|
|
4144
|
+
| Highlight/focus | 30-60 frames |
|
|
4145
|
+
| Stagger delay | 3-8 frames |
|
|
4146
|
+
`;
|
|
4147
|
+
}
|
|
4148
|
+
function generateAnimationComponents() {
|
|
4149
|
+
return `/**
|
|
4150
|
+
* Remotion Animation Components
|
|
4151
|
+
* Copy these into your project as needed.
|
|
4152
|
+
*/
|
|
4153
|
+
|
|
4154
|
+
import React from 'react';
|
|
4155
|
+
import { useCurrentFrame, useVideoConfig, interpolate, spring, Easing } from 'remotion';
|
|
4156
|
+
|
|
4157
|
+
// Spring configurations
|
|
4158
|
+
export const SPRING_CONFIGS = {
|
|
4159
|
+
snappy: { damping: 15, stiffness: 200, mass: 0.5 },
|
|
4160
|
+
smooth: { damping: 20, stiffness: 100, mass: 1 },
|
|
4161
|
+
bouncy: { damping: 8, stiffness: 150, mass: 0.8 },
|
|
4162
|
+
gentle: { damping: 30, stiffness: 50, mass: 1 },
|
|
4163
|
+
};
|
|
4164
|
+
|
|
4165
|
+
// Animated entry with direction
|
|
4166
|
+
export const AnimatedEntry: React.FC<{
|
|
4167
|
+
children: React.ReactNode;
|
|
4168
|
+
delay?: number;
|
|
4169
|
+
direction?: 'up' | 'down' | 'left' | 'right' | 'none';
|
|
4170
|
+
distance?: number;
|
|
4171
|
+
}> = ({ children, delay = 0, direction = 'up', distance = 30 }) => {
|
|
4172
|
+
const frame = useCurrentFrame();
|
|
4173
|
+
const { fps } = useVideoConfig();
|
|
4174
|
+
|
|
4175
|
+
const progress = spring({
|
|
4176
|
+
frame: frame - delay,
|
|
4177
|
+
fps,
|
|
4178
|
+
config: SPRING_CONFIGS.smooth,
|
|
4179
|
+
});
|
|
4180
|
+
|
|
4181
|
+
const directions = {
|
|
4182
|
+
up: { x: 0, y: distance },
|
|
4183
|
+
down: { x: 0, y: -distance },
|
|
4184
|
+
left: { x: distance, y: 0 },
|
|
4185
|
+
right: { x: -distance, y: 0 },
|
|
4186
|
+
none: { x: 0, y: 0 },
|
|
4187
|
+
};
|
|
4188
|
+
|
|
4189
|
+
const { x, y } = directions[direction];
|
|
4190
|
+
|
|
4191
|
+
return (
|
|
4192
|
+
<div style={{
|
|
4193
|
+
opacity: interpolate(progress, [0, 1], [0, 1]),
|
|
4194
|
+
transform: \`translate(\${x * (1 - progress)}px, \${y * (1 - progress)}px)\`,
|
|
4195
|
+
}}>
|
|
4196
|
+
{children}
|
|
4197
|
+
</div>
|
|
4198
|
+
);
|
|
4199
|
+
};
|
|
4200
|
+
|
|
4201
|
+
// Scale in animation
|
|
4202
|
+
export const ScaleIn: React.FC<{
|
|
4203
|
+
children: React.ReactNode;
|
|
4204
|
+
delay?: number;
|
|
4205
|
+
from?: number;
|
|
4206
|
+
}> = ({ children, delay = 0, from = 0.8 }) => {
|
|
4207
|
+
const frame = useCurrentFrame();
|
|
4208
|
+
const { fps } = useVideoConfig();
|
|
4209
|
+
|
|
4210
|
+
const progress = spring({
|
|
4211
|
+
frame: frame - delay,
|
|
4212
|
+
fps,
|
|
4213
|
+
config: SPRING_CONFIGS.bouncy,
|
|
4214
|
+
});
|
|
4215
|
+
|
|
4216
|
+
return (
|
|
4217
|
+
<div style={{
|
|
4218
|
+
opacity: interpolate(progress, [0, 0.5], [0, 1], { extrapolateRight: 'clamp' }),
|
|
4219
|
+
transform: \`scale(\${interpolate(progress, [0, 1], [from, 1])})\`,
|
|
4220
|
+
}}>
|
|
4221
|
+
{children}
|
|
4222
|
+
</div>
|
|
4223
|
+
);
|
|
4224
|
+
};
|
|
4225
|
+
|
|
4226
|
+
// Staggered list
|
|
4227
|
+
export const StaggeredList: React.FC<{
|
|
4228
|
+
children: React.ReactNode;
|
|
4229
|
+
itemDelay?: number;
|
|
4230
|
+
startFrame?: number;
|
|
4231
|
+
}> = ({ children, itemDelay = 5, startFrame = 0 }) => (
|
|
4232
|
+
<>
|
|
4233
|
+
{React.Children.map(children, (child, i) => (
|
|
4234
|
+
<AnimatedEntry delay={startFrame + i * itemDelay}>{child}</AnimatedEntry>
|
|
4235
|
+
))}
|
|
4236
|
+
</>
|
|
4237
|
+
);
|
|
4238
|
+
|
|
4239
|
+
// Button press animation
|
|
4240
|
+
export const ButtonPress: React.FC<{
|
|
4241
|
+
children: React.ReactNode;
|
|
4242
|
+
pressFrame: number;
|
|
4243
|
+
}> = ({ children, pressFrame }) => {
|
|
4244
|
+
const frame = useCurrentFrame();
|
|
4245
|
+
const { fps } = useVideoConfig();
|
|
4246
|
+
|
|
4247
|
+
const isPressing = frame >= pressFrame && frame < pressFrame + 3;
|
|
4248
|
+
const isReleasing = frame >= pressFrame + 3;
|
|
4249
|
+
|
|
4250
|
+
const releaseProgress = isReleasing ? spring({
|
|
4251
|
+
frame: frame - pressFrame - 3,
|
|
4252
|
+
fps,
|
|
4253
|
+
config: { damping: 10, stiffness: 300 },
|
|
4254
|
+
}) : 0;
|
|
4255
|
+
|
|
4256
|
+
const scale = isPressing ? 0.95 : 0.95 + releaseProgress * 0.05;
|
|
4257
|
+
|
|
4258
|
+
return <div style={{ transform: \`scale(\${Math.min(1, scale)})\` }}>{children}</div>;
|
|
4259
|
+
};
|
|
4260
|
+
|
|
4261
|
+
// Typed text effect
|
|
4262
|
+
export const TypedText: React.FC<{
|
|
4263
|
+
text: string;
|
|
4264
|
+
startFrame?: number;
|
|
4265
|
+
speed?: number;
|
|
4266
|
+
showCursor?: boolean;
|
|
4267
|
+
}> = ({ text, startFrame = 0, speed = 2, showCursor = true }) => {
|
|
4268
|
+
const frame = useCurrentFrame();
|
|
4269
|
+
const charsToShow = Math.floor((frame - startFrame) / speed);
|
|
4270
|
+
|
|
4271
|
+
if (frame < startFrame) return null;
|
|
4272
|
+
|
|
4273
|
+
const isTyping = charsToShow < text.length;
|
|
4274
|
+
|
|
4275
|
+
return (
|
|
4276
|
+
<span>
|
|
4277
|
+
{text.slice(0, Math.min(charsToShow, text.length))}
|
|
4278
|
+
{showCursor && isTyping && (
|
|
4279
|
+
<span style={{ opacity: frame % 15 < 8 ? 1 : 0 }}>|</span>
|
|
4280
|
+
)}
|
|
4281
|
+
</span>
|
|
4282
|
+
);
|
|
4283
|
+
};
|
|
4284
|
+
|
|
4285
|
+
// Counting number
|
|
4286
|
+
export const CountingNumber: React.FC<{
|
|
4287
|
+
from?: number;
|
|
4288
|
+
to: number;
|
|
4289
|
+
startFrame?: number;
|
|
4290
|
+
duration?: number;
|
|
4291
|
+
format?: (n: number) => string;
|
|
4292
|
+
}> = ({ from = 0, to, startFrame = 0, duration = 30, format = String }) => {
|
|
4293
|
+
const frame = useCurrentFrame();
|
|
4294
|
+
|
|
4295
|
+
const progress = interpolate(frame - startFrame, [0, duration], [0, 1], {
|
|
4296
|
+
extrapolateLeft: 'clamp',
|
|
4297
|
+
extrapolateRight: 'clamp',
|
|
4298
|
+
});
|
|
4299
|
+
|
|
4300
|
+
const eased = 1 - Math.pow(1 - progress, 3);
|
|
4301
|
+
const value = Math.round(from + (to - from) * eased);
|
|
4302
|
+
|
|
4303
|
+
return <span>{format(value)}</span>;
|
|
4304
|
+
};
|
|
4305
|
+
|
|
4306
|
+
// Floating element
|
|
4307
|
+
export const FloatingElement: React.FC<{
|
|
4308
|
+
children: React.ReactNode;
|
|
4309
|
+
amplitude?: number;
|
|
4310
|
+
speed?: number;
|
|
4311
|
+
}> = ({ children, amplitude = 3, speed = 0.05 }) => {
|
|
4312
|
+
const frame = useCurrentFrame();
|
|
4313
|
+
const y = Math.sin(frame * speed) * amplitude;
|
|
4314
|
+
|
|
4315
|
+
return <div style={{ transform: \`translateY(\${y}px)\` }}>{children}</div>;
|
|
4316
|
+
};
|
|
4317
|
+
|
|
4318
|
+
// Highlight effect
|
|
4319
|
+
export const Highlight: React.FC<{
|
|
4320
|
+
children: React.ReactNode;
|
|
4321
|
+
startFrame: number;
|
|
4322
|
+
duration?: number;
|
|
4323
|
+
}> = ({ children, startFrame, duration = 45 }) => {
|
|
4324
|
+
const frame = useCurrentFrame();
|
|
4325
|
+
const { fps } = useVideoConfig();
|
|
4326
|
+
|
|
4327
|
+
const isActive = frame >= startFrame && frame < startFrame + duration;
|
|
4328
|
+
const progress = spring({
|
|
4329
|
+
frame: isActive ? frame - startFrame : 0,
|
|
4330
|
+
fps,
|
|
4331
|
+
config: SPRING_CONFIGS.snappy,
|
|
4332
|
+
});
|
|
4333
|
+
|
|
4334
|
+
const scale = isActive ? 1 + progress * 0.03 : 1;
|
|
4335
|
+
|
|
4336
|
+
return (
|
|
4337
|
+
<div style={{
|
|
4338
|
+
transform: \`scale(\${scale})\`,
|
|
4339
|
+
boxShadow: isActive ? \`0 \${8 + progress * 12}px \${16 + progress * 24}px rgba(0,0,0,0.15)\` : undefined,
|
|
4340
|
+
}}>
|
|
4341
|
+
{children}
|
|
4342
|
+
</div>
|
|
4343
|
+
);
|
|
4344
|
+
};
|
|
4345
|
+
|
|
4346
|
+
// Cursor pointer
|
|
4347
|
+
export const CursorPointer: React.FC<{
|
|
4348
|
+
path: Array<{ x: number; y: number; frame: number }>;
|
|
4349
|
+
size?: number;
|
|
4350
|
+
}> = ({ path, size = 24 }) => {
|
|
4351
|
+
const frame = useCurrentFrame();
|
|
4352
|
+
const { fps } = useVideoConfig();
|
|
4353
|
+
|
|
4354
|
+
let x = path[0].x;
|
|
4355
|
+
let y = path[0].y;
|
|
4356
|
+
|
|
4357
|
+
for (let i = 0; i < path.length - 1; i++) {
|
|
4358
|
+
const from = path[i];
|
|
4359
|
+
const to = path[i + 1];
|
|
4360
|
+
|
|
4361
|
+
if (frame >= from.frame && frame <= to.frame) {
|
|
4362
|
+
const progress = spring({
|
|
4363
|
+
frame: frame - from.frame,
|
|
4364
|
+
fps,
|
|
4365
|
+
config: { damping: 20, stiffness: 80 },
|
|
4366
|
+
});
|
|
4367
|
+
|
|
4368
|
+
x = interpolate(progress, [0, 1], [from.x, to.x]);
|
|
4369
|
+
y = interpolate(progress, [0, 1], [from.y, to.y]);
|
|
4370
|
+
break;
|
|
4371
|
+
} else if (frame > to.frame) {
|
|
4372
|
+
x = to.x;
|
|
4373
|
+
y = to.y;
|
|
4374
|
+
}
|
|
4375
|
+
}
|
|
4376
|
+
|
|
4377
|
+
return (
|
|
4378
|
+
<div style={{
|
|
4379
|
+
position: 'absolute',
|
|
4380
|
+
left: \`\${x}%\`,
|
|
4381
|
+
top: \`\${y}%\`,
|
|
4382
|
+
transform: 'translate(-50%, -50%)',
|
|
4383
|
+
zIndex: 1000,
|
|
4384
|
+
pointerEvents: 'none',
|
|
4385
|
+
}}>
|
|
4386
|
+
<svg width={size} height={size} viewBox="0 0 24 24">
|
|
4387
|
+
<path
|
|
4388
|
+
d="M4 4 L4 20 L9 15 L13 22 L16 20 L12 13 L19 13 Z"
|
|
4389
|
+
fill="white"
|
|
4390
|
+
stroke="black"
|
|
4391
|
+
strokeWidth="1.5"
|
|
4392
|
+
/>
|
|
4393
|
+
</svg>
|
|
4394
|
+
</div>
|
|
4395
|
+
);
|
|
4396
|
+
};
|
|
4397
|
+
`;
|
|
4398
|
+
}
|
|
4399
|
+
function generateComponentIntegrationRule(b) {
|
|
4400
|
+
const cmd2 = b.name;
|
|
4401
|
+
return `---
|
|
4402
|
+
name: component-integration
|
|
4403
|
+
description: Integrating app components into Remotion videos
|
|
4404
|
+
---
|
|
4405
|
+
|
|
4406
|
+
# Integrating App Components into Remotion
|
|
4407
|
+
|
|
4408
|
+
Use your actual React components OR replicate them pixel-perfect in Remotion videos.
|
|
4409
|
+
|
|
4410
|
+
## Two Approaches
|
|
4411
|
+
|
|
4412
|
+
### Approach A: Replicate UI (Recommended)
|
|
4413
|
+
Read your app's components, note every visual detail, build identical-looking components in Remotion.
|
|
4414
|
+
|
|
4415
|
+
**Why?** Your app components have hooks, state, and dependencies that don't work in Remotion. Replication is cleaner.
|
|
4416
|
+
|
|
4417
|
+
### Approach B: Copy Components (When simple enough)
|
|
4418
|
+
For truly simple presentational components, you can copy them directly.
|
|
4419
|
+
|
|
4420
|
+
\`\`\`bash
|
|
4421
|
+
cp -r ../my-app/src/components/Card ./src/app-components/
|
|
4422
|
+
cp ../my-app/tailwind.config.js ./
|
|
4423
|
+
\`\`\`
|
|
4424
|
+
|
|
4425
|
+
---
|
|
4426
|
+
|
|
4427
|
+
## Adapting Components
|
|
4428
|
+
|
|
4429
|
+
### 1. Remove Interactivity
|
|
4430
|
+
|
|
4431
|
+
\`\`\`tsx
|
|
4432
|
+
// BEFORE (interactive app)
|
|
4433
|
+
<Button onClick={handleSubmit}>Submit</Button>
|
|
4434
|
+
|
|
4435
|
+
// AFTER (video-ready)
|
|
4436
|
+
<Button disabled style={{ pointerEvents: 'none' }}>Submit</Button>
|
|
4437
|
+
\`\`\`
|
|
4438
|
+
|
|
4439
|
+
### 2. Replace Dynamic Data
|
|
4440
|
+
|
|
4441
|
+
\`\`\`tsx
|
|
4442
|
+
// BEFORE (fetches from API)
|
|
4443
|
+
const { data } = useQuery('GET_USERS');
|
|
4444
|
+
|
|
4445
|
+
// AFTER (scripted data)
|
|
4446
|
+
const data = [
|
|
4447
|
+
{ id: 1, name: 'Sarah Chen', role: 'Designer' },
|
|
4448
|
+
{ id: 2, name: 'Alex Rivera', role: 'Developer' },
|
|
4449
|
+
];
|
|
4450
|
+
\`\`\`
|
|
4451
|
+
|
|
4452
|
+
### 3. Wrap with Animation
|
|
4453
|
+
|
|
4454
|
+
\`\`\`tsx
|
|
4455
|
+
import { FadeIn, SlideUp } from '../shared';
|
|
4456
|
+
|
|
4457
|
+
<FadeIn delay={0}>
|
|
4458
|
+
<Navbar />
|
|
4459
|
+
</FadeIn>
|
|
4460
|
+
|
|
4461
|
+
<SlideUp delay={15}>
|
|
4462
|
+
<Sidebar />
|
|
4463
|
+
</SlideUp>
|
|
4464
|
+
\`\`\`
|
|
4465
|
+
|
|
4466
|
+
---
|
|
4467
|
+
|
|
4468
|
+
## Common Showcase Patterns
|
|
4469
|
+
|
|
4470
|
+
### Dashboard with Staggered Widgets
|
|
4471
|
+
|
|
4472
|
+
\`\`\`tsx
|
|
4473
|
+
const DashboardShowcase = () => {
|
|
4474
|
+
return (
|
|
4475
|
+
<DashboardLayout>
|
|
4476
|
+
<FadeIn delay={0}>
|
|
4477
|
+
<Header user={mockUser} />
|
|
4478
|
+
</FadeIn>
|
|
4479
|
+
|
|
4480
|
+
<div className="grid grid-cols-3 gap-4">
|
|
4481
|
+
<SlideUp delay={15}><StatsWidget data={revenueData} /></SlideUp>
|
|
4482
|
+
<SlideUp delay={23}><StatsWidget data={usersData} /></SlideUp>
|
|
4483
|
+
<SlideUp delay={31}><StatsWidget data={ordersData} /></SlideUp>
|
|
4484
|
+
</div>
|
|
4485
|
+
|
|
4486
|
+
<FadeIn delay={45}>
|
|
4487
|
+
<ChartWidget data={chartData} />
|
|
4488
|
+
</FadeIn>
|
|
4489
|
+
</DashboardLayout>
|
|
4490
|
+
);
|
|
4491
|
+
};
|
|
4492
|
+
\`\`\`
|
|
4493
|
+
|
|
4494
|
+
### Form with Typing Simulation
|
|
4495
|
+
|
|
4496
|
+
\`\`\`tsx
|
|
4497
|
+
const FormShowcase = () => {
|
|
4498
|
+
const frame = useCurrentFrame();
|
|
4499
|
+
const { fps } = useVideoConfig();
|
|
4500
|
+
|
|
4501
|
+
return (
|
|
4502
|
+
<LoginForm>
|
|
4503
|
+
<Input
|
|
4504
|
+
label="Email"
|
|
4505
|
+
value={<TextReveal text="sarah@example.com" startFrame={0} />}
|
|
4506
|
+
/>
|
|
4507
|
+
<Input
|
|
4508
|
+
label="Password"
|
|
4509
|
+
type="password"
|
|
4510
|
+
value={frame > fps * 2 ? '\u2022\u2022\u2022\u2022\u2022\u2022\u2022\u2022' : ''}
|
|
4511
|
+
/>
|
|
4512
|
+
</LoginForm>
|
|
4513
|
+
);
|
|
4514
|
+
};
|
|
4515
|
+
\`\`\`
|
|
4516
|
+
|
|
4517
|
+
### Modal Slide-In
|
|
4518
|
+
|
|
4519
|
+
\`\`\`tsx
|
|
4520
|
+
const ModalShowcase = () => {
|
|
4521
|
+
const frame = useCurrentFrame();
|
|
4522
|
+
const showModal = frame > 30;
|
|
4523
|
+
|
|
4524
|
+
return (
|
|
4525
|
+
<>
|
|
4526
|
+
<PageBackground />
|
|
4527
|
+
{showModal && (
|
|
4528
|
+
<>
|
|
4529
|
+
<FadeIn delay={30}>
|
|
4530
|
+
<div className="absolute inset-0 bg-black/50" />
|
|
4531
|
+
</FadeIn>
|
|
4532
|
+
<SlideUp delay={35}>
|
|
4533
|
+
<ConfirmationModal title="Confirm Delete" message="Are you sure?" isOpen />
|
|
4534
|
+
</SlideUp>
|
|
4535
|
+
</>
|
|
4536
|
+
)}
|
|
4537
|
+
</>
|
|
4538
|
+
);
|
|
4539
|
+
};
|
|
4540
|
+
\`\`\`
|
|
4541
|
+
|
|
4542
|
+
---
|
|
4543
|
+
|
|
4544
|
+
## Troubleshooting
|
|
4545
|
+
|
|
4546
|
+
### Component uses hooks that don't work
|
|
4547
|
+
\`\`\`tsx
|
|
4548
|
+
// PROBLEM: useRouter, useAuth won't work
|
|
4549
|
+
// SOLUTION: Pass as props or mock the context
|
|
4550
|
+
const MockAuthProvider = ({ children }) => (
|
|
4551
|
+
<AuthContext.Provider value={{ user: mockUser }}>
|
|
4552
|
+
{children}
|
|
4553
|
+
</AuthContext.Provider>
|
|
4554
|
+
);
|
|
4555
|
+
\`\`\`
|
|
4556
|
+
|
|
4557
|
+
### Component too large for frame
|
|
4558
|
+
\`\`\`tsx
|
|
4559
|
+
// Use transform scale to fit
|
|
4560
|
+
<div style={{ transform: 'scale(0.8)', transformOrigin: 'top left' }}>
|
|
4561
|
+
<LargeComponent />
|
|
4562
|
+
</div>
|
|
4563
|
+
\`\`\`
|
|
4564
|
+
`;
|
|
4565
|
+
}
|
|
4566
|
+
function generateProjectVideoWorkflowRule(b) {
|
|
4567
|
+
const cmd2 = b.name;
|
|
4568
|
+
return `---
|
|
4569
|
+
name: project-video-workflow
|
|
4570
|
+
description: Create promotional videos using actual project UI
|
|
4571
|
+
---
|
|
4572
|
+
|
|
4573
|
+
# Project-Based Video Workflow
|
|
4574
|
+
|
|
4575
|
+
Create promotional videos using **your actual project's UI** replicated in Remotion.
|
|
4576
|
+
|
|
4577
|
+
## When to Use
|
|
4578
|
+
|
|
4579
|
+
- User has existing React/Next.js/Vue project
|
|
4580
|
+
- User wants "product demo", "feature walkthrough", or "promotional video"
|
|
4581
|
+
- User mentions showcasing specific features/UI
|
|
4582
|
+
- User wants to animate their actual app interface
|
|
4583
|
+
|
|
4584
|
+
## Quick Start
|
|
4585
|
+
|
|
4586
|
+
\`\`\`bash
|
|
4587
|
+
# 1. Scaffold video project
|
|
4588
|
+
${cmd2} video init my-app-promo
|
|
4589
|
+
cd my-app-promo
|
|
4590
|
+
|
|
4591
|
+
# 2. Generate audio assets
|
|
4592
|
+
${cmd2} video create \\
|
|
4593
|
+
--script "Introducing our new app..." \\
|
|
4594
|
+
--output ./public
|
|
4595
|
+
|
|
4596
|
+
# 3. Build scenes replicating your app's UI
|
|
4597
|
+
|
|
4598
|
+
# 4. Preview & Render
|
|
4599
|
+
npm run dev
|
|
4600
|
+
npm run render
|
|
4601
|
+
\`\`\`
|
|
4602
|
+
|
|
4603
|
+
---
|
|
4604
|
+
|
|
4605
|
+
## Full Workflow
|
|
4606
|
+
|
|
4607
|
+
### Step 1: Analyze Project
|
|
4608
|
+
|
|
4609
|
+
\`\`\`bash
|
|
4610
|
+
# Check framework
|
|
4611
|
+
cat package.json | grep -E "react|next|vue"
|
|
4612
|
+
|
|
4613
|
+
# List components
|
|
4614
|
+
ls -la src/components/
|
|
4615
|
+
|
|
4616
|
+
# Get colors
|
|
4617
|
+
cat tailwind.config.* | grep -A 30 "colors"
|
|
4618
|
+
\`\`\`
|
|
4619
|
+
|
|
4620
|
+
**Identify:**
|
|
4621
|
+
- Framework: React, Next.js, Vue
|
|
4622
|
+
- Styling: Tailwind, CSS modules, styled-components
|
|
4623
|
+
- Key components: Forms, cards, modals, dashboards
|
|
4624
|
+
- Views to showcase
|
|
4625
|
+
|
|
4626
|
+
### Step 2: Document Brand
|
|
4627
|
+
|
|
4628
|
+
\`\`\`markdown
|
|
4629
|
+
## Brand: [App Name]
|
|
4630
|
+
|
|
4631
|
+
### Colors (from tailwind.config)
|
|
4632
|
+
- Background: #0f172a
|
|
4633
|
+
- Surface: #1e293b
|
|
4634
|
+
- Primary: #14b8a6
|
|
4635
|
+
- Text: #ffffff
|
|
4636
|
+
|
|
4637
|
+
### Key Components
|
|
4638
|
+
1. Sidebar - Dark bg, navigation items
|
|
4639
|
+
2. Dashboard - Stats cards, charts
|
|
4640
|
+
3. Modal - Overlay, card
|
|
4641
|
+
\`\`\`
|
|
4642
|
+
|
|
4643
|
+
### Step 3: Plan Scenes
|
|
4644
|
+
|
|
4645
|
+
\`\`\`markdown
|
|
4646
|
+
## Scene Plan
|
|
4647
|
+
|
|
4648
|
+
### Scene 1: Intro (3s)
|
|
4649
|
+
- Logo centered
|
|
4650
|
+
- Tagline fades up
|
|
4651
|
+
|
|
4652
|
+
### Scene 2: Dashboard (5s)
|
|
4653
|
+
- Stats widgets stagger in
|
|
4654
|
+
- Chart animates
|
|
4655
|
+
|
|
4656
|
+
### Scene 3: Feature Demo (5s)
|
|
4657
|
+
- Sidebar slides in
|
|
4658
|
+
- Selection animates
|
|
4659
|
+
|
|
4660
|
+
### Scene 4: CTA (3s)
|
|
4661
|
+
- Logo + button
|
|
4662
|
+
\`\`\`
|
|
4663
|
+
|
|
4664
|
+
### Step 4: Build Scenes
|
|
4665
|
+
|
|
4666
|
+
Create scenes in \`src/remotion/scenes/\` that replicate your UI:
|
|
4667
|
+
|
|
4668
|
+
\`\`\`tsx
|
|
4669
|
+
// src/remotion/scenes/DashboardScene.tsx
|
|
4670
|
+
import { AbsoluteFill, useCurrentFrame, spring, useVideoConfig } from "remotion";
|
|
4671
|
+
|
|
4672
|
+
export const DASHBOARD_SCENE_DURATION = 150;
|
|
4673
|
+
|
|
4674
|
+
const mockData = {
|
|
4675
|
+
revenue: 125000,
|
|
4676
|
+
users: 1234,
|
|
4677
|
+
orders: 567,
|
|
4678
|
+
};
|
|
4679
|
+
|
|
4680
|
+
export const DashboardScene: React.FC = () => {
|
|
4681
|
+
const frame = useCurrentFrame();
|
|
4682
|
+
const { fps } = useVideoConfig();
|
|
4683
|
+
|
|
4684
|
+
return (
|
|
4685
|
+
<AbsoluteFill style={{ backgroundColor: "#0f172a", padding: 40 }}>
|
|
4686
|
+
{/* Replicate your dashboard layout here */}
|
|
4687
|
+
{/* Use EXACT colors from your tailwind.config */}
|
|
4688
|
+
</AbsoluteFill>
|
|
4689
|
+
);
|
|
4690
|
+
};
|
|
4691
|
+
\`\`\`
|
|
4692
|
+
|
|
4693
|
+
### Step 5: Generate Audio
|
|
4694
|
+
|
|
4695
|
+
\`\`\`bash
|
|
4696
|
+
${cmd2} video create \\
|
|
4697
|
+
--script "Introducing [App]. The fastest way to..." \\
|
|
4698
|
+
--music-prompt "modern uplifting tech" \\
|
|
4699
|
+
--output ./public
|
|
4700
|
+
\`\`\`
|
|
4701
|
+
|
|
4702
|
+
### Step 6: Render
|
|
4703
|
+
|
|
4704
|
+
\`\`\`bash
|
|
4705
|
+
npm run dev # Preview
|
|
4706
|
+
npm run render # Final video
|
|
4707
|
+
\`\`\`
|
|
4708
|
+
|
|
4709
|
+
---
|
|
4710
|
+
|
|
4711
|
+
## Tips
|
|
4712
|
+
|
|
4713
|
+
1. **Start simple** - Get basic scenes working before adding complex animations
|
|
4714
|
+
2. **Use mock data** - Pre-define realistic demo data
|
|
4715
|
+
3. **Match voiceover timing** - Sync visual transitions with narration
|
|
4716
|
+
4. **Keep scenes focused** - One main idea per scene
|
|
4717
|
+
5. **Test at 1x speed** - Preview at normal speed to catch timing issues
|
|
4718
|
+
`;
|
|
4719
|
+
}
|
|
4720
|
+
function generateAllSkillFiles(b) {
|
|
4721
|
+
return {
|
|
4722
|
+
"SKILL.md": generateSkillContent(b),
|
|
4723
|
+
"rules/presentations.md": generatePresentationsRule(b),
|
|
4724
|
+
"rules/video.md": generateVideoRule(b),
|
|
4725
|
+
"rules/motion-standards.md": generateMotionStandardsRule(),
|
|
4726
|
+
"rules/micro-interactions.md": generateMicroInteractionsRule(),
|
|
4727
|
+
"rules/component-integration.md": generateComponentIntegrationRule(b),
|
|
4728
|
+
"rules/project-video-workflow.md": generateProjectVideoWorkflowRule(b),
|
|
4729
|
+
"assets/animation-components.tsx": generateAnimationComponents()
|
|
4730
|
+
};
|
|
4731
|
+
}
|
|
4732
|
+
var EDITORS = [
|
|
4733
|
+
{ name: "Claude Code", dir: ".claude" },
|
|
4734
|
+
{ name: "Cursor", dir: ".cursor" },
|
|
4735
|
+
{ name: "Codex", dir: ".codex" },
|
|
4736
|
+
{ name: "OpenCode", dir: ".opencode" },
|
|
4737
|
+
{ name: "Windsurf", dir: ".windsurf" },
|
|
4738
|
+
{ name: "Agent", dir: ".agent" }
|
|
4739
|
+
];
|
|
4740
|
+
var skillCommand = new Command14("skill").description(`Manage ${brand.displayName} skill for AI coding assistants`).addHelpText(
|
|
4741
|
+
"after",
|
|
4742
|
+
`
|
|
4743
|
+
${chalk12.bold("Examples:")}
|
|
4744
|
+
${chalk12.gray("# Install skill for all detected editors")}
|
|
4745
|
+
$ ${brand.name} skill install
|
|
4746
|
+
|
|
4747
|
+
${chalk12.gray("# Install to specific directory")}
|
|
4748
|
+
$ ${brand.name} skill install --dir ~/.claude
|
|
4749
|
+
|
|
4750
|
+
${chalk12.gray("# Install without remotion-best-practices")}
|
|
4751
|
+
$ ${brand.name} skill install --skip-remotion
|
|
4752
|
+
|
|
4753
|
+
${chalk12.gray("# Show skill content")}
|
|
4754
|
+
$ ${brand.name} skill show
|
|
4755
|
+
`
|
|
4756
|
+
);
|
|
4757
|
+
skillCommand.command("install").description(`Install the ${brand.displayName} skill for AI coding assistants`).option("-d, --dir <path>", "Install to specific directory").option("-g, --global", "Install globally (to home directory)", true).option("-l, --local", "Install locally (to current directory)").option("-f, --force", "Overwrite existing skill files").option("--skip-remotion", "Skip installing remotion-best-practices skill").action(async (options) => {
|
|
4758
|
+
const installed = [];
|
|
4759
|
+
const skipped = [];
|
|
4760
|
+
const errors = [];
|
|
4761
|
+
const baseDir = options.local ? process.cwd() : homedir();
|
|
4762
|
+
const skillFiles = generateAllSkillFiles(brand);
|
|
4763
|
+
if (options.dir) {
|
|
4764
|
+
const skillPath = join(options.dir, "skills", brand.name);
|
|
4765
|
+
try {
|
|
4766
|
+
installSkill(skillPath, skillFiles, options.force);
|
|
4767
|
+
installed.push(options.dir);
|
|
4768
|
+
} catch (err) {
|
|
4769
|
+
errors.push(`${options.dir}: ${err instanceof Error ? err.message : String(err)}`);
|
|
4770
|
+
}
|
|
4771
|
+
} else {
|
|
4772
|
+
for (const editor of EDITORS) {
|
|
4773
|
+
const editorDir = join(baseDir, editor.dir);
|
|
4774
|
+
const skillPath = join(editorDir, "skills", brand.name);
|
|
4775
|
+
const skillFile = join(skillPath, "SKILL.md");
|
|
3226
4776
|
if (!existsSync2(editorDir)) {
|
|
3227
4777
|
continue;
|
|
3228
4778
|
}
|
|
3229
|
-
if (existsSync2(skillFile) && !options.force) {
|
|
3230
|
-
skipped.push(editor.name);
|
|
3231
|
-
continue;
|
|
4779
|
+
if (existsSync2(skillFile) && !options.force) {
|
|
4780
|
+
skipped.push(editor.name);
|
|
4781
|
+
continue;
|
|
4782
|
+
}
|
|
4783
|
+
try {
|
|
4784
|
+
installSkill(skillPath, skillFiles, options.force);
|
|
4785
|
+
installed.push(editor.name);
|
|
4786
|
+
} catch (err) {
|
|
4787
|
+
errors.push(`${editor.name}: ${err instanceof Error ? err.message : String(err)}`);
|
|
4788
|
+
}
|
|
4789
|
+
}
|
|
4790
|
+
}
|
|
4791
|
+
console.log();
|
|
4792
|
+
if (installed.length > 0) {
|
|
4793
|
+
success("Skill installed successfully");
|
|
4794
|
+
console.log();
|
|
4795
|
+
keyValue("Installed to", installed.join(", "));
|
|
4796
|
+
keyValue("Files", Object.keys(skillFiles).length.toString());
|
|
4797
|
+
}
|
|
4798
|
+
if (skipped.length > 0) {
|
|
4799
|
+
console.log();
|
|
4800
|
+
info(`Skipped (already exists): ${skipped.join(", ")}`);
|
|
4801
|
+
console.log(chalk12.gray(" Use --force to overwrite"));
|
|
4802
|
+
}
|
|
4803
|
+
if (errors.length > 0) {
|
|
4804
|
+
console.log();
|
|
4805
|
+
for (const err of errors) {
|
|
4806
|
+
error(err);
|
|
4807
|
+
}
|
|
4808
|
+
}
|
|
4809
|
+
if (installed.length === 0 && skipped.length === 0 && errors.length === 0) {
|
|
4810
|
+
info("No supported AI coding assistants detected.");
|
|
4811
|
+
console.log();
|
|
4812
|
+
console.log(chalk12.gray("Supported editors: " + EDITORS.map((e) => e.name).join(", ")));
|
|
4813
|
+
console.log(chalk12.gray("Use --dir <path> to install to a specific directory"));
|
|
4814
|
+
}
|
|
4815
|
+
if (installed.length > 0 && !options.skipRemotion) {
|
|
4816
|
+
console.log();
|
|
4817
|
+
info("Installing remotion-best-practices skill...");
|
|
4818
|
+
try {
|
|
4819
|
+
execSync("npx -y skills add https://github.com/remotion-dev/skills --skill remotion-best-practices --all", {
|
|
4820
|
+
stdio: "inherit",
|
|
4821
|
+
timeout: 6e4
|
|
4822
|
+
});
|
|
4823
|
+
success("remotion-best-practices skill installed");
|
|
4824
|
+
} catch (err) {
|
|
4825
|
+
warn("Could not install remotion-best-practices skill automatically");
|
|
4826
|
+
console.log(chalk12.gray(" Run manually: npx skills add remotion-dev/skills"));
|
|
4827
|
+
}
|
|
4828
|
+
}
|
|
4829
|
+
console.log();
|
|
4830
|
+
});
|
|
4831
|
+
skillCommand.command("show").description("Display the skill content").option("-a, --all", "Show all files").action((options) => {
|
|
4832
|
+
const files = generateAllSkillFiles(brand);
|
|
4833
|
+
if (options.all) {
|
|
4834
|
+
for (const [path2, content] of Object.entries(files)) {
|
|
4835
|
+
console.log(chalk12.bold.cyan(`
|
|
4836
|
+
=== ${path2} ===
|
|
4837
|
+
`));
|
|
4838
|
+
console.log(content);
|
|
4839
|
+
}
|
|
4840
|
+
} else {
|
|
4841
|
+
console.log(files["SKILL.md"]);
|
|
4842
|
+
console.log(chalk12.gray("\nUse --all to show all files"));
|
|
4843
|
+
}
|
|
4844
|
+
});
|
|
4845
|
+
skillCommand.command("uninstall").description(`Remove the ${brand.displayName} skill from AI coding assistants`).option("-g, --global", "Uninstall globally (from home directory)", true).option("-l, --local", "Uninstall locally (from current directory)").action(async (options) => {
|
|
4846
|
+
const { rmSync } = await import("fs");
|
|
4847
|
+
const removed = [];
|
|
4848
|
+
const baseDir = options.local ? process.cwd() : homedir();
|
|
4849
|
+
for (const editor of EDITORS) {
|
|
4850
|
+
const skillPath = join(baseDir, editor.dir, "skills", brand.name);
|
|
4851
|
+
if (existsSync2(skillPath)) {
|
|
4852
|
+
try {
|
|
4853
|
+
rmSync(skillPath, { recursive: true });
|
|
4854
|
+
removed.push(editor.name);
|
|
4855
|
+
} catch {
|
|
4856
|
+
}
|
|
4857
|
+
}
|
|
4858
|
+
}
|
|
4859
|
+
console.log();
|
|
4860
|
+
if (removed.length > 0) {
|
|
4861
|
+
success("Skill uninstalled");
|
|
4862
|
+
keyValue("Removed from", removed.join(", "));
|
|
4863
|
+
} else {
|
|
4864
|
+
info("No installed skills found");
|
|
4865
|
+
}
|
|
4866
|
+
console.log();
|
|
4867
|
+
});
|
|
4868
|
+
function installSkill(skillPath, files, force) {
|
|
4869
|
+
mkdirSync(join(skillPath, "rules"), { recursive: true });
|
|
4870
|
+
mkdirSync(join(skillPath, "assets"), { recursive: true });
|
|
4871
|
+
for (const [relativePath, content] of Object.entries(files)) {
|
|
4872
|
+
const filePath = join(skillPath, relativePath);
|
|
4873
|
+
writeFileSync(filePath, content, "utf-8");
|
|
4874
|
+
}
|
|
4875
|
+
}
|
|
4876
|
+
|
|
4877
|
+
// src/commands/tts.ts
|
|
4878
|
+
init_api();
|
|
4879
|
+
init_output();
|
|
4880
|
+
init_types();
|
|
4881
|
+
import { Command as Command15 } from "commander";
|
|
4882
|
+
import ora8 from "ora";
|
|
4883
|
+
import { writeFile as writeFile2 } from "fs/promises";
|
|
4884
|
+
var generateCommand = new Command15("generate").description("Generate speech from text").requiredOption("-t, --text <text>", "Text to convert to speech").requiredOption("-o, --output <path>", "Output file path").option("-v, --voice <voice>", "Voice name or ID (e.g., Kore, Rachel, alloy)").option("-p, --provider <provider>", "Provider: gemini, elevenlabs, openai").option("-m, --model <model>", "Model (provider-specific)").option("-s, --speed <speed>", "Speech speed 0.25-4.0 (default: 1.0)").option("-f, --format <format>", "Output format: human, json, quiet", "human").action(async (options) => {
|
|
4885
|
+
const format = options.format;
|
|
4886
|
+
const spinner = format === "human" ? ora8("Generating speech...").start() : null;
|
|
4887
|
+
let speed;
|
|
4888
|
+
if (options.speed) {
|
|
4889
|
+
speed = parseFloat(options.speed);
|
|
4890
|
+
if (isNaN(speed) || speed < 0.25 || speed > 4) {
|
|
4891
|
+
spinner?.stop();
|
|
4892
|
+
error("Speed must be between 0.25 and 4.0");
|
|
4893
|
+
process.exit(EXIT_CODES.INVALID_INPUT);
|
|
4894
|
+
}
|
|
4895
|
+
}
|
|
4896
|
+
try {
|
|
4897
|
+
const result = await generateSpeech({
|
|
4898
|
+
text: options.text,
|
|
4899
|
+
options: {
|
|
4900
|
+
provider: options.provider,
|
|
4901
|
+
voice: options.voice,
|
|
4902
|
+
model: options.model,
|
|
4903
|
+
speed
|
|
4904
|
+
}
|
|
4905
|
+
});
|
|
4906
|
+
spinner?.stop();
|
|
4907
|
+
const outputPath = options.output.endsWith(`.${result.format}`) ? options.output : `${options.output}.${result.format}`;
|
|
4908
|
+
await writeFile2(outputPath, result.audioData);
|
|
4909
|
+
if (format === "json") {
|
|
4910
|
+
printJson({
|
|
4911
|
+
status: "completed",
|
|
4912
|
+
output: outputPath,
|
|
4913
|
+
duration: result.duration,
|
|
4914
|
+
cost: result.cost,
|
|
4915
|
+
provider: result.provider,
|
|
4916
|
+
format: result.format
|
|
4917
|
+
});
|
|
4918
|
+
return;
|
|
4919
|
+
}
|
|
4920
|
+
if (format === "quiet") {
|
|
4921
|
+
console.log(outputPath);
|
|
4922
|
+
return;
|
|
4923
|
+
}
|
|
4924
|
+
success(`Saved to: ${outputPath}`);
|
|
4925
|
+
info(`Duration: ${result.duration.toFixed(2)}s`);
|
|
4926
|
+
info(`Provider: ${result.provider}`);
|
|
4927
|
+
info(`Cost: $${result.cost.toFixed(6)}`);
|
|
4928
|
+
} catch (err) {
|
|
4929
|
+
spinner?.stop();
|
|
4930
|
+
error(err instanceof Error ? err.message : "Unknown error");
|
|
4931
|
+
process.exit(EXIT_CODES.GENERAL_ERROR);
|
|
4932
|
+
}
|
|
4933
|
+
});
|
|
4934
|
+
var voicesCommand = new Command15("voices").description("List available voices").option("-p, --provider <provider>", "Filter by provider: gemini, elevenlabs, openai").option("-f, --format <format>", "Output format: human, json", "human").action(async (options) => {
|
|
4935
|
+
const spinner = options.format === "human" ? ora8("Fetching voices...").start() : null;
|
|
4936
|
+
try {
|
|
4937
|
+
const result = await getVoices();
|
|
4938
|
+
spinner?.stop();
|
|
4939
|
+
if (options.format === "json") {
|
|
4940
|
+
if (options.provider) {
|
|
4941
|
+
const providerVoices = result.voices[options.provider];
|
|
4942
|
+
printJson(providerVoices || []);
|
|
4943
|
+
} else {
|
|
4944
|
+
printJson(result.voices);
|
|
4945
|
+
}
|
|
4946
|
+
return;
|
|
4947
|
+
}
|
|
4948
|
+
const providers = options.provider ? [options.provider] : ["gemini", "elevenlabs", "openai"];
|
|
4949
|
+
for (const provider of providers) {
|
|
4950
|
+
const voices = result.voices[provider];
|
|
4951
|
+
if (!voices || voices.length === 0) continue;
|
|
4952
|
+
console.log();
|
|
4953
|
+
console.log(`${provider.toUpperCase()} Voices:`);
|
|
4954
|
+
console.log("-".repeat(50));
|
|
4955
|
+
for (const voice of voices) {
|
|
4956
|
+
console.log(` ${voice.name} (${voice.id})`);
|
|
4957
|
+
console.log(` ${voice.description}`);
|
|
4958
|
+
}
|
|
4959
|
+
}
|
|
4960
|
+
} catch (err) {
|
|
4961
|
+
spinner?.stop();
|
|
4962
|
+
error(err instanceof Error ? err.message : "Unknown error");
|
|
4963
|
+
process.exit(EXIT_CODES.GENERAL_ERROR);
|
|
4964
|
+
}
|
|
4965
|
+
});
|
|
4966
|
+
var ttsCommand = new Command15("tts").description("Text-to-speech commands").addCommand(generateCommand).addCommand(voicesCommand);
|
|
4967
|
+
|
|
4968
|
+
// src/commands/music.ts
|
|
4969
|
+
init_api();
|
|
4970
|
+
init_output();
|
|
4971
|
+
init_types();
|
|
4972
|
+
import { Command as Command16 } from "commander";
|
|
4973
|
+
import ora9 from "ora";
|
|
4974
|
+
import { writeFile as writeFile3 } from "fs/promises";
|
|
4975
|
+
function outputResult(result, format) {
|
|
4976
|
+
if (format === "json") {
|
|
4977
|
+
printJson(result);
|
|
4978
|
+
return;
|
|
4979
|
+
}
|
|
4980
|
+
if (format === "quiet") {
|
|
4981
|
+
if (result.audioUrl) {
|
|
4982
|
+
console.log(result.audioUrl);
|
|
4983
|
+
} else {
|
|
4984
|
+
console.log(result.requestId);
|
|
4985
|
+
}
|
|
4986
|
+
return;
|
|
4987
|
+
}
|
|
4988
|
+
info(`Request ID: ${result.requestId}`);
|
|
4989
|
+
info(`Status: ${result.status}`);
|
|
4990
|
+
if (result.duration) {
|
|
4991
|
+
info(`Duration: ${result.duration}s`);
|
|
4992
|
+
}
|
|
4993
|
+
if (result.audioUrl) {
|
|
4994
|
+
success(`Audio URL: ${result.audioUrl}`);
|
|
4995
|
+
}
|
|
4996
|
+
if (result.cost !== void 0) {
|
|
4997
|
+
info(`Cost: $${result.cost.toFixed(4)}`);
|
|
4998
|
+
}
|
|
4999
|
+
if (result.error) {
|
|
5000
|
+
error(`Error: ${result.error}`);
|
|
5001
|
+
}
|
|
5002
|
+
}
|
|
5003
|
+
async function downloadFile(url, outputPath) {
|
|
5004
|
+
if (url.startsWith("data:")) {
|
|
5005
|
+
const matches = url.match(/^data:[^;]+;base64,(.+)$/);
|
|
5006
|
+
if (!matches) {
|
|
5007
|
+
throw new Error("Invalid data URL format");
|
|
5008
|
+
}
|
|
5009
|
+
const buffer2 = Buffer.from(matches[1], "base64");
|
|
5010
|
+
await writeFile3(outputPath, buffer2);
|
|
5011
|
+
return;
|
|
5012
|
+
}
|
|
5013
|
+
const response = await fetch(url);
|
|
5014
|
+
if (!response.ok) {
|
|
5015
|
+
throw new Error(`Failed to download: ${response.status}`);
|
|
5016
|
+
}
|
|
5017
|
+
const buffer = await response.arrayBuffer();
|
|
5018
|
+
await writeFile3(outputPath, Buffer.from(buffer));
|
|
5019
|
+
}
|
|
5020
|
+
var generateCommand2 = new Command16("generate").description("Generate music from a text prompt").requiredOption("-p, --prompt <text>", "Music description").option("-d, --duration <seconds>", "Duration in seconds (3-30)", "30").option("-s, --style <style>", "Style preset").option("--provider <provider>", "Provider (elevenlabs, suno)").option("-o, --output <path>", "Output file path").option("--no-wait", "Do not wait for completion").option("-f, --format <format>", "Output format: human, json, quiet", "human").action(async (options) => {
|
|
5021
|
+
const duration = parseInt(options.duration, 10);
|
|
5022
|
+
if (isNaN(duration) || duration < 3 || duration > 30) {
|
|
5023
|
+
error("Duration must be between 3 and 30 seconds");
|
|
5024
|
+
process.exit(EXIT_CODES.INVALID_INPUT);
|
|
5025
|
+
}
|
|
5026
|
+
const format = options.format;
|
|
5027
|
+
const spinner = format === "human" ? ora9("Generating music...").start() : null;
|
|
5028
|
+
try {
|
|
5029
|
+
const result = await generateMusic({
|
|
5030
|
+
prompt: options.prompt,
|
|
5031
|
+
duration,
|
|
5032
|
+
options: {
|
|
5033
|
+
provider: options.provider,
|
|
5034
|
+
style: options.style
|
|
5035
|
+
}
|
|
5036
|
+
});
|
|
5037
|
+
if (!options.wait) {
|
|
5038
|
+
spinner?.stop();
|
|
5039
|
+
outputResult(result, format);
|
|
5040
|
+
return;
|
|
5041
|
+
}
|
|
5042
|
+
let finalResult = result;
|
|
5043
|
+
if (result.status !== "completed" && result.status !== "failed") {
|
|
5044
|
+
if (spinner) spinner.text = `Processing (ID: ${result.requestId})...`;
|
|
5045
|
+
finalResult = await pollForCompletion(
|
|
5046
|
+
() => checkMusicStatus(result.requestId),
|
|
5047
|
+
60,
|
|
5048
|
+
2e3
|
|
5049
|
+
);
|
|
5050
|
+
}
|
|
5051
|
+
spinner?.stop();
|
|
5052
|
+
if (finalResult.status === "failed") {
|
|
5053
|
+
error(finalResult.error || "Music generation failed");
|
|
5054
|
+
process.exit(EXIT_CODES.GENERAL_ERROR);
|
|
5055
|
+
}
|
|
5056
|
+
outputResult(finalResult, format);
|
|
5057
|
+
if (options.output && finalResult.audioUrl) {
|
|
5058
|
+
const downloadSpinner = format === "human" ? ora9("Downloading...").start() : null;
|
|
5059
|
+
try {
|
|
5060
|
+
await downloadFile(finalResult.audioUrl, options.output);
|
|
5061
|
+
downloadSpinner?.stop();
|
|
5062
|
+
if (format === "human") {
|
|
5063
|
+
success(`Saved to: ${options.output}`);
|
|
5064
|
+
}
|
|
5065
|
+
} catch (err) {
|
|
5066
|
+
downloadSpinner?.stop();
|
|
5067
|
+
warn(`Failed to download: ${err instanceof Error ? err.message : "Unknown error"}`);
|
|
5068
|
+
}
|
|
5069
|
+
}
|
|
5070
|
+
} catch (err) {
|
|
5071
|
+
spinner?.stop();
|
|
5072
|
+
error(err instanceof Error ? err.message : "Unknown error");
|
|
5073
|
+
process.exit(EXIT_CODES.GENERAL_ERROR);
|
|
5074
|
+
}
|
|
5075
|
+
});
|
|
5076
|
+
var statusCommand = new Command16("status").description("Check status of a music generation request").argument("<id>", "Request ID").option("-f, --format <format>", "Output format: human, json, quiet", "human").action(async (id, options) => {
|
|
5077
|
+
const spinner = options.format === "human" ? ora9("Checking status...").start() : null;
|
|
5078
|
+
try {
|
|
5079
|
+
const result = await checkMusicStatus(id);
|
|
5080
|
+
spinner?.stop();
|
|
5081
|
+
outputResult(result, options.format);
|
|
5082
|
+
} catch (err) {
|
|
5083
|
+
spinner?.stop();
|
|
5084
|
+
error(err instanceof Error ? err.message : "Unknown error");
|
|
5085
|
+
process.exit(EXIT_CODES.GENERAL_ERROR);
|
|
5086
|
+
}
|
|
5087
|
+
});
|
|
5088
|
+
var musicCommand = new Command16("music").description("Music generation commands").addCommand(generateCommand2).addCommand(statusCommand);
|
|
5089
|
+
|
|
5090
|
+
// src/commands/mix.ts
|
|
5091
|
+
init_api();
|
|
5092
|
+
init_output();
|
|
5093
|
+
init_types();
|
|
5094
|
+
import { Command as Command17 } from "commander";
|
|
5095
|
+
import ora10 from "ora";
|
|
5096
|
+
import { writeFile as writeFile4 } from "fs/promises";
|
|
5097
|
+
function outputResult2(result, format) {
|
|
5098
|
+
if (format === "json") {
|
|
5099
|
+
printJson(result);
|
|
5100
|
+
return;
|
|
5101
|
+
}
|
|
5102
|
+
if (format === "quiet") {
|
|
5103
|
+
if (result.outputUrl) {
|
|
5104
|
+
console.log(result.outputUrl);
|
|
5105
|
+
} else {
|
|
5106
|
+
console.log(result.requestId);
|
|
5107
|
+
}
|
|
5108
|
+
return;
|
|
5109
|
+
}
|
|
5110
|
+
info(`Request ID: ${result.requestId}`);
|
|
5111
|
+
info(`Status: ${result.status}`);
|
|
5112
|
+
if (result.duration) {
|
|
5113
|
+
info(`Duration: ${result.duration}s`);
|
|
5114
|
+
}
|
|
5115
|
+
if (result.outputUrl) {
|
|
5116
|
+
success(`Output URL: ${result.outputUrl}`);
|
|
5117
|
+
}
|
|
5118
|
+
if (result.cost !== void 0) {
|
|
5119
|
+
info(`Cost: $${result.cost.toFixed(4)}`);
|
|
5120
|
+
}
|
|
5121
|
+
if (result.error) {
|
|
5122
|
+
error(`Error: ${result.error}`);
|
|
5123
|
+
}
|
|
5124
|
+
}
|
|
5125
|
+
async function downloadFile2(url, outputPath) {
|
|
5126
|
+
const response = await fetch(url);
|
|
5127
|
+
if (!response.ok) {
|
|
5128
|
+
throw new Error(`Failed to download: ${response.status}`);
|
|
5129
|
+
}
|
|
5130
|
+
const buffer = await response.arrayBuffer();
|
|
5131
|
+
await writeFile4(outputPath, Buffer.from(buffer));
|
|
5132
|
+
}
|
|
5133
|
+
var mixCommand = new Command17("create").description("Mix audio tracks into a video").requiredOption("--video <url>", "Input video file/URL").option("--music <url>", "Background music file/URL").option("--voice <url>", "Voiceover file/URL").option("--music-volume <percent>", "Music volume 0-100", "50").option("--voice-volume <percent>", "Voice volume 0-100", "100").option("-o, --output <path>", "Output file path").option("--no-wait", "Do not wait for completion").option("-f, --format <format>", "Output format: human, json, quiet", "human").action(async (options) => {
|
|
5134
|
+
if (!options.music && !options.voice) {
|
|
5135
|
+
error("At least one of --music or --voice must be provided");
|
|
5136
|
+
process.exit(EXIT_CODES.INVALID_INPUT);
|
|
5137
|
+
}
|
|
5138
|
+
const musicVolume = parseInt(options.musicVolume, 10) / 100;
|
|
5139
|
+
const voiceVolume = parseInt(options.voiceVolume, 10) / 100;
|
|
5140
|
+
if (isNaN(musicVolume) || musicVolume < 0 || musicVolume > 1) {
|
|
5141
|
+
error("Music volume must be between 0 and 100");
|
|
5142
|
+
process.exit(EXIT_CODES.INVALID_INPUT);
|
|
5143
|
+
}
|
|
5144
|
+
if (isNaN(voiceVolume) || voiceVolume < 0 || voiceVolume > 2) {
|
|
5145
|
+
error("Voice volume must be between 0 and 200");
|
|
5146
|
+
process.exit(EXIT_CODES.INVALID_INPUT);
|
|
5147
|
+
}
|
|
5148
|
+
const format = options.format;
|
|
5149
|
+
const spinner = format === "human" ? ora10("Mixing audio...").start() : null;
|
|
5150
|
+
const inputs = [{ url: options.video, role: "video" }];
|
|
5151
|
+
if (options.music) {
|
|
5152
|
+
inputs.push({ url: options.music, role: "background", volume: musicVolume * 5 });
|
|
5153
|
+
}
|
|
5154
|
+
if (options.voice) {
|
|
5155
|
+
inputs.push({ url: options.voice, role: "voice", volume: voiceVolume * 2 });
|
|
5156
|
+
}
|
|
5157
|
+
try {
|
|
5158
|
+
const result = await mixAudio({
|
|
5159
|
+
operation: "add-to-video",
|
|
5160
|
+
inputs,
|
|
5161
|
+
options: {
|
|
5162
|
+
musicVolume,
|
|
5163
|
+
voiceVolume
|
|
3232
5164
|
}
|
|
5165
|
+
});
|
|
5166
|
+
if (!options.wait) {
|
|
5167
|
+
spinner?.stop();
|
|
5168
|
+
outputResult2(result, format);
|
|
5169
|
+
return;
|
|
5170
|
+
}
|
|
5171
|
+
if (spinner) spinner.text = `Processing (ID: ${result.requestId})...`;
|
|
5172
|
+
const finalResult = await pollForCompletion(
|
|
5173
|
+
() => checkMixStatus(result.requestId),
|
|
5174
|
+
120,
|
|
5175
|
+
3e3
|
|
5176
|
+
);
|
|
5177
|
+
spinner?.stop();
|
|
5178
|
+
if (finalResult.status === "failed") {
|
|
5179
|
+
error(finalResult.error || "Audio mixing failed");
|
|
5180
|
+
process.exit(EXIT_CODES.GENERAL_ERROR);
|
|
5181
|
+
}
|
|
5182
|
+
outputResult2(finalResult, format);
|
|
5183
|
+
if (options.output && finalResult.outputUrl) {
|
|
5184
|
+
const downloadSpinner = format === "human" ? ora10("Downloading...").start() : null;
|
|
3233
5185
|
try {
|
|
3234
|
-
|
|
3235
|
-
|
|
5186
|
+
await downloadFile2(finalResult.outputUrl, options.output);
|
|
5187
|
+
downloadSpinner?.stop();
|
|
5188
|
+
if (format === "human") {
|
|
5189
|
+
success(`Saved to: ${options.output}`);
|
|
5190
|
+
}
|
|
3236
5191
|
} catch (err) {
|
|
3237
|
-
|
|
5192
|
+
downloadSpinner?.stop();
|
|
5193
|
+
warn(`Failed to download: ${err instanceof Error ? err.message : "Unknown error"}`);
|
|
3238
5194
|
}
|
|
3239
5195
|
}
|
|
5196
|
+
} catch (err) {
|
|
5197
|
+
spinner?.stop();
|
|
5198
|
+
error(err instanceof Error ? err.message : "Unknown error");
|
|
5199
|
+
process.exit(EXIT_CODES.GENERAL_ERROR);
|
|
3240
5200
|
}
|
|
3241
|
-
|
|
3242
|
-
|
|
3243
|
-
|
|
3244
|
-
|
|
3245
|
-
|
|
5201
|
+
});
|
|
5202
|
+
var statusCommand2 = new Command17("status").description("Check status of an audio mix request").argument("<id>", "Request ID").option("-f, --format <format>", "Output format: human, json, quiet", "human").action(async (id, options) => {
|
|
5203
|
+
const spinner = options.format === "human" ? ora10("Checking status...").start() : null;
|
|
5204
|
+
try {
|
|
5205
|
+
const result = await checkMixStatus(id);
|
|
5206
|
+
spinner?.stop();
|
|
5207
|
+
outputResult2(result, options.format);
|
|
5208
|
+
} catch (err) {
|
|
5209
|
+
spinner?.stop();
|
|
5210
|
+
error(err instanceof Error ? err.message : "Unknown error");
|
|
5211
|
+
process.exit(EXIT_CODES.GENERAL_ERROR);
|
|
3246
5212
|
}
|
|
3247
|
-
|
|
3248
|
-
|
|
3249
|
-
|
|
3250
|
-
|
|
5213
|
+
});
|
|
5214
|
+
var mixAudioCommand = new Command17("mix").description("Audio mixing commands").addCommand(mixCommand).addCommand(statusCommand2);
|
|
5215
|
+
|
|
5216
|
+
// src/commands/image.ts
|
|
5217
|
+
init_api();
|
|
5218
|
+
init_output();
|
|
5219
|
+
init_types();
|
|
5220
|
+
import { Command as Command18 } from "commander";
|
|
5221
|
+
import ora11 from "ora";
|
|
5222
|
+
var searchCommand = new Command18("search").description("Search for images").requiredOption("-q, --query <query>", "Search query").option("-n, --max-results <number>", "Maximum number of results (default: 10)").option("-s, --size <size>", "Image size: small, medium, large, any", "large").option("--safe-search", "Enable safe search (default: true)", true).option("--no-safe-search", "Disable safe search").option("-f, --format <format>", "Output format: human, json, quiet", "human").action(async (options) => {
|
|
5223
|
+
const format = options.format;
|
|
5224
|
+
const spinner = format === "human" ? ora11("Searching for images...").start() : null;
|
|
5225
|
+
let maxResults;
|
|
5226
|
+
if (options.maxResults) {
|
|
5227
|
+
maxResults = parseInt(options.maxResults, 10);
|
|
5228
|
+
if (isNaN(maxResults) || maxResults < 1) {
|
|
5229
|
+
spinner?.stop();
|
|
5230
|
+
error("Max results must be a positive number");
|
|
5231
|
+
process.exit(EXIT_CODES.INVALID_INPUT);
|
|
5232
|
+
}
|
|
3251
5233
|
}
|
|
3252
|
-
|
|
5234
|
+
try {
|
|
5235
|
+
const result = await searchImages({
|
|
5236
|
+
query: options.query,
|
|
5237
|
+
options: {
|
|
5238
|
+
maxResults: maxResults || 10,
|
|
5239
|
+
size: options.size,
|
|
5240
|
+
safeSearch: options.safeSearch
|
|
5241
|
+
}
|
|
5242
|
+
});
|
|
5243
|
+
spinner?.stop();
|
|
5244
|
+
if (!result.success) {
|
|
5245
|
+
error("Search failed");
|
|
5246
|
+
process.exit(EXIT_CODES.GENERAL_ERROR);
|
|
5247
|
+
}
|
|
5248
|
+
const allImages = result.data.results.flatMap(
|
|
5249
|
+
(providerResult) => providerResult.results.map((img) => ({
|
|
5250
|
+
...img,
|
|
5251
|
+
provider: providerResult.providerName
|
|
5252
|
+
}))
|
|
5253
|
+
);
|
|
5254
|
+
if (format === "json") {
|
|
5255
|
+
printJson({
|
|
5256
|
+
success: true,
|
|
5257
|
+
query: options.query,
|
|
5258
|
+
totalResults: allImages.length,
|
|
5259
|
+
totalCost: result.data.totalCost,
|
|
5260
|
+
images: allImages
|
|
5261
|
+
});
|
|
5262
|
+
return;
|
|
5263
|
+
}
|
|
5264
|
+
if (format === "quiet") {
|
|
5265
|
+
for (const img of allImages) {
|
|
5266
|
+
console.log(img.url);
|
|
5267
|
+
}
|
|
5268
|
+
return;
|
|
5269
|
+
}
|
|
5270
|
+
if (allImages.length === 0) {
|
|
5271
|
+
info("No images found");
|
|
5272
|
+
return;
|
|
5273
|
+
}
|
|
5274
|
+
success(`Found ${allImages.length} images for "${options.query}"`);
|
|
3253
5275
|
console.log();
|
|
3254
|
-
for (
|
|
3255
|
-
|
|
5276
|
+
for (let i = 0; i < allImages.length; i++) {
|
|
5277
|
+
const img = allImages[i];
|
|
5278
|
+
console.log(`[${i + 1}] ${img.title || "Untitled"}`);
|
|
5279
|
+
console.log(` URL: ${img.url}`);
|
|
5280
|
+
console.log(` Size: ${img.width}x${img.height}`);
|
|
5281
|
+
if (img.author) {
|
|
5282
|
+
console.log(` Author: ${img.author}`);
|
|
5283
|
+
}
|
|
5284
|
+
console.log(` Provider: ${img.provider}`);
|
|
5285
|
+
console.log();
|
|
3256
5286
|
}
|
|
5287
|
+
info(`Total cost: $${result.data.totalCost.toFixed(4)}`);
|
|
5288
|
+
} catch (err) {
|
|
5289
|
+
spinner?.stop();
|
|
5290
|
+
error(err instanceof Error ? err.message : "Unknown error");
|
|
5291
|
+
process.exit(EXIT_CODES.GENERAL_ERROR);
|
|
3257
5292
|
}
|
|
3258
|
-
|
|
3259
|
-
|
|
5293
|
+
});
|
|
5294
|
+
var imageCommand = new Command18("image").description("Image search commands").addCommand(searchCommand);
|
|
5295
|
+
|
|
5296
|
+
// src/commands/video.ts
|
|
5297
|
+
init_api();
|
|
5298
|
+
init_output();
|
|
5299
|
+
init_types();
|
|
5300
|
+
import { Command as Command19 } from "commander";
|
|
5301
|
+
import ora12 from "ora";
|
|
5302
|
+
import { mkdir, writeFile as writeFile5, readFile as readFile2, access, rm } from "fs/promises";
|
|
5303
|
+
import { join as join2, resolve as resolve4 } from "path";
|
|
5304
|
+
import { execSync as execSync2, spawn } from "child_process";
|
|
5305
|
+
var DEFAULT_TEMPLATE = "inizio-inc/remotion-composition";
|
|
5306
|
+
var DEFAULT_FPS = 30;
|
|
5307
|
+
function parseScriptIntoSections(script) {
|
|
5308
|
+
if (script.includes("---") || script.includes("[Section")) {
|
|
5309
|
+
const parts = script.split(/---|\[Section \d+\]/i).filter((s) => s.trim());
|
|
5310
|
+
if (parts.length > 1) {
|
|
5311
|
+
return parts.map((p) => p.trim());
|
|
5312
|
+
}
|
|
5313
|
+
}
|
|
5314
|
+
const sentences = script.split(/(?<=[.!?])\s+/).map((s) => s.trim()).filter((s) => s.length > 0);
|
|
5315
|
+
const sections = [];
|
|
5316
|
+
let pendingShort = "";
|
|
5317
|
+
for (const sentence of sentences) {
|
|
5318
|
+
const wordCount = sentence.split(/\s+/).length;
|
|
5319
|
+
if (pendingShort) {
|
|
5320
|
+
sections.push(`${pendingShort} ${sentence}`);
|
|
5321
|
+
pendingShort = "";
|
|
5322
|
+
} else if (wordCount < 5 && sections.length < sentences.length - 1) {
|
|
5323
|
+
pendingShort = sentence;
|
|
5324
|
+
} else {
|
|
5325
|
+
sections.push(sentence);
|
|
5326
|
+
}
|
|
5327
|
+
}
|
|
5328
|
+
if (pendingShort) {
|
|
5329
|
+
if (sections.length > 0) {
|
|
5330
|
+
sections[sections.length - 1] += ` ${pendingShort}`;
|
|
5331
|
+
} else {
|
|
5332
|
+
sections.push(pendingShort);
|
|
5333
|
+
}
|
|
5334
|
+
}
|
|
5335
|
+
return sections;
|
|
5336
|
+
}
|
|
5337
|
+
function calculateSectionTiming(sections, totalDuration, fps = DEFAULT_FPS, timestamps) {
|
|
5338
|
+
if (timestamps && timestamps.characters.length > 0) {
|
|
5339
|
+
return calculateSectionTimingFromTimestamps(sections, timestamps, fps);
|
|
5340
|
+
}
|
|
5341
|
+
const totalWords = sections.reduce((sum, s) => sum + s.split(/\s+/).length, 0);
|
|
5342
|
+
let currentTime = 0;
|
|
5343
|
+
return sections.map((text, index) => {
|
|
5344
|
+
const wordCount = text.split(/\s+/).length;
|
|
5345
|
+
const proportion = wordCount / totalWords;
|
|
5346
|
+
const durationInSeconds = totalDuration * proportion;
|
|
5347
|
+
const durationInFrames = Math.round(durationInSeconds * fps);
|
|
5348
|
+
const section = {
|
|
5349
|
+
id: index + 1,
|
|
5350
|
+
text,
|
|
5351
|
+
wordCount,
|
|
5352
|
+
startTime: currentTime,
|
|
5353
|
+
endTime: currentTime + durationInSeconds,
|
|
5354
|
+
durationInSeconds,
|
|
5355
|
+
durationInFrames
|
|
5356
|
+
};
|
|
5357
|
+
currentTime += durationInSeconds;
|
|
5358
|
+
return section;
|
|
5359
|
+
});
|
|
5360
|
+
}
|
|
5361
|
+
function calculateSectionTimingFromTimestamps(sections, timestamps, fps) {
|
|
5362
|
+
const { characters, characterStartTimesSeconds, characterEndTimesSeconds } = timestamps;
|
|
5363
|
+
const fullText = characters.join("");
|
|
5364
|
+
const results = [];
|
|
5365
|
+
let charIndex = 0;
|
|
5366
|
+
for (let i = 0; i < sections.length; i++) {
|
|
5367
|
+
const sectionText = sections[i];
|
|
5368
|
+
const sectionLength = sectionText.length;
|
|
5369
|
+
while (charIndex < characters.length && characters[charIndex].match(/^\s*$/)) {
|
|
5370
|
+
charIndex++;
|
|
5371
|
+
}
|
|
5372
|
+
const startCharIndex = charIndex;
|
|
5373
|
+
const startTime = characterStartTimesSeconds[startCharIndex] || 0;
|
|
5374
|
+
charIndex += sectionLength;
|
|
5375
|
+
let endCharIndex = charIndex - 1;
|
|
5376
|
+
while (endCharIndex > startCharIndex && characters[endCharIndex]?.match(/^\s*$/)) {
|
|
5377
|
+
endCharIndex--;
|
|
5378
|
+
}
|
|
5379
|
+
const endTime = characterEndTimesSeconds[Math.min(endCharIndex, characterEndTimesSeconds.length - 1)] || startTime + 1;
|
|
5380
|
+
const durationInSeconds = endTime - startTime;
|
|
5381
|
+
const durationInFrames = Math.round(durationInSeconds * fps);
|
|
5382
|
+
results.push({
|
|
5383
|
+
id: i + 1,
|
|
5384
|
+
text: sectionText,
|
|
5385
|
+
wordCount: sectionText.split(/\s+/).length,
|
|
5386
|
+
startTime,
|
|
5387
|
+
endTime,
|
|
5388
|
+
durationInSeconds,
|
|
5389
|
+
durationInFrames
|
|
5390
|
+
});
|
|
5391
|
+
}
|
|
5392
|
+
return results;
|
|
5393
|
+
}
|
|
5394
|
+
async function downloadFile3(url, outputPath) {
|
|
5395
|
+
if (url.startsWith("data:")) {
|
|
5396
|
+
const matches = url.match(/^data:[^;]+;base64,(.+)$/);
|
|
5397
|
+
if (!matches) {
|
|
5398
|
+
throw new Error("Invalid data URL format");
|
|
5399
|
+
}
|
|
5400
|
+
const buffer2 = Buffer.from(matches[1], "base64");
|
|
5401
|
+
await writeFile5(outputPath, buffer2);
|
|
5402
|
+
return;
|
|
5403
|
+
}
|
|
5404
|
+
const response = await fetch(url);
|
|
5405
|
+
if (!response.ok) {
|
|
5406
|
+
throw new Error(`Failed to download: ${response.status}`);
|
|
5407
|
+
}
|
|
5408
|
+
const buffer = await response.arrayBuffer();
|
|
5409
|
+
await writeFile5(outputPath, Buffer.from(buffer));
|
|
5410
|
+
}
|
|
5411
|
+
function getExtension(url) {
|
|
5412
|
+
try {
|
|
5413
|
+
const urlObj = new URL(url);
|
|
5414
|
+
const pathname = urlObj.pathname;
|
|
5415
|
+
const ext = pathname.split(".").pop()?.toLowerCase();
|
|
5416
|
+
if (ext && ["jpg", "jpeg", "png", "gif", "webp"].includes(ext)) {
|
|
5417
|
+
return ext;
|
|
5418
|
+
}
|
|
5419
|
+
} catch {
|
|
5420
|
+
}
|
|
5421
|
+
return "jpg";
|
|
5422
|
+
}
|
|
5423
|
+
var createCommand2 = new Command19("create").description("Create video assets (voiceover, music, images)").option("-s, --script <text>", "Narration script text").option("--script-file <path>", "Path to script file").option("-t, --topic <text>", "Topic for image search (inferred from script if not provided)").option("-d, --duration <seconds>", "Target duration (auto-calculated from script if not set)").option("-v, --voice <name>", "TTS voice (Kore, Puck, Rachel, alloy)", "Kore").option("-m, --music-prompt <text>", "Music description (auto-generated if not provided)").option("-n, --num-images <number>", "Number of images to search/download", "5").option("-o, --output <dir>", "Output directory", "./public").option("-f, --format <format>", "Output format: human, json, quiet", "human").action(async (options) => {
|
|
5424
|
+
const format = options.format;
|
|
5425
|
+
const spinner = format === "human" ? ora12("Initializing...").start() : null;
|
|
5426
|
+
try {
|
|
5427
|
+
let script = options.script;
|
|
5428
|
+
if (options.scriptFile) {
|
|
5429
|
+
try {
|
|
5430
|
+
script = await readFile2(options.scriptFile, "utf-8");
|
|
5431
|
+
} catch (err) {
|
|
5432
|
+
spinner?.stop();
|
|
5433
|
+
error(`Failed to read script file: ${err instanceof Error ? err.message : "Unknown error"}`);
|
|
5434
|
+
process.exit(EXIT_CODES.INVALID_INPUT);
|
|
5435
|
+
}
|
|
5436
|
+
}
|
|
5437
|
+
if (!script || script.trim().length === 0) {
|
|
5438
|
+
spinner?.stop();
|
|
5439
|
+
error("Either --script or --script-file is required");
|
|
5440
|
+
process.exit(EXIT_CODES.INVALID_INPUT);
|
|
5441
|
+
}
|
|
5442
|
+
script = script.trim();
|
|
5443
|
+
const topic = options.topic || script.split(".")[0].slice(0, 50);
|
|
5444
|
+
const numImages = parseInt(options.numImages, 10);
|
|
5445
|
+
if (isNaN(numImages) || numImages < 1 || numImages > 20) {
|
|
5446
|
+
spinner?.stop();
|
|
5447
|
+
error("Number of images must be between 1 and 20");
|
|
5448
|
+
process.exit(EXIT_CODES.INVALID_INPUT);
|
|
5449
|
+
}
|
|
5450
|
+
const audioDir = join2(options.output, "audio");
|
|
5451
|
+
const imagesDir = join2(options.output, "images");
|
|
5452
|
+
if (spinner) spinner.text = "Creating directories...";
|
|
5453
|
+
await mkdir(audioDir, { recursive: true });
|
|
5454
|
+
await mkdir(imagesDir, { recursive: true });
|
|
5455
|
+
let totalCost = 0;
|
|
5456
|
+
if (spinner) spinner.text = "Generating voiceover...";
|
|
5457
|
+
const ttsResult = await generateSpeech({
|
|
5458
|
+
text: script,
|
|
5459
|
+
options: { voice: options.voice }
|
|
5460
|
+
});
|
|
5461
|
+
const voiceoverPath = join2(audioDir, `voiceover.${ttsResult.format}`);
|
|
5462
|
+
await writeFile5(voiceoverPath, ttsResult.audioData);
|
|
5463
|
+
totalCost += ttsResult.cost;
|
|
5464
|
+
const voiceoverInfo = {
|
|
5465
|
+
path: `audio/voiceover.${ttsResult.format}`,
|
|
5466
|
+
duration: ttsResult.duration,
|
|
5467
|
+
voice: options.voice,
|
|
5468
|
+
provider: ttsResult.provider,
|
|
5469
|
+
cost: ttsResult.cost,
|
|
5470
|
+
timestamps: ttsResult.timestamps
|
|
5471
|
+
// Include for word-level sync
|
|
5472
|
+
};
|
|
5473
|
+
if (format === "human") {
|
|
5474
|
+
spinner?.stop();
|
|
5475
|
+
success(`Voiceover: ${voiceoverPath} (${ttsResult.duration.toFixed(1)}s)`);
|
|
5476
|
+
spinner?.start();
|
|
5477
|
+
}
|
|
5478
|
+
if (spinner) spinner.text = "Analyzing script sections...";
|
|
5479
|
+
const sectionTexts = parseScriptIntoSections(script);
|
|
5480
|
+
const sections = calculateSectionTiming(sectionTexts, ttsResult.duration, DEFAULT_FPS, ttsResult.timestamps);
|
|
5481
|
+
if (format === "human") {
|
|
5482
|
+
spinner?.stop();
|
|
5483
|
+
const timingSource = ttsResult.timestamps ? "TTS timestamps" : "word estimation";
|
|
5484
|
+
success(`Sections: ${sections.length} sections (timing from ${timingSource})`);
|
|
5485
|
+
spinner?.start();
|
|
5486
|
+
}
|
|
5487
|
+
const musicDuration = Math.min(30, Math.ceil(ttsResult.duration) + 5);
|
|
5488
|
+
const musicPrompt = options.musicPrompt || "uplifting background music, positive energy";
|
|
5489
|
+
if (spinner) spinner.text = "Generating music...";
|
|
5490
|
+
let musicResult = await generateMusic({
|
|
5491
|
+
prompt: musicPrompt,
|
|
5492
|
+
duration: musicDuration
|
|
5493
|
+
});
|
|
5494
|
+
if (musicResult.status !== "completed" && musicResult.status !== "failed") {
|
|
5495
|
+
if (spinner) spinner.text = `Processing music (ID: ${musicResult.requestId})...`;
|
|
5496
|
+
musicResult = await pollForCompletion(
|
|
5497
|
+
() => checkMusicStatus(musicResult.requestId),
|
|
5498
|
+
60,
|
|
5499
|
+
2e3
|
|
5500
|
+
);
|
|
5501
|
+
}
|
|
5502
|
+
if (musicResult.status === "failed") {
|
|
5503
|
+
spinner?.stop();
|
|
5504
|
+
error(`Music generation failed: ${musicResult.error || "Unknown error"}`);
|
|
5505
|
+
process.exit(EXIT_CODES.GENERAL_ERROR);
|
|
5506
|
+
}
|
|
5507
|
+
const musicPath = join2(audioDir, "music.mp3");
|
|
5508
|
+
if (musicResult.audioUrl) {
|
|
5509
|
+
await downloadFile3(musicResult.audioUrl, musicPath);
|
|
5510
|
+
}
|
|
5511
|
+
totalCost += musicResult.cost || 0;
|
|
5512
|
+
const musicInfo = {
|
|
5513
|
+
path: "audio/music.mp3",
|
|
5514
|
+
duration: musicResult.duration || musicDuration,
|
|
5515
|
+
prompt: musicPrompt,
|
|
5516
|
+
cost: musicResult.cost || 0
|
|
5517
|
+
};
|
|
5518
|
+
if (format === "human") {
|
|
5519
|
+
spinner?.stop();
|
|
5520
|
+
success(`Music: ${musicPath} (${musicInfo.duration}s)`);
|
|
5521
|
+
spinner?.start();
|
|
5522
|
+
}
|
|
5523
|
+
if (spinner) spinner.text = "Searching for images...";
|
|
5524
|
+
const imageResults = await searchImages({
|
|
5525
|
+
query: topic,
|
|
5526
|
+
options: {
|
|
5527
|
+
maxResults: numImages,
|
|
5528
|
+
size: "large",
|
|
5529
|
+
safeSearch: true
|
|
5530
|
+
}
|
|
5531
|
+
});
|
|
5532
|
+
const allImages = imageResults.data.results.flatMap(
|
|
5533
|
+
(providerResult) => providerResult.results.map((img) => ({
|
|
5534
|
+
...img,
|
|
5535
|
+
provider: providerResult.providerName
|
|
5536
|
+
}))
|
|
5537
|
+
);
|
|
5538
|
+
totalCost += imageResults.data.totalCost;
|
|
5539
|
+
const downloadedImages = [];
|
|
5540
|
+
for (let i = 0; i < Math.min(allImages.length, numImages); i++) {
|
|
5541
|
+
const img = allImages[i];
|
|
5542
|
+
const ext = getExtension(img.url);
|
|
5543
|
+
const filename = `scene-${i + 1}.${ext}`;
|
|
5544
|
+
const imagePath = join2(imagesDir, filename);
|
|
5545
|
+
if (spinner) spinner.text = `Downloading image ${i + 1}/${Math.min(allImages.length, numImages)}...`;
|
|
5546
|
+
try {
|
|
5547
|
+
await downloadFile3(img.url, imagePath);
|
|
5548
|
+
downloadedImages.push({
|
|
5549
|
+
path: `images/${filename}`,
|
|
5550
|
+
url: img.url,
|
|
5551
|
+
width: img.width,
|
|
5552
|
+
height: img.height,
|
|
5553
|
+
query: topic
|
|
5554
|
+
});
|
|
5555
|
+
} catch (err) {
|
|
5556
|
+
if (format === "human") {
|
|
5557
|
+
spinner?.stop();
|
|
5558
|
+
warn(`Failed to download image ${i + 1}: ${err instanceof Error ? err.message : "Unknown error"}`);
|
|
5559
|
+
spinner?.start();
|
|
5560
|
+
}
|
|
5561
|
+
}
|
|
5562
|
+
}
|
|
5563
|
+
if (format === "human") {
|
|
5564
|
+
spinner?.stop();
|
|
5565
|
+
success(`Images: Downloaded ${downloadedImages.length} images to ${imagesDir}`);
|
|
5566
|
+
spinner?.start();
|
|
5567
|
+
}
|
|
5568
|
+
const sectionsWithImages = sections.map((section, index) => {
|
|
5569
|
+
const imageIndex = index % downloadedImages.length;
|
|
5570
|
+
return {
|
|
5571
|
+
...section,
|
|
5572
|
+
imagePath: downloadedImages[imageIndex]?.path
|
|
5573
|
+
};
|
|
5574
|
+
});
|
|
5575
|
+
if (spinner) spinner.text = "Writing manifest...";
|
|
5576
|
+
const totalDurationInFrames = Math.round(ttsResult.duration * DEFAULT_FPS);
|
|
5577
|
+
const manifest = {
|
|
5578
|
+
topic,
|
|
5579
|
+
script,
|
|
5580
|
+
voiceover: voiceoverInfo,
|
|
5581
|
+
music: musicInfo,
|
|
5582
|
+
images: downloadedImages,
|
|
5583
|
+
sections: sectionsWithImages,
|
|
5584
|
+
totalDurationInFrames,
|
|
5585
|
+
fps: DEFAULT_FPS,
|
|
5586
|
+
totalCost,
|
|
5587
|
+
createdAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
5588
|
+
};
|
|
5589
|
+
const manifestPath = join2(options.output, "video-manifest.json");
|
|
5590
|
+
await writeFile5(manifestPath, JSON.stringify(manifest, null, 2));
|
|
5591
|
+
spinner?.stop();
|
|
5592
|
+
if (format === "json") {
|
|
5593
|
+
printJson(manifest);
|
|
5594
|
+
return;
|
|
5595
|
+
}
|
|
5596
|
+
if (format === "quiet") {
|
|
5597
|
+
console.log(manifestPath);
|
|
5598
|
+
return;
|
|
5599
|
+
}
|
|
3260
5600
|
console.log();
|
|
3261
|
-
|
|
3262
|
-
console.log(
|
|
5601
|
+
success("Video assets created successfully!");
|
|
5602
|
+
console.log();
|
|
5603
|
+
info(`Topic: ${topic}`);
|
|
5604
|
+
info(`Voiceover: ${voiceoverInfo.path} (${voiceoverInfo.duration.toFixed(1)}s, ${voiceoverInfo.voice})`);
|
|
5605
|
+
info(`Music: ${musicInfo.path} (${musicInfo.duration}s)`);
|
|
5606
|
+
info(`Sections: ${sections.length} (${totalDurationInFrames} frames at ${DEFAULT_FPS}fps)`);
|
|
5607
|
+
info(`Images: ${downloadedImages.length} downloaded`);
|
|
5608
|
+
info(`Manifest: ${manifestPath}`);
|
|
5609
|
+
console.log();
|
|
5610
|
+
info(`Total cost: $${totalCost.toFixed(4)}`);
|
|
5611
|
+
console.log();
|
|
5612
|
+
info("Next steps:");
|
|
5613
|
+
info(" 1. Create Remotion scenes matching section timings in manifest");
|
|
5614
|
+
info(" 2. Each section has exact durationInFrames - use these for sync");
|
|
5615
|
+
info(" 3. Run: npx remotion render FullVideo out/video.mp4");
|
|
5616
|
+
} catch (err) {
|
|
5617
|
+
spinner?.stop();
|
|
5618
|
+
error(err instanceof Error ? err.message : "Unknown error");
|
|
5619
|
+
process.exit(EXIT_CODES.GENERAL_ERROR);
|
|
3263
5620
|
}
|
|
3264
|
-
console.log();
|
|
3265
|
-
});
|
|
3266
|
-
skillCommand.command("show").description("Display the skill content").action(() => {
|
|
3267
|
-
console.log(generateSkillContent(brand));
|
|
3268
5621
|
});
|
|
3269
|
-
|
|
3270
|
-
const {
|
|
3271
|
-
const
|
|
3272
|
-
|
|
3273
|
-
|
|
3274
|
-
|
|
3275
|
-
|
|
3276
|
-
|
|
3277
|
-
|
|
3278
|
-
|
|
3279
|
-
} catch {
|
|
5622
|
+
var searchCommand2 = new Command19("search").description("Search for stock videos").argument("<query>", "Search query").option("-n, --max-results <count>", "Maximum number of results", "10").option("-o, --orientation <type>", "Video orientation: landscape, portrait, square, any", "any").option("-l, --license <type>", "License type: free, premium, any", "any").option("-f, --format <format>", "Output format: human, json, quiet", "human").action(async (query, options) => {
|
|
5623
|
+
const { maxResults, orientation, license, format } = options;
|
|
5624
|
+
const spinner = format === "human" ? ora12("Searching for videos...").start() : null;
|
|
5625
|
+
try {
|
|
5626
|
+
const result = await searchVideos({
|
|
5627
|
+
query,
|
|
5628
|
+
options: {
|
|
5629
|
+
maxResults: parseInt(maxResults, 10),
|
|
5630
|
+
orientation,
|
|
5631
|
+
license
|
|
3280
5632
|
}
|
|
5633
|
+
});
|
|
5634
|
+
spinner?.stop();
|
|
5635
|
+
const allVideos = result.data.results.flatMap((provider) => provider.results);
|
|
5636
|
+
if (format === "json") {
|
|
5637
|
+
printJson(result);
|
|
5638
|
+
return;
|
|
5639
|
+
}
|
|
5640
|
+
if (format === "quiet") {
|
|
5641
|
+
allVideos.forEach((video) => {
|
|
5642
|
+
console.log(video.previewUrl || video.thumbnailUrl);
|
|
5643
|
+
});
|
|
5644
|
+
return;
|
|
3281
5645
|
}
|
|
5646
|
+
if (allVideos.length === 0) {
|
|
5647
|
+
info("No videos found");
|
|
5648
|
+
return;
|
|
5649
|
+
}
|
|
5650
|
+
success(`Found ${allVideos.length} videos for "${query}"`);
|
|
5651
|
+
console.log();
|
|
5652
|
+
allVideos.forEach((video, index) => {
|
|
5653
|
+
console.log(`[${index + 1}] ${video.title}`);
|
|
5654
|
+
console.log(` URL: ${video.previewUrl || video.thumbnailUrl}`);
|
|
5655
|
+
console.log(` Duration: ${video.duration}s | Size: ${video.width}x${video.height}`);
|
|
5656
|
+
console.log(` Provider: ${video.provider}`);
|
|
5657
|
+
console.log();
|
|
5658
|
+
});
|
|
5659
|
+
info(`Total cost: $${result.data.totalCost.toFixed(4)}`);
|
|
5660
|
+
} catch (err) {
|
|
5661
|
+
spinner?.stop();
|
|
5662
|
+
error(err instanceof Error ? err.message : "Unknown error");
|
|
5663
|
+
process.exit(EXIT_CODES.GENERAL_ERROR);
|
|
3282
5664
|
}
|
|
3283
|
-
|
|
3284
|
-
|
|
3285
|
-
|
|
3286
|
-
|
|
3287
|
-
|
|
3288
|
-
|
|
5665
|
+
});
|
|
5666
|
+
var initCommand = new Command19("init").description("Create a new Remotion video project from template").argument("<name>", "Project directory name").option("-t, --template <repo>", "GitHub repo (user/repo)", DEFAULT_TEMPLATE).option("--no-install", "Skip pnpm install").option("-f, --format <format>", "Output format: human, json, quiet", "human").action(async (name, options) => {
|
|
5667
|
+
const format = options.format;
|
|
5668
|
+
const spinner = format === "human" ? ora12("Initializing video project...").start() : null;
|
|
5669
|
+
try {
|
|
5670
|
+
const targetDir = resolve4(process.cwd(), name);
|
|
5671
|
+
try {
|
|
5672
|
+
await access(targetDir);
|
|
5673
|
+
spinner?.stop();
|
|
5674
|
+
error(`Directory "${name}" already exists`);
|
|
5675
|
+
process.exit(EXIT_CODES.INVALID_INPUT);
|
|
5676
|
+
} catch {
|
|
5677
|
+
}
|
|
5678
|
+
if (spinner) spinner.text = `Downloading template from ${options.template}...`;
|
|
5679
|
+
try {
|
|
5680
|
+
execSync2(`npx --yes degit ${options.template} "${targetDir}"`, {
|
|
5681
|
+
stdio: "pipe"
|
|
5682
|
+
});
|
|
5683
|
+
} catch {
|
|
5684
|
+
if (spinner) spinner.text = "Cloning template...";
|
|
5685
|
+
execSync2(`git clone --depth 1 https://github.com/${options.template}.git "${targetDir}"`, {
|
|
5686
|
+
stdio: "pipe"
|
|
5687
|
+
});
|
|
5688
|
+
await rm(join2(targetDir, ".git"), { recursive: true, force: true });
|
|
5689
|
+
}
|
|
5690
|
+
if (format === "human") {
|
|
5691
|
+
spinner?.stop();
|
|
5692
|
+
success(`Template downloaded to ${name}/`);
|
|
5693
|
+
spinner?.start();
|
|
5694
|
+
}
|
|
5695
|
+
if (options.install) {
|
|
5696
|
+
if (spinner) spinner.text = "Installing dependencies...";
|
|
5697
|
+
await new Promise((resolvePromise, reject) => {
|
|
5698
|
+
const child = spawn("pnpm", ["install"], {
|
|
5699
|
+
cwd: targetDir,
|
|
5700
|
+
stdio: "pipe",
|
|
5701
|
+
shell: true
|
|
5702
|
+
});
|
|
5703
|
+
child.on("close", (code) => {
|
|
5704
|
+
if (code === 0) {
|
|
5705
|
+
resolvePromise();
|
|
5706
|
+
} else {
|
|
5707
|
+
reject(new Error(`pnpm install failed with code ${code}`));
|
|
5708
|
+
}
|
|
5709
|
+
});
|
|
5710
|
+
child.on("error", reject);
|
|
5711
|
+
});
|
|
5712
|
+
if (format === "human") {
|
|
5713
|
+
spinner?.stop();
|
|
5714
|
+
success("Dependencies installed");
|
|
5715
|
+
spinner?.start();
|
|
5716
|
+
}
|
|
5717
|
+
}
|
|
5718
|
+
spinner?.stop();
|
|
5719
|
+
if (format === "json") {
|
|
5720
|
+
printJson({
|
|
5721
|
+
name,
|
|
5722
|
+
path: targetDir,
|
|
5723
|
+
template: options.template,
|
|
5724
|
+
installed: options.install
|
|
5725
|
+
});
|
|
5726
|
+
return;
|
|
5727
|
+
}
|
|
5728
|
+
if (format === "quiet") {
|
|
5729
|
+
console.log(targetDir);
|
|
5730
|
+
return;
|
|
5731
|
+
}
|
|
5732
|
+
console.log();
|
|
5733
|
+
success(`Video project "${name}" created successfully!`);
|
|
5734
|
+
console.log();
|
|
5735
|
+
info("Next steps:");
|
|
5736
|
+
info(` cd ${name}`);
|
|
5737
|
+
if (!options.install) {
|
|
5738
|
+
info(" pnpm install");
|
|
5739
|
+
}
|
|
5740
|
+
info(" pnpm dev # Preview in Remotion Studio");
|
|
5741
|
+
info(" cc video create ... # Generate assets to public/");
|
|
5742
|
+
info(" pnpm render # Render final video");
|
|
5743
|
+
} catch (err) {
|
|
5744
|
+
spinner?.stop();
|
|
5745
|
+
error(err instanceof Error ? err.message : "Unknown error");
|
|
5746
|
+
process.exit(EXIT_CODES.GENERAL_ERROR);
|
|
3289
5747
|
}
|
|
3290
|
-
console.log();
|
|
3291
5748
|
});
|
|
3292
|
-
|
|
3293
|
-
const skillFile = join(skillPath, "SKILL.md");
|
|
3294
|
-
mkdirSync(skillPath, { recursive: true });
|
|
3295
|
-
writeFileSync(skillFile, content, "utf-8");
|
|
3296
|
-
}
|
|
5749
|
+
var videoCommand = new Command19("video").description("Video asset generation commands").addCommand(initCommand).addCommand(createCommand2).addCommand(searchCommand2);
|
|
3297
5750
|
|
|
3298
5751
|
// src/index.ts
|
|
3299
|
-
var VERSION = "0.1.
|
|
3300
|
-
var program = new
|
|
5752
|
+
var VERSION = "0.1.6";
|
|
5753
|
+
var program = new Command20();
|
|
3301
5754
|
var cmdName = brand.commands[0];
|
|
3302
5755
|
program.name(cmdName).description(brand.description).version(VERSION, "-v, --version", "Show version number").option("--debug", "Enable debug logging").option("--no-color", "Disable colored output").configureOutput({
|
|
3303
5756
|
outputError: (str, write) => {
|
|
@@ -3317,6 +5770,11 @@ program.addCommand(brandingCommand);
|
|
|
3317
5770
|
program.addCommand(ideasCommand);
|
|
3318
5771
|
program.addCommand(whoamiCommand);
|
|
3319
5772
|
program.addCommand(skillCommand);
|
|
5773
|
+
program.addCommand(ttsCommand);
|
|
5774
|
+
program.addCommand(musicCommand);
|
|
5775
|
+
program.addCommand(mixAudioCommand);
|
|
5776
|
+
program.addCommand(imageCommand);
|
|
5777
|
+
program.addCommand(videoCommand);
|
|
3320
5778
|
var deriveCommand = buildDeriveCommand();
|
|
3321
5779
|
if (deriveCommand.commands.length > 0) {
|
|
3322
5780
|
program.addCommand(deriveCommand);
|