@conceptcraft/mindframes 0.1.5 → 0.1.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -946,12 +946,21 @@ async function generateSpeech(ttsRequest) {
946
946
  const cost = parseFloat(response.headers.get("X-Cost-USD") || "0");
947
947
  const provider = response.headers.get("X-Provider") || "unknown";
948
948
  const format = response.headers.get("X-Audio-Format") || "mp3";
949
+ let timestamps;
950
+ const timestampsHeader = response.headers.get("X-Timestamps");
951
+ if (timestampsHeader) {
952
+ try {
953
+ timestamps = JSON.parse(timestampsHeader);
954
+ } catch {
955
+ }
956
+ }
949
957
  return {
950
958
  audioData,
951
959
  duration,
952
960
  cost,
953
961
  provider,
954
- format
962
+ format,
963
+ timestamps
955
964
  };
956
965
  }
957
966
  async function getVoices() {
@@ -1016,7 +1025,7 @@ async function pollForCompletion(checkFn, maxAttempts = 60, intervalMs = 2e3) {
1016
1025
  if (result.status === "completed" || result.status === "failed") {
1017
1026
  return result;
1018
1027
  }
1019
- await new Promise((resolve4) => setTimeout(resolve4, intervalMs));
1028
+ await new Promise((resolve5) => setTimeout(resolve5, intervalMs));
1020
1029
  }
1021
1030
  throw new ApiError("Operation timed out", 408, 1);
1022
1031
  }
@@ -1123,10 +1132,10 @@ function generateState() {
1123
1132
  async function findAvailablePort(start, end) {
1124
1133
  for (let port = start; port <= end; port++) {
1125
1134
  try {
1126
- await new Promise((resolve4, reject) => {
1135
+ await new Promise((resolve5, reject) => {
1127
1136
  const server = http.createServer();
1128
1137
  server.listen(port, () => {
1129
- server.close(() => resolve4());
1138
+ server.close(() => resolve5());
1130
1139
  });
1131
1140
  server.on("error", reject);
1132
1141
  });
@@ -1184,7 +1193,7 @@ async function exchangeCodeForTokens(tokenEndpoint, code, codeVerifier, redirect
1184
1193
  return response.json();
1185
1194
  }
1186
1195
  function startCallbackServer(port, expectedState) {
1187
- return new Promise((resolve4, reject) => {
1196
+ return new Promise((resolve5, reject) => {
1188
1197
  let timeoutId;
1189
1198
  let settled = false;
1190
1199
  const cleanup = () => {
@@ -1270,7 +1279,7 @@ function startCallbackServer(port, expectedState) {
1270
1279
  </html>
1271
1280
  `);
1272
1281
  cleanup();
1273
- resolve4({ code, state });
1282
+ resolve5({ code, state });
1274
1283
  });
1275
1284
  server.listen(port);
1276
1285
  process.once("SIGINT", onCancel);
@@ -2413,21 +2422,21 @@ Uploading ${options.file.length} file(s)...`));
2413
2422
  }
2414
2423
  });
2415
2424
  async function readStdin() {
2416
- return new Promise((resolve4) => {
2425
+ return new Promise((resolve5) => {
2417
2426
  let data = "";
2418
2427
  process.stdin.setEncoding("utf8");
2419
2428
  if (process.stdin.isTTY) {
2420
- resolve4("");
2429
+ resolve5("");
2421
2430
  return;
2422
2431
  }
2423
2432
  process.stdin.on("data", (chunk) => {
2424
2433
  data += chunk;
2425
2434
  });
2426
2435
  process.stdin.on("end", () => {
2427
- resolve4(data.trim());
2436
+ resolve5(data.trim());
2428
2437
  });
2429
2438
  setTimeout(() => {
2430
- resolve4(data.trim());
2439
+ resolve5(data.trim());
2431
2440
  }, 100);
2432
2441
  });
2433
2442
  }
@@ -3125,6 +3134,7 @@ import chalk12 from "chalk";
3125
3134
  import { mkdirSync, writeFileSync, existsSync as existsSync2 } from "fs";
3126
3135
  import { join } from "path";
3127
3136
  import { homedir } from "os";
3137
+ import { execSync } from "child_process";
3128
3138
  function generateSkillContent(b) {
3129
3139
  const cmd2 = b.name;
3130
3140
  const pkg = b.packageName;
@@ -3132,34 +3142,105 @@ function generateSkillContent(b) {
3132
3142
  const name = b.displayName;
3133
3143
  return `---
3134
3144
  name: ${cmd2}
3135
- description: Create AI-powered presentations from code, documentation, files, or any content. Use when the user wants to generate slides, presentations, or decks about their project, codebase, research, or ideas.
3145
+ description: Create AI-powered presentations and videos. Use for slides, decks, video content, voiceovers, and music generation.
3146
+ metadata:
3147
+ tags: presentations, video, tts, music, animation, remotion
3148
+ video:
3149
+ description: End-to-end AI video creation workflow with TTS voiceover and music generation. Use this skill when users want to create videos, promotional content, explainers, tourism videos, product demos, or any video content from an idea or topic. Handles the complete workflow - research, script writing, asset gathering, audio generation (voiceover + music), and orchestrates video creation. Use together with remotion-best-practices skill for Remotion-specific patterns. Triggers on requests like "create a video about X", "make a promotional video", "build a video for Y", or any video content creation task.
3136
3150
  ---
3137
3151
 
3138
3152
  # ${name} CLI
3139
3153
 
3140
- Create professional presentations directly from your terminal. The CLI generates AI-powered slides from any context you provide - text, files, URLs, or piped content.
3154
+ Create professional presentations and videos directly from your terminal.
3141
3155
 
3142
3156
  ## Prerequisites
3143
3157
 
3144
3158
  \`\`\`bash
3145
- # Install globally
3146
3159
  npm install -g ${pkg}
3160
+ ${cmd2} login # Authenticate (opens browser)
3161
+ ${cmd2} whoami # Verify setup
3162
+ \`\`\`
3163
+
3164
+ ## Rules
3165
+
3166
+ Read these for detailed usage:
3167
+
3168
+ - [rules/presentations.md](rules/presentations.md) - Creating AI-powered presentations
3169
+ - [rules/video.md](rules/video.md) - Video creation workflow and commands
3170
+ - [rules/motion-standards.md](rules/motion-standards.md) - Animation quality standards
3171
+ - [rules/micro-interactions.md](rules/micro-interactions.md) - Animation components and patterns
3172
+ - **remotion-best-practices** skill (auto-installed) - Remotion-specific patterns
3173
+
3174
+ ## Quick Reference
3175
+
3176
+ ### Presentations
3177
+
3178
+ \`\`\`bash
3179
+ # Create from context
3180
+ cat README.md | ${cmd2} create "Project Overview"
3181
+
3182
+ # With files
3183
+ ${cmd2} create "Product Demo" --file ./deck.pptx --file ./logo.png
3184
+
3185
+ # With options
3186
+ ${cmd2} create "API Docs" --slides 8 --tone educational --goal inform
3187
+ \`\`\`
3147
3188
 
3148
- # Configure API key (get from ${url}/settings/api-keys)
3149
- ${cmd2} config init
3189
+ ### Video Creation
3190
+
3191
+ \`\`\`bash
3192
+ # Scaffold project
3193
+ ${cmd2} video init my-video
3194
+
3195
+ # Generate all assets (voiceover, music, images)
3196
+ ${cmd2} video create --script "Your narration..." --output ./public
3197
+
3198
+ # Search for stock content
3199
+ ${cmd2} image search -q "tropical beach" -n 5
3200
+ ${cmd2} video search "tech workspace" -n 5
3201
+ \`\`\`
3202
+
3203
+ ### Audio Generation
3204
+
3205
+ \`\`\`bash
3206
+ # Text-to-speech
3207
+ ${cmd2} tts generate -t "Narration text" -o voice.wav --voice Kore
3208
+
3209
+ # Music generation
3210
+ ${cmd2} music generate -p "uplifting corporate" -d 30 -o music.mp3
3211
+
3212
+ # Mix audio into video
3213
+ ${cmd2} mix create --video video.mp4 --voice voice.wav --music music.mp3 -o final.mp4
3150
3214
  \`\`\`
3151
3215
 
3152
- ## Core Workflow
3216
+ ## Assets
3217
+
3218
+ Copy animation components from \`assets/animation-components.tsx\` for Remotion videos.
3219
+
3220
+ ## Asking Questions
3221
+
3222
+ When you need to ask the user for preferences (voice, music, duration, etc.), use the \`AskUserQuestion\` tool if available. This provides a better UX with selectable options. See \`rules/video.md\` for the question format.
3223
+ `;
3224
+ }
3225
+ function generatePresentationsRule(b) {
3226
+ const cmd2 = b.name;
3227
+ const url = b.apiUrl;
3228
+ return `---
3229
+ name: presentations
3230
+ description: Creating AI-powered presentations
3231
+ ---
3232
+
3233
+ # Presentations
3234
+
3235
+ ## Workflow
3153
3236
 
3154
3237
  1. **Gather context** - Read relevant files, code, or documentation
3155
3238
  2. **Create presentation** - Pass context to \`${cmd2} create\`
3156
3239
  3. **Share URL** - Return the presentation link to the user
3157
3240
 
3158
- ## Commands
3159
-
3160
- ### Create Presentation
3241
+ ## Create Command
3161
3242
 
3162
- Context is **required**. Provide it via one of these methods:
3243
+ Context is **required**. Provide via:
3163
3244
 
3164
3245
  \`\`\`bash
3165
3246
  # Upload files (PDFs, PPTX, images, docs)
@@ -3171,19 +3252,19 @@ ${cmd2} create "Topic Title" --context "Key points, data, facts..."
3171
3252
  # From a text file
3172
3253
  ${cmd2} create "Topic Title" --context-file ./notes.md
3173
3254
 
3174
- # Pipe content (auto-detected)
3255
+ # Pipe content
3175
3256
  cat README.md | ${cmd2} create "Project Overview"
3176
3257
 
3177
- # From URLs (scraped automatically)
3258
+ # From URLs
3178
3259
  ${cmd2} create "Competitor Analysis" --sources https://example.com/report
3179
3260
 
3180
- # Combine multiple sources
3261
+ # Combine sources
3181
3262
  cat src/auth/*.ts | ${cmd2} create "Auth System" \\
3182
3263
  --file ./architecture.png \\
3183
3264
  --context "Focus on security patterns"
3184
3265
  \`\`\`
3185
3266
 
3186
- ### Create Options
3267
+ ## Options
3187
3268
 
3188
3269
  | Option | Description | Default |
3189
3270
  |--------|-------------|---------|
@@ -3193,174 +3274,1503 @@ cat src/auth/*.ts | ${cmd2} create "Auth System" \\
3193
3274
  | \`--amount <amount>\` | Density: \`minimal\`, \`concise\`, \`detailed\`, \`extensive\` | concise |
3194
3275
  | \`--audience <text>\` | Target audience | General Audience |
3195
3276
  | \`-g, --goal <type>\` | Purpose: \`inform\`, \`persuade\`, \`train\`, \`learn\`, \`entertain\`, \`report\` | - |
3196
- | \`--custom-goal <text>\` | Custom goal description | - |
3197
- | \`-f, --file <paths...>\` | Files to upload (PDF, PPTX, images, docs) | - |
3277
+ | \`-f, --file <paths...>\` | Files to upload | - |
3198
3278
  | \`-l, --language <lang>\` | Output language | en |
3199
- | \`-b, --brand <id>\` | Branding ID to apply | - |
3200
- | \`-o, --output <format>\` | Output: \`human\`, \`json\`, \`quiet\` | human |
3279
+ | \`-b, --brand <id>\` | Branding ID | - |
3201
3280
 
3202
- ### Other Commands
3281
+ ## Other Commands
3203
3282
 
3204
3283
  \`\`\`bash
3205
- # Check authentication
3206
- ${cmd2} whoami
3284
+ ${cmd2} list # List presentations
3285
+ ${cmd2} get <id-or-slug> # Get details
3286
+ ${cmd2} export <id> -o deck.zip # Export to ZIP
3287
+ ${cmd2} import ./deck.zip # Import presentation
3288
+ ${cmd2} branding list # List brandings
3289
+ ${cmd2} branding extract https://... # Extract branding from URL
3290
+ \`\`\`
3291
+
3292
+ ## Output
3293
+
3294
+ \`\`\`
3295
+ \u2713 Presentation created successfully
3296
+
3297
+ Title: Authentication System
3298
+ Slides: 8
3299
+ Generated in: 45s \xB7 12,500 tokens
3300
+
3301
+ Open: ${url}/en/view/presentations/auth-system-v1-abc123
3302
+ \`\`\`
3303
+ `;
3304
+ }
3305
+ function generateVideoRule(b) {
3306
+ const cmd2 = b.name;
3307
+ return `---
3308
+ name: video
3309
+ description: Video creation workflow - project-based UI replication AND stock-based videos
3310
+ ---
3311
+
3312
+ # Video Creation
3313
+
3314
+ **Replicate the app's UI AS CLOSELY AS POSSIBLE - almost an exact copy.**
3315
+
3316
+ The video should look like the REAL app. Same layout. Same colors. Same buttons. Same everything. If someone watches the video and then opens the app, they should recognize it immediately.
3317
+
3318
+ ---
3319
+
3320
+ ## \u26D4 HARD RULES
3321
+
3322
+ 1. **NO GENERIC SHAPES** - Don't draw random rectangles. Replicate what the app actually looks like.
3323
+ 2. **NO MADE-UP CONTENT** - Don't invent "Finding 1: Performance improved 45%". Use real content from the app.
3324
+ 3. **READ BEFORE BUILDING** - Read the app's components to understand their visual structure before writing any code.
3325
+ 4. **MATCH THE BRAND** - Use exact colors from tailwind.config, exact fonts, exact visual style.
3326
+ 5. **ALWAYS FRESH PROJECT** - Delete existing video project, create new with \`${cmd2} video init\`.
3327
+
3328
+ ---
3329
+
3330
+ ## \u{1F534} PHASE 0: READ REFERENCES FIRST
3331
+
3332
+ **Before doing ANYTHING, read these files:**
3333
+
3334
+ 1. Read: rules/motion-standards.md (animation quality)
3335
+ 2. Read: rules/micro-interactions.md (animation patterns)
3336
+ 3. Read: rules/component-integration.md (patterns)
3337
+ 4. Read: rules/project-video-workflow.md (full workflow)
3338
+ 5. Skill: remotion-best-practices
3339
+
3340
+ ---
3341
+
3342
+ ## \u{1F3AF} TWO VIDEO MODES
3343
+
3344
+ ### Mode A: Project-Based Video (PREFERRED)
3345
+ Use when user has a project/app and wants to showcase it.
3346
+ - **Triggers:** "create video for my app", "product demo", "feature walkthrough", "promotional video for [project]"
3347
+ - **Approach:** Read components \u2192 replicate UI pixel-perfect \u2192 add animations
3348
+ - **Result:** Video looks IDENTICAL to the real app
3207
3349
 
3208
- # List presentations
3209
- ${cmd2} list
3210
- ${cmd2} list --format json
3350
+ ### Mode B: Stock-Based Video
3351
+ Use ONLY when user has NO project or explicitly wants stock content.
3352
+ - **Triggers:** "create a video about tourism", "make a generic explainer"
3353
+ - **Approach:** Use \`${cmd2} video create\` with stock images
3354
+ - **Result:** Generic video with stock imagery
3211
3355
 
3212
- # Get presentation details
3213
- ${cmd2} get <id-or-slug>
3356
+ **DEFAULT TO MODE A if user mentions their app/project.**
3214
3357
 
3215
- # Export to ZIP
3216
- ${cmd2} export <id-or-slug> -o presentation.zip
3358
+ ---
3217
3359
 
3218
- # Import presentation
3219
- ${cmd2} import ./presentation.zip
3360
+ ## Pre-Creation Questions
3220
3361
 
3221
- # Manage branding
3222
- ${cmd2} branding list
3223
- ${cmd2} branding extract https://company.com
3362
+ Before creating a video, use \`AskUserQuestion\` tool (if available) to ask:
3224
3363
 
3225
- # Install/manage this skill
3226
- ${cmd2} skill install
3227
- ${cmd2} skill show
3364
+ \`\`\`json
3365
+ {
3366
+ "questions": [
3367
+ {
3368
+ "question": "Which voice would you prefer for the narration?",
3369
+ "header": "Voice",
3370
+ "options": [
3371
+ { "label": "Kore (Recommended)", "description": "Female, professional voice - best for narration" },
3372
+ { "label": "Puck", "description": "Male, energetic voice - good for promos" },
3373
+ { "label": "Rachel", "description": "Female, calm voice" },
3374
+ { "label": "No voiceover", "description": "Music only, no narration" }
3375
+ ],
3376
+ "multiSelect": false
3377
+ },
3378
+ {
3379
+ "question": "What background music style fits your video?",
3380
+ "header": "Music",
3381
+ "options": [
3382
+ { "label": "Uplifting/positive", "description": "Energetic and inspiring" },
3383
+ { "label": "Corporate/professional", "description": "Modern, polished business feel" },
3384
+ { "label": "Cinematic/dramatic", "description": "Epic, impactful presentation" },
3385
+ { "label": "Calm ambient", "description": "Soft, subtle background" }
3386
+ ],
3387
+ "multiSelect": false
3388
+ },
3389
+ {
3390
+ "question": "How long should the video be?",
3391
+ "header": "Duration",
3392
+ "options": [
3393
+ { "label": "15 seconds", "description": "Quick teaser" },
3394
+ { "label": "30 seconds", "description": "Social media friendly" },
3395
+ { "label": "60 seconds", "description": "Standard length" }
3396
+ ],
3397
+ "multiSelect": false
3398
+ }
3399
+ ]
3400
+ }
3228
3401
  \`\`\`
3229
3402
 
3230
- ## Examples
3403
+ If \`AskUserQuestion\` tool is not available, ask these questions in text format.
3231
3404
 
3232
- ### Present a Codebase Feature
3405
+ ## Audio-First Workflow
3406
+
3407
+ **IMPORTANT:** This workflow ensures video and audio are always in sync. The CLI generates audio first, parses the script into sections, and calculates exact timing for each section. Scenes MUST use these timings.
3408
+
3409
+ ### Step 1: Write Script
3410
+
3411
+ Write narration for the target duration. Structure: Hook \u2192 Key points \u2192 CTA
3412
+
3413
+ Tip: ~2.5 words per second for natural pacing.
3414
+
3415
+ ### Step 2: Generate Assets (Audio-First)
3233
3416
 
3234
3417
  \`\`\`bash
3235
- # Read the relevant files and create presentation
3236
- cat src/lib/auth.ts src/lib/session.ts | ${cmd2} create "Authentication System" \\
3237
- --slides 8 --tone educational --audience "New developers" \\
3238
- --goal train
3418
+ ${cmd2} video create \\
3419
+ --script "Your narration script..." \\
3420
+ --topic "topic for image search" \\
3421
+ --voice Kore \\
3422
+ --music-prompt "uplifting corporate" \\
3423
+ --num-images 5 \\
3424
+ --output ./public
3425
+ \`\`\`
3426
+
3427
+ This generates:
3428
+ - \`public/audio/voiceover.wav\` - TTS voiceover (determines total duration)
3429
+ - \`public/audio/music.mp3\` - Background music (auto-matches voiceover length)
3430
+ - \`public/images/scene-*.jpg\` - Stock images
3431
+ - \`public/video-manifest.json\` - **Contains sections with exact TTS timestamps**
3432
+
3433
+ ### Step 3: Read Manifest Sections
3434
+
3435
+ The manifest includes a \`sections\` array with **exact timing from TTS character-level timestamps**:
3436
+
3437
+ \`\`\`json
3438
+ {
3439
+ "voiceover": {
3440
+ "path": "audio/voiceover.wav",
3441
+ "duration": 15.2,
3442
+ "timestamps": {
3443
+ "characters": ["P", "u", "e", "r", "t", "o", " ", ...],
3444
+ "characterStartTimesSeconds": [0, 0.05, 0.1, ...],
3445
+ "characterEndTimesSeconds": [0.05, 0.1, 0.15, ...]
3446
+ }
3447
+ },
3448
+ "sections": [
3449
+ {
3450
+ "id": 1,
3451
+ "text": "Puerto Rico. La Isla del Encanto.",
3452
+ "wordCount": 5,
3453
+ "startTime": 0,
3454
+ "endTime": 2.8,
3455
+ "durationInSeconds": 2.8,
3456
+ "durationInFrames": 84,
3457
+ "imagePath": "images/scene-1.jpg"
3458
+ },
3459
+ {
3460
+ "id": 2,
3461
+ "text": "Discover five hundred years of history.",
3462
+ "wordCount": 7,
3463
+ "startTime": 2.8,
3464
+ "endTime": 8.2,
3465
+ "durationInSeconds": 5.4,
3466
+ "durationInFrames": 162,
3467
+ "imagePath": "images/scene-2.jpg"
3468
+ }
3469
+ ],
3470
+ "totalDurationInFrames": 450,
3471
+ "fps": 30
3472
+ }
3473
+ \`\`\`
3474
+
3475
+ **Key points:**
3476
+ - Section timing is derived from actual TTS audio timestamps (not estimated)
3477
+ - \`voiceover.timestamps\` contains character-level timing for word-by-word animations
3478
+ - Video duration will always match voiceover duration exactly
3479
+
3480
+ ### Step 4: Create Scenes (Match Section Timing)
3481
+
3482
+ **CRITICAL:** Use \`durationInFrames\` from each section. This ensures audio/video sync.
3483
+
3484
+ \`\`\`tsx
3485
+ // Read manifest sections and create matching scenes
3486
+ import manifest from '../../public/video-manifest.json';
3487
+
3488
+ // Scene durations MUST match manifest sections
3489
+ export const SECTION_1_DURATION = manifest.sections[0].durationInFrames; // 84
3490
+ export const SECTION_2_DURATION = manifest.sections[1].durationInFrames; // 162
3491
+ // ... etc
3492
+
3493
+ export const FULL_VIDEO_DURATION = manifest.totalDurationInFrames; // 450
3494
+ \`\`\`
3495
+
3496
+ Example scene component:
3497
+
3498
+ \`\`\`tsx
3499
+ // src/remotion/scenes/Scene1.tsx
3500
+ import { AbsoluteFill, Img, staticFile, useCurrentFrame, useVideoConfig, spring } from "remotion";
3501
+ import manifest from '../../../public/video-manifest.json';
3502
+
3503
+ const section = manifest.sections[0];
3504
+ export const SCENE_1_DURATION = section.durationInFrames;
3505
+
3506
+ export const Scene1: React.FC = () => {
3507
+ const frame = useCurrentFrame();
3508
+ const { fps } = useVideoConfig();
3509
+ const progress = spring({ frame, fps, config: { damping: 15, stiffness: 100 } });
3510
+
3511
+ return (
3512
+ <AbsoluteFill>
3513
+ <Img src={staticFile(section.imagePath)} style={{ width: '100%', height: '100%', objectFit: 'cover' }} />
3514
+ <div style={{
3515
+ position: 'absolute',
3516
+ bottom: 100,
3517
+ left: 0,
3518
+ right: 0,
3519
+ textAlign: 'center',
3520
+ opacity: progress,
3521
+ transform: \`translateY(\${(1 - progress) * 20}px)\`,
3522
+ }}>
3523
+ <h1 style={{ color: 'white', fontSize: 60, textShadow: '2px 2px 8px rgba(0,0,0,0.8)' }}>
3524
+ {section.text}
3525
+ </h1>
3526
+ </div>
3527
+ </AbsoluteFill>
3528
+ );
3529
+ };
3239
3530
  \`\`\`
3240
3531
 
3241
- ### Technical Documentation with Diagrams
3532
+ ### Step 5: Update FullVideo.tsx
3533
+
3534
+ \`\`\`tsx
3535
+ import { AbsoluteFill, Series, Audio, staticFile, useCurrentFrame, interpolate } from "remotion";
3536
+ import manifest from '../../public/video-manifest.json';
3537
+ import { Scene1, SCENE_1_DURATION } from "./scenes/Scene1";
3538
+ import { Scene2, SCENE_2_DURATION } from "./scenes/Scene2";
3539
+ // ... import all scenes
3540
+
3541
+ export const FULL_VIDEO_DURATION = manifest.totalDurationInFrames;
3542
+
3543
+ const BackgroundMusic: React.FC = () => {
3544
+ const frame = useCurrentFrame();
3545
+ const fadeIn = interpolate(frame, [0, 10], [0, 1], { extrapolateRight: "clamp" });
3546
+ const fadeOut = interpolate(frame, [FULL_VIDEO_DURATION - 20, FULL_VIDEO_DURATION], [1, 0], { extrapolateLeft: "clamp" });
3547
+ return <Audio src={staticFile("audio/music.mp3")} volume={fadeIn * fadeOut * 0.25} />;
3548
+ };
3549
+
3550
+ export const FullVideo: React.FC = () => {
3551
+ return (
3552
+ <AbsoluteFill>
3553
+ <Series>
3554
+ <Series.Sequence durationInFrames={SCENE_1_DURATION}>
3555
+ <Scene1 />
3556
+ </Series.Sequence>
3557
+ <Series.Sequence durationInFrames={SCENE_2_DURATION}>
3558
+ <Scene2 />
3559
+ </Series.Sequence>
3560
+ {/* Add all sections */}
3561
+ </Series>
3562
+
3563
+ <Audio src={staticFile("audio/voiceover.wav")} volume={1} />
3564
+ <BackgroundMusic />
3565
+ </AbsoluteFill>
3566
+ );
3567
+ };
3568
+ \`\`\`
3569
+
3570
+ ### Step 6: Preview & Render
3242
3571
 
3243
3572
  \`\`\`bash
3244
- ${cmd2} create "API Reference" \\
3245
- --file ./docs/api.md \\
3246
- --file ./diagrams/architecture.png \\
3247
- --mode best --amount detailed \\
3248
- --goal inform
3573
+ npm run dev # Preview in Remotion Studio
3574
+ npm run render # Output to out/video.mp4
3249
3575
  \`\`\`
3250
3576
 
3251
- ### Quick Project Overview
3577
+ ## CLI Commands Reference
3578
+
3579
+ ### ${cmd2} video create
3580
+
3581
+ | Option | Required | Default | Description |
3582
+ |--------|----------|---------|-------------|
3583
+ | \`-s, --script <text>\` | Yes* | - | Narration script |
3584
+ | \`--script-file <path>\` | Yes* | - | Path to script file |
3585
+ | \`-t, --topic <text>\` | No | auto | Topic for image search |
3586
+ | \`-v, --voice <name>\` | No | Kore | TTS voice |
3587
+ | \`-m, --music-prompt <text>\` | No | auto | Music description |
3588
+ | \`-n, --num-images <n>\` | No | 5 | Number of images |
3589
+ | \`-o, --output <dir>\` | No | ./public | Output directory |
3590
+
3591
+ ### ${cmd2} tts generate
3252
3592
 
3253
3593
  \`\`\`bash
3254
- cat README.md package.json | ${cmd2} create "Project Introduction" \\
3255
- -m instant --slides 5
3594
+ ${cmd2} tts generate -t "Narration text" -o voice.wav --voice Kore
3595
+ ${cmd2} tts voices # List all voices
3256
3596
  \`\`\`
3257
3597
 
3258
- ### Sales Deck from Existing Presentation
3598
+ **Voices:** Kore (professional female), Puck (energetic male), Rachel (calm female), alloy (neutral)
3599
+
3600
+ ### ${cmd2} music generate
3259
3601
 
3260
3602
  \`\`\`bash
3261
- ${cmd2} create "Product Demo" \\
3262
- --file ./existing-deck.pptx \\
3263
- --goal persuade \\
3264
- --audience "Enterprise buyers" \\
3265
- --tone professional
3603
+ ${cmd2} music generate -p "uplifting corporate" -d 30 -o music.mp3
3266
3604
  \`\`\`
3267
3605
 
3268
- ### Research Presentation
3606
+ **Good prompts:** "uplifting corporate", "calm ambient, soft piano", "cinematic orchestral"
3607
+
3608
+ ### ${cmd2} image search / video search
3269
3609
 
3270
3610
  \`\`\`bash
3271
- ${cmd2} create "Market Analysis" \\
3272
- --file ./research.pdf \\
3273
- --sources https://report.com/industry.pdf \\
3274
- --tone formal --audience "Executive team" \\
3275
- --goal report
3611
+ ${cmd2} image search -q "tropical beach" -n 5 -s large
3612
+ ${cmd2} video search "tech workspace" -n 5
3276
3613
  \`\`\`
3277
3614
 
3278
- ## Output
3615
+ ### ${cmd2} mix create
3279
3616
 
3280
- Successful creation returns:
3617
+ Post-process audio into existing video:
3618
+
3619
+ \`\`\`bash
3620
+ ${cmd2} mix create --video video.mp4 --voice voice.wav --music music.mp3 -o final.mp4
3281
3621
  \`\`\`
3282
- \u2713 Presentation created successfully
3283
3622
 
3284
- Title: Authentication System
3285
- Slides: 8
3286
- Generated in: 45s \xB7 12,500 tokens
3623
+ ## Audio Guidelines
3287
3624
 
3288
- Open: ${url}/en/view/presentations/auth-system-v1-abc123
3625
+ | Element | Volume | Notes |
3626
+ |---------|--------|-------|
3627
+ | Voiceover | 100% | Primary audio |
3628
+ | Background music | 20-30% | Fade in/out over ~10-20 frames |
3629
+
3630
+ Generate music 5s longer than video for fade out.
3631
+
3632
+ ## Animation Quality Checklist
3633
+
3634
+ Before rendering, ensure your video follows these standards from motion-standards.md:
3635
+
3636
+ 1. **Physics over linearity** - Use \`spring()\` for all animations, never linear interpolate for movement
3637
+ 2. **Orchestration** - Stagger element entrances (3-8 frame delays), never animate all at once
3638
+ 3. **Virtual camera** - Add subtle zoom/scale even on static scenes (1.0 \u2192 1.03 over duration)
3639
+ 4. **Micro-interactions** - Use components from micro-interactions.md for buttons, text reveals, highlights
3640
+
3641
+ ---
3642
+
3643
+ # \u{1F3AC} PROJECT-BASED VIDEO WORKFLOW (Mode A)
3644
+
3645
+ **Use this when user has a project/app to showcase.**
3646
+
3647
+ ## \u{1F4CB} PHASE 1: EXPLORE THE APP
3648
+
3649
+ ### 1.1 Find Brand Assets
3650
+
3651
+ \`\`\`bash
3652
+ # Logo
3653
+ find src -name "*[Ll]ogo*" 2>/dev/null
3654
+ find public -name "*logo*" 2>/dev/null
3655
+
3656
+ # Colors - THIS IS CRITICAL
3657
+ cat tailwind.config.* | grep -A 30 "colors"
3658
+ cat src/app/globals.css | head -50
3659
+
3660
+ # Fonts
3661
+ grep -r "fontFamily" tailwind.config.* src/app/layout.tsx
3289
3662
  \`\`\`
3290
3663
 
3291
- For scripting, use JSON output:
3664
+ ### 1.2 Read Key UI Components
3665
+
3666
+ **Don't copy - just read to understand the visual structure:**
3667
+
3292
3668
  \`\`\`bash
3293
- URL=$(${cmd2} create "Demo" --context "..." -o json | jq -r '.viewUrl')
3669
+ # Find main components
3670
+ find src/components -name "*.tsx" | head -30
3671
+
3672
+ # Read them to understand layout, colors, structure
3673
+ cat src/components/slides/SlidesSidebar.tsx
3674
+ cat src/components/tools/ToolsPanel.tsx
3675
+ cat src/components/ui/button.tsx
3294
3676
  \`\`\`
3295
3677
 
3296
- ## Best Practices
3678
+ **For each component, note:**
3679
+ - Layout structure (sidebar? grid? list?)
3680
+ - Colors used (bg-slate-900, text-teal-400, etc.)
3681
+ - Visual elements (badges, icons, thumbnails)
3682
+ - Typography (font sizes, weights)
3297
3683
 
3298
- 1. **Provide rich context** - More context = better slides. Include code, docs, data.
3299
- 2. **Use file uploads for binary content** - PDFs, images, PPTX files need \`--file\`.
3300
- 3. **Specify a goal** - Helps tailor the presentation structure and messaging.
3301
- 4. **Use appropriate mode** - \`instant\` for quick drafts, \`best\` for important presentations.
3302
- 5. **Specify audience** - Helps tailor complexity and terminology.
3303
- 6. **Combine sources** - Pipe multiple files for comprehensive presentations.
3684
+ ### 1.3 Document Your Findings
3304
3685
 
3305
- ## Supported File Types
3686
+ \`\`\`markdown
3687
+ ## Brand Discovery: [App Name]
3306
3688
 
3307
- - **Documents**: PDF, DOCX, XLSX, PPTX
3308
- - **Images**: JPEG, PNG, GIF, WebP
3309
- - **Text**: Markdown, TXT, CSV, JSON
3689
+ ### Colors (from tailwind.config)
3690
+ - Background: #0f172a (slate-900)
3691
+ - Surface: #1e293b (slate-800)
3692
+ - Primary: #14b8a6 (teal-500)
3693
+ - Accent: #f472b6 (pink-400)
3694
+ - Text: #ffffff / #94a3b8 (slate-400)
3310
3695
 
3311
- ## Troubleshooting
3696
+ ### Key UI Elements I Observed
3697
+ 1. **Sidebar** - Dark bg, slide thumbnails with numbers
3698
+ 2. **Main viewer** - Light slide content area
3699
+ 3. **Tools panel** - Grid of cards with icons
3700
+ \`\`\`
3701
+
3702
+ ---
3703
+
3704
+ ## \u{1F4CB} PHASE 2: PLAN THE VIDEO
3705
+
3706
+ ### Scene Structure
3707
+
3708
+ \`\`\`markdown
3709
+ ## Video Plan: [App Name] Demo
3710
+
3711
+ ### Scene 1: Intro (3s / 90 frames)
3712
+ **What to show:** Logo + tagline on dark background
3713
+ **Colors:** bg #0f172a, logo centered
3714
+ **Animation:** Logo scales in with spring, tagline fades up
3715
+
3716
+ ### Scene 2: Sidebar UI (5s / 150 frames)
3717
+ **What to show:** Replicate the slides sidebar
3718
+ **Reference:** Read src/components/slides/SlidesSidebar.tsx
3719
+ **Build:** Dark sidebar with slide items, thumbnails
3720
+ **Animation:** Sidebar slides in, items stagger
3721
+
3722
+ ### Scene 3: Main Editor (5s / 150 frames)
3723
+ **What to show:** Replicate the slide viewer
3724
+ **Reference:** Read src/components/slides/SlideViewer.tsx
3725
+ **Animation:** Content fades in
3726
+
3727
+ ### Scene 4: CTA (3s / 90 frames)
3728
+ **What to show:** Logo + CTA button + URL
3729
+ **Animation:** Logo fades in, button pulses
3730
+ \`\`\`
3731
+
3732
+ ---
3733
+
3734
+ ## \u{1F528} PHASE 3: BUILD
3735
+
3736
+ ### 3.1 Create Fresh Project
3737
+
3738
+ \`\`\`bash
3739
+ rm -rf ../appname-video
3740
+ ${cmd2} video init ../appname-video
3741
+ cd ../appname-video
3742
+ \`\`\`
3743
+
3744
+ ### 3.2 Copy Brand Assets Only
3745
+
3746
+ \`\`\`bash
3747
+ # Logo
3748
+ cp ../myapp/public/logo.svg ./public/
3749
+
3750
+ # Tailwind config (for colors/fonts)
3751
+ cp ../myapp/tailwind.config.* ./
3752
+
3753
+ # Global CSS
3754
+ cp ../myapp/src/app/globals.css ./src/styles/
3755
+ \`\`\`
3756
+
3757
+ ### 3.3 Build Scene Components - PIXEL PERFECT
3758
+
3759
+ **Each scene replicates what you observed, using Remotion:**
3760
+
3761
+ \`\`\`tsx
3762
+ // src/remotion/scenes/SidebarScene.tsx
3763
+ // Replicates: src/components/slides/SlidesSidebar.tsx
3764
+
3765
+ import React from "react";
3766
+ import { AbsoluteFill, useCurrentFrame, spring, useVideoConfig } from "remotion";
3767
+
3768
+ const mockSlides = [
3769
+ { id: 1, title: "Title Slide", selected: true },
3770
+ { id: 2, title: "Overview", selected: false },
3771
+ { id: 3, title: "Key Players", selected: false },
3772
+ ];
3773
+
3774
+ export const SIDEBAR_SCENE_DURATION = 150;
3775
+
3776
+ export const SidebarScene: React.FC = () => {
3777
+ const frame = useCurrentFrame();
3778
+ const { fps } = useVideoConfig();
3779
+
3780
+ const sidebarProgress = spring({ frame, fps, config: { damping: 20, stiffness: 100 } });
3781
+ const sidebarX = (1 - sidebarProgress) * -280;
3782
+
3783
+ return (
3784
+ <AbsoluteFill style={{ backgroundColor: "#0f172a" }}>
3785
+ {/* Sidebar - EXACT colors from tailwind.config */}
3786
+ <div style={{
3787
+ width: 280,
3788
+ height: "100%",
3789
+ backgroundColor: "#0f172a",
3790
+ borderRight: "1px solid #1e293b",
3791
+ transform: \`translateX(\${sidebarX}px)\`,
3792
+ padding: 16,
3793
+ }}>
3794
+ {/* Header - EXACT styling from component */}
3795
+ <div style={{ display: "flex", alignItems: "center", gap: 8, marginBottom: 16 }}>
3796
+ <span style={{ color: "#14b8a6", fontSize: 14, fontWeight: 500 }}>
3797
+ SLIDES CONTROL
3798
+ </span>
3799
+ </div>
3800
+
3801
+ {/* Slide items - staggered animation */}
3802
+ {mockSlides.map((slide, i) => {
3803
+ const itemProgress = spring({
3804
+ frame: frame - 10 - i * 8,
3805
+ fps,
3806
+ config: { damping: 15, stiffness: 100 },
3807
+ });
3808
+
3809
+ return (
3810
+ <div key={slide.id} style={{
3811
+ opacity: itemProgress,
3812
+ transform: \`translateX(\${(1 - itemProgress) * -20}px)\`,
3813
+ marginBottom: 8,
3814
+ padding: 12,
3815
+ borderRadius: 8,
3816
+ backgroundColor: slide.selected ? "#1e293b" : "transparent",
3817
+ border: slide.selected ? "1px solid #14b8a6" : "1px solid transparent",
3818
+ }}>
3819
+ <div style={{ display: "flex", alignItems: "center", gap: 12 }}>
3820
+ <div style={{ width: 48, height: 32, backgroundColor: "#334155", borderRadius: 4 }} />
3821
+ <div>
3822
+ <span style={{ color: "#64748b", fontSize: 12 }}>
3823
+ SLIDE {String(i + 1).padStart(2, "0")}
3824
+ </span>
3825
+ {slide.selected && <span style={{ color: "#f87171", fontSize: 12, marginLeft: 8 }}>SELECTED</span>}
3826
+ <p style={{ color: "#ffffff", fontSize: 14, margin: 0 }}>{slide.title}</p>
3827
+ </div>
3828
+ </div>
3829
+ </div>
3830
+ );
3831
+ })}
3832
+ </div>
3833
+ </AbsoluteFill>
3834
+ );
3835
+ };
3836
+ \`\`\`
3837
+
3838
+ ### 3.4 Key Principles: PIXEL-PERFECT Replication
3839
+
3840
+ **The video UI should be indistinguishable from the real app.**
3841
+
3842
+ 1. **EXACT colors** - Copy hex values directly from tailwind.config
3843
+ 2. **EXACT spacing** - If \`p-4 gap-3\`, use \`padding: 16px, gap: 12px\`
3844
+ 3. **EXACT typography** - Same font size, weight, color
3845
+ 4. **EXACT borders** - Same border width, color, radius
3846
+ 5. **EXACT layout** - Same flex direction, alignment, widths
3847
+ 6. **Then add animations** - spring() entrances, stagger delays
3848
+
3849
+ ---
3850
+
3851
+ ## \u{1F3AC} PHASE 4: AUDIO & RENDER
3852
+
3853
+ ### Generate Audio
3854
+
3855
+ \`\`\`bash
3856
+ ${cmd2} video create \\
3857
+ --script "Your narration..." \\
3858
+ --music-prompt "modern uplifting tech" \\
3859
+ --output ./public
3860
+ \`\`\`
3861
+
3862
+ ### Preview & Render
3312
3863
 
3313
3864
  \`\`\`bash
3314
- # Check if authenticated
3315
- ${cmd2} whoami
3865
+ npm run dev # Preview
3866
+ npm run render # Output to out/video.mp4
3867
+ \`\`\`
3868
+
3869
+ ---
3316
3870
 
3317
- # Verify API key
3318
- ${cmd2} config show
3871
+ ## \u274C WHAT NOT TO DO
3319
3872
 
3320
- # Debug mode
3321
- ${cmd2} create "Test" --context "test" --debug
3873
+ ### Bad: Generic rectangles
3874
+ \`\`\`tsx
3875
+ // \u274C NO
3876
+ <div style={{ background: "linear-gradient(#667eea, #764ba2)", width: 200, height: 150 }} />
3322
3877
  \`\`\`
3878
+
3879
+ ### Bad: Made-up content
3880
+ \`\`\`tsx
3881
+ // \u274C NO
3882
+ <h2>Key Insights from Research</h2>
3883
+ <li>Finding 1: Performance improved by 45%</li>
3884
+ \`\`\`
3885
+
3886
+ ### Bad: Not matching the app
3887
+ \`\`\`tsx
3888
+ // \u274C NO - App uses slate-900, not gray-800
3889
+ <div style={{ backgroundColor: "#1f2937" }}>
3890
+ \`\`\`
3891
+
3892
+ ### Good: Replicated UI with correct brand
3893
+ \`\`\`tsx
3894
+ // \u2705 YES - Matches actual app colors and structure
3895
+ <div style={{ backgroundColor: "#0f172a", borderColor: "#1e293b" }}>
3896
+ <span style={{ color: "#14b8a6" }}>SLIDES CONTROL</span>
3897
+ </div>
3898
+ \`\`\`
3899
+
3900
+ ---
3901
+
3902
+ ## \u2705 Project Video Checklist
3903
+
3904
+ ### Before Building
3905
+ - [ ] Read motion-standards.md and micro-interactions.md
3906
+ - [ ] Found logo path
3907
+ - [ ] Found colors from tailwind.config
3908
+ - [ ] Read key components to understand visual structure
3909
+ - [ ] Documented findings
3910
+ - [ ] Planned scenes
3911
+
3912
+ ### While Building
3913
+ - [ ] Using exact colors from tailwind.config
3914
+ - [ ] Matching layout structure of real app
3915
+ - [ ] Using spring() for animations
3916
+ - [ ] Mock data is realistic
3917
+
3918
+ ### Before Render
3919
+ - [ ] Logo appears in intro and CTA
3920
+ - [ ] Colors match the app exactly
3921
+ - [ ] All scenes have smooth animations
3323
3922
  `;
3324
3923
  }
3325
- var EDITORS = [
3326
- { name: "Claude Code", dir: ".claude" },
3327
- { name: "Cursor", dir: ".cursor" },
3328
- { name: "Codex", dir: ".codex" },
3329
- { name: "OpenCode", dir: ".opencode" },
3330
- { name: "Windsurf", dir: ".windsurf" },
3331
- { name: "Agent", dir: ".agent" }
3332
- ];
3333
- var skillCommand = new Command14("skill").description(`Manage ${brand.displayName} skill for AI coding assistants`).addHelpText(
3334
- "after",
3335
- `
3336
- ${chalk12.bold("Examples:")}
3337
- ${chalk12.gray("# Install skill for all detected editors")}
3338
- $ ${brand.name} skill install
3924
+ function generateMotionStandardsRule() {
3925
+ return `---
3926
+ name: motion-standards
3927
+ description: Animation quality standards for high-end video production
3928
+ ---
3339
3929
 
3340
- ${chalk12.gray("# Install to specific directory")}
3341
- $ ${brand.name} skill install --dir ~/.claude
3930
+ # Motion Design Standards
3342
3931
 
3343
- ${chalk12.gray("# Show skill content")}
3344
- $ ${brand.name} skill show
3345
- `
3346
- );
3347
- skillCommand.command("install").description(`Install the ${brand.displayName} skill for AI coding assistants`).option("-d, --dir <path>", "Install to specific directory").option("-g, --global", "Install globally (to home directory)", true).option("-l, --local", "Install locally (to current directory)").option("-f, --force", "Overwrite existing skill files").action(async (options) => {
3348
- const installed = [];
3349
- const skipped = [];
3350
- const errors = [];
3351
- const baseDir = options.local ? process.cwd() : homedir();
3352
- const skillContent = generateSkillContent(brand);
3353
- if (options.dir) {
3354
- const skillPath = join(options.dir, "skills", brand.name);
3355
- try {
3356
- installSkill(skillPath, skillContent, options.force);
3357
- installed.push(options.dir);
3358
- } catch (err) {
3359
- errors.push(`${options.dir}: ${err instanceof Error ? err.message : String(err)}`);
3360
- }
3361
- } else {
3362
- for (const editor of EDITORS) {
3363
- const editorDir = join(baseDir, editor.dir);
3932
+ Generate videos that feel like high-end productions (Apple, Stripe, Linear quality).
3933
+
3934
+ **Follow these standards for every Remotion component.**
3935
+
3936
+ ## STANDARD 01: PHYSICS OVER LINEARITY
3937
+
3938
+ - **Rule:** Never use linear interpolation for movement or scaling
3939
+ - **Implementation:** Use \`spring()\` for ALL entrance/exit animations
3940
+ - **Default config:** \`{ mass: 0.8, stiffness: 150, damping: 15 }\`
3941
+
3942
+ \`\`\`tsx
3943
+ // BAD
3944
+ const opacity = interpolate(frame, [0, 30], [0, 1]);
3945
+
3946
+ // GOOD
3947
+ const progress = spring({ frame, fps, config: { mass: 0.8, stiffness: 150, damping: 15 } });
3948
+ \`\`\`
3949
+
3950
+ ## STANDARD 02: ORCHESTRATION & CASCADE
3951
+
3952
+ - **Rule:** NEVER animate all elements simultaneously
3953
+ - **Implementation:** Staggered entrances with 3-5 frames between items
3954
+
3955
+ \`\`\`tsx
3956
+ // GOOD - cascading entrance
3957
+ <FadeIn delay={0}><Header /></FadeIn>
3958
+ <FadeIn delay={8}><Content /></FadeIn>
3959
+ <FadeIn delay={16}><Footer /></FadeIn>
3960
+
3961
+ // GOOD - staggered list
3962
+ {items.map((item, i) => (
3963
+ <SlideUp key={item.id} delay={i * 4}>
3964
+ <ListItem data={item} />
3965
+ </SlideUp>
3966
+ ))}
3967
+ \`\`\`
3968
+
3969
+ ## STANDARD 03: THE VIRTUAL CAMERA
3970
+
3971
+ - **Rule:** Even when UI is idle, add subtle movement
3972
+ - **Implementation:** Dolly zoom (slow push in)
3973
+
3974
+ \`\`\`tsx
3975
+ const CinematicContainer = ({ children }) => {
3976
+ const frame = useCurrentFrame();
3977
+ const { durationInFrames } = useVideoConfig();
3978
+ const scale = interpolate(frame, [0, durationInFrames], [1, 1.03]);
3979
+
3980
+ return (
3981
+ <AbsoluteFill style={{ transform: \`scale(\${scale})\` }}>
3982
+ {children}
3983
+ </AbsoluteFill>
3984
+ );
3985
+ };
3986
+ \`\`\`
3987
+
3988
+ ## STANDARD 04: HUMAN SIMULATION
3989
+
3990
+ - **Rule:** NEVER move cursor in straight lines
3991
+ - **Implementation:** Use curved/Bezier paths for cursor movement
3992
+
3993
+ ## STANDARD 05: TECHNICAL CONSTRAINTS
3994
+
3995
+ 1. **Styling:** Tailwind CSS or inline styles
3996
+ 2. **Layout:** Use \`AbsoluteFill\` for scene composition
3997
+ 3. **State:** NO \`useState\` or \`useEffect\` - derive from \`useCurrentFrame()\`
3998
+
3999
+ ## Execution Checklist
4000
+
4001
+ 1. Analyze UI hierarchy
4002
+ 2. Choreograph order of appearance
4003
+ 3. Apply \`spring()\` physics
4004
+ 4. Add subtle camera movement
4005
+ 5. Human touches for interactions
4006
+ `;
4007
+ }
4008
+ function generateMicroInteractionsRule() {
4009
+ return `---
4010
+ name: micro-interactions
4011
+ description: Animation components and patterns
4012
+ ---
4013
+
4014
+ # Micro-Interactions
4015
+
4016
+ ## Core Principles
4017
+
4018
+ 1. **Subtle** - Effects enhance, never distract
4019
+ 2. **Purposeful** - Every animation communicates something
4020
+ 3. **Physics-based** - Use \`spring()\`, not linear easing
4021
+ 4. **Continuous** - Always have something moving subtly
4022
+
4023
+ ## Spring Configurations
4024
+
4025
+ \`\`\`tsx
4026
+ const SPRING_CONFIGS = {
4027
+ snappy: { damping: 15, stiffness: 200, mass: 0.5 },
4028
+ smooth: { damping: 20, stiffness: 100, mass: 1 },
4029
+ bouncy: { damping: 8, stiffness: 150, mass: 0.8 },
4030
+ gentle: { damping: 30, stiffness: 50, mass: 1 },
4031
+ };
4032
+ \`\`\`
4033
+
4034
+ ## Entry Animations
4035
+
4036
+ ### Fade + Slide
4037
+
4038
+ \`\`\`tsx
4039
+ const AnimatedEntry = ({ delay = 0, direction = 'up', children }) => {
4040
+ const frame = useCurrentFrame();
4041
+ const { fps } = useVideoConfig();
4042
+
4043
+ const progress = spring({
4044
+ frame: frame - delay,
4045
+ fps,
4046
+ config: { damping: 20, stiffness: 100 }
4047
+ });
4048
+
4049
+ const directions = {
4050
+ up: { x: 0, y: 30 },
4051
+ down: { x: 0, y: -30 },
4052
+ left: { x: 30, y: 0 },
4053
+ right: { x: -30, y: 0 },
4054
+ };
4055
+
4056
+ const { x, y } = directions[direction];
4057
+
4058
+ return (
4059
+ <div style={{
4060
+ opacity: progress,
4061
+ transform: \`translate(\${x * (1 - progress)}px, \${y * (1 - progress)}px)\`,
4062
+ }}>
4063
+ {children}
4064
+ </div>
4065
+ );
4066
+ };
4067
+ \`\`\`
4068
+
4069
+ ### Staggered List
4070
+
4071
+ \`\`\`tsx
4072
+ const StaggeredList = ({ children, itemDelay = 5 }) => (
4073
+ <>
4074
+ {React.Children.map(children, (child, i) => (
4075
+ <AnimatedEntry delay={i * itemDelay}>{child}</AnimatedEntry>
4076
+ ))}
4077
+ </>
4078
+ );
4079
+ \`\`\`
4080
+
4081
+ ## Interaction Simulation
4082
+
4083
+ ### Button Press
4084
+
4085
+ \`\`\`tsx
4086
+ const ButtonPress = ({ pressFrame, children }) => {
4087
+ const frame = useCurrentFrame();
4088
+ const { fps } = useVideoConfig();
4089
+
4090
+ const isPressing = frame >= pressFrame && frame < pressFrame + 3;
4091
+ const isReleasing = frame >= pressFrame + 3;
4092
+
4093
+ const releaseProgress = isReleasing ? spring({
4094
+ frame: frame - pressFrame - 3,
4095
+ fps,
4096
+ config: { damping: 10, stiffness: 300 }
4097
+ }) : 0;
4098
+
4099
+ const scale = isPressing ? 0.95 : (0.95 + releaseProgress * 0.05);
4100
+
4101
+ return <div style={{ transform: \`scale(\${scale})\` }}>{children}</div>;
4102
+ };
4103
+ \`\`\`
4104
+
4105
+ ### Typed Text
4106
+
4107
+ \`\`\`tsx
4108
+ const TypedText = ({ text, startFrame = 0, speed = 2 }) => {
4109
+ const frame = useCurrentFrame();
4110
+ const charsToShow = Math.floor((frame - startFrame) / speed);
4111
+
4112
+ if (frame < startFrame) return null;
4113
+
4114
+ return (
4115
+ <span>
4116
+ {text.slice(0, Math.min(charsToShow, text.length))}
4117
+ {charsToShow < text.length && (
4118
+ <span style={{ opacity: frame % 15 < 8 ? 1 : 0 }}>|</span>
4119
+ )}
4120
+ </span>
4121
+ );
4122
+ };
4123
+ \`\`\`
4124
+
4125
+ ### Counting Number
4126
+
4127
+ \`\`\`tsx
4128
+ const CountingNumber = ({ from = 0, to, startFrame = 0, duration = 30 }) => {
4129
+ const frame = useCurrentFrame();
4130
+ const progress = interpolate(frame - startFrame, [0, duration], [0, 1], {
4131
+ extrapolateLeft: 'clamp', extrapolateRight: 'clamp'
4132
+ });
4133
+ const eased = 1 - Math.pow(1 - progress, 3);
4134
+ return <span>{Math.round(from + (to - from) * eased)}</span>;
4135
+ };
4136
+ \`\`\`
4137
+
4138
+ ## Timing Guidelines
4139
+
4140
+ | Effect | Duration |
4141
+ |--------|----------|
4142
+ | Entry animation | 15-25 frames |
4143
+ | Button press | 10-15 frames |
4144
+ | Highlight/focus | 30-60 frames |
4145
+ | Stagger delay | 3-8 frames |
4146
+ `;
4147
+ }
4148
+ function generateAnimationComponents() {
4149
+ return `/**
4150
+ * Remotion Animation Components
4151
+ * Copy these into your project as needed.
4152
+ */
4153
+
4154
+ import React from 'react';
4155
+ import { useCurrentFrame, useVideoConfig, interpolate, spring, Easing } from 'remotion';
4156
+
4157
+ // Spring configurations
4158
+ export const SPRING_CONFIGS = {
4159
+ snappy: { damping: 15, stiffness: 200, mass: 0.5 },
4160
+ smooth: { damping: 20, stiffness: 100, mass: 1 },
4161
+ bouncy: { damping: 8, stiffness: 150, mass: 0.8 },
4162
+ gentle: { damping: 30, stiffness: 50, mass: 1 },
4163
+ };
4164
+
4165
+ // Animated entry with direction
4166
+ export const AnimatedEntry: React.FC<{
4167
+ children: React.ReactNode;
4168
+ delay?: number;
4169
+ direction?: 'up' | 'down' | 'left' | 'right' | 'none';
4170
+ distance?: number;
4171
+ }> = ({ children, delay = 0, direction = 'up', distance = 30 }) => {
4172
+ const frame = useCurrentFrame();
4173
+ const { fps } = useVideoConfig();
4174
+
4175
+ const progress = spring({
4176
+ frame: frame - delay,
4177
+ fps,
4178
+ config: SPRING_CONFIGS.smooth,
4179
+ });
4180
+
4181
+ const directions = {
4182
+ up: { x: 0, y: distance },
4183
+ down: { x: 0, y: -distance },
4184
+ left: { x: distance, y: 0 },
4185
+ right: { x: -distance, y: 0 },
4186
+ none: { x: 0, y: 0 },
4187
+ };
4188
+
4189
+ const { x, y } = directions[direction];
4190
+
4191
+ return (
4192
+ <div style={{
4193
+ opacity: interpolate(progress, [0, 1], [0, 1]),
4194
+ transform: \`translate(\${x * (1 - progress)}px, \${y * (1 - progress)}px)\`,
4195
+ }}>
4196
+ {children}
4197
+ </div>
4198
+ );
4199
+ };
4200
+
4201
+ // Scale in animation
4202
+ export const ScaleIn: React.FC<{
4203
+ children: React.ReactNode;
4204
+ delay?: number;
4205
+ from?: number;
4206
+ }> = ({ children, delay = 0, from = 0.8 }) => {
4207
+ const frame = useCurrentFrame();
4208
+ const { fps } = useVideoConfig();
4209
+
4210
+ const progress = spring({
4211
+ frame: frame - delay,
4212
+ fps,
4213
+ config: SPRING_CONFIGS.bouncy,
4214
+ });
4215
+
4216
+ return (
4217
+ <div style={{
4218
+ opacity: interpolate(progress, [0, 0.5], [0, 1], { extrapolateRight: 'clamp' }),
4219
+ transform: \`scale(\${interpolate(progress, [0, 1], [from, 1])})\`,
4220
+ }}>
4221
+ {children}
4222
+ </div>
4223
+ );
4224
+ };
4225
+
4226
+ // Staggered list
4227
+ export const StaggeredList: React.FC<{
4228
+ children: React.ReactNode;
4229
+ itemDelay?: number;
4230
+ startFrame?: number;
4231
+ }> = ({ children, itemDelay = 5, startFrame = 0 }) => (
4232
+ <>
4233
+ {React.Children.map(children, (child, i) => (
4234
+ <AnimatedEntry delay={startFrame + i * itemDelay}>{child}</AnimatedEntry>
4235
+ ))}
4236
+ </>
4237
+ );
4238
+
4239
+ // Button press animation
4240
+ export const ButtonPress: React.FC<{
4241
+ children: React.ReactNode;
4242
+ pressFrame: number;
4243
+ }> = ({ children, pressFrame }) => {
4244
+ const frame = useCurrentFrame();
4245
+ const { fps } = useVideoConfig();
4246
+
4247
+ const isPressing = frame >= pressFrame && frame < pressFrame + 3;
4248
+ const isReleasing = frame >= pressFrame + 3;
4249
+
4250
+ const releaseProgress = isReleasing ? spring({
4251
+ frame: frame - pressFrame - 3,
4252
+ fps,
4253
+ config: { damping: 10, stiffness: 300 },
4254
+ }) : 0;
4255
+
4256
+ const scale = isPressing ? 0.95 : 0.95 + releaseProgress * 0.05;
4257
+
4258
+ return <div style={{ transform: \`scale(\${Math.min(1, scale)})\` }}>{children}</div>;
4259
+ };
4260
+
4261
+ // Typed text effect
4262
+ export const TypedText: React.FC<{
4263
+ text: string;
4264
+ startFrame?: number;
4265
+ speed?: number;
4266
+ showCursor?: boolean;
4267
+ }> = ({ text, startFrame = 0, speed = 2, showCursor = true }) => {
4268
+ const frame = useCurrentFrame();
4269
+ const charsToShow = Math.floor((frame - startFrame) / speed);
4270
+
4271
+ if (frame < startFrame) return null;
4272
+
4273
+ const isTyping = charsToShow < text.length;
4274
+
4275
+ return (
4276
+ <span>
4277
+ {text.slice(0, Math.min(charsToShow, text.length))}
4278
+ {showCursor && isTyping && (
4279
+ <span style={{ opacity: frame % 15 < 8 ? 1 : 0 }}>|</span>
4280
+ )}
4281
+ </span>
4282
+ );
4283
+ };
4284
+
4285
+ // Counting number
4286
+ export const CountingNumber: React.FC<{
4287
+ from?: number;
4288
+ to: number;
4289
+ startFrame?: number;
4290
+ duration?: number;
4291
+ format?: (n: number) => string;
4292
+ }> = ({ from = 0, to, startFrame = 0, duration = 30, format = String }) => {
4293
+ const frame = useCurrentFrame();
4294
+
4295
+ const progress = interpolate(frame - startFrame, [0, duration], [0, 1], {
4296
+ extrapolateLeft: 'clamp',
4297
+ extrapolateRight: 'clamp',
4298
+ });
4299
+
4300
+ const eased = 1 - Math.pow(1 - progress, 3);
4301
+ const value = Math.round(from + (to - from) * eased);
4302
+
4303
+ return <span>{format(value)}</span>;
4304
+ };
4305
+
4306
+ // Floating element
4307
+ export const FloatingElement: React.FC<{
4308
+ children: React.ReactNode;
4309
+ amplitude?: number;
4310
+ speed?: number;
4311
+ }> = ({ children, amplitude = 3, speed = 0.05 }) => {
4312
+ const frame = useCurrentFrame();
4313
+ const y = Math.sin(frame * speed) * amplitude;
4314
+
4315
+ return <div style={{ transform: \`translateY(\${y}px)\` }}>{children}</div>;
4316
+ };
4317
+
4318
+ // Highlight effect
4319
+ export const Highlight: React.FC<{
4320
+ children: React.ReactNode;
4321
+ startFrame: number;
4322
+ duration?: number;
4323
+ }> = ({ children, startFrame, duration = 45 }) => {
4324
+ const frame = useCurrentFrame();
4325
+ const { fps } = useVideoConfig();
4326
+
4327
+ const isActive = frame >= startFrame && frame < startFrame + duration;
4328
+ const progress = spring({
4329
+ frame: isActive ? frame - startFrame : 0,
4330
+ fps,
4331
+ config: SPRING_CONFIGS.snappy,
4332
+ });
4333
+
4334
+ const scale = isActive ? 1 + progress * 0.03 : 1;
4335
+
4336
+ return (
4337
+ <div style={{
4338
+ transform: \`scale(\${scale})\`,
4339
+ boxShadow: isActive ? \`0 \${8 + progress * 12}px \${16 + progress * 24}px rgba(0,0,0,0.15)\` : undefined,
4340
+ }}>
4341
+ {children}
4342
+ </div>
4343
+ );
4344
+ };
4345
+
4346
+ // Cursor pointer
4347
+ export const CursorPointer: React.FC<{
4348
+ path: Array<{ x: number; y: number; frame: number }>;
4349
+ size?: number;
4350
+ }> = ({ path, size = 24 }) => {
4351
+ const frame = useCurrentFrame();
4352
+ const { fps } = useVideoConfig();
4353
+
4354
+ let x = path[0].x;
4355
+ let y = path[0].y;
4356
+
4357
+ for (let i = 0; i < path.length - 1; i++) {
4358
+ const from = path[i];
4359
+ const to = path[i + 1];
4360
+
4361
+ if (frame >= from.frame && frame <= to.frame) {
4362
+ const progress = spring({
4363
+ frame: frame - from.frame,
4364
+ fps,
4365
+ config: { damping: 20, stiffness: 80 },
4366
+ });
4367
+
4368
+ x = interpolate(progress, [0, 1], [from.x, to.x]);
4369
+ y = interpolate(progress, [0, 1], [from.y, to.y]);
4370
+ break;
4371
+ } else if (frame > to.frame) {
4372
+ x = to.x;
4373
+ y = to.y;
4374
+ }
4375
+ }
4376
+
4377
+ return (
4378
+ <div style={{
4379
+ position: 'absolute',
4380
+ left: \`\${x}%\`,
4381
+ top: \`\${y}%\`,
4382
+ transform: 'translate(-50%, -50%)',
4383
+ zIndex: 1000,
4384
+ pointerEvents: 'none',
4385
+ }}>
4386
+ <svg width={size} height={size} viewBox="0 0 24 24">
4387
+ <path
4388
+ d="M4 4 L4 20 L9 15 L13 22 L16 20 L12 13 L19 13 Z"
4389
+ fill="white"
4390
+ stroke="black"
4391
+ strokeWidth="1.5"
4392
+ />
4393
+ </svg>
4394
+ </div>
4395
+ );
4396
+ };
4397
+ `;
4398
+ }
4399
+ function generateComponentIntegrationRule(b) {
4400
+ const cmd2 = b.name;
4401
+ return `---
4402
+ name: component-integration
4403
+ description: Integrating app components into Remotion videos
4404
+ ---
4405
+
4406
+ # Integrating App Components into Remotion
4407
+
4408
+ Use your actual React components OR replicate them pixel-perfect in Remotion videos.
4409
+
4410
+ ## Two Approaches
4411
+
4412
+ ### Approach A: Replicate UI (Recommended)
4413
+ Read your app's components, note every visual detail, build identical-looking components in Remotion.
4414
+
4415
+ **Why?** Your app components have hooks, state, and dependencies that don't work in Remotion. Replication is cleaner.
4416
+
4417
+ ### Approach B: Copy Components (When simple enough)
4418
+ For truly simple presentational components, you can copy them directly.
4419
+
4420
+ \`\`\`bash
4421
+ cp -r ../my-app/src/components/Card ./src/app-components/
4422
+ cp ../my-app/tailwind.config.js ./
4423
+ \`\`\`
4424
+
4425
+ ---
4426
+
4427
+ ## Adapting Components
4428
+
4429
+ ### 1. Remove Interactivity
4430
+
4431
+ \`\`\`tsx
4432
+ // BEFORE (interactive app)
4433
+ <Button onClick={handleSubmit}>Submit</Button>
4434
+
4435
+ // AFTER (video-ready)
4436
+ <Button disabled style={{ pointerEvents: 'none' }}>Submit</Button>
4437
+ \`\`\`
4438
+
4439
+ ### 2. Replace Dynamic Data
4440
+
4441
+ \`\`\`tsx
4442
+ // BEFORE (fetches from API)
4443
+ const { data } = useQuery('GET_USERS');
4444
+
4445
+ // AFTER (scripted data)
4446
+ const data = [
4447
+ { id: 1, name: 'Sarah Chen', role: 'Designer' },
4448
+ { id: 2, name: 'Alex Rivera', role: 'Developer' },
4449
+ ];
4450
+ \`\`\`
4451
+
4452
+ ### 3. Wrap with Animation
4453
+
4454
+ \`\`\`tsx
4455
+ import { FadeIn, SlideUp } from '../shared';
4456
+
4457
+ <FadeIn delay={0}>
4458
+ <Navbar />
4459
+ </FadeIn>
4460
+
4461
+ <SlideUp delay={15}>
4462
+ <Sidebar />
4463
+ </SlideUp>
4464
+ \`\`\`
4465
+
4466
+ ---
4467
+
4468
+ ## Common Showcase Patterns
4469
+
4470
+ ### Dashboard with Staggered Widgets
4471
+
4472
+ \`\`\`tsx
4473
+ const DashboardShowcase = () => {
4474
+ return (
4475
+ <DashboardLayout>
4476
+ <FadeIn delay={0}>
4477
+ <Header user={mockUser} />
4478
+ </FadeIn>
4479
+
4480
+ <div className="grid grid-cols-3 gap-4">
4481
+ <SlideUp delay={15}><StatsWidget data={revenueData} /></SlideUp>
4482
+ <SlideUp delay={23}><StatsWidget data={usersData} /></SlideUp>
4483
+ <SlideUp delay={31}><StatsWidget data={ordersData} /></SlideUp>
4484
+ </div>
4485
+
4486
+ <FadeIn delay={45}>
4487
+ <ChartWidget data={chartData} />
4488
+ </FadeIn>
4489
+ </DashboardLayout>
4490
+ );
4491
+ };
4492
+ \`\`\`
4493
+
4494
+ ### Form with Typing Simulation
4495
+
4496
+ \`\`\`tsx
4497
+ const FormShowcase = () => {
4498
+ const frame = useCurrentFrame();
4499
+ const { fps } = useVideoConfig();
4500
+
4501
+ return (
4502
+ <LoginForm>
4503
+ <Input
4504
+ label="Email"
4505
+ value={<TextReveal text="sarah@example.com" startFrame={0} />}
4506
+ />
4507
+ <Input
4508
+ label="Password"
4509
+ type="password"
4510
+ value={frame > fps * 2 ? '\u2022\u2022\u2022\u2022\u2022\u2022\u2022\u2022' : ''}
4511
+ />
4512
+ </LoginForm>
4513
+ );
4514
+ };
4515
+ \`\`\`
4516
+
4517
+ ### Modal Slide-In
4518
+
4519
+ \`\`\`tsx
4520
+ const ModalShowcase = () => {
4521
+ const frame = useCurrentFrame();
4522
+ const showModal = frame > 30;
4523
+
4524
+ return (
4525
+ <>
4526
+ <PageBackground />
4527
+ {showModal && (
4528
+ <>
4529
+ <FadeIn delay={30}>
4530
+ <div className="absolute inset-0 bg-black/50" />
4531
+ </FadeIn>
4532
+ <SlideUp delay={35}>
4533
+ <ConfirmationModal title="Confirm Delete" message="Are you sure?" isOpen />
4534
+ </SlideUp>
4535
+ </>
4536
+ )}
4537
+ </>
4538
+ );
4539
+ };
4540
+ \`\`\`
4541
+
4542
+ ---
4543
+
4544
+ ## Troubleshooting
4545
+
4546
+ ### Component uses hooks that don't work
4547
+ \`\`\`tsx
4548
+ // PROBLEM: useRouter, useAuth won't work
4549
+ // SOLUTION: Pass as props or mock the context
4550
+ const MockAuthProvider = ({ children }) => (
4551
+ <AuthContext.Provider value={{ user: mockUser }}>
4552
+ {children}
4553
+ </AuthContext.Provider>
4554
+ );
4555
+ \`\`\`
4556
+
4557
+ ### Component too large for frame
4558
+ \`\`\`tsx
4559
+ // Use transform scale to fit
4560
+ <div style={{ transform: 'scale(0.8)', transformOrigin: 'top left' }}>
4561
+ <LargeComponent />
4562
+ </div>
4563
+ \`\`\`
4564
+ `;
4565
+ }
4566
+ function generateProjectVideoWorkflowRule(b) {
4567
+ const cmd2 = b.name;
4568
+ return `---
4569
+ name: project-video-workflow
4570
+ description: Create promotional videos using actual project UI
4571
+ ---
4572
+
4573
+ # Project-Based Video Workflow
4574
+
4575
+ Create promotional videos using **your actual project's UI** replicated in Remotion.
4576
+
4577
+ ## When to Use
4578
+
4579
+ - User has existing React/Next.js/Vue project
4580
+ - User wants "product demo", "feature walkthrough", or "promotional video"
4581
+ - User mentions showcasing specific features/UI
4582
+ - User wants to animate their actual app interface
4583
+
4584
+ ## Quick Start
4585
+
4586
+ \`\`\`bash
4587
+ # 1. Scaffold video project
4588
+ ${cmd2} video init my-app-promo
4589
+ cd my-app-promo
4590
+
4591
+ # 2. Generate audio assets
4592
+ ${cmd2} video create \\
4593
+ --script "Introducing our new app..." \\
4594
+ --output ./public
4595
+
4596
+ # 3. Build scenes replicating your app's UI
4597
+
4598
+ # 4. Preview & Render
4599
+ npm run dev
4600
+ npm run render
4601
+ \`\`\`
4602
+
4603
+ ---
4604
+
4605
+ ## Full Workflow
4606
+
4607
+ ### Step 1: Analyze Project
4608
+
4609
+ \`\`\`bash
4610
+ # Check framework
4611
+ cat package.json | grep -E "react|next|vue"
4612
+
4613
+ # List components
4614
+ ls -la src/components/
4615
+
4616
+ # Get colors
4617
+ cat tailwind.config.* | grep -A 30 "colors"
4618
+ \`\`\`
4619
+
4620
+ **Identify:**
4621
+ - Framework: React, Next.js, Vue
4622
+ - Styling: Tailwind, CSS modules, styled-components
4623
+ - Key components: Forms, cards, modals, dashboards
4624
+ - Views to showcase
4625
+
4626
+ ### Step 2: Document Brand
4627
+
4628
+ \`\`\`markdown
4629
+ ## Brand: [App Name]
4630
+
4631
+ ### Colors (from tailwind.config)
4632
+ - Background: #0f172a
4633
+ - Surface: #1e293b
4634
+ - Primary: #14b8a6
4635
+ - Text: #ffffff
4636
+
4637
+ ### Key Components
4638
+ 1. Sidebar - Dark bg, navigation items
4639
+ 2. Dashboard - Stats cards, charts
4640
+ 3. Modal - Overlay, card
4641
+ \`\`\`
4642
+
4643
+ ### Step 3: Plan Scenes
4644
+
4645
+ \`\`\`markdown
4646
+ ## Scene Plan
4647
+
4648
+ ### Scene 1: Intro (3s)
4649
+ - Logo centered
4650
+ - Tagline fades up
4651
+
4652
+ ### Scene 2: Dashboard (5s)
4653
+ - Stats widgets stagger in
4654
+ - Chart animates
4655
+
4656
+ ### Scene 3: Feature Demo (5s)
4657
+ - Sidebar slides in
4658
+ - Selection animates
4659
+
4660
+ ### Scene 4: CTA (3s)
4661
+ - Logo + button
4662
+ \`\`\`
4663
+
4664
+ ### Step 4: Build Scenes
4665
+
4666
+ Create scenes in \`src/remotion/scenes/\` that replicate your UI:
4667
+
4668
+ \`\`\`tsx
4669
+ // src/remotion/scenes/DashboardScene.tsx
4670
+ import { AbsoluteFill, useCurrentFrame, spring, useVideoConfig } from "remotion";
4671
+
4672
+ export const DASHBOARD_SCENE_DURATION = 150;
4673
+
4674
+ const mockData = {
4675
+ revenue: 125000,
4676
+ users: 1234,
4677
+ orders: 567,
4678
+ };
4679
+
4680
+ export const DashboardScene: React.FC = () => {
4681
+ const frame = useCurrentFrame();
4682
+ const { fps } = useVideoConfig();
4683
+
4684
+ return (
4685
+ <AbsoluteFill style={{ backgroundColor: "#0f172a", padding: 40 }}>
4686
+ {/* Replicate your dashboard layout here */}
4687
+ {/* Use EXACT colors from your tailwind.config */}
4688
+ </AbsoluteFill>
4689
+ );
4690
+ };
4691
+ \`\`\`
4692
+
4693
+ ### Step 5: Generate Audio
4694
+
4695
+ \`\`\`bash
4696
+ ${cmd2} video create \\
4697
+ --script "Introducing [App]. The fastest way to..." \\
4698
+ --music-prompt "modern uplifting tech" \\
4699
+ --output ./public
4700
+ \`\`\`
4701
+
4702
+ ### Step 6: Render
4703
+
4704
+ \`\`\`bash
4705
+ npm run dev # Preview
4706
+ npm run render # Final video
4707
+ \`\`\`
4708
+
4709
+ ---
4710
+
4711
+ ## Tips
4712
+
4713
+ 1. **Start simple** - Get basic scenes working before adding complex animations
4714
+ 2. **Use mock data** - Pre-define realistic demo data
4715
+ 3. **Match voiceover timing** - Sync visual transitions with narration
4716
+ 4. **Keep scenes focused** - One main idea per scene
4717
+ 5. **Test at 1x speed** - Preview at normal speed to catch timing issues
4718
+ `;
4719
+ }
4720
+ function generateAllSkillFiles(b) {
4721
+ return {
4722
+ "SKILL.md": generateSkillContent(b),
4723
+ "rules/presentations.md": generatePresentationsRule(b),
4724
+ "rules/video.md": generateVideoRule(b),
4725
+ "rules/motion-standards.md": generateMotionStandardsRule(),
4726
+ "rules/micro-interactions.md": generateMicroInteractionsRule(),
4727
+ "rules/component-integration.md": generateComponentIntegrationRule(b),
4728
+ "rules/project-video-workflow.md": generateProjectVideoWorkflowRule(b),
4729
+ "assets/animation-components.tsx": generateAnimationComponents()
4730
+ };
4731
+ }
4732
+ var EDITORS = [
4733
+ { name: "Claude Code", dir: ".claude" },
4734
+ { name: "Cursor", dir: ".cursor" },
4735
+ { name: "Codex", dir: ".codex" },
4736
+ { name: "OpenCode", dir: ".opencode" },
4737
+ { name: "Windsurf", dir: ".windsurf" },
4738
+ { name: "Agent", dir: ".agent" }
4739
+ ];
4740
+ var skillCommand = new Command14("skill").description(`Manage ${brand.displayName} skill for AI coding assistants`).addHelpText(
4741
+ "after",
4742
+ `
4743
+ ${chalk12.bold("Examples:")}
4744
+ ${chalk12.gray("# Install skill for all detected editors")}
4745
+ $ ${brand.name} skill install
4746
+
4747
+ ${chalk12.gray("# Install to specific directory")}
4748
+ $ ${brand.name} skill install --dir ~/.claude
4749
+
4750
+ ${chalk12.gray("# Install without remotion-best-practices")}
4751
+ $ ${brand.name} skill install --skip-remotion
4752
+
4753
+ ${chalk12.gray("# Show skill content")}
4754
+ $ ${brand.name} skill show
4755
+ `
4756
+ );
4757
+ skillCommand.command("install").description(`Install the ${brand.displayName} skill for AI coding assistants`).option("-d, --dir <path>", "Install to specific directory").option("-g, --global", "Install globally (to home directory)", true).option("-l, --local", "Install locally (to current directory)").option("-f, --force", "Overwrite existing skill files").option("--skip-remotion", "Skip installing remotion-best-practices skill").action(async (options) => {
4758
+ const installed = [];
4759
+ const skipped = [];
4760
+ const errors = [];
4761
+ const baseDir = options.local ? process.cwd() : homedir();
4762
+ const skillFiles = generateAllSkillFiles(brand);
4763
+ if (options.dir) {
4764
+ const skillPath = join(options.dir, "skills", brand.name);
4765
+ try {
4766
+ installSkill(skillPath, skillFiles, options.force);
4767
+ installed.push(options.dir);
4768
+ } catch (err) {
4769
+ errors.push(`${options.dir}: ${err instanceof Error ? err.message : String(err)}`);
4770
+ }
4771
+ } else {
4772
+ for (const editor of EDITORS) {
4773
+ const editorDir = join(baseDir, editor.dir);
3364
4774
  const skillPath = join(editorDir, "skills", brand.name);
3365
4775
  const skillFile = join(skillPath, "SKILL.md");
3366
4776
  if (!existsSync2(editorDir)) {
@@ -3371,7 +4781,7 @@ skillCommand.command("install").description(`Install the ${brand.displayName} sk
3371
4781
  continue;
3372
4782
  }
3373
4783
  try {
3374
- installSkill(skillPath, skillContent, options.force);
4784
+ installSkill(skillPath, skillFiles, options.force);
3375
4785
  installed.push(editor.name);
3376
4786
  } catch (err) {
3377
4787
  errors.push(`${editor.name}: ${err instanceof Error ? err.message : String(err)}`);
@@ -3383,6 +4793,7 @@ skillCommand.command("install").description(`Install the ${brand.displayName} sk
3383
4793
  success("Skill installed successfully");
3384
4794
  console.log();
3385
4795
  keyValue("Installed to", installed.join(", "));
4796
+ keyValue("Files", Object.keys(skillFiles).length.toString());
3386
4797
  }
3387
4798
  if (skipped.length > 0) {
3388
4799
  console.log();
@@ -3401,10 +4812,35 @@ skillCommand.command("install").description(`Install the ${brand.displayName} sk
3401
4812
  console.log(chalk12.gray("Supported editors: " + EDITORS.map((e) => e.name).join(", ")));
3402
4813
  console.log(chalk12.gray("Use --dir <path> to install to a specific directory"));
3403
4814
  }
4815
+ if (installed.length > 0 && !options.skipRemotion) {
4816
+ console.log();
4817
+ info("Installing remotion-best-practices skill...");
4818
+ try {
4819
+ execSync("npx -y skills add https://github.com/remotion-dev/skills --skill remotion-best-practices --all", {
4820
+ stdio: "inherit",
4821
+ timeout: 6e4
4822
+ });
4823
+ success("remotion-best-practices skill installed");
4824
+ } catch (err) {
4825
+ warn("Could not install remotion-best-practices skill automatically");
4826
+ console.log(chalk12.gray(" Run manually: npx skills add remotion-dev/skills"));
4827
+ }
4828
+ }
3404
4829
  console.log();
3405
4830
  });
3406
- skillCommand.command("show").description("Display the skill content").action(() => {
3407
- console.log(generateSkillContent(brand));
4831
+ skillCommand.command("show").description("Display the skill content").option("-a, --all", "Show all files").action((options) => {
4832
+ const files = generateAllSkillFiles(brand);
4833
+ if (options.all) {
4834
+ for (const [path2, content] of Object.entries(files)) {
4835
+ console.log(chalk12.bold.cyan(`
4836
+ === ${path2} ===
4837
+ `));
4838
+ console.log(content);
4839
+ }
4840
+ } else {
4841
+ console.log(files["SKILL.md"]);
4842
+ console.log(chalk12.gray("\nUse --all to show all files"));
4843
+ }
3408
4844
  });
3409
4845
  skillCommand.command("uninstall").description(`Remove the ${brand.displayName} skill from AI coding assistants`).option("-g, --global", "Uninstall globally (from home directory)", true).option("-l, --local", "Uninstall locally (from current directory)").action(async (options) => {
3410
4846
  const { rmSync } = await import("fs");
@@ -3429,10 +4865,13 @@ skillCommand.command("uninstall").description(`Remove the ${brand.displayName} s
3429
4865
  }
3430
4866
  console.log();
3431
4867
  });
3432
- function installSkill(skillPath, content, force) {
3433
- const skillFile = join(skillPath, "SKILL.md");
3434
- mkdirSync(skillPath, { recursive: true });
3435
- writeFileSync(skillFile, content, "utf-8");
4868
+ function installSkill(skillPath, files, force) {
4869
+ mkdirSync(join(skillPath, "rules"), { recursive: true });
4870
+ mkdirSync(join(skillPath, "assets"), { recursive: true });
4871
+ for (const [relativePath, content] of Object.entries(files)) {
4872
+ const filePath = join(skillPath, relativePath);
4873
+ writeFileSync(filePath, content, "utf-8");
4874
+ }
3436
4875
  }
3437
4876
 
3438
4877
  // src/commands/tts.ts
@@ -3860,8 +5299,98 @@ init_output();
3860
5299
  init_types();
3861
5300
  import { Command as Command19 } from "commander";
3862
5301
  import ora12 from "ora";
3863
- import { mkdir, writeFile as writeFile5, readFile as readFile2 } from "fs/promises";
3864
- import { join as join2 } from "path";
5302
+ import { mkdir, writeFile as writeFile5, readFile as readFile2, access, rm } from "fs/promises";
5303
+ import { join as join2, resolve as resolve4 } from "path";
5304
+ import { execSync as execSync2, spawn } from "child_process";
5305
+ var DEFAULT_TEMPLATE = "inizio-inc/remotion-composition";
5306
+ var DEFAULT_FPS = 30;
5307
+ function parseScriptIntoSections(script) {
5308
+ if (script.includes("---") || script.includes("[Section")) {
5309
+ const parts = script.split(/---|\[Section \d+\]/i).filter((s) => s.trim());
5310
+ if (parts.length > 1) {
5311
+ return parts.map((p) => p.trim());
5312
+ }
5313
+ }
5314
+ const sentences = script.split(/(?<=[.!?])\s+/).map((s) => s.trim()).filter((s) => s.length > 0);
5315
+ const sections = [];
5316
+ let pendingShort = "";
5317
+ for (const sentence of sentences) {
5318
+ const wordCount = sentence.split(/\s+/).length;
5319
+ if (pendingShort) {
5320
+ sections.push(`${pendingShort} ${sentence}`);
5321
+ pendingShort = "";
5322
+ } else if (wordCount < 5 && sections.length < sentences.length - 1) {
5323
+ pendingShort = sentence;
5324
+ } else {
5325
+ sections.push(sentence);
5326
+ }
5327
+ }
5328
+ if (pendingShort) {
5329
+ if (sections.length > 0) {
5330
+ sections[sections.length - 1] += ` ${pendingShort}`;
5331
+ } else {
5332
+ sections.push(pendingShort);
5333
+ }
5334
+ }
5335
+ return sections;
5336
+ }
5337
+ function calculateSectionTiming(sections, totalDuration, fps = DEFAULT_FPS, timestamps) {
5338
+ if (timestamps && timestamps.characters.length > 0) {
5339
+ return calculateSectionTimingFromTimestamps(sections, timestamps, fps);
5340
+ }
5341
+ const totalWords = sections.reduce((sum, s) => sum + s.split(/\s+/).length, 0);
5342
+ let currentTime = 0;
5343
+ return sections.map((text, index) => {
5344
+ const wordCount = text.split(/\s+/).length;
5345
+ const proportion = wordCount / totalWords;
5346
+ const durationInSeconds = totalDuration * proportion;
5347
+ const durationInFrames = Math.round(durationInSeconds * fps);
5348
+ const section = {
5349
+ id: index + 1,
5350
+ text,
5351
+ wordCount,
5352
+ startTime: currentTime,
5353
+ endTime: currentTime + durationInSeconds,
5354
+ durationInSeconds,
5355
+ durationInFrames
5356
+ };
5357
+ currentTime += durationInSeconds;
5358
+ return section;
5359
+ });
5360
+ }
5361
+ function calculateSectionTimingFromTimestamps(sections, timestamps, fps) {
5362
+ const { characters, characterStartTimesSeconds, characterEndTimesSeconds } = timestamps;
5363
+ const fullText = characters.join("");
5364
+ const results = [];
5365
+ let charIndex = 0;
5366
+ for (let i = 0; i < sections.length; i++) {
5367
+ const sectionText = sections[i];
5368
+ const sectionLength = sectionText.length;
5369
+ while (charIndex < characters.length && characters[charIndex].match(/^\s*$/)) {
5370
+ charIndex++;
5371
+ }
5372
+ const startCharIndex = charIndex;
5373
+ const startTime = characterStartTimesSeconds[startCharIndex] || 0;
5374
+ charIndex += sectionLength;
5375
+ let endCharIndex = charIndex - 1;
5376
+ while (endCharIndex > startCharIndex && characters[endCharIndex]?.match(/^\s*$/)) {
5377
+ endCharIndex--;
5378
+ }
5379
+ const endTime = characterEndTimesSeconds[Math.min(endCharIndex, characterEndTimesSeconds.length - 1)] || startTime + 1;
5380
+ const durationInSeconds = endTime - startTime;
5381
+ const durationInFrames = Math.round(durationInSeconds * fps);
5382
+ results.push({
5383
+ id: i + 1,
5384
+ text: sectionText,
5385
+ wordCount: sectionText.split(/\s+/).length,
5386
+ startTime,
5387
+ endTime,
5388
+ durationInSeconds,
5389
+ durationInFrames
5390
+ });
5391
+ }
5392
+ return results;
5393
+ }
3865
5394
  async function downloadFile3(url, outputPath) {
3866
5395
  if (url.startsWith("data:")) {
3867
5396
  const matches = url.match(/^data:[^;]+;base64,(.+)$/);
@@ -3937,13 +5466,24 @@ var createCommand2 = new Command19("create").description("Create video assets (v
3937
5466
  duration: ttsResult.duration,
3938
5467
  voice: options.voice,
3939
5468
  provider: ttsResult.provider,
3940
- cost: ttsResult.cost
5469
+ cost: ttsResult.cost,
5470
+ timestamps: ttsResult.timestamps
5471
+ // Include for word-level sync
3941
5472
  };
3942
5473
  if (format === "human") {
3943
5474
  spinner?.stop();
3944
5475
  success(`Voiceover: ${voiceoverPath} (${ttsResult.duration.toFixed(1)}s)`);
3945
5476
  spinner?.start();
3946
5477
  }
5478
+ if (spinner) spinner.text = "Analyzing script sections...";
5479
+ const sectionTexts = parseScriptIntoSections(script);
5480
+ const sections = calculateSectionTiming(sectionTexts, ttsResult.duration, DEFAULT_FPS, ttsResult.timestamps);
5481
+ if (format === "human") {
5482
+ spinner?.stop();
5483
+ const timingSource = ttsResult.timestamps ? "TTS timestamps" : "word estimation";
5484
+ success(`Sections: ${sections.length} sections (timing from ${timingSource})`);
5485
+ spinner?.start();
5486
+ }
3947
5487
  const musicDuration = Math.min(30, Math.ceil(ttsResult.duration) + 5);
3948
5488
  const musicPrompt = options.musicPrompt || "uplifting background music, positive energy";
3949
5489
  if (spinner) spinner.text = "Generating music...";
@@ -4025,13 +5565,24 @@ var createCommand2 = new Command19("create").description("Create video assets (v
4025
5565
  success(`Images: Downloaded ${downloadedImages.length} images to ${imagesDir}`);
4026
5566
  spinner?.start();
4027
5567
  }
5568
+ const sectionsWithImages = sections.map((section, index) => {
5569
+ const imageIndex = index % downloadedImages.length;
5570
+ return {
5571
+ ...section,
5572
+ imagePath: downloadedImages[imageIndex]?.path
5573
+ };
5574
+ });
4028
5575
  if (spinner) spinner.text = "Writing manifest...";
5576
+ const totalDurationInFrames = Math.round(ttsResult.duration * DEFAULT_FPS);
4029
5577
  const manifest = {
4030
5578
  topic,
4031
5579
  script,
4032
5580
  voiceover: voiceoverInfo,
4033
5581
  music: musicInfo,
4034
5582
  images: downloadedImages,
5583
+ sections: sectionsWithImages,
5584
+ totalDurationInFrames,
5585
+ fps: DEFAULT_FPS,
4035
5586
  totalCost,
4036
5587
  createdAt: (/* @__PURE__ */ new Date()).toISOString()
4037
5588
  };
@@ -4052,15 +5603,16 @@ var createCommand2 = new Command19("create").description("Create video assets (v
4052
5603
  info(`Topic: ${topic}`);
4053
5604
  info(`Voiceover: ${voiceoverInfo.path} (${voiceoverInfo.duration.toFixed(1)}s, ${voiceoverInfo.voice})`);
4054
5605
  info(`Music: ${musicInfo.path} (${musicInfo.duration}s)`);
5606
+ info(`Sections: ${sections.length} (${totalDurationInFrames} frames at ${DEFAULT_FPS}fps)`);
4055
5607
  info(`Images: ${downloadedImages.length} downloaded`);
4056
5608
  info(`Manifest: ${manifestPath}`);
4057
5609
  console.log();
4058
5610
  info(`Total cost: $${totalCost.toFixed(4)}`);
4059
5611
  console.log();
4060
5612
  info("Next steps:");
4061
- info(" 1. Create Remotion project (see remotion-best-practices skill)");
4062
- info(" 2. Use manifest data to configure video composition");
4063
- info(" 3. Run: npm start (preview) / npm run render (build)");
5613
+ info(" 1. Create Remotion scenes matching section timings in manifest");
5614
+ info(" 2. Each section has exact durationInFrames - use these for sync");
5615
+ info(" 3. Run: npx remotion render FullVideo out/video.mp4");
4064
5616
  } catch (err) {
4065
5617
  spinner?.stop();
4066
5618
  error(err instanceof Error ? err.message : "Unknown error");
@@ -4111,10 +5663,93 @@ var searchCommand2 = new Command19("search").description("Search for stock video
4111
5663
  process.exit(EXIT_CODES.GENERAL_ERROR);
4112
5664
  }
4113
5665
  });
4114
- var videoCommand = new Command19("video").description("Video asset generation commands").addCommand(createCommand2).addCommand(searchCommand2);
5666
+ var initCommand = new Command19("init").description("Create a new Remotion video project from template").argument("<name>", "Project directory name").option("-t, --template <repo>", "GitHub repo (user/repo)", DEFAULT_TEMPLATE).option("--no-install", "Skip pnpm install").option("-f, --format <format>", "Output format: human, json, quiet", "human").action(async (name, options) => {
5667
+ const format = options.format;
5668
+ const spinner = format === "human" ? ora12("Initializing video project...").start() : null;
5669
+ try {
5670
+ const targetDir = resolve4(process.cwd(), name);
5671
+ try {
5672
+ await access(targetDir);
5673
+ spinner?.stop();
5674
+ error(`Directory "${name}" already exists`);
5675
+ process.exit(EXIT_CODES.INVALID_INPUT);
5676
+ } catch {
5677
+ }
5678
+ if (spinner) spinner.text = `Downloading template from ${options.template}...`;
5679
+ try {
5680
+ execSync2(`npx --yes degit ${options.template} "${targetDir}"`, {
5681
+ stdio: "pipe"
5682
+ });
5683
+ } catch {
5684
+ if (spinner) spinner.text = "Cloning template...";
5685
+ execSync2(`git clone --depth 1 https://github.com/${options.template}.git "${targetDir}"`, {
5686
+ stdio: "pipe"
5687
+ });
5688
+ await rm(join2(targetDir, ".git"), { recursive: true, force: true });
5689
+ }
5690
+ if (format === "human") {
5691
+ spinner?.stop();
5692
+ success(`Template downloaded to ${name}/`);
5693
+ spinner?.start();
5694
+ }
5695
+ if (options.install) {
5696
+ if (spinner) spinner.text = "Installing dependencies...";
5697
+ await new Promise((resolvePromise, reject) => {
5698
+ const child = spawn("pnpm", ["install"], {
5699
+ cwd: targetDir,
5700
+ stdio: "pipe",
5701
+ shell: true
5702
+ });
5703
+ child.on("close", (code) => {
5704
+ if (code === 0) {
5705
+ resolvePromise();
5706
+ } else {
5707
+ reject(new Error(`pnpm install failed with code ${code}`));
5708
+ }
5709
+ });
5710
+ child.on("error", reject);
5711
+ });
5712
+ if (format === "human") {
5713
+ spinner?.stop();
5714
+ success("Dependencies installed");
5715
+ spinner?.start();
5716
+ }
5717
+ }
5718
+ spinner?.stop();
5719
+ if (format === "json") {
5720
+ printJson({
5721
+ name,
5722
+ path: targetDir,
5723
+ template: options.template,
5724
+ installed: options.install
5725
+ });
5726
+ return;
5727
+ }
5728
+ if (format === "quiet") {
5729
+ console.log(targetDir);
5730
+ return;
5731
+ }
5732
+ console.log();
5733
+ success(`Video project "${name}" created successfully!`);
5734
+ console.log();
5735
+ info("Next steps:");
5736
+ info(` cd ${name}`);
5737
+ if (!options.install) {
5738
+ info(" pnpm install");
5739
+ }
5740
+ info(" pnpm dev # Preview in Remotion Studio");
5741
+ info(" cc video create ... # Generate assets to public/");
5742
+ info(" pnpm render # Render final video");
5743
+ } catch (err) {
5744
+ spinner?.stop();
5745
+ error(err instanceof Error ? err.message : "Unknown error");
5746
+ process.exit(EXIT_CODES.GENERAL_ERROR);
5747
+ }
5748
+ });
5749
+ var videoCommand = new Command19("video").description("Video asset generation commands").addCommand(initCommand).addCommand(createCommand2).addCommand(searchCommand2);
4115
5750
 
4116
5751
  // src/index.ts
4117
- var VERSION = "0.1.5";
5752
+ var VERSION = "0.1.6";
4118
5753
  var program = new Command20();
4119
5754
  var cmdName = brand.commands[0];
4120
5755
  program.name(cmdName).description(brand.description).version(VERSION, "-v, --version", "Show version number").option("--debug", "Enable debug logging").option("--no-color", "Disable colored output").configureOutput({