@conceptcraft/mindframes 0.1.13 → 0.1.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -2,7 +2,7 @@
2
2
  import { createRequire } from 'module'; const require = createRequire(import.meta.url);
3
3
 
4
4
  // src/index.ts
5
- import { Command as Command20 } from "commander";
5
+ import { Command as Command21 } from "commander";
6
6
  import chalk13 from "chalk";
7
7
 
8
8
  // src/lib/brand.ts
@@ -1109,6 +1109,12 @@ async function pollForCompletion(checkFn, maxAttempts = 60, intervalMs = 2e3) {
1109
1109
  }
1110
1110
  throw new ApiError("Operation timed out", 408, 1);
1111
1111
  }
1112
+ async function scrapeUrl(url) {
1113
+ return request("/api/cli/scrape", {
1114
+ method: "POST",
1115
+ body: { url }
1116
+ });
1117
+ }
1112
1118
 
1113
1119
  // src/lib/feature-cache.ts
1114
1120
  var cache = new Conf2({
@@ -1164,7 +1170,7 @@ function getCachePath() {
1164
1170
  }
1165
1171
 
1166
1172
  // src/commands/login.ts
1167
- var CLI_CLIENT_NAME = "ConceptCraft CLI";
1173
+ var CLI_CLIENT_NAME = `${brand.displayName} CLI`;
1168
1174
  var CALLBACK_PORT_START = 8765;
1169
1175
  var CALLBACK_PORT_END = 8775;
1170
1176
  function generateCodeVerifier() {
@@ -1414,11 +1420,11 @@ async function runLoginFlow(options) {
1414
1420
  throw err;
1415
1421
  }
1416
1422
  }
1417
- var loginCommand = new Command("login").description("Authenticate with ConceptCraft (opens browser)").option("--no-browser", "Print URL instead of opening browser").action(async (options) => {
1423
+ var loginCommand = new Command("login").description(`Authenticate with ${brand.displayName} (opens browser)`).option("--no-browser", "Print URL instead of opening browser").action(async (options) => {
1418
1424
  console.log();
1419
1425
  if (hasOAuthTokens()) {
1420
1426
  warn("You are already logged in.");
1421
- info("Run 'conceptcraft logout' to log out first, or continue to re-authenticate.");
1427
+ info(`Run '${brand.commands[0]} logout' to log out first, or continue to re-authenticate.`);
1422
1428
  console.log();
1423
1429
  }
1424
1430
  try {
@@ -1476,7 +1482,7 @@ import ora2 from "ora";
1476
1482
  var configCommand = new Command3("config").description("Manage CLI configuration").addCommand(
1477
1483
  new Command3("init").description("Initialize configuration interactively").action(async () => {
1478
1484
  console.log();
1479
- console.log(chalk4.bold("ConceptCraft CLI Configuration"));
1485
+ console.log(chalk4.bold(`${brand.displayName} CLI Configuration`));
1480
1486
  console.log(chalk4.gray("\u2500".repeat(35)));
1481
1487
  console.log();
1482
1488
  try {
@@ -1495,7 +1501,7 @@ var configCommand = new Command3("config").description("Manage CLI configuration
1495
1501
  });
1496
1502
  setApiKey(apiKey.trim());
1497
1503
  const useCustomUrl = await confirm2({
1498
- message: "Use a custom API URL? (default: www.mindframes.app)",
1504
+ message: `Use a custom API URL? (default: ${brand.apiUrl.replace("https://", "")})`,
1499
1505
  default: false
1500
1506
  });
1501
1507
  if (useCustomUrl) {
@@ -1550,7 +1556,7 @@ var configCommand = new Command3("config").description("Manage CLI configuration
1550
1556
  warn(
1551
1557
  `Could not verify API key: ${apiErr instanceof Error ? apiErr.message : String(apiErr)}`
1552
1558
  );
1553
- warn("You may need to set the team ID manually: mindframes config set team-id <id>");
1559
+ warn(`You may need to set the team ID manually: ${brand.commands[0]} config set team-id <id>`);
1554
1560
  }
1555
1561
  console.log();
1556
1562
  success("Configuration saved!");
@@ -1654,7 +1660,7 @@ var configCommand = new Command3("config").description("Manage CLI configuration
1654
1660
  try {
1655
1661
  await fetchAndCache();
1656
1662
  spinner.succeed("Feature flags refreshed");
1657
- info("Run 'conceptcraft --help' to see updated commands");
1663
+ info(`Run '${brand.commands[0]} --help' to see updated commands`);
1658
1664
  } catch (err) {
1659
1665
  spinner.fail("Failed to refresh");
1660
1666
  error(err instanceof Error ? err.message : String(err));
@@ -2002,29 +2008,29 @@ ${chalk6.bold("Recommended Usage:")}
2002
2008
 
2003
2009
  ${chalk6.bold("Examples:")}
2004
2010
  ${chalk6.gray("# Content from PDF + style from image reference")}
2005
- $ conceptcraft create "Quarterly Report" \\
2011
+ $ ${brand.commands[0]} create "Quarterly Report" \\
2006
2012
  --file ./report.pdf \\
2007
2013
  --reference-url https://example.com/style-template.png \\
2008
2014
  -n 12 -m best --audience "Executive team"
2009
2015
 
2010
2016
  ${chalk6.gray("# Upload content files (PDF, PPTX, DOCX)")}
2011
- $ conceptcraft create "Product Demo" \\
2017
+ $ ${brand.commands[0]} create "Product Demo" \\
2012
2018
  --file ./existing-deck.pptx --file ./specs.pdf \\
2013
2019
  --goal persuade --audience "Enterprise buyers"
2014
2020
 
2015
2021
  ${chalk6.gray("# Inline context with custom styling")}
2016
- $ conceptcraft create "Q4 Business Review" \\
2022
+ $ ${brand.commands[0]} create "Q4 Business Review" \\
2017
2023
  -n 12 -m best --goal inform \\
2018
2024
  --context "Revenue: $50M (+25% YoY), EBITDA: $8M" \\
2019
2025
  --reference-url https://example.com/brand-style.jpg
2020
2026
 
2021
2027
  ${chalk6.gray("# Research presentation from URLs")}
2022
- $ conceptcraft create "AI Industry Trends" \\
2028
+ $ ${brand.commands[0]} create "AI Industry Trends" \\
2023
2029
  -n 10 -m best -t educational \\
2024
2030
  --sources https://example.com/ai-report.pdf
2025
2031
 
2026
2032
  ${chalk6.gray("# Pipe content from another command")}
2027
- $ cat meeting-notes.md | conceptcraft create "Meeting Summary" \\
2033
+ $ cat meeting-notes.md | ${brand.commands[0]} create "Meeting Summary" \\
2028
2034
  -n 6 -m balanced --goal inform
2029
2035
 
2030
2036
  ${chalk6.bold("Content Options (what to include in slides):")}
@@ -2053,7 +2059,7 @@ ${chalk6.bold("Theme Options:")}
2053
2059
  --decorations <style> Background style (none, waves-bottom-left, waves-top-right, blob-corners, minimal)
2054
2060
 
2055
2061
  ${chalk6.gray("Example: Apply corporate colors")}
2056
- $ conceptcraft create "Brand Deck" \\
2062
+ $ ${brand.commands[0]} create "Brand Deck" \\
2057
2063
  --theme blue --primary-color "#1E40AF" --decorations waves-bottom-left
2058
2064
 
2059
2065
  ${chalk6.bold("Mode Reference:")}
@@ -2288,11 +2294,11 @@ Uploading ${options.file.length} file(s)...`));
2288
2294
  console.log(chalk6.gray(" -c, --context <text> Direct text context"));
2289
2295
  console.log(chalk6.gray(" --context-file <path> Read from a file"));
2290
2296
  console.log(chalk6.gray(" --sources <urls...> URLs to scrape"));
2291
- console.log(chalk6.gray(" cat file | conceptcraft Pipe content"));
2297
+ console.log(chalk6.gray(` cat file | ${brand.commands[0]} Pipe content`));
2292
2298
  console.log();
2293
2299
  console.log(chalk6.gray("Example:"));
2294
- console.log(chalk6.cyan(' conceptcraft create "Q4 Report" --file ./report.pdf'));
2295
- console.log(chalk6.cyan(' conceptcraft create "Q4 Report" --context "Revenue: $10M, Growth: 25%"'));
2300
+ console.log(chalk6.cyan(` ${brand.commands[0]} create "Q4 Report" --file ./report.pdf`));
2301
+ console.log(chalk6.cyan(` ${brand.commands[0]} create "Q4 Report" --context "Revenue: $10M, Growth: 25%"`));
2296
2302
  process.exit(6);
2297
2303
  }
2298
2304
  try {
@@ -2433,7 +2439,7 @@ var listCommand = new Command5("list").description("List presentations").option(
2433
2439
  const teamId = options.teamId ?? getDefaultTeamId();
2434
2440
  if (!teamId) {
2435
2441
  error(
2436
- "Team ID required. Set a default with 'mindframes config set team-id <id>' or use --team-id"
2442
+ "`Team ID required. Set a default with '${brand.commands[0]} config set team-id <id>' or use --team-id`"
2437
2443
  );
2438
2444
  process.exit(6);
2439
2445
  }
@@ -2704,7 +2710,7 @@ var brandingCommand = new Command10("branding").description("Manage brand profil
2704
2710
  console.log();
2705
2711
  console.log(
2706
2712
  chalk8.gray(
2707
- "Create one with: conceptcraft branding extract <url>"
2713
+ "`Create one with: ${brand.commands[0]} branding extract <url>`"
2708
2714
  )
2709
2715
  );
2710
2716
  }
@@ -2837,7 +2843,7 @@ var brandingCommand = new Command10("branding").description("Manage brand profil
2837
2843
  console.log(` Confidence: ${Math.round(brand2.confidence * 100)}%`);
2838
2844
  console.log();
2839
2845
  }
2840
- info(`Create a presentation with this brand: conceptcraft create "Your Topic" -b ${result.id}`);
2846
+ info(`Create a presentation with this brand: ${brand2.commands[0]} create "Your Topic" -b ${result.id}`);
2841
2847
  console.log();
2842
2848
  } catch (err) {
2843
2849
  spinner.fail("Extraction failed");
@@ -3000,7 +3006,7 @@ var ideasCommand = new Command12("ideas").description("Generate presentation top
3000
3006
  console.log("For now, try these approaches:");
3001
3007
  console.log(
3002
3008
  chalk10.gray(
3003
- " 1. Visit the ConceptCraft dashboard and use the idea generator"
3009
+ "` 1. Visit the ${brand.displayName} dashboard and use the idea generator`"
3004
3010
  )
3005
3011
  );
3006
3012
  console.log(
@@ -3073,8 +3079,8 @@ function generateMainSkillContent(context) {
3073
3079
  const { name, cmd: cmd2 } = context;
3074
3080
  const envPrefix = name.toUpperCase().replace(/[^A-Z0-9]/g, "_");
3075
3081
  return `---
3076
- name: ${cmd2}
3077
- description: ${name} CLI for AI-powered content creation. Use when user needs to create presentations, generate video assets (voiceover, music, images, stock videos), use text-to-speech, mix audio, search stock media, or manage branding. This is the main entry point - load specialized skills (${cmd2}-video, ${cmd2}-presentation) for detailed workflows.
3082
+ name: ${name}
3083
+ description: ${name} CLI for AI-powered content creation. Use when user needs to create presentations, generate video assets (voiceover, music, images, stock videos), use text-to-speech, mix audio, search stock media, or manage branding. This is the main entry point - load specialized skills (${name}-video, ${name}-presentation) for detailed workflows.
3078
3084
  ---
3079
3085
 
3080
3086
  # ${name} CLI
@@ -3096,6 +3102,7 @@ A comprehensive CLI for AI-powered content creation. Generate presentations, vid
3096
3102
  | Search images | \`${cmd2} image search -q "mountain landscape"\` |
3097
3103
  | Search videos | \`${cmd2} video search "ocean waves"\` |
3098
3104
  | Mix audio tracks | \`${cmd2} mix create --video v.mp4 --music m.mp3\` |
3105
+ | Scrape URL content | \`${cmd2} scrape https://example.com\` |
3099
3106
 
3100
3107
  ---
3101
3108
 
@@ -3152,7 +3159,7 @@ ${cmd2} create "Pitch Deck" \\
3152
3159
  - \`--mode <instant|ultrafast|fast|balanced|best>\` - Quality/speed tradeoff
3153
3160
  - \`--tone <creative|professional|educational|formal|casual>\`
3154
3161
  - \`--file <paths...>\` - Extract content from files
3155
- - \`--sources <urls...>\` - Scrape URLs for context
3162
+ - \`--sources <urls...>\` - Scrape URLs for context (auto-routes YouTube, Twitter/X)
3156
3163
  - \`--brand <id|url>\` - Apply branding
3157
3164
  - \`--open\` - Open in browser when done
3158
3165
 
@@ -3193,6 +3200,7 @@ cat <<'EOF' | ${cmd2} video create --output ./public
3193
3200
  "imageQuery": "call to action button"
3194
3201
  }
3195
3202
  ],
3203
+ "voiceId": "21m00Tcm4TlvDq8ikWAM",
3196
3204
  "voiceSettings": {
3197
3205
  "speed": 0.95,
3198
3206
  "stability": 0.4,
@@ -3231,10 +3239,10 @@ ${cmd2} tts generate -t "Hello world" -o output.mp3
3231
3239
  # With voice selection
3232
3240
  ${cmd2} tts generate -t "Welcome to the demo" -v Rachel -o welcome.mp3
3233
3241
 
3234
- # With provider and settings (Gemini)
3242
+ # With provider and settings
3235
3243
  ${cmd2} tts generate \\
3236
3244
  -t "Professional narration" \\
3237
- -v Puck \\
3245
+ -v Kore \\
3238
3246
  -p gemini \\
3239
3247
  -s 0.9 \\
3240
3248
  -o narration.mp3
@@ -3244,11 +3252,8 @@ ${cmd2} tts voices
3244
3252
  ${cmd2} tts voices --provider elevenlabs
3245
3253
  \`\`\`
3246
3254
 
3247
- **Providers:** \`elevenlabs\` (default), \`gemini\`, \`openai\`
3248
- **Voices by provider:**
3249
- - ElevenLabs: \`Rachel\`, \`Josh\`, \`Adam\`, \`Bella\` (or voice IDs)
3250
- - Gemini: \`Kore\`, \`Puck\`, \`Charon\`, \`Aoede\`
3251
- - OpenAI: \`alloy\`, \`nova\`, \`echo\`, \`onyx\`
3255
+ **Providers:** \`gemini\`, \`elevenlabs\`, \`openai\`
3256
+ **Popular voices:** \`Kore\`, \`Puck\`, \`Rachel\`, \`alloy\`
3252
3257
  **Speed range:** 0.25 - 4.0 (default: 1.0)
3253
3258
 
3254
3259
  ---
@@ -3341,7 +3346,35 @@ Music automatically loops to match video duration.
3341
3346
 
3342
3347
  ---
3343
3348
 
3344
- ## 8. Branding
3349
+ ## 8. URL Scraping
3350
+
3351
+ Extract content from URLs for analysis, context gathering, or research.
3352
+
3353
+ \`\`\`bash
3354
+ # Basic scrape
3355
+ ${cmd2} scrape https://example.com
3356
+
3357
+ # Save to file
3358
+ ${cmd2} scrape https://company.com/about -o about.md
3359
+
3360
+ # JSON output (for programmatic use)
3361
+ ${cmd2} scrape https://docs.example.com --format json
3362
+
3363
+ # Quiet mode (content only)
3364
+ ${cmd2} scrape https://blog.example.com --format quiet > content.txt
3365
+ \`\`\`
3366
+
3367
+ **Use cases:**
3368
+ - Gather context for presentation creation
3369
+ - Research competitor features
3370
+ - Extract documentation for analysis
3371
+ - Feed content into other workflows
3372
+
3373
+ **Output includes:** title, description, full content (markdown), token count, cost
3374
+
3375
+ ---
3376
+
3377
+ ## 9. Branding
3345
3378
 
3346
3379
  Manage brand profiles for consistent styling.
3347
3380
 
@@ -3364,7 +3397,7 @@ ${cmd2} create "Topic" --brand my-company
3364
3397
 
3365
3398
  ---
3366
3399
 
3367
- ## 9. Configuration
3400
+ ## 10. Configuration
3368
3401
 
3369
3402
  \`\`\`bash
3370
3403
  # Interactive setup
@@ -3432,7 +3465,7 @@ cat scenes.json | ${cmd2} video create -o product-demo/public
3432
3465
 
3433
3466
  # 4. Render and add thumbnail (version files: v1, v2, v3...)
3434
3467
  cd product-demo
3435
- npx remotion render FullVideo out/FullVideo-v1.mp4
3468
+ pnpm exec remotion render FullVideo out/FullVideo-v1.mp4
3436
3469
  ${cmd2} video thumbnail out/FullVideo-v1.mp4 --frame 60
3437
3470
  \`\`\`
3438
3471
 
@@ -3491,236 +3524,636 @@ ${cmd2} --version # Version info
3491
3524
  `;
3492
3525
  }
3493
3526
 
3494
- // src/commands/skill/rules/video/content.ts
3495
- var MOTION_DESIGN_GUIDELINES = `# Motion Design Principles
3527
+ // src/commands/skill/generate-video-skill.ts
3528
+ function generateVideoSkillContent(context) {
3529
+ const { name, cmd: cmd2 } = context;
3530
+ return `---
3531
+ name: ${name}-video
3532
+ description: Orchestrates video asset assembly for marketing videos, product demos, explainers, and promo content. Analyzes projects to extract branding (logos, colors, fonts), identifies reusable UI components, generates voiceovers and music via CLI, searches stock media, and produces a video-manifest.json. Use when user says "create a video", "make a promo", "product demo", "explainer video", "marketing video", "gather video assets", "video for my project", or "TikTok/Reels content".
3533
+ ---
3496
3534
 
3497
- **Core Philosophy:** "Atomic, Kinetic Construction" - nothing is static. Elements arrive and leave via physics-based transitions.
3535
+ # Video Asset Assembler (Phase 1)
3498
3536
 
3499
- ## Design System Approach
3537
+ You are an expert video producer who prepares all assets needed for Remotion video rendering. Your job is to analyze the project, gather resources, generate audio, and create a complete manifest\u2014NOT to render the final video.
3500
3538
 
3501
- **Separate content from logic:**
3502
- - Theme object: colors (primary, accent, background, text), fonts, corner radiuses in config
3503
- - Scene object: define by duration in frames and content type, not timecodes
3504
- - Avoid hardcoding: colors, text, data values can be passed via props or config file
3539
+ ## When to Use This Skill
3505
3540
 
3506
- ## Animation Physics (Spring-Based)
3541
+ \u2705 **USE for:**
3542
+ - Marketing videos, product demos, launch videos
3543
+ - News/trending topic videos
3544
+ - Explainer videos from any source
3545
+ - Social content (TikTok, Reels, YouTube Shorts)
3546
+ - Any video that needs voiceover + visuals + music
3507
3547
 
3508
- **Spring Pop:**
3509
- - UI cards, bubbles, logos "pop" with bounce
3510
- - \`spring()\` function works well: low mass (0.5), moderate damping (10-12), high stiffness (100-200)
3511
- - Spring value can map to scale (0 to 1) and opacity (0 to 1)
3512
- - Consider \`transform-origin\` placement (center for bubbles, top for dropdowns)
3548
+ \u274C **DO NOT use for:**
3549
+ - Editing existing video files (use ffmpeg directly)
3550
+ - Live streaming setup
3551
+ - Video format conversion
3552
+ - Simple image slideshows without voiceover
3513
3553
 
3514
- **Kinetic Typography:**
3515
- - Text entering line-by-line or word-by-word (not all at once)
3516
- - Can split text into arrays, stagger with delays (index * 5 frames)
3517
- - \`interpolate()\` works for opacity [0,1] and translateY [20px, 0px] - slide up
3518
- - Cubic easing works well for slide-up motion
3554
+ ## Critical Rules
3519
3555
 
3520
- **Constructed UI:**
3521
- - Building UI from HTML/CSS divs works better than screenshots
3522
- - If user shares project: study actual UI components (buttons, cards, modals) and implement pixel-perfect recreations - match colors, fonts, shadows, border-radius
3523
- - Bar charts: can animate height/width from 0% to target
3524
- - Line charts: can animate SVG path \`stroke-dashoffset\`
3525
- - Donut charts: can animate \`stroke-dasharray\` of SVG circle
3526
- - Numbers: counter component interpolating from 0 to target over 30-60 frames
3556
+ 1. **NEVER hallucinate file paths.** Before referencing any file, VERIFY it exists with \`ls\` or \`find\`.
3557
+ 2. **NEVER skip the discovery phase.** Always analyze source material before scripting.
3558
+ 3. **ALWAYS use the CLI** for audio generation\u2014don't simulate or skip it.
3559
+ 4. **ALWAYS produce a valid video-manifest.json** at the end.
3560
+ 5. **ALWAYS ask clarifying questions** if video type, duration, or tone is unclear.
3527
3561
 
3528
- ## Visual Composition
3562
+ ## Video Types & Source Detection
3529
3563
 
3530
- **Background Ambience:**
3531
- - Static backgrounds feel flat - consider faint dots/patterns
3532
- - Slow oscillation works well: \`Math.sin(frame / 100)\` applied to position/rotation for "floating" effect
3533
- - Parallax adds depth: background moves slower than foreground
3564
+ Detect the video type and source material FIRST. This determines your asset strategy.
3534
3565
 
3535
- **SVG Handling:**
3536
- - Inline SVGs allow control of fill color via theme (better than img tags)
3537
- - Chat bubbles can be constructed with SVG paths or heavy border-radius
3538
- - Animating bubble "tail" separately adds polish
3566
+ | Video Type | Source | Asset Strategy |
3567
+ |------------|--------|----------------|
3568
+ | **Project Launch** | Local codebase | Extract components, branding, screenshots from code |
3569
+ | **Website Promo** | Live URL | Screenshot pages, extract colors/logos via browser |
3570
+ | **News/Trending** | External articles, URLs | Summarize content, use 100% stock media |
3571
+ | **Explainer/Tutorial** | Topic or concept | Research topic, use diagrams + stock |
3572
+ | **Product Announcement** | Press release, changelog | Extract key points, mix branded + stock |
3539
3573
 
3540
- **Scene Transitions:**
3541
- - Scenes can slide or camera can "pan" to new area
3542
- - Slide-out approach: Scene A \`translateX\` 0% to -100%, Scene B 100% to 0%
3543
- - Spatial pan approach: place scenes on giant canvas, animate parent container transform
3574
+ ### Source Detection Logic
3544
3575
 
3545
- ## Suggested Component Architecture
3576
+ Check what the user provides or what's available:
3546
3577
 
3547
- Consider these reusable patterns:
3548
- - KineticText: text, delay, style props - handles word-splitting and stagger
3549
- - SmartCard: container with Spring Pop entry and glassmorphism styles
3550
- - AnimatedCounter: from, to, duration props - number ticking
3551
- - ProgressBar/ChartElement: percentage, color props - growth animation from 0
3578
+ | User Provides | Action |
3579
+ |---------------|--------|
3580
+ | **Local codebase** (package.json, src/) | Extract components, branding, screenshots |
3581
+ | **URL to website** | Screenshot pages, extract colors/logos |
3582
+ | **URL to article/news** | Fetch content, summarize key points |
3583
+ | **Document** (PDF, markdown, .txt) | Parse and extract key messages |
3584
+ | **Raw text/script** | Use directly for voiceover |
3585
+ | **Images/videos** | Use as-is for scenes |
3586
+ | **Topic/concept** | Research + generate stock queries |
3552
3587
 
3553
- **Motion Blur (optional):** Can simulate by stretching element in direction of movement on fast transitions.
3554
- `;
3555
- var SVG_ANIMATION_GUIDELINES = `# SVG Line Animation (Write-On Effect)
3588
+ **Sources can combine.** Examples:
3589
+ - "Launch video for my project + our blog post" = codebase + URL
3590
+ - "News video about X with our branding" = article + local logo
3591
+ - "Explainer using these screenshots" = user images + generated VO
3556
3592
 
3557
- **Core Concept:** "Invisible Ink Rule" - lines draw in (don't fade in), as if hand-drawn in real-time.
3593
+ ---
3558
3594
 
3559
- **Animation approach:**
3560
- - Setting \`pathLength="1"\` on SVG path elements normalizes length
3561
- - Animating \`strokeDashoffset\` from 1 (hidden) to 0 (drawn) creates write-on effect
3562
- - \`strokeDasharray: 1\` with interpolate \`[1, 0]\` over 20-30 frames works well
3563
- - \`stroke-linecap="round"\` creates friendly hand-drawn look
3595
+ ## Phase 1: Discovery & Analysis
3564
3596
 
3565
- **Draw & vanish sequence:**
3566
- - Draw in: 20 frames (offset 1\u21920)
3567
- - Hold: 10 frames
3568
- - Draw out: fade opacity or continue offset
3597
+ Run the relevant discovery based on detected source type.
3569
3598
 
3570
- **Reusable component pattern:**
3571
- - Props: path data (d), color, width, delay, type (underline/spark/circle/arrow)
3572
- - Pre-defined path dictionaries work better than generating random coordinates
3573
- - Positioning with top/left/scale/rotation props for text accents
3574
- `;
3575
- var ASSET_USAGE_GUIDELINES = `# Asset Usage & Optimization
3599
+ ### 1.1 Codebase Analysis (if local project)
3576
3600
 
3577
- **For project-specific videos:** Study the project first - extract logos, colors, fonts, actual UI components. Recreate components pixel-perfect in Remotion (match exact colors, shadows, border-radius, fonts). Use project's actual branding and design system for authentic look.
3601
+ \`\`\`bash
3602
+ # Find branding assets
3603
+ find . -type f \\( -name "logo*" -o -name "favicon*" -o -name "brand*" \\) 2>/dev/null | head -20
3578
3604
 
3579
- **CLI provides flexible asset search** - images and videos can be used creatively throughout compositions.
3605
+ # Find color configuration
3606
+ find . -type f \\( -name "tailwind.config.*" -o -name "theme.*" -o -name "colors.*" \\) 2>/dev/null
3580
3607
 
3581
- **Video assets (from CLI video search):**
3582
- - Full-screen backgrounds (with overlays/text)
3583
- - Embedded in UI cards or windows alongside text
3584
- - Picture-in-picture style elements
3585
- - Background layers with reduced opacity
3586
- - Transitional footage between scenes
3608
+ # Find impressive UI components (prioritize pages/containers over atoms)
3609
+ find . -path "*/components/*" -name "*.tsx" 2>/dev/null | head -30
3610
+ \`\`\`
3587
3611
 
3588
- **Image assets (from CLI image search):**
3589
- - Scene backgrounds (static or with animation)
3590
- - Embedded elements within compositions
3591
- - UI component content (cards, panels)
3592
- - Layered for depth and parallax effects
3612
+ **Extract:**
3613
+ - [ ] Logo path (verify exists)
3614
+ - [ ] Primary/accent colors from config
3615
+ - [ ] 2-4 "hero" components worth showcasing
3593
3616
 
3594
- **Dynamic backgrounds (Three.js/WebGL):**
3595
- - Three.js/React Three Fiber for performance-optimized animated backgrounds
3596
- - Particle systems, procedural gradients, geometric patterns
3597
- - SVG animations for abstract shapes and patterns
3598
- - WebGL shaders for dynamic effects
3599
- - Combines well with static assets for depth
3617
+ ### 1.2 Website Analysis (if URL provided)
3600
3618
 
3601
- **Best approach:** Mix CLI assets (images/videos) with generated elements (Three.js, SVG) for rich, performant compositions.
3602
- `;
3619
+ \`\`\`bash
3620
+ # Scrape website content for analysis
3621
+ ${cmd2} scrape "https://example.com" --format json
3603
3622
 
3604
- // src/commands/skill/generate-video-skill.ts
3605
- function generateVideoSkillContent(context) {
3606
- const { name, cmd: cmd2 } = context;
3607
- return `---
3608
- name: ${cmd2}-video
3609
- description: Use when user asks to create videos (product demos, explainers, social content, promos). Handles video asset generation, Remotion implementation, and thumbnail embedding.
3610
- ---
3623
+ # Save content to file for reference
3624
+ ${cmd2} scrape "https://example.com" -o ./content/homepage.md
3611
3625
 
3612
- # ${name} Video Creation CLI
3626
+ # Scrape multiple pages
3627
+ ${cmd2} scrape "https://example.com/features" -o ./content/features.md
3628
+ ${cmd2} scrape "https://example.com/pricing" -o ./content/pricing.md
3629
+ \`\`\`
3613
3630
 
3614
- Generate video assets (voiceover, music, images, stock videos) and render with Remotion.
3631
+ **Extract from scraped content:**
3632
+ - [ ] Main value proposition / headline
3633
+ - [ ] Key features and benefits
3634
+ - [ ] Brand name and tagline
3635
+ - [ ] Color mentions (if in content) or use stock visuals
3615
3636
 
3616
- ---
3637
+ ### 1.3 Content Analysis (if document/article)
3617
3638
 
3618
- ## Prerequisites
3639
+ For news articles, blog posts, press releases, changelogs:
3619
3640
 
3620
- **Authenticate:**
3621
3641
  \`\`\`bash
3622
- ${cmd2} login
3642
+ # Scrape and parse article
3643
+ ${cmd2} scrape "https://example.com/article" -o ./content.md
3644
+
3645
+ # Or read local document
3646
+ cat ./CHANGELOG.md | head -100
3623
3647
  \`\`\`
3624
3648
 
3649
+ **Extract:**
3650
+ - [ ] Main headline / title
3651
+ - [ ] 3-5 key points for scenes
3652
+ - [ ] Quotes or stats to highlight
3653
+ - [ ] Related images (if embedded)
3654
+
3655
+ ### 1.4 User-Provided Content
3656
+
3657
+ If user provides text, script, or topic directly:
3658
+
3659
+ **Raw script provided:**
3660
+ - Use as-is for voiceover
3661
+ - Plan visuals based on content
3662
+
3663
+ **Topic/concept provided:**
3664
+ - Research topic using your web search capabilities
3665
+ - Or scrape relevant URLs: \`${cmd2} scrape "https://relevant-article.com"\`
3666
+
3667
+ **Images/videos provided:**
3668
+ - Verify files exist
3669
+ - Note dimensions and quality
3670
+ - Plan scenes around provided assets
3671
+
3672
+ **Extract:**
3673
+ - [ ] Key messages (3-5 per video)
3674
+ - [ ] Visual concepts for each message
3675
+ - [ ] Any specific assets user wants included
3676
+
3677
+ ### 1.5 Content Brief & User Clarification
3678
+
3679
+ **Goal:** Align on video purpose before scripting.
3680
+
3681
+ **Use AskUserQuestion tool:** If your AI environment provides an \`AskUserQuestion\` tool (Claude Code, Agent SDK, etc.), use it to gather user preferences about duration, tone, focus, and CTA before proceeding. This provides a better UX than plain text questions.
3682
+
3683
+ **When to ask vs. proceed:**
3684
+ - **Vague request** ("make a video for my project") \u2192 ASK clarifying questions
3685
+ - **Partial details** ("30-second TikTok for my app") \u2192 ASK remaining questions
3686
+ - **Detailed brief** (duration, tone, CTA specified) \u2192 PROCEED directly
3687
+ - **User says "just do it" or "use defaults"** \u2192 USE defaults below
3688
+
3689
+ **Reasonable defaults (if user opts out of questions):**
3690
+ - Duration: 30-45 seconds
3691
+ - Tone: Professional
3692
+ - Focus: Balanced (show problem + solution)
3693
+ - CTA: "Try it free" or "Learn more"
3694
+ - Structure: Hook \u2192 Main Point \u2192 CTA
3695
+
3696
+ **Validation Gate:** Do NOT proceed to asset generation until you have:
3697
+ - [ ] Clear video type identified
3698
+ - [ ] Source material gathered (screenshots, content, or stock queries planned)
3699
+ - [ ] Duration and tone (confirmed OR defaulted)
3700
+
3625
3701
  ---
3626
3702
 
3627
- ## Video Creation Workflow
3703
+ ## Phase 2: Script & Scene Planning
3704
+
3705
+ ### 2.1 Narrative Structure
3706
+
3707
+ Follow this arc for maximum engagement:
3708
+
3709
+ | Scene | Duration | Purpose | Example |
3710
+ |-------|----------|---------|---------|
3711
+ | **Hook** | 3-5s | Stop the scroll, create curiosity | "What if your dashboard updated itself?" |
3712
+ | **Problem** | 5-8s | Relatable pain point | "Tired of manual data entry?" |
3713
+ | **Solution** | 10-20s | Show the product/concept | Demo of key features |
3714
+ | **Proof** | 5-10s | Social proof, stats, benefits | "10x faster than competitors" |
3715
+ | **CTA** | 3-5s | Clear next step | "Try free at example.com" |
3628
3716
 
3629
- ### 1. Generate Assets
3717
+ **For Short Videos (15-30s):** Hook \u2192 Solution \u2192 CTA (skip Problem/Proof)
3630
3718
 
3631
- Generate voiceover, music, images, and thumbnail:
3719
+ ### 2.2 Scene Definition
3720
+
3721
+ For each scene, define:
3722
+
3723
+ \`\`\`json
3724
+ {
3725
+ "name": "SceneName",
3726
+ "script": "Voiceover text (conversational, 2-3 sentences max)",
3727
+ "imageQuery": "Stock search query if needed",
3728
+ "videoQuery": "Stock video search query if preferred over image",
3729
+ "componentPath": "src/components/Feature.tsx (Project Mode only, VERIFIED path)"
3730
+ }
3731
+ \`\`\`
3732
+
3733
+ **Script Writing Tips:**
3734
+ - Keep sentences SHORT (8-12 words ideal for voiceover)
3735
+ - Use active voice: "Build faster" not "Faster building is possible"
3736
+ - Include natural pauses with periods, not commas
3737
+ - Match script length to desired scene duration (~150 words/minute)
3738
+
3739
+ ---
3740
+
3741
+ ## Phase 3: Asset Generation
3742
+
3743
+ ### 3.1 Generate Voiceovers & Music
3744
+
3745
+ **CRITICAL:** Run this command with your scenes JSON. Music is generated LAST to match total duration.
3632
3746
 
3633
3747
  \`\`\`bash
3634
- cat <<SCENES | ${cmd2} video create --output ./public
3748
+ cat <<'SCENES_JSON' | ${cmd2} video create --output ./public
3635
3749
  {
3636
3750
  "scenes": [
3637
3751
  {
3638
3752
  "name": "Hook",
3639
- "script": "Watch how we transformed this complex workflow into a single click.",
3640
- "imageQuery": "modern dashboard interface dark theme"
3753
+ "script": "Your hook script here.",
3754
+ "imageQuery": "abstract tech gradient particles"
3641
3755
  },
3642
3756
  {
3643
- "name": "Demo",
3644
- "script": "Our AI analyzes your data in real-time, surfacing insights that matter."
3757
+ "name": "Solution",
3758
+ "script": "Your solution script here.",
3759
+ "videoQuery": "modern dashboard interface animation"
3760
+ },
3761
+ {
3762
+ "name": "CTA",
3763
+ "script": "Your call to action here.",
3764
+ "imageQuery": "gradient background call to action"
3645
3765
  }
3646
- ]
3766
+ ],
3767
+ "voiceId": "21m00Tcm4TlvDq8ikWAM",
3768
+ "voiceSettings": {
3769
+ "speed": 1.0,
3770
+ "stability": 0.5,
3771
+ "similarity": 0.75,
3772
+ "style": 0.3
3773
+ },
3774
+ "musicPrompt": "upbeat corporate, modern synth, positive energy"
3647
3775
  }
3648
- SCENES
3776
+ SCENES_JSON
3649
3777
  \`\`\`
3650
3778
 
3651
- **Output:**
3652
- - \`public/audio/*.wav\` - scene voiceovers
3653
- - \`public/audio/music.mp3\` - background music
3654
- - \`public/images/*.jpg\` - scene images (if imageQuery provided)
3655
- - \`public/thumbnail.jpg\` - auto-generated thumbnail
3656
- - \`public/video-manifest.json\` - **complete timeline ready to use**
3779
+ **Voice ID:** Use ElevenLabs voice IDs. Get available voices with \`${cmd2} tts voices\`.
3780
+ Common voice IDs:
3781
+ - \`21m00Tcm4TlvDq8ikWAM\` - Rachel (default, calm female)
3782
+ - \`EXAVITQu4vr4xnSDxMaL\` - Bella (young female)
3783
+ - \`ErXwobaYiN019PkySvjV\` - Antoni (male)
3784
+
3785
+ **Voice Settings Tuning:**
3786
+ | Setting | Low (0.2-0.4) | Medium (0.5) | High (0.6-0.8) |
3787
+ |---------|---------------|--------------|----------------|
3788
+ | stability | More expressive/variable | Balanced | Consistent/monotone |
3789
+ | similarity | Creative interpretation | Balanced | Strict voice match |
3790
+ | style | Subtle emotion | Moderate | Dramatic/animated |
3791
+ | speed | Slower, dramatic | Normal | Faster, energetic |
3657
3792
 
3658
- ### 2. Initialize Remotion (MANDATORY)
3793
+ ### 3.2 Search Additional Stock Media (Optional)
3659
3794
 
3660
- Scaffold the template and copy assets:
3795
+ If scenes need more visuals:
3661
3796
 
3662
3797
  \`\`\`bash
3663
- cd .. && ${cmd2} video init my-video
3664
- cd my-video
3665
- # Assets are now in public/ directory
3798
+ # Search stock images
3799
+ ${cmd2} image search "modern SaaS dashboard dark theme" --count 5
3800
+
3801
+ # Search stock videos
3802
+ ${cmd2} video search "tech particles animation blue" --license free
3666
3803
  \`\`\`
3667
3804
 
3668
- ### 3. Render Video
3805
+ ### 3.3 Verify Generated Assets
3669
3806
 
3670
- **IMPORTANT:**
3671
- - The \`video-manifest.json\` already contains the complete \`timeline\` - **no need to build or transform anything**
3672
- - Just pass \`timeline\` directly to the composition
3673
- - Always version output files: \`out/video-v1.mp4\`, \`out/video-v2.mp4\`, etc.
3807
+ After CLI completes, verify all files exist:
3674
3808
 
3675
- **YouTube (landscape 16:9, NO captions):**
3676
3809
  \`\`\`bash
3677
- npx remotion render YouTubeVideo out/youtube-v1.mp4 \\
3678
- --props='{"timeline":'$(cat public/video-manifest.json | jq -c .timeline)',"showCaptions":false}'
3810
+ ls -la ./public/audio/
3811
+ ls -la ./public/images/
3812
+ ls -la ./public/videos/
3813
+ cat ./public/video-manifest.json
3679
3814
  \`\`\`
3680
3815
 
3681
- **TikTok/Reels/Shorts (vertical 9:16, WITH captions):**
3682
- \`\`\`bash
3683
- npx remotion render TikTokVideo out/tiktok-v1.mp4 \\
3684
- --props='{"timeline":'$(cat public/video-manifest.json | jq -c .timeline)',"showCaptions":true}'
3816
+ **Validation Gate:** Ensure:
3817
+ - [ ] Each scene has an audio file (\`.wav\` or \`.mp3\`)
3818
+ - [ ] Music file exists (\`audio/music.mp3\`)
3819
+ - [ ] video-manifest.json is valid JSON
3820
+ - [ ] Total duration looks reasonable for content
3821
+
3822
+ ---
3823
+
3824
+ ## Phase 4: Manifest Enrichment
3825
+
3826
+ After CLI generates base manifest, enhance it with metadata based on your source type.
3827
+
3828
+ **Add these fields as relevant:**
3829
+
3830
+ \`\`\`json
3831
+ {
3832
+ "source": "codebase|website|article|topic",
3833
+ "sourceUrl": "https://... (if external)",
3834
+
3835
+ "scenes": [
3836
+ {
3837
+ "componentPath": "src/components/X.tsx",
3838
+ "screenshotPath": "screenshots/home.png",
3839
+ "textOverlay": "Key message here"
3840
+ }
3841
+ ],
3842
+
3843
+ "theme": {
3844
+ "primary": "#3b82f6",
3845
+ "accent": "#10b981"
3846
+ },
3847
+
3848
+ "branding": {
3849
+ "logoPath": "public/logo.svg",
3850
+ "companyName": "Name"
3851
+ },
3852
+
3853
+ "attribution": {
3854
+ "source": "SourceName",
3855
+ "date": "2024-01-15"
3856
+ }
3857
+ }
3858
+ \`\`\`
3859
+
3860
+ **Key rules:**
3861
+ - Only add \`componentPath\` for VERIFIED files
3862
+ - Only add \`screenshotPath\` if you captured screenshots
3863
+ - Only add \`attribution\` for news/article sources
3864
+ - \`theme\` and \`branding\` only if extracted from actual source
3865
+
3866
+ ---
3867
+
3868
+ ## Output Checklist
3869
+
3870
+ Before completing Phase 1, verify:
3871
+
3872
+ **Required for ALL videos:**
3873
+ - [ ] \`./public/audio/\` contains voiceover for each scene
3874
+ - [ ] \`./public/audio/music.mp3\` exists (or user opted out)
3875
+ - [ ] \`./public/video-manifest.json\` is valid JSON
3876
+ - [ ] Each scene has visual source (image, video, screenshot, or componentPath)
3877
+
3878
+ **If using local codebase:**
3879
+ - [ ] All \`componentPath\` references VERIFIED to exist
3880
+ - [ ] Theme colors extracted from actual config
3881
+ - [ ] Logo path verified
3882
+
3883
+ **If using website URL:**
3884
+ - [ ] Screenshots captured for key pages
3885
+ - [ ] Colors/branding extracted or noted
3886
+
3887
+ **If news/content video:**
3888
+ - [ ] Source attribution included
3889
+ - [ ] Key points extracted for scenes
3890
+
3891
+ **Success Message:**
3892
+ \`\`\`
3893
+ \u2705 Phase 1 Complete: Video assets assembled
3894
+ - Video type: [project/website/news/explainer]
3895
+ - Scenes: X
3896
+ - Total duration: ~Xs
3897
+ - Audio files: X
3898
+ - Visuals: X images, X videos, X screenshots
3899
+ - Components to adapt: X (if any)
3900
+
3901
+ Ready for Phase 2: Remotion implementation
3685
3902
  \`\`\`
3686
3903
 
3687
- ### Video Compositions
3904
+ ---
3905
+
3906
+ ## Handoff to Phase 2: Remotion Setup
3688
3907
 
3689
- | Composition | Dimensions | Captions | Use Case |
3690
- |-------------|------------|----------|----------|
3691
- | \`YouTubeVideo\` | 1920x1080 (16:9) | No | YouTube, Vimeo, traditional video |
3692
- | \`TikTokVideo\` | 1080x1920 (9:16) | Yes (word-by-word) | TikTok, Reels, Shorts |
3908
+ After Phase 1 is complete, prepare the Remotion project:
3693
3909
 
3694
- **Same timeline, different output formats.** Both use the exact same \`timeline\` from \`video-manifest.json\`.
3910
+ ### Step 1: Initialize Remotion Project
3695
3911
 
3696
- ### 4. Embed Thumbnail
3912
+ \`\`\`bash
3913
+ # Create Remotion project in current directory
3914
+ ${cmd2} video init my-video
3697
3915
 
3698
- Thumbnail is auto-generated during \`video create\`. Inject into final video:
3916
+ # For TikTok/Reels format (9:16)
3917
+ ${cmd2} video init my-video --type tiktok
3918
+ \`\`\`
3919
+
3920
+ This creates \`./my-video/\` with the Remotion template.
3921
+
3922
+ ### Step 2: Copy Assets to Remotion Project
3699
3923
 
3700
3924
  \`\`\`bash
3701
- ${cmd2} video thumbnail out/youtube-v1.mp4 --image public/thumbnail.jpg
3925
+ # Copy all generated assets to Remotion public folder
3926
+ cp -r ./public/audio ./my-video/public/
3927
+ cp -r ./public/images ./my-video/public/
3928
+ cp -r ./public/videos ./my-video/public/
3929
+ cp ./public/video-manifest.json ./my-video/public/
3702
3930
  \`\`\`
3703
3931
 
3704
- **High-CTR thumbnail principles:**
3705
- - Expressive faces boost CTR 20-30%
3706
- - High contrast, bold colors (yellow, orange)
3707
- - Simple: 3 elements max
3708
- - Mobile-first: readable at 320px
3709
- - Specs: 1280x720, <2MB
3932
+ ### Step 3: Verify Transfer
3933
+
3934
+ \`\`\`bash
3935
+ # Confirm all assets are in place
3936
+ ls -la ./my-video/public/
3937
+ cat ./my-video/public/video-manifest.json | jq '.scenes | length'
3938
+ \`\`\`
3939
+
3940
+ **Expected structure in Remotion project:**
3941
+ \`\`\`
3942
+ my-video/
3943
+ \u251C\u2500\u2500 public/
3944
+ \u2502 \u251C\u2500\u2500 audio/
3945
+ \u2502 \u2502 \u251C\u2500\u2500 hook.wav
3946
+ \u2502 \u2502 \u251C\u2500\u2500 solution.wav
3947
+ \u2502 \u2502 \u251C\u2500\u2500 cta.wav
3948
+ \u2502 \u2502 \u2514\u2500\u2500 music.mp3
3949
+ \u2502 \u251C\u2500\u2500 images/
3950
+ \u2502 \u2502 \u2514\u2500\u2500 *.jpg, *.png
3951
+ \u2502 \u251C\u2500\u2500 videos/
3952
+ \u2502 \u2502 \u2514\u2500\u2500 *.mp4
3953
+ \u2502 \u2514\u2500\u2500 video-manifest.json
3954
+ \u251C\u2500\u2500 src/
3955
+ \u2502 \u2514\u2500\u2500 ... (Remotion components)
3956
+ \u2514\u2500\u2500 package.json
3957
+ \`\`\`
3958
+
3959
+ ### Step 4: Hand Off to Phase 2
3960
+
3961
+ \`\`\`
3962
+ \u{1F4E6} Assets transferred to Remotion project: ./my-video/
3963
+
3964
+ Next steps (Phase 2 - Remotion Implementation):
3965
+ cd my-video
3966
+ pnpm dev # Preview in Remotion Studio
3967
+ # Then implement scenes using video-manifest.json
3968
+ \`\`\`
3969
+
3970
+ **Note:** Phase 2 (Remotion implementation) is handled by a separate skill.
3971
+
3972
+ ---
3973
+
3974
+ ## Examples
3975
+
3976
+ ### Example 1: News Video
3977
+ **User says:** "Make a video about the latest AI news"
3978
+
3979
+ **Actions:**
3980
+ 1. Ask: "Any specific article or topic? What duration (30s or 60s)?"
3981
+ 2. User provides article URL or topic
3982
+ 3. Fetch/research content, extract 3-4 key points
3983
+ 4. Write script with Hook \u2192 Key Points \u2192 Takeaway
3984
+ 5. Run CLI with stock video queries for each point
3985
+ 6. Generate manifest with source attribution
3986
+
3987
+ **Result:** 45-second news video with stock footage, voiceover, and music
3988
+
3989
+ ---
3990
+
3991
+ ### Example 2: Product Launch (with codebase)
3992
+ **User says:** "Create a promo video for my app"
3993
+
3994
+ **Actions:**
3995
+ 1. Scan project: find logo, colors, impressive components
3996
+ 2. Ask: "Which features to highlight? Duration preference?"
3997
+ 3. Write script: Hook \u2192 Problem \u2192 Feature Demo \u2192 CTA
3998
+ 4. Run CLI with scenes, use componentPath for demo scenes
3999
+ 5. Enrich manifest with theme colors and branding
4000
+
4001
+ **Result:** 60-second promo with branded visuals and component references
4002
+
4003
+ ---
4004
+
4005
+ ### Example 3: Website Promo (URL only)
4006
+ **User says:** "Make a video for https://example.com"
4007
+
4008
+ **Actions:**
4009
+ 1. Scrape website content: \`${cmd2} scrape "https://example.com" -o content.md\`
4010
+ 2. Extract key messages, features, and value props from content
4011
+ 3. Write script based on scraped content
4012
+ 4. Run CLI with stock media queries matching the product/service
4013
+ 5. Use text overlays for key messages
4014
+
4015
+ **Result:** 30-second website promo with stock visuals and key messaging from site
4016
+
4017
+ ---
4018
+
4019
+ ### Example 4: Explainer (topic only)
4020
+ **User says:** "Explain how blockchain works in 30 seconds"
4021
+
4022
+ **Actions:**
4023
+ 1. Research topic briefly if needed
4024
+ 2. Write simple script: Concept \u2192 How it works \u2192 Why it matters
4025
+ 3. Run CLI with visual metaphor queries ("digital ledger", "chain links")
4026
+ 4. Use 100% stock media
4027
+
4028
+ **Result:** 30-second explainer with abstract visualizations
3710
4029
 
3711
4030
  ---
3712
4031
 
3713
- ${MOTION_DESIGN_GUIDELINES}
4032
+ ## Error Recovery
4033
+
4034
+ Use standalone CLI commands if \`video create\` fails partially.
4035
+
4036
+ ### If TTS Fails for One Scene
4037
+
4038
+ \`\`\`bash
4039
+ # Retry single scene with standalone tts command
4040
+ ${cmd2} tts "Your script text here" --voice Kore --output ./public/audio/scene-name.wav
4041
+ \`\`\`
4042
+
4043
+ If still fails: try different voice, or skip scene.
4044
+
4045
+ ### If Music Generation Fails
4046
+
4047
+ \`\`\`bash
4048
+ # Retry music separately
4049
+ ${cmd2} music generate "upbeat corporate synth" --duration 30 --output ./public/audio/music.mp3
4050
+ \`\`\`
4051
+
4052
+ If still fails: continue without music, notify user.
4053
+
4054
+ ### If Stock Search Returns Nothing
4055
+
4056
+ \`\`\`bash
4057
+ # Try standalone image search with simpler query
4058
+ ${cmd2} image search "dashboard" --count 10
4059
+
4060
+ # Or try video search
4061
+ ${cmd2} video search "tech animation" --license free
4062
+ \`\`\`
4063
+
4064
+ Fallback: use solid color background with text overlay.
4065
+
4066
+ ### If Component Not Found
4067
+ 1. DO NOT guess or hallucinate path
4068
+ 2. Fall back to stock media for that scene
4069
+ 3. Add to manifest: \`"componentPath": null, "fallbackReason": "not found"\`
3714
4070
 
3715
4071
  ---
3716
4072
 
3717
- ${SVG_ANIMATION_GUIDELINES}
4073
+ ## Troubleshooting
4074
+
4075
+ ### CLI Command Fails
4076
+
4077
+ **Error:** \`command not found: ${cmd2}\`
4078
+ **Fix:** Install CLI globally: \`npm install -g @anthropic/${name}\` or use npx: \`npx ${cmd2} video create\`
4079
+
4080
+ **Error:** \`Authentication required\`
4081
+ **Fix:** Run \`${cmd2} login\` to authenticate with API key
4082
+
4083
+ ### Audio Generation Issues
4084
+
4085
+ **Error:** \`Voice not found\`
4086
+ **Fix:** Use one of: Kore, Puck, Rachel, alloy (case-sensitive)
4087
+
4088
+ **Error:** \`Music duration too short\`
4089
+ **Cause:** ElevenLabs requires minimum 3 seconds
4090
+ **Fix:** Ensure total script duration is at least 5 seconds
4091
+
4092
+ ### Stock Search Returns Nothing
4093
+
4094
+ **Cause:** Query too specific or API limit reached
4095
+ **Fix:**
4096
+ 1. Simplify query: "dashboard" instead of "modern SaaS dashboard with dark theme"
4097
+ 2. Try alternative terms: "interface" instead of "UI"
4098
+ 3. Check API quota: \`${cmd2} account\`
4099
+
4100
+ ### Component Path Not Found
4101
+
4102
+ **Cause:** Guessed path without verification
4103
+ **Fix:**
4104
+ 1. ALWAYS run \`ls\` or \`find\` before referencing
4105
+ 2. Use stock media as fallback
4106
+ 3. Add to manifest: \`"componentPath": null, "fallback": "stock"\`
4107
+
4108
+ ### Manifest JSON Invalid
4109
+
4110
+ **Cause:** Syntax error in manual edits
4111
+ **Fix:** Validate JSON before saving: \`cat video-manifest.json | jq .\`
3718
4112
 
3719
4113
  ---
3720
4114
 
3721
- ${ASSET_USAGE_GUIDELINES}
4115
+ ## Quick Reference
4116
+
4117
+ ### Minimum Viable Command
4118
+ \`\`\`bash
4119
+ cat <<'EOF' | ${cmd2} video create --output ./public
4120
+ {"scenes":[{"name":"Main","script":"Your script here."}],"musicPrompt":"ambient background"}
4121
+ EOF
4122
+ \`\`\`
4123
+
4124
+ ### Full Command with All Options
4125
+ \`\`\`bash
4126
+ cat <<'EOF' | ${cmd2} video create --output ./public
4127
+ {
4128
+ "scenes": [
4129
+ {"name": "Hook", "script": "...", "imageQuery": "..."},
4130
+ {"name": "Demo", "script": "...", "videoQuery": "..."},
4131
+ {"name": "CTA", "script": "...", "imageQuery": "..."}
4132
+ ],
4133
+ "voiceId": "21m00Tcm4TlvDq8ikWAM",
4134
+ "voiceSettings": {"speed": 1.0, "stability": 0.5, "similarity": 0.75, "style": 0.3},
4135
+ "musicPrompt": "genre, mood, instruments"
4136
+ }
4137
+ EOF
4138
+ \`\`\`
4139
+
4140
+ ### Voice Settings Presets
4141
+ | Preset | speed | stability | similarity | style | Best For |
4142
+ |--------|-------|-----------|------------|-------|----------|
4143
+ | Professional | 1.0 | 0.6 | 0.8 | 0.2 | Corporate, B2B |
4144
+ | Energetic | 1.1 | 0.4 | 0.7 | 0.5 | Startups, Apps |
4145
+ | Calm | 0.9 | 0.7 | 0.8 | 0.2 | Tutorial, Explainer |
4146
+ | Dramatic | 0.85 | 0.3 | 0.6 | 0.7 | Announcements |
3722
4147
 
3723
4148
  ---
4149
+
4150
+ ## References
4151
+
4152
+ For advanced techniques, consult these files after Phase 1:
4153
+ - \`rules/video/motion-design.md\` - Animation principles
4154
+ - \`rules/video/thumbnails.md\` - CTR optimization
4155
+ - \`rules/video/svg-animations.md\` - Write-on effects
4156
+ - \`rules/video/asset-usage.md\` - Creative asset usage
3724
4157
  `;
3725
4158
  }
3726
4159
 
@@ -3766,8 +4199,12 @@ ${cmd2} create --file product-brief.md
3766
4199
 
3767
4200
  ### From URL
3768
4201
 
4202
+ Supports websites, YouTube videos, and Twitter/X posts:
4203
+
3769
4204
  \`\`\`bash
3770
4205
  ${cmd2} create --url https://company.com/product
4206
+ ${cmd2} create --url https://youtube.com/watch?v=abc123
4207
+ ${cmd2} create --url https://x.com/user/status/123456
3771
4208
  \`\`\`
3772
4209
 
3773
4210
  ### From Piped Content
@@ -4784,6 +5221,7 @@ var createCommand3 = new Command19("create").description("Create video assets (v
4784
5221
  }
4785
5222
  }
4786
5223
  const voice = scenesInput?.voice || options.voice;
5224
+ const voiceId = scenesInput?.voiceId;
4787
5225
  const musicPrompt = scenesInput?.musicPrompt || options.musicPrompt || "uplifting background music, positive energy";
4788
5226
  const audioDir = join2(options.output, "audio");
4789
5227
  const imagesDir = join2(options.output, "images");
@@ -4812,6 +5250,8 @@ var createCommand3 = new Command19("create").description("Create video assets (v
4812
5250
  texts: ttsRequests,
4813
5251
  options: {
4814
5252
  voice,
5253
+ voiceId,
5254
+ // ElevenLabs voice ID takes priority on backend
4815
5255
  voiceSettings: scenesInput.voiceSettings
4816
5256
  }
4817
5257
  });
@@ -5426,9 +5866,65 @@ var thumbnailCommand = new Command19("thumbnail").description("Embed a thumbnail
5426
5866
  });
5427
5867
  var videoCommand = new Command19("video").description("Video asset generation commands").addCommand(initCommand).addCommand(createCommand3).addCommand(searchCommand2).addCommand(thumbnailCommand);
5428
5868
 
5869
+ // src/commands/scrape.ts
5870
+ import { Command as Command20 } from "commander";
5871
+ import ora13 from "ora";
5872
+ import { writeFile as writeFile6 } from "fs/promises";
5873
+ var scrapeCommand = new Command20("scrape").description("Extract content from a URL").argument("<url>", "URL to scrape").option("-o, --output <path>", "Save content to file").option("-f, --format <format>", "Output format: human, json, quiet", "human").action(async (url, options) => {
5874
+ const format = options.format;
5875
+ const spinner = format === "human" ? ora13("Scraping URL...").start() : null;
5876
+ try {
5877
+ const result = await scrapeUrl(url);
5878
+ spinner?.stop();
5879
+ if (!result.success) {
5880
+ error(result.error || "Failed to scrape URL");
5881
+ process.exit(EXIT_CODES.GENERAL_ERROR);
5882
+ }
5883
+ const data = result.data;
5884
+ if (options.output) {
5885
+ await writeFile6(options.output, data.content, "utf-8");
5886
+ if (format === "human") {
5887
+ success(`Content saved to: ${options.output}`);
5888
+ }
5889
+ }
5890
+ if (format === "json") {
5891
+ printJson(data);
5892
+ return;
5893
+ }
5894
+ if (format === "quiet") {
5895
+ console.log(data.content);
5896
+ return;
5897
+ }
5898
+ if (data.title) {
5899
+ console.log();
5900
+ console.log(`Title: ${data.title}`);
5901
+ }
5902
+ if (data.metadata?.description) {
5903
+ console.log(`Description: ${data.metadata.description}`);
5904
+ }
5905
+ console.log(`URL: ${data.url}`);
5906
+ console.log(`Tokens: ~${data.metadata?.tokenUsage?.toLocaleString() || "unknown"}`);
5907
+ if (data.warning) {
5908
+ warn(data.warning);
5909
+ }
5910
+ if (!options.output) {
5911
+ console.log();
5912
+ console.log("--- Content ---");
5913
+ console.log(data.content);
5914
+ }
5915
+ if (data.cost && data.cost > 0) {
5916
+ info(`Cost: $${data.cost.toFixed(6)}`);
5917
+ }
5918
+ } catch (err) {
5919
+ spinner?.stop();
5920
+ error(err instanceof Error ? err.message : "Unknown error");
5921
+ process.exit(EXIT_CODES.GENERAL_ERROR);
5922
+ }
5923
+ });
5924
+
5429
5925
  // src/index.ts
5430
- var VERSION = "0.1.13";
5431
- var program = new Command20();
5926
+ var VERSION = "0.1.14";
5927
+ var program = new Command21();
5432
5928
  var cmdName = brand.commands[0];
5433
5929
  program.name(cmdName).description(brand.description).version(VERSION, "-v, --version", "Show version number").option("--debug", "Enable debug logging").option("--no-color", "Disable colored output").configureOutput({
5434
5930
  outputError: (str, write) => {
@@ -5453,6 +5949,7 @@ program.addCommand(musicCommand);
5453
5949
  program.addCommand(mixAudioCommand);
5454
5950
  program.addCommand(imageCommand);
5455
5951
  program.addCommand(videoCommand);
5952
+ program.addCommand(scrapeCommand);
5456
5953
  var deriveCommand = buildDeriveCommand();
5457
5954
  if (deriveCommand.commands.length > 0) {
5458
5955
  program.addCommand(deriveCommand);