vargai 0.3.1 → 0.4.0-alpha.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. package/README.md +1 -38
  2. package/biome.json +6 -1
  3. package/docs/index.html +1130 -0
  4. package/docs/prompting.md +326 -0
  5. package/docs/react.md +834 -0
  6. package/package.json +11 -6
  7. package/src/ai-sdk/index.ts +2 -21
  8. package/src/cli/commands/index.ts +1 -4
  9. package/src/cli/commands/render.tsx +71 -0
  10. package/src/cli/index.ts +2 -0
  11. package/src/react/cli.ts +52 -0
  12. package/src/react/elements.ts +146 -0
  13. package/src/react/examples/branching.tsx +66 -0
  14. package/src/react/examples/captions-demo.tsx +37 -0
  15. package/src/react/examples/character-video.tsx +84 -0
  16. package/src/react/examples/grid.tsx +53 -0
  17. package/src/react/examples/layouts-demo.tsx +57 -0
  18. package/src/react/examples/madi.tsx +60 -0
  19. package/src/react/examples/music-test.tsx +35 -0
  20. package/src/react/examples/onlyfans-1m/workflow.tsx +88 -0
  21. package/src/react/examples/orange-portrait.tsx +41 -0
  22. package/src/react/examples/split-element-demo.tsx +60 -0
  23. package/src/react/examples/split-layout-demo.tsx +60 -0
  24. package/src/react/examples/split.tsx +41 -0
  25. package/src/react/examples/video-grid.tsx +46 -0
  26. package/src/react/index.ts +43 -0
  27. package/src/react/layouts/grid.tsx +28 -0
  28. package/src/react/layouts/index.ts +2 -0
  29. package/src/react/layouts/split.tsx +20 -0
  30. package/src/react/react.test.ts +309 -0
  31. package/src/react/render.ts +21 -0
  32. package/src/react/renderers/animate.ts +59 -0
  33. package/src/react/renderers/captions.ts +297 -0
  34. package/src/react/renderers/clip.ts +248 -0
  35. package/src/react/renderers/context.ts +17 -0
  36. package/src/react/renderers/image.ts +109 -0
  37. package/src/react/renderers/index.ts +22 -0
  38. package/src/react/renderers/music.ts +60 -0
  39. package/src/react/renderers/packshot.ts +84 -0
  40. package/src/react/renderers/progress.ts +173 -0
  41. package/src/react/renderers/render.ts +243 -0
  42. package/src/react/renderers/slider.ts +69 -0
  43. package/src/react/renderers/speech.ts +53 -0
  44. package/src/react/renderers/split.ts +91 -0
  45. package/src/react/renderers/subtitle.ts +16 -0
  46. package/src/react/renderers/swipe.ts +75 -0
  47. package/src/react/renderers/title.ts +17 -0
  48. package/src/react/renderers/utils.ts +124 -0
  49. package/src/react/renderers/video.ts +127 -0
  50. package/src/react/runtime/jsx-dev-runtime.ts +43 -0
  51. package/src/react/runtime/jsx-runtime.ts +35 -0
  52. package/src/react/types.ts +232 -0
  53. package/src/studio/index.ts +26 -0
  54. package/src/studio/scanner.ts +102 -0
  55. package/src/studio/server.ts +554 -0
  56. package/src/studio/stages.ts +251 -0
  57. package/src/studio/step-renderer.ts +279 -0
  58. package/src/studio/types.ts +60 -0
  59. package/src/studio/ui/cache.html +303 -0
  60. package/src/studio/ui/index.html +1820 -0
  61. package/tsconfig.cli.json +8 -0
  62. package/tsconfig.json +3 -1
  63. package/bun.lock +0 -1255
  64. package/docs/plan.md +0 -66
  65. package/docs/todo.md +0 -14
  66. package/src/ai-sdk/middleware/index.ts +0 -25
  67. package/src/ai-sdk/middleware/placeholder.ts +0 -111
  68. package/src/ai-sdk/middleware/wrap-image-model.ts +0 -86
  69. package/src/ai-sdk/middleware/wrap-music-model.ts +0 -108
  70. package/src/ai-sdk/middleware/wrap-video-model.ts +0 -115
  71. /package/docs/{varg-sdk.md → sdk.md} +0 -0
  72. /package/src/ai-sdk/providers/{elevenlabs.ts → elevenlabs-provider.ts} +0 -0
  73. /package/src/ai-sdk/providers/{fal.ts → fal-provider.ts} +0 -0
package/package.json CHANGED
@@ -10,8 +10,9 @@
10
10
  "lint": "biome check .",
11
11
  "format": "biome format --write .",
12
12
  "type-check": "tsc --noEmit",
13
- "prepare": "husky install",
14
- "size": "size-limit"
13
+ "prepare": "husky || true",
14
+ "size": "size-limit",
15
+ "studio": "bun run src/studio/index.ts"
15
16
  },
16
17
  "lint-staged": {
17
18
  "*.{js,ts,tsx}": [
@@ -32,11 +33,12 @@
32
33
  "lint-staged": "^16.2.7"
33
34
  },
34
35
  "peerDependencies": {
35
- "typescript": "^5",
36
- "ai": "^6.0.0"
36
+ "typescript": "^5"
37
37
  },
38
38
  "dependencies": {
39
39
  "@ai-sdk/fal": "^1.0.23",
40
+ "@ai-sdk/fireworks": "^2.0.16",
41
+ "@ai-sdk/groq": "^3.0.12",
40
42
  "@ai-sdk/openai": "^3.0.9",
41
43
  "@ai-sdk/provider": "^3.0.2",
42
44
  "@ai-sdk/replicate": "^2.0.5",
@@ -60,11 +62,14 @@
60
62
  "replicate": "^1.4.0",
61
63
  "zod": "^4.2.1"
62
64
  },
63
- "version": "0.3.1",
65
+ "version": "0.4.0-alpha.1",
64
66
  "exports": {
65
67
  ".": "./src/index.ts",
68
+ "./ai": "./src/ai-sdk/index.ts",
66
69
  "./core": "./src/core/index.ts",
67
70
  "./providers": "./src/providers/index.ts",
68
- "./definitions": "./src/definitions/index.ts"
71
+ "./definitions": "./src/definitions/index.ts",
72
+ "./react": "./src/react/index.ts",
73
+ "./studio": "./src/studio/index.ts"
69
74
  }
70
75
  }
@@ -25,25 +25,6 @@ export {
25
25
  type GenerateVideoResult,
26
26
  generateVideo,
27
27
  } from "./generate-video";
28
- export {
29
- generatePlaceholder,
30
- type ImagePlaceholderFallbackOptions,
31
- imagePlaceholderFallbackMiddleware,
32
- type MusicModelMiddleware,
33
- type MusicPlaceholderFallbackOptions,
34
- musicPlaceholderFallbackMiddleware,
35
- type PlaceholderFallbackOptions,
36
- type PlaceholderOptions,
37
- type PlaceholderResult,
38
- placeholderFallbackMiddleware,
39
- type RenderMode,
40
- type VideoModelMiddleware,
41
- withImagePlaceholderFallback,
42
- withMusicPlaceholderFallback,
43
- withPlaceholderFallback,
44
- wrapMusicModel,
45
- wrapVideoModel,
46
- } from "./middleware";
47
28
  export type {
48
29
  MusicModelV3,
49
30
  MusicModelV3CallOptions,
@@ -61,8 +42,8 @@ export {
61
42
  type ElevenLabsProvider,
62
43
  elevenlabs,
63
44
  VOICES,
64
- } from "./providers/elevenlabs";
65
- export { createFal, type FalProvider, fal } from "./providers/fal";
45
+ } from "./providers/elevenlabs-provider";
46
+ export { createFal, type FalProvider, fal } from "./providers/fal-provider";
66
47
  export {
67
48
  createHiggsfield,
68
49
  type HiggsfieldImageModelSettings,
@@ -1,9 +1,6 @@
1
- /**
2
- * CLI commands exports
3
- */
4
-
5
1
  export { findCmd, showFindHelp } from "./find.tsx";
6
2
  export { helpCmd, showHelp } from "./help.tsx";
7
3
  export { listCmd, showListHelp } from "./list.tsx";
4
+ export { renderCmd } from "./render.tsx";
8
5
  export { runCmd, showRunHelp, showTargetHelp } from "./run.tsx";
9
6
  export { showWhichHelp, whichCmd } from "./which.tsx";
@@ -0,0 +1,71 @@
1
+ import { defineCommand } from "citty";
2
+ import { render } from "../../react/render";
3
+ import type { VargElement } from "../../react/types";
4
+
5
+ export const renderCmd = defineCommand({
6
+ meta: {
7
+ name: "render",
8
+ description: "render a react component to video",
9
+ },
10
+ args: {
11
+ file: {
12
+ type: "positional",
13
+ description: "component file (.tsx)",
14
+ required: true,
15
+ },
16
+ output: {
17
+ type: "string",
18
+ alias: "o",
19
+ description: "output path",
20
+ },
21
+ cache: {
22
+ type: "string",
23
+ alias: "c",
24
+ description: "cache directory",
25
+ default: ".cache/ai",
26
+ },
27
+ quiet: {
28
+ type: "boolean",
29
+ alias: "q",
30
+ description: "minimal output",
31
+ default: false,
32
+ },
33
+ },
34
+ async run({ args }) {
35
+ const file = args.file as string;
36
+
37
+ if (!file) {
38
+ console.error("usage: varg render <component.tsx> [-o output.mp4]");
39
+ process.exit(1);
40
+ }
41
+
42
+ const resolvedPath = Bun.resolveSync(file, process.cwd());
43
+ const mod = await import(resolvedPath);
44
+ const component: VargElement = mod.default;
45
+
46
+ if (!component || component.type !== "render") {
47
+ console.error("error: default export must be a <Render> element");
48
+ process.exit(1);
49
+ }
50
+
51
+ const outputPath =
52
+ args.output ??
53
+ `output/${file
54
+ .replace(/\.tsx?$/, "")
55
+ .split("/")
56
+ .pop()}.mp4`;
57
+
58
+ if (!args.quiet) {
59
+ console.log(`rendering ${file} → ${outputPath}`);
60
+ }
61
+
62
+ const buffer = await render(component, {
63
+ output: outputPath,
64
+ cache: args.cache,
65
+ });
66
+
67
+ if (!args.quiet) {
68
+ console.log(`done! ${buffer.byteLength} bytes → ${outputPath}`);
69
+ }
70
+ },
71
+ });
package/src/cli/index.ts CHANGED
@@ -15,6 +15,7 @@ import {
15
15
  findCmd,
16
16
  helpCmd,
17
17
  listCmd,
18
+ renderCmd,
18
19
  runCmd,
19
20
  showFindHelp,
20
21
  showHelp,
@@ -99,6 +100,7 @@ const main = defineCommand({
99
100
  },
100
101
  subCommands: {
101
102
  run: runCmd,
103
+ render: renderCmd,
102
104
  list: listCmd,
103
105
  ls: listCmd,
104
106
  find: findCmd,
@@ -0,0 +1,52 @@
1
+ #!/usr/bin/env bun
2
+
3
+ import { parseArgs } from "node:util";
4
+ import { render } from "./render";
5
+ import type { VargElement } from "./types";
6
+
7
+ const { values, positionals } = parseArgs({
8
+ args: Bun.argv.slice(2),
9
+ options: {
10
+ output: { type: "string", short: "o" },
11
+ cache: { type: "string", short: "c", default: ".cache/ai" },
12
+ quiet: { type: "boolean", short: "q", default: false },
13
+ },
14
+ allowPositionals: true,
15
+ });
16
+
17
+ const [file] = positionals;
18
+
19
+ if (!file) {
20
+ console.error("usage: bun react/cli.ts <component.tsx> [-o output.mp4]");
21
+ process.exit(1);
22
+ }
23
+
24
+ const resolvedPath = Bun.resolveSync(file, process.cwd());
25
+ const mod = await import(resolvedPath);
26
+ const component: VargElement = mod.default;
27
+
28
+ if (!component || component.type !== "render") {
29
+ console.error("error: default export must be a <Render> element");
30
+ process.exit(1);
31
+ }
32
+
33
+ const outputPath =
34
+ values.output ??
35
+ `output/${file
36
+ .replace(/\.tsx?$/, "")
37
+ .split("/")
38
+ .pop()}.mp4`;
39
+
40
+ if (!values.quiet) {
41
+ console.log(`rendering ${file} → ${outputPath}`);
42
+ }
43
+
44
+ const buffer = await render(component, {
45
+ output: outputPath,
46
+ cache: values.cache,
47
+ quiet: values.quiet,
48
+ });
49
+
50
+ if (!values.quiet) {
51
+ console.log(`done! ${buffer.byteLength} bytes → ${outputPath}`);
52
+ }
@@ -0,0 +1,146 @@
1
+ import type {
2
+ AnimateProps,
3
+ CaptionsProps,
4
+ ClipProps,
5
+ ImageProps,
6
+ MusicProps,
7
+ OverlayProps,
8
+ PackshotProps,
9
+ RenderProps,
10
+ SliderProps,
11
+ SpeechProps,
12
+ SplitProps,
13
+ SubtitleProps,
14
+ SwipeProps,
15
+ TalkingHeadProps,
16
+ TitleProps,
17
+ VargElement,
18
+ VargNode,
19
+ VideoProps,
20
+ } from "./types";
21
+
22
+ function normalizeChildren(children: unknown): VargNode[] {
23
+ if (children === null || children === undefined) return [];
24
+ if (Array.isArray(children))
25
+ return children.flat().flatMap(normalizeChildren);
26
+ return [children as VargNode];
27
+ }
28
+
29
+ function createElement<T extends VargElement["type"]>(
30
+ type: T,
31
+ props: Record<string, unknown>,
32
+ children: unknown,
33
+ ): VargElement<T> {
34
+ const { children: _, ...restProps } = props;
35
+ return {
36
+ type,
37
+ props: restProps,
38
+ children: normalizeChildren(children ?? props.children),
39
+ };
40
+ }
41
+
42
+ export function Render(props: RenderProps): VargElement<"render"> {
43
+ return createElement(
44
+ "render",
45
+ props as Record<string, unknown>,
46
+ props.children,
47
+ );
48
+ }
49
+
50
+ export function Clip(props: ClipProps): VargElement<"clip"> {
51
+ return createElement(
52
+ "clip",
53
+ props as Record<string, unknown>,
54
+ props.children,
55
+ );
56
+ }
57
+
58
+ export function Overlay(props: OverlayProps): VargElement<"overlay"> {
59
+ return createElement(
60
+ "overlay",
61
+ props as Record<string, unknown>,
62
+ props.children,
63
+ );
64
+ }
65
+
66
+ export function Image(props: ImageProps): VargElement<"image"> {
67
+ return createElement("image", props as Record<string, unknown>, undefined);
68
+ }
69
+
70
+ export function Video(props: VideoProps): VargElement<"video"> {
71
+ return createElement("video", props as Record<string, unknown>, undefined);
72
+ }
73
+
74
+ export function Animate(props: AnimateProps): VargElement<"animate"> {
75
+ return createElement("animate", props as Record<string, unknown>, undefined);
76
+ }
77
+
78
+ export function Speech(props: SpeechProps): VargElement<"speech"> {
79
+ return createElement(
80
+ "speech",
81
+ props as Record<string, unknown>,
82
+ props.children,
83
+ );
84
+ }
85
+
86
+ export function TalkingHead(
87
+ props: TalkingHeadProps,
88
+ ): VargElement<"talking-head"> {
89
+ return createElement(
90
+ "talking-head",
91
+ props as Record<string, unknown>,
92
+ props.children,
93
+ );
94
+ }
95
+
96
+ export function Title(props: TitleProps): VargElement<"title"> {
97
+ return createElement(
98
+ "title",
99
+ props as Record<string, unknown>,
100
+ props.children,
101
+ );
102
+ }
103
+
104
+ export function Subtitle(props: SubtitleProps): VargElement<"subtitle"> {
105
+ return createElement(
106
+ "subtitle",
107
+ props as Record<string, unknown>,
108
+ props.children,
109
+ );
110
+ }
111
+
112
+ export function Music(props: MusicProps): VargElement<"music"> {
113
+ return createElement("music", props as Record<string, unknown>, undefined);
114
+ }
115
+
116
+ export function Captions(props: CaptionsProps): VargElement<"captions"> {
117
+ return createElement("captions", props as Record<string, unknown>, undefined);
118
+ }
119
+
120
+ export function Split(props: SplitProps): VargElement<"split"> {
121
+ return createElement(
122
+ "split",
123
+ props as Record<string, unknown>,
124
+ props.children,
125
+ );
126
+ }
127
+
128
+ export function Slider(props: SliderProps): VargElement<"slider"> {
129
+ return createElement(
130
+ "slider",
131
+ props as Record<string, unknown>,
132
+ props.children,
133
+ );
134
+ }
135
+
136
+ export function Swipe(props: SwipeProps): VargElement<"swipe"> {
137
+ return createElement(
138
+ "swipe",
139
+ props as Record<string, unknown>,
140
+ props.children,
141
+ );
142
+ }
143
+
144
+ export function Packshot(props: PackshotProps): VargElement<"packshot"> {
145
+ return createElement("packshot", props as Record<string, unknown>, undefined);
146
+ }
@@ -0,0 +1,66 @@
1
+ import { elevenlabs } from "../../ai-sdk/providers/elevenlabs-provider";
2
+ import { fal } from "../../ai-sdk/providers/fal-provider";
3
+ import { Animate, Clip, Image, Render, Speech, Title } from "..";
4
+
5
+ // Non-linear tree: multiple clips with independent branches
6
+ // Clip 1: TalkingHead (Image -> Animate + Speech)
7
+ // Clip 2: Split comparison (2 independent Images)
8
+ // Clip 3: Product shot with music
9
+
10
+ const character = Image({
11
+ prompt:
12
+ "friendly tech reviewer, young man with glasses, studio lighting, professional headshot",
13
+ model: fal.imageModel("flux-schnell"),
14
+ });
15
+
16
+ const _productBefore = Image({
17
+ prompt:
18
+ "old smartphone, cracked screen, slow, outdated design, on white background",
19
+ model: fal.imageModel("flux-schnell"),
20
+ });
21
+
22
+ const _productAfter = Image({
23
+ prompt:
24
+ "sleek new smartphone, edge-to-edge display, premium design, on white background",
25
+ model: fal.imageModel("flux-schnell"),
26
+ });
27
+
28
+ const packshot = Image({
29
+ prompt:
30
+ "smartphone floating with gradient background, product photography, premium feel",
31
+ model: fal.imageModel("flux-schnell"),
32
+ });
33
+
34
+ export default (
35
+ <Render width={1080} height={1920}>
36
+ {/* Clip 1: Talking head intro */}
37
+ <Clip duration={5}>
38
+ <Animate
39
+ image={character}
40
+ model={fal.videoModel("wan-2.5")}
41
+ motion="talking naturally, slight head movements, friendly expression"
42
+ />
43
+ <Speech voice="adam" model={elevenlabs.speechModel("turbo")}>
44
+ Hey everyone! Today we're looking at the biggest smartphone upgrade of
45
+ the year.
46
+ </Speech>
47
+ </Clip>
48
+
49
+ {/* Clip 2: Before/after comparison - branches into 2 images */}
50
+ <Clip duration={4} transition={{ name: "fade", duration: 0.5 }}>
51
+ <Image
52
+ prompt="split screen comparison layout"
53
+ model={fal.imageModel("flux-schnell")}
54
+ />
55
+ <Title position="top">Before vs After</Title>
56
+ </Clip>
57
+
58
+ {/* Clip 3: Product packshot */}
59
+ <Clip duration={3} transition={{ name: "fade", duration: 0.5 }}>
60
+ {packshot}
61
+ <Title position="bottom" color="#ffffff">
62
+ Available Now
63
+ </Title>
64
+ </Clip>
65
+ </Render>
66
+ );
@@ -0,0 +1,37 @@
1
+ import { elevenlabs } from "../../ai-sdk/providers/elevenlabs-provider";
2
+ import { Captions, Clip, Image, Render, render, Speech } from "..";
3
+
4
+ async function main() {
5
+ const speech = Speech({
6
+ model: elevenlabs.speechModel("eleven_multilingual_v2"),
7
+ voice: "adam",
8
+ children:
9
+ "Hello world! This is a test of the captions system with word level timestamps.",
10
+ });
11
+
12
+ const video = (
13
+ <Render width={1080} height={1920}>
14
+ <Clip duration={5}>
15
+ <Image src="media/cyberpunk-street.png" resize="contain" />
16
+ </Clip>
17
+ <Clip duration={5}>
18
+ <Image src="media/cyberpunk-street.png" resize="cover" />
19
+ </Clip>
20
+ <Clip duration={5}>
21
+ <Image src="media/cyberpunk-street.png" />
22
+ </Clip>
23
+ <Captions src={speech} style="tiktok" />
24
+ </Render>
25
+ );
26
+
27
+ console.log("rendering captions demo with speech transcription...\n");
28
+
29
+ await render(video, {
30
+ output: "output/captions-demo.mp4",
31
+ cache: ".cache/ai",
32
+ });
33
+
34
+ console.log("\ndone! check output/captions-demo.mp4");
35
+ }
36
+
37
+ main().catch(console.error);
@@ -0,0 +1,84 @@
1
+ import { elevenlabs } from "../../ai-sdk/providers/elevenlabs-provider";
2
+ import { fal } from "../../ai-sdk/providers/fal-provider";
3
+ import { Animate, Clip, Image, Music, Render, render } from "..";
4
+
5
+ const MADI_REF =
6
+ "https://s3.varg.ai/fellowers/madi/character_shots/madi_shot_03_closeup.png";
7
+
8
+ // TikTok timeline structure:
9
+ // 0-2s: HOOK - frontal close-up, grab attention
10
+ // 2-4s: 45° medium shot + expression change
11
+ // 4-6s: low angle or extreme close-up
12
+ // 6-8s: high angle + new emotion
13
+ const SCENES = [
14
+ {
15
+ // 0-2s: HOOK - frontal extreme close-up, surprised/curious expression
16
+ prompt:
17
+ "extreme close-up face shot, surprised expression with wide eyes, looking directly at camera, holding peach near lips",
18
+ motion:
19
+ "eyes widen in surprise, eyebrows raise slightly, subtle head tilt forward. Static shot, no camera movement.",
20
+ },
21
+ {
22
+ // 2-4s: 45° medium shot, playful grimace while eating
23
+ prompt:
24
+ "45-degree angle medium shot showing face and hands, biting into peach with exaggerated enjoyment, juice on lips, playful expression",
25
+ motion:
26
+ "turns head 45 degrees, bites into peach, juice drips down chin, hands move expressively. Slow push-in camera movement.",
27
+ },
28
+ {
29
+ // 4-6s: low angle (up shot), confident/powerful vibe
30
+ prompt:
31
+ "low angle shot from below, looking down at camera with confident smirk, holding peach triumphantly, dramatic perspective",
32
+ motion:
33
+ "looks down at camera with growing smile, raises peach slightly, confident head tilt. Static camera, slight lens distortion.",
34
+ },
35
+ {
36
+ // 6-8s: high angle (down shot), playful vulnerability + CTA energy
37
+ prompt:
38
+ "high angle shot from above, looking up at camera with playful smile, arms spread wide, peach in one hand, endearing expression",
39
+ motion:
40
+ "looks up at camera, expression shifts from neutral to excited smile, subtle wink, slight forward lean. Gentle camera tilt down.",
41
+ },
42
+ ];
43
+
44
+ async function main() {
45
+ console.log("creating madi peach video (animated)...\n");
46
+
47
+ const video = (
48
+ <Render width={1080} height={1920}>
49
+ <Music
50
+ prompt="upbeat electronic pop, energetic female vocal chops, modern tiktok vibe, catchy melody"
51
+ model={elevenlabs.musicModel()}
52
+ duration={8}
53
+ />
54
+
55
+ {SCENES.map((scene) => (
56
+ <Clip key={scene.prompt} duration={2}>
57
+ <Animate
58
+ image={Image({
59
+ prompt: { text: scene.prompt, images: [MADI_REF] },
60
+ model: fal.imageModel("nano-banana-pro/edit"),
61
+ aspectRatio: "9:16",
62
+ resize: "cover",
63
+ })}
64
+ motion={scene.motion}
65
+ model={fal.videoModel("wan-2.5")}
66
+ duration={5}
67
+ />
68
+ </Clip>
69
+ ))}
70
+ </Render>
71
+ );
72
+
73
+ console.log("rendering", SCENES.length, "animated clips in parallel...");
74
+
75
+ const buffer = await render(video, {
76
+ output: "output/react-madi.mp4",
77
+ cache: ".cache/ai",
78
+ });
79
+
80
+ console.log(`\ndone! ${buffer.byteLength} bytes`);
81
+ console.log("output: output/react-madi.mp4");
82
+ }
83
+
84
+ main().catch(console.error);
@@ -0,0 +1,53 @@
1
+ import { fal } from "../../ai-sdk/providers/fal-provider";
2
+ import { Clip, Grid, Image, Render, render, Title } from "..";
3
+
4
+ const CHARACTER_PROMPTS = [
5
+ { name: "Warrior", prompt: "fierce warrior with sword, armor" },
6
+ { name: "Mage", prompt: "mystical mage with glowing staff, robes" },
7
+ { name: "Rogue", prompt: "stealthy rogue with daggers, hooded" },
8
+ { name: "Healer", prompt: "gentle healer with staff, white robes" },
9
+ { name: "Archer", prompt: "skilled archer with bow, leather armor" },
10
+ { name: "Knight", prompt: "noble knight with shield, heavy armor" },
11
+ { name: "Necro", prompt: "dark necromancer with skull staff" },
12
+ { name: "Paladin", prompt: "holy paladin with hammer, golden armor" },
13
+ { name: "Bard", prompt: "charismatic bard with lute, colorful" },
14
+ { name: "Druid", prompt: "nature druid with wooden staff, leaves" },
15
+ { name: "Monk", prompt: "disciplined monk with wrapped fists" },
16
+ { name: "Assassin", prompt: "deadly assassin with hidden blades" },
17
+ ];
18
+
19
+ async function main() {
20
+ console.log("creating 3x4 character grid...\n");
21
+
22
+ const baseStyle = "fantasy portrait, stylized art, vibrant colors";
23
+
24
+ const images = CHARACTER_PROMPTS.map(({ prompt }) =>
25
+ Image({
26
+ prompt: `${prompt}, ${baseStyle}`,
27
+ model: fal.imageModel("flux-schnell"),
28
+ }),
29
+ );
30
+
31
+ const video = (
32
+ <Render width={1080} height={1440}>
33
+ <Clip duration={5}>
34
+ <Grid columns={3}>{images}</Grid>
35
+ <Title position="bottom" color="#ffffff">
36
+ Fantasy Characters
37
+ </Title>
38
+ </Clip>
39
+ </Render>
40
+ );
41
+
42
+ console.log("video tree:", JSON.stringify(video, null, 2));
43
+
44
+ const buffer = await render(video, {
45
+ output: "output/react-grid.mp4",
46
+ cache: ".cache/ai",
47
+ });
48
+
49
+ console.log(`\ndone! ${buffer.byteLength} bytes`);
50
+ console.log("output: output/react-grid.mp4");
51
+ }
52
+
53
+ main().catch(console.error);
@@ -0,0 +1,57 @@
1
+ import {
2
+ Clip,
3
+ Image,
4
+ Packshot,
5
+ Render,
6
+ render,
7
+ Slider,
8
+ Split,
9
+ Swipe,
10
+ Title,
11
+ } from "..";
12
+
13
+ async function main() {
14
+ const img1 = Image({ src: "media/cyberpunk-street.png" });
15
+ const img2 = Image({ src: "media/fal-coffee-shop.png" });
16
+ const img3 = Image({ src: "media/kirill.png" });
17
+
18
+ const video = (
19
+ <Render width={1280} height={720}>
20
+ <Clip duration={3}>
21
+ <Split direction="horizontal">{[img1, img2]}</Split>
22
+ <Title position="bottom">Split Layout</Title>
23
+ </Clip>
24
+
25
+ <Clip duration={4} transition={{ name: "fade", duration: 0.5 }}>
26
+ <Slider direction="horizontal">{[img1, img2, img3]}</Slider>
27
+ </Clip>
28
+
29
+ <Clip duration={4} transition={{ name: "fade", duration: 0.5 }}>
30
+ <Swipe direction="left" interval={1.5}>
31
+ {[img1, img2, img3]}
32
+ </Swipe>
33
+ </Clip>
34
+
35
+ <Clip duration={3} transition={{ name: "fade", duration: 0.5 }}>
36
+ <Packshot
37
+ background="#1a1a2e"
38
+ logo="media/cyberpunk-street.png"
39
+ logoPosition="center"
40
+ logoSize="50%"
41
+ cta="Subscribe for more!"
42
+ ctaColor="#FFD700"
43
+ />
44
+ </Clip>
45
+ </Render>
46
+ );
47
+
48
+ console.log("rendering layouts demo...\n");
49
+
50
+ await render(video, {
51
+ output: "output/layouts-demo.mp4",
52
+ });
53
+
54
+ console.log("\ndone! check output/layouts-demo.mp4");
55
+ }
56
+
57
+ main().catch(console.error);