vargai 0.4.0-alpha.1 → 0.4.0-alpha10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -5,9 +5,46 @@ AI video generation from your terminal.
5
5
  ## Quick Start
6
6
 
7
7
  ```bash
8
- bun install @vargai/sdk
8
+ bun install vargai ai
9
9
  ```
10
10
 
11
+ ### SDK Usage
12
+
13
+ ```typescript
14
+ import { generateImage } from "ai";
15
+ import { File, fal, generateElement, generateVideo, scene } from "vargai";
16
+
17
+ // generate a character from reference image
18
+ const { element: character } = await generateElement({
19
+ model: fal.imageModel("nano-banana-pro/edit"),
20
+ type: "character",
21
+ prompt: {
22
+ text: "cartoon character, simple style",
23
+ images: [await File.fromPath("media/reference.jpg").arrayBuffer()],
24
+ },
25
+ });
26
+
27
+ // generate scene with character
28
+ const { image: frame } = await generateImage({
29
+ model: fal.imageModel("nano-banana-pro"),
30
+ prompt: scene`${character} walks through a forest`,
31
+ });
32
+
33
+ // animate the frame
34
+ const { video } = await generateVideo({
35
+ model: fal.videoModel("wan-2.5"),
36
+ prompt: {
37
+ text: `${character.text} walks through a forest`,
38
+ images: [frame.base64],
39
+ },
40
+ duration: 5,
41
+ });
42
+
43
+ await Bun.write("output/scene.mp4", video.uint8Array);
44
+ ```
45
+
46
+ ### CLI Usage
47
+
11
48
  ```bash
12
49
  varg run image --prompt "cyberpunk cityscape at night"
13
50
  varg run video --prompt "camera flies through clouds" --duration 5
package/package.json CHANGED
@@ -33,7 +33,9 @@
33
33
  "lint-staged": "^16.2.7"
34
34
  },
35
35
  "peerDependencies": {
36
- "typescript": "^5"
36
+ "typescript": "^5",
37
+ "ai": "^6.0.0",
38
+ "zod": "^3.25.76 || ^4.1.8"
37
39
  },
38
40
  "dependencies": {
39
41
  "@ai-sdk/fal": "^1.0.23",
@@ -62,7 +64,7 @@
62
64
  "replicate": "^1.4.0",
63
65
  "zod": "^4.2.1"
64
66
  },
65
- "version": "0.4.0-alpha.1",
67
+ "version": "0.4.0-alpha10",
66
68
  "exports": {
67
69
  ".": "./src/index.ts",
68
70
  "./ai": "./src/ai-sdk/index.ts",
@@ -70,6 +72,8 @@
70
72
  "./providers": "./src/providers/index.ts",
71
73
  "./definitions": "./src/definitions/index.ts",
72
74
  "./react": "./src/react/index.ts",
73
- "./studio": "./src/studio/index.ts"
75
+ "./studio": "./src/studio/index.ts",
76
+ "./jsx-runtime": "./src/react/runtime/jsx-runtime.ts",
77
+ "./jsx-dev-runtime": "./src/react/runtime/jsx-dev-runtime.ts"
74
78
  }
75
79
  }
@@ -25,6 +25,25 @@ export {
25
25
  type GenerateVideoResult,
26
26
  generateVideo,
27
27
  } from "./generate-video";
28
+ export {
29
+ generatePlaceholder,
30
+ type ImagePlaceholderFallbackOptions,
31
+ imagePlaceholderFallbackMiddleware,
32
+ type MusicModelMiddleware,
33
+ type MusicPlaceholderFallbackOptions,
34
+ musicPlaceholderFallbackMiddleware,
35
+ type PlaceholderFallbackOptions,
36
+ type PlaceholderOptions,
37
+ type PlaceholderResult,
38
+ placeholderFallbackMiddleware,
39
+ type RenderMode,
40
+ type VideoModelMiddleware,
41
+ withImagePlaceholderFallback,
42
+ withMusicPlaceholderFallback,
43
+ withPlaceholderFallback,
44
+ wrapMusicModel,
45
+ wrapVideoModel,
46
+ } from "./middleware";
28
47
  export type {
29
48
  MusicModelV3,
30
49
  MusicModelV3CallOptions,
@@ -42,8 +61,8 @@ export {
42
61
  type ElevenLabsProvider,
43
62
  elevenlabs,
44
63
  VOICES,
45
- } from "./providers/elevenlabs-provider";
46
- export { createFal, type FalProvider, fal } from "./providers/fal-provider";
64
+ } from "./providers/elevenlabs";
65
+ export { createFal, type FalProvider, fal } from "./providers/fal";
47
66
  export {
48
67
  createHiggsfield,
49
68
  type HiggsfieldImageModelSettings,
@@ -0,0 +1,25 @@
1
+ export {
2
+ generatePlaceholder,
3
+ type PlaceholderOptions,
4
+ type PlaceholderResult,
5
+ } from "./placeholder";
6
+ export {
7
+ type ImagePlaceholderFallbackOptions,
8
+ imagePlaceholderFallbackMiddleware,
9
+ withImagePlaceholderFallback,
10
+ } from "./wrap-image-model";
11
+ export {
12
+ type MusicModelMiddleware,
13
+ type MusicPlaceholderFallbackOptions,
14
+ musicPlaceholderFallbackMiddleware,
15
+ withMusicPlaceholderFallback,
16
+ wrapMusicModel,
17
+ } from "./wrap-music-model";
18
+ export {
19
+ type PlaceholderFallbackOptions,
20
+ placeholderFallbackMiddleware,
21
+ type RenderMode,
22
+ type VideoModelMiddleware,
23
+ withPlaceholderFallback,
24
+ wrapVideoModel,
25
+ } from "./wrap-video-model";
@@ -0,0 +1,111 @@
1
+ import { unlink } from "node:fs/promises";
2
+ import { tmpdir } from "node:os";
3
+ import { join } from "node:path";
4
+ import { $ } from "bun";
5
+
6
+ export interface PlaceholderOptions {
7
+ type: "image" | "video" | "audio";
8
+ prompt: string;
9
+ duration?: number;
10
+ width?: number;
11
+ height?: number;
12
+ }
13
+
14
+ export interface PlaceholderResult {
15
+ data: Uint8Array;
16
+ placeholder: true;
17
+ }
18
+
19
+ function promptToColor(prompt: string): string {
20
+ let hash = 0;
21
+ for (let i = 0; i < prompt.length; i++) {
22
+ const char = prompt.charCodeAt(i);
23
+ hash = (hash << 5) - hash + char;
24
+ hash = hash & hash;
25
+ }
26
+
27
+ const h = Math.abs(hash) % 360;
28
+ const s = 40 + (Math.abs(hash >> 8) % 30);
29
+ const l = 35 + (Math.abs(hash >> 16) % 20);
30
+
31
+ return `hsl(${h},${s}%,${l}%)`;
32
+ }
33
+
34
+ function hslToHex(hsl: string): string {
35
+ const match = hsl.match(/hsl\((\d+),(\d+)%,(\d+)%\)/);
36
+ if (!match) return "333333";
37
+
38
+ const h = Number.parseInt(match[1]!) / 360;
39
+ const s = Number.parseInt(match[2]!) / 100;
40
+ const l = Number.parseInt(match[3]!) / 100;
41
+
42
+ const hue2rgb = (p: number, q: number, t: number) => {
43
+ if (t < 0) t += 1;
44
+ if (t > 1) t -= 1;
45
+ if (t < 1 / 6) return p + (q - p) * 6 * t;
46
+ if (t < 1 / 2) return q;
47
+ if (t < 2 / 3) return p + (q - p) * (2 / 3 - t) * 6;
48
+ return p;
49
+ };
50
+
51
+ const q = l < 0.5 ? l * (1 + s) : l + s - l * s;
52
+ const p = 2 * l - q;
53
+ const r = Math.round(hue2rgb(p, q, h + 1 / 3) * 255);
54
+ const g = Math.round(hue2rgb(p, q, h) * 255);
55
+ const b = Math.round(hue2rgb(p, q, h - 1 / 3) * 255);
56
+
57
+ return `${r.toString(16).padStart(2, "0")}${g.toString(16).padStart(2, "0")}${b.toString(16).padStart(2, "0")}`;
58
+ }
59
+
60
+ function truncatePrompt(text: string, maxLen: number): string {
61
+ const clean = text.replace(/[^a-zA-Z0-9 .,!?-]/g, "");
62
+ if (clean.length <= maxLen) return clean;
63
+ return `${clean.slice(0, maxLen - 3)}...`;
64
+ }
65
+
66
+ export async function generatePlaceholder(
67
+ options: PlaceholderOptions,
68
+ ): Promise<PlaceholderResult> {
69
+ const { type, prompt, duration = 3, width = 1080, height = 1920 } = options;
70
+
71
+ const color = promptToColor(prompt);
72
+ const hexColor = hslToHex(color);
73
+ const labelFontSize = Math.floor(Math.min(width, height) / 20);
74
+ const promptFontSize = Math.floor(Math.min(width, height) / 35);
75
+ const maxChars = Math.floor((width * 0.7) / (promptFontSize * 0.5));
76
+ const typeLabel = type.toUpperCase();
77
+ const promptText = truncatePrompt(prompt, maxChars);
78
+
79
+ const ext = type === "audio" ? "mp3" : type === "image" ? "png" : "mp4";
80
+ const outputPath = join(
81
+ tmpdir(),
82
+ `placeholder_${Date.now()}_${Math.random().toString(36).slice(2)}.${ext}`,
83
+ );
84
+
85
+ try {
86
+ if (type === "audio") {
87
+ await $`ffmpeg -y -f lavfi -i anullsrc=r=44100:cl=stereo -t ${duration} -c:a libmp3lame ${outputPath}`.quiet();
88
+ } else if (type === "image") {
89
+ const colorInput = `color=c=0x${hexColor}:s=${width}x${height}:d=1`;
90
+ const labelY = `(h/2)-${labelFontSize}`;
91
+ const promptY = `(h/2)+${Math.floor(labelFontSize * 0.5)}`;
92
+ const drawLabel = `drawtext=text='${typeLabel}':fontcolor=white:fontsize=${labelFontSize}:x=(w-text_w)/2:y=${labelY}`;
93
+ const drawPrompt = `drawtext=text='${promptText}':fontcolor=white@0.7:fontsize=${promptFontSize}:x=(w-text_w)/2:y=${promptY}`;
94
+ await $`ffmpeg -y -f lavfi -i ${colorInput} -vf ${drawLabel},${drawPrompt} -frames:v 1 -update 1 ${outputPath}`.quiet();
95
+ } else {
96
+ const colorInput = `color=c=0x${hexColor}:s=${width}x${height}:d=${duration}:r=30`;
97
+ const labelY = `(h/2)-${labelFontSize}`;
98
+ const promptY = `(h/2)+${Math.floor(labelFontSize * 0.5)}`;
99
+ const drawLabel = `drawtext=text='${typeLabel}':fontcolor=white:fontsize=${labelFontSize}:x=(w-text_w)/2:y=${labelY}`;
100
+ const drawPrompt = `drawtext=text='${promptText}':fontcolor=white@0.7:fontsize=${promptFontSize}:x=(w-text_w)/2:y=${promptY}`;
101
+ await $`ffmpeg -y -f lavfi -i ${colorInput} -vf ${drawLabel},${drawPrompt} -c:v libx264 -preset ultrafast -pix_fmt yuv420p ${outputPath}`.quiet();
102
+ }
103
+
104
+ const data = await Bun.file(outputPath).bytes();
105
+ await unlink(outputPath).catch(() => {});
106
+ return { data: new Uint8Array(data), placeholder: true };
107
+ } catch (e) {
108
+ await unlink(outputPath).catch(() => {});
109
+ throw e;
110
+ }
111
+ }
@@ -0,0 +1,86 @@
1
+ import type {
2
+ ImageModelV3,
3
+ ImageModelV3CallOptions,
4
+ ImageModelV3Middleware,
5
+ } from "@ai-sdk/provider";
6
+ import { wrapImageModel } from "ai";
7
+ import { generatePlaceholder } from "./placeholder";
8
+ import type { RenderMode } from "./wrap-video-model";
9
+
10
+ export interface ImagePlaceholderFallbackOptions {
11
+ mode: RenderMode;
12
+ onFallback?: (error: Error, prompt: string) => void;
13
+ }
14
+
15
+ export function imagePlaceholderFallbackMiddleware(
16
+ options: ImagePlaceholderFallbackOptions,
17
+ ): ImageModelV3Middleware {
18
+ const { mode, onFallback } = options;
19
+
20
+ return {
21
+ specificationVersion: "v3",
22
+ wrapGenerate: async ({ doGenerate, params, model }) => {
23
+ const createPlaceholderResult = async () => {
24
+ const [width, height] = (params.size?.split("x").map(Number) ?? [
25
+ 1024, 1024,
26
+ ]) as [number, number];
27
+ const prompt =
28
+ typeof params.prompt === "string"
29
+ ? params.prompt
30
+ : ((params.prompt as { text?: string } | undefined)?.text ??
31
+ "placeholder");
32
+
33
+ const placeholder = await generatePlaceholder({
34
+ type: "image",
35
+ prompt,
36
+ width,
37
+ height,
38
+ });
39
+
40
+ return {
41
+ images: [placeholder.data],
42
+ warnings: [
43
+ {
44
+ type: "other" as const,
45
+ message: "placeholder: provider skipped or failed",
46
+ },
47
+ ],
48
+ response: {
49
+ timestamp: new Date(),
50
+ modelId: model.modelId,
51
+ headers: undefined,
52
+ },
53
+ };
54
+ };
55
+
56
+ if (mode === "preview") {
57
+ return createPlaceholderResult();
58
+ }
59
+
60
+ try {
61
+ return await doGenerate();
62
+ } catch (e) {
63
+ if (mode === "strict") throw e;
64
+
65
+ const error = e instanceof Error ? e : new Error(String(e));
66
+ const promptText =
67
+ typeof params.prompt === "string"
68
+ ? params.prompt
69
+ : ((params.prompt as { text?: string } | undefined)?.text ??
70
+ "placeholder");
71
+ onFallback?.(error, promptText);
72
+ return createPlaceholderResult();
73
+ }
74
+ },
75
+ };
76
+ }
77
+
78
+ export function withImagePlaceholderFallback(
79
+ model: ImageModelV3,
80
+ options: ImagePlaceholderFallbackOptions,
81
+ ): ImageModelV3 {
82
+ return wrapImageModel({
83
+ model,
84
+ middleware: imagePlaceholderFallbackMiddleware(options),
85
+ });
86
+ }
@@ -0,0 +1,108 @@
1
+ import type { MusicModelV3, MusicModelV3CallOptions } from "../music-model";
2
+ import { generatePlaceholder } from "./placeholder";
3
+ import type { RenderMode } from "./wrap-video-model";
4
+
5
+ export interface MusicModelMiddleware {
6
+ transformParams?: (options: {
7
+ params: MusicModelV3CallOptions;
8
+ model: MusicModelV3;
9
+ }) => PromiseLike<MusicModelV3CallOptions> | MusicModelV3CallOptions;
10
+
11
+ wrapGenerate?: (options: {
12
+ doGenerate: () => PromiseLike<
13
+ Awaited<ReturnType<MusicModelV3["doGenerate"]>>
14
+ >;
15
+ params: MusicModelV3CallOptions;
16
+ model: MusicModelV3;
17
+ }) => PromiseLike<Awaited<ReturnType<MusicModelV3["doGenerate"]>>>;
18
+ }
19
+
20
+ export function wrapMusicModel({
21
+ model,
22
+ middleware,
23
+ }: {
24
+ model: MusicModelV3;
25
+ middleware: MusicModelMiddleware;
26
+ }): MusicModelV3 {
27
+ const { transformParams, wrapGenerate } = middleware;
28
+
29
+ return {
30
+ specificationVersion: "v3",
31
+ provider: model.provider,
32
+ modelId: model.modelId,
33
+
34
+ async doGenerate(params: MusicModelV3CallOptions) {
35
+ const transformedParams = transformParams
36
+ ? await transformParams({ params, model })
37
+ : params;
38
+
39
+ const doGenerate = () => model.doGenerate(transformedParams);
40
+
41
+ return wrapGenerate
42
+ ? wrapGenerate({ doGenerate, params: transformedParams, model })
43
+ : doGenerate();
44
+ },
45
+ };
46
+ }
47
+
48
+ export interface MusicPlaceholderFallbackOptions {
49
+ mode: RenderMode;
50
+ onFallback?: (error: Error, prompt: string) => void;
51
+ }
52
+
53
+ export function musicPlaceholderFallbackMiddleware(
54
+ options: MusicPlaceholderFallbackOptions,
55
+ ): MusicModelMiddleware {
56
+ const { mode, onFallback } = options;
57
+
58
+ return {
59
+ wrapGenerate: async ({ doGenerate, params, model }) => {
60
+ const createPlaceholderResult = async () => {
61
+ const placeholder = await generatePlaceholder({
62
+ type: "audio",
63
+ prompt: params.prompt,
64
+ duration: params.duration ?? 10,
65
+ });
66
+
67
+ return {
68
+ audio: placeholder.data,
69
+ warnings: [
70
+ {
71
+ type: "other" as const,
72
+ message: "placeholder: provider skipped or failed",
73
+ },
74
+ ],
75
+ response: {
76
+ timestamp: new Date(),
77
+ modelId: model.modelId,
78
+ headers: undefined,
79
+ },
80
+ };
81
+ };
82
+
83
+ if (mode === "preview") {
84
+ return createPlaceholderResult();
85
+ }
86
+
87
+ try {
88
+ return await doGenerate();
89
+ } catch (e) {
90
+ if (mode === "strict") throw e;
91
+
92
+ const error = e instanceof Error ? e : new Error(String(e));
93
+ onFallback?.(error, params.prompt);
94
+ return createPlaceholderResult();
95
+ }
96
+ },
97
+ };
98
+ }
99
+
100
+ export function withMusicPlaceholderFallback(
101
+ model: MusicModelV3,
102
+ options: MusicPlaceholderFallbackOptions,
103
+ ): MusicModelV3 {
104
+ return wrapMusicModel({
105
+ model,
106
+ middleware: musicPlaceholderFallbackMiddleware(options),
107
+ });
108
+ }
@@ -0,0 +1,115 @@
1
+ import type { VideoModelV3, VideoModelV3CallOptions } from "../video-model";
2
+ import { generatePlaceholder } from "./placeholder";
3
+
4
+ export type RenderMode = "strict" | "default" | "preview";
5
+
6
+ export interface VideoModelMiddleware {
7
+ transformParams?: (options: {
8
+ params: VideoModelV3CallOptions;
9
+ model: VideoModelV3;
10
+ }) => PromiseLike<VideoModelV3CallOptions> | VideoModelV3CallOptions;
11
+
12
+ wrapGenerate?: (options: {
13
+ doGenerate: () => PromiseLike<
14
+ Awaited<ReturnType<VideoModelV3["doGenerate"]>>
15
+ >;
16
+ params: VideoModelV3CallOptions;
17
+ model: VideoModelV3;
18
+ }) => PromiseLike<Awaited<ReturnType<VideoModelV3["doGenerate"]>>>;
19
+ }
20
+
21
+ export function wrapVideoModel({
22
+ model,
23
+ middleware,
24
+ }: {
25
+ model: VideoModelV3;
26
+ middleware: VideoModelMiddleware;
27
+ }): VideoModelV3 {
28
+ const { transformParams, wrapGenerate } = middleware;
29
+
30
+ return {
31
+ specificationVersion: "v3",
32
+ provider: model.provider,
33
+ modelId: model.modelId,
34
+ maxVideosPerCall: model.maxVideosPerCall,
35
+
36
+ async doGenerate(params: VideoModelV3CallOptions) {
37
+ const transformedParams = transformParams
38
+ ? await transformParams({ params, model })
39
+ : params;
40
+
41
+ const doGenerate = () => model.doGenerate(transformedParams);
42
+
43
+ return wrapGenerate
44
+ ? wrapGenerate({ doGenerate, params: transformedParams, model })
45
+ : doGenerate();
46
+ },
47
+ };
48
+ }
49
+
50
+ export interface PlaceholderFallbackOptions {
51
+ mode: RenderMode;
52
+ onFallback?: (error: Error, prompt: string) => void;
53
+ }
54
+
55
+ export function placeholderFallbackMiddleware(
56
+ options: PlaceholderFallbackOptions,
57
+ ): VideoModelMiddleware {
58
+ const { mode, onFallback } = options;
59
+
60
+ return {
61
+ wrapGenerate: async ({ doGenerate, params, model }) => {
62
+ const createPlaceholderResult = async () => {
63
+ const [width, height] = (params.resolution?.split("x").map(Number) ?? [
64
+ 1080, 1920,
65
+ ]) as [number, number];
66
+ const placeholder = await generatePlaceholder({
67
+ type: "video",
68
+ prompt: params.prompt,
69
+ duration: params.duration ?? 3,
70
+ width,
71
+ height,
72
+ });
73
+
74
+ return {
75
+ videos: [placeholder.data],
76
+ warnings: [
77
+ {
78
+ type: "other" as const,
79
+ message: "placeholder: provider skipped or failed",
80
+ },
81
+ ],
82
+ response: {
83
+ timestamp: new Date(),
84
+ modelId: model.modelId,
85
+ headers: undefined,
86
+ },
87
+ };
88
+ };
89
+
90
+ if (mode === "preview") {
91
+ return createPlaceholderResult();
92
+ }
93
+
94
+ try {
95
+ return await doGenerate();
96
+ } catch (e) {
97
+ if (mode === "strict") throw e;
98
+
99
+ const error = e instanceof Error ? e : new Error(String(e));
100
+ onFallback?.(error, params.prompt);
101
+ return createPlaceholderResult();
102
+ }
103
+ },
104
+ };
105
+ }
106
+
107
+ export function withPlaceholderFallback(
108
+ model: VideoModelV3,
109
+ options: PlaceholderFallbackOptions,
110
+ ): VideoModelV3 {
111
+ return wrapVideoModel({
112
+ model,
113
+ middleware: placeholderFallbackMiddleware(options),
114
+ });
115
+ }
@@ -1,6 +1,7 @@
1
1
  export { findCmd, showFindHelp } from "./find.tsx";
2
2
  export { helpCmd, showHelp } from "./help.tsx";
3
3
  export { listCmd, showListHelp } from "./list.tsx";
4
- export { renderCmd } from "./render.tsx";
4
+ export { renderCmd } from "./render.ts";
5
5
  export { runCmd, showRunHelp, showTargetHelp } from "./run.tsx";
6
+ export { studioCmd } from "./studio.ts";
6
7
  export { showWhichHelp, whichCmd } from "./which.tsx";
@@ -0,0 +1,136 @@
1
+ import { existsSync, mkdirSync } from "node:fs";
2
+ import { dirname, resolve } from "node:path";
3
+ import { defineCommand } from "citty";
4
+ import { render } from "../../react/render";
5
+ import type { RenderMode, VargElement } from "../../react/types";
6
+
7
+ const AUTO_IMPORTS = `/** @jsxImportSource vargai */
8
+ import { Animate, Captions, Clip, Image, Music, Overlay, Packshot, Render, Slider, Speech, Split, Subtitle, Swipe, TalkingHead, Title, Video, Grid, SplitLayout } from "vargai/react";
9
+ import { fal, elevenlabs, replicate } from "vargai/ai";
10
+ `;
11
+
12
+ async function loadComponent(filePath: string): Promise<VargElement> {
13
+ const resolvedPath = resolve(filePath);
14
+ const source = await Bun.file(resolvedPath).text();
15
+
16
+ const hasImports =
17
+ source.includes("from 'vargai") ||
18
+ source.includes('from "vargai') ||
19
+ source.includes("from '@vargai") ||
20
+ source.includes('from "@vargai');
21
+
22
+ const hasJsxPragma =
23
+ source.includes("@jsxImportSource") || source.includes("@jsx ");
24
+
25
+ if (hasImports && hasJsxPragma) {
26
+ const mod = await import(resolvedPath);
27
+ return mod.default;
28
+ }
29
+
30
+ const pkgDir = new URL("../../..", import.meta.url).pathname;
31
+ const tmpDir = `${pkgDir}/.cache/varg-render`;
32
+ if (!existsSync(tmpDir)) {
33
+ mkdirSync(tmpDir, { recursive: true });
34
+ }
35
+
36
+ const prepended = hasImports
37
+ ? `/** @jsxImportSource vargai */\n`
38
+ : AUTO_IMPORTS;
39
+ const tmpFile = `${tmpDir}/${Date.now()}.tsx`;
40
+ await Bun.write(tmpFile, prepended + source);
41
+
42
+ try {
43
+ const mod = await import(tmpFile);
44
+ return mod.default;
45
+ } finally {
46
+ (await Bun.file(tmpFile).exists()) && (await Bun.write(tmpFile, ""));
47
+ }
48
+ }
49
+
50
+ export const renderCmd = defineCommand({
51
+ meta: {
52
+ name: "render",
53
+ description: "render a react component to video",
54
+ },
55
+ args: {
56
+ file: {
57
+ type: "positional",
58
+ description: "component file (.tsx)",
59
+ required: true,
60
+ },
61
+ output: {
62
+ type: "string",
63
+ alias: "o",
64
+ description: "output path",
65
+ },
66
+ cache: {
67
+ type: "string",
68
+ alias: "c",
69
+ description: "cache directory",
70
+ default: ".cache/ai",
71
+ },
72
+ quiet: {
73
+ type: "boolean",
74
+ alias: "q",
75
+ description: "minimal output",
76
+ default: false,
77
+ },
78
+ strict: {
79
+ type: "boolean",
80
+ description: "fail on provider errors (no fallback)",
81
+ default: false,
82
+ },
83
+ preview: {
84
+ type: "boolean",
85
+ description: "skip all generation, use placeholders only",
86
+ default: false,
87
+ },
88
+ },
89
+ async run({ args }) {
90
+ const file = args.file as string;
91
+
92
+ if (!file) {
93
+ console.error("usage: varg render <component.tsx> [-o output.mp4]");
94
+ process.exit(1);
95
+ }
96
+
97
+ const component = await loadComponent(file);
98
+
99
+ if (!component || component.type !== "render") {
100
+ console.error("error: default export must be a <Render> element");
101
+ process.exit(1);
102
+ }
103
+
104
+ const basename = file
105
+ .replace(/\.tsx?$/, "")
106
+ .split("/")
107
+ .pop();
108
+ const outputPath = args.output ?? `output/${basename}.mp4`;
109
+
110
+ const mode: RenderMode = args.strict
111
+ ? "strict"
112
+ : args.preview
113
+ ? "preview"
114
+ : "default";
115
+
116
+ if (!args.quiet) {
117
+ const modeLabel =
118
+ mode === "preview"
119
+ ? " (preview)"
120
+ : mode === "strict"
121
+ ? " (strict)"
122
+ : "";
123
+ console.log(`rendering ${file} → ${outputPath}${modeLabel}`);
124
+ }
125
+
126
+ const buffer = await render(component, {
127
+ output: outputPath,
128
+ cache: args.cache,
129
+ mode,
130
+ });
131
+
132
+ if (!args.quiet) {
133
+ console.log(`done! ${buffer.byteLength} bytes → ${outputPath}`);
134
+ }
135
+ },
136
+ });
@@ -0,0 +1,47 @@
1
+ import { resolve } from "node:path";
2
+ import { defineCommand } from "citty";
3
+ import { createStudioServer } from "../../studio/server";
4
+
5
+ export const studioCmd = defineCommand({
6
+ meta: {
7
+ name: "studio",
8
+ description: "launch varg studio - visual editor for workflows",
9
+ },
10
+ args: {
11
+ file: {
12
+ type: "positional",
13
+ description: "initial file to open",
14
+ required: false,
15
+ },
16
+ cache: {
17
+ type: "string",
18
+ description: "cache directory",
19
+ default: ".cache/ai",
20
+ },
21
+ port: {
22
+ type: "string",
23
+ description: "port to run on",
24
+ default: "8282",
25
+ },
26
+ },
27
+ run: async ({ args }) => {
28
+ const initialFile = args.file ? resolve(args.file) : undefined;
29
+ const cacheDir = args.cache;
30
+ const port = Number.parseInt(args.port, 10);
31
+
32
+ console.log("varg studio starting...");
33
+ console.log(`cache folder: ${cacheDir}`);
34
+ if (initialFile) {
35
+ console.log(`initial file: ${initialFile}`);
36
+ }
37
+
38
+ const server = createStudioServer({ cacheDir, initialFile, port });
39
+
40
+ console.log(`\nopen http://localhost:${server.port}`);
41
+ console.log(" /editor - code editor");
42
+ console.log(" /cache - cache viewer");
43
+
44
+ // Keep process alive
45
+ await new Promise(() => {});
46
+ },
47
+ });
package/src/cli/index.ts CHANGED
@@ -23,6 +23,7 @@ import {
23
23
  showRunHelp,
24
24
  showTargetHelp,
25
25
  showWhichHelp,
26
+ studioCmd,
26
27
  whichCmd,
27
28
  } from "./commands";
28
29
 
@@ -92,15 +93,18 @@ if (hasHelp) {
92
93
  }
93
94
  }
94
95
 
96
+ const pkg = await import("../../package.json");
97
+
95
98
  const main = defineCommand({
96
99
  meta: {
97
100
  name: "varg",
98
- version: "0.3.0",
101
+ version: pkg.version,
99
102
  description: "ai video infrastructure from your terminal",
100
103
  },
101
104
  subCommands: {
102
105
  run: runCmd,
103
106
  render: renderCmd,
107
+ studio: studioCmd,
104
108
  list: listCmd,
105
109
  ls: listCmd,
106
110
  find: findCmd,
@@ -1,5 +1,5 @@
1
- import { elevenlabs } from "../../ai-sdk/providers/elevenlabs-provider";
2
- import { fal } from "../../ai-sdk/providers/fal-provider";
1
+ import { elevenlabs } from "../../ai-sdk/providers/elevenlabs";
2
+ import { fal } from "../../ai-sdk/providers/fal";
3
3
  import { Animate, Clip, Image, Render, Speech, Title } from "..";
4
4
 
5
5
  // Non-linear tree: multiple clips with independent branches
@@ -1,4 +1,4 @@
1
- import { elevenlabs } from "../../ai-sdk/providers/elevenlabs-provider";
1
+ import { elevenlabs } from "../../ai-sdk/providers/elevenlabs";
2
2
  import { Captions, Clip, Image, Render, render, Speech } from "..";
3
3
 
4
4
  async function main() {
@@ -1,5 +1,5 @@
1
- import { elevenlabs } from "../../ai-sdk/providers/elevenlabs-provider";
2
- import { fal } from "../../ai-sdk/providers/fal-provider";
1
+ import { elevenlabs } from "../../ai-sdk/providers/elevenlabs";
2
+ import { fal } from "../../ai-sdk/providers/fal";
3
3
  import { Animate, Clip, Image, Music, Render, render } from "..";
4
4
 
5
5
  const MADI_REF =
@@ -1,4 +1,4 @@
1
- import { fal } from "../../ai-sdk/providers/fal-provider";
1
+ import { fal } from "../../ai-sdk/providers/fal";
2
2
  import { Clip, Grid, Image, Render, render, Title } from "..";
3
3
 
4
4
  const CHARACTER_PROMPTS = [
@@ -1,5 +1,5 @@
1
- import { elevenlabs } from "../../ai-sdk/providers/elevenlabs-provider";
2
- import { fal } from "../../ai-sdk/providers/fal-provider";
1
+ import { elevenlabs } from "../../ai-sdk/providers/elevenlabs";
2
+ import { fal } from "../../ai-sdk/providers/fal";
3
3
  import { Animate, Clip, Image, Music, Render } from "..";
4
4
 
5
5
  const MADI_REF =
@@ -1,4 +1,4 @@
1
- import { elevenlabs } from "../../ai-sdk/providers/elevenlabs-provider";
1
+ import { elevenlabs } from "../../ai-sdk/providers/elevenlabs";
2
2
  import { Clip, Image, Music, Render, render } from "..";
3
3
 
4
4
  async function main() {
@@ -1,4 +1,4 @@
1
- import { fal } from "../../ai-sdk/providers/fal-provider";
1
+ import { fal } from "../../ai-sdk/providers/fal";
2
2
  import { Clip, Image, Render, Video } from "..";
3
3
 
4
4
  // character: young woman, short dark brown bob with wispy bangs, oval face, fair skin,
@@ -1,4 +1,4 @@
1
- import { fal } from "../../ai-sdk/providers/fal-provider";
1
+ import { fal } from "../../ai-sdk/providers/fal";
2
2
  import { Clip, Image, Render, render, SplitLayout as Split, Title } from "..";
3
3
 
4
4
  async function main() {
@@ -1,4 +1,4 @@
1
- import { fal } from "../../ai-sdk/providers/fal-provider";
1
+ import { fal } from "../../ai-sdk/providers/fal";
2
2
  import { Clip, Grid, Render, render, Title, Video } from "..";
3
3
 
4
4
  async function main() {
@@ -1,6 +1,6 @@
1
1
  import { describe, expect, test } from "bun:test";
2
2
  import { existsSync, unlinkSync } from "node:fs";
3
- import { fal } from "../ai-sdk/providers/fal-provider";
3
+ import { fal } from "../ai-sdk/providers/fal";
4
4
  import {
5
5
  Animate,
6
6
  Captions,
@@ -32,9 +32,15 @@ type PendingLayer =
32
32
  | { type: "sync"; layer: Layer }
33
33
  | { type: "async"; promise: Promise<Layer> };
34
34
 
35
+ interface ClipLayerOptions {
36
+ cutFrom?: number;
37
+ cutTo?: number;
38
+ }
39
+
35
40
  async function renderClipLayers(
36
41
  children: VargNode[],
37
42
  ctx: RenderContext,
43
+ clipOptions?: ClipLayerOptions,
38
44
  ): Promise<Layer[]> {
39
45
  const pending: PendingLayer[] = [];
40
46
 
@@ -86,8 +92,9 @@ async function renderClipLayers(
86
92
  type: "video",
87
93
  path,
88
94
  resizeMode: props.resize,
89
- cutFrom: props.cutFrom,
90
- cutTo: props.cutTo,
95
+ // Video-level cutFrom/cutTo take precedence over clip-level
96
+ cutFrom: props.cutFrom ?? clipOptions?.cutFrom,
97
+ cutTo: props.cutTo ?? clipOptions?.cutTo,
91
98
  mixVolume: props.keepAudio ? (props.volume ?? 1) : 0,
92
99
  left: props.left,
93
100
  top: props.top,
@@ -220,7 +227,10 @@ export async function renderClip(
220
227
  ctx: RenderContext,
221
228
  ): Promise<Clip> {
222
229
  const props = element.props as ClipProps;
223
- const layers = await renderClipLayers(element.children, ctx);
230
+ const layers = await renderClipLayers(element.children, ctx, {
231
+ cutFrom: props.cutFrom,
232
+ cutTo: props.cutTo,
233
+ });
224
234
 
225
235
  const isOverlayVideo = (l: Layer) =>
226
236
  l.type === "video" &&
@@ -1,7 +1,12 @@
1
- import { generateImage } from "ai";
1
+ import { generateImage, wrapImageModel } from "ai";
2
2
  import { withCache } from "../../ai-sdk/cache";
3
3
  import { fileCache } from "../../ai-sdk/file-cache";
4
4
  import { generateVideo } from "../../ai-sdk/generate-video";
5
+ import {
6
+ imagePlaceholderFallbackMiddleware,
7
+ placeholderFallbackMiddleware,
8
+ wrapVideoModel,
9
+ } from "../../ai-sdk/middleware";
5
10
  import { editly } from "../../ai-sdk/providers/editly";
6
11
  import type {
7
12
  AudioTrack,
@@ -14,6 +19,7 @@ import type {
14
19
  ClipProps,
15
20
  MusicProps,
16
21
  OverlayProps,
22
+ RenderMode,
17
23
  RenderOptions,
18
24
  RenderProps,
19
25
  SpeechProps,
@@ -48,17 +54,75 @@ export async function renderRoot(
48
54
  const props = element.props as RenderProps;
49
55
  const progress = createProgressTracker(options.quiet ?? false);
50
56
 
57
+ const mode: RenderMode = options.mode ?? "default";
58
+ const placeholderCount = { images: 0, videos: 0, total: 0 };
59
+
60
+ const onFallback = (error: Error, prompt: string) => {
61
+ if (!options.quiet) {
62
+ console.warn(
63
+ `\x1b[33m⚠ provider failed: ${error.message} → placeholder\x1b[0m`,
64
+ );
65
+ }
66
+ };
67
+
68
+ const trackPlaceholder = (type: "image" | "video") => {
69
+ placeholderCount[type === "image" ? "images" : "videos"]++;
70
+ placeholderCount.total++;
71
+ };
72
+
73
+ const wrapGenerateImage: typeof generateImage = async (opts) => {
74
+ if (
75
+ typeof opts.model === "string" ||
76
+ opts.model.specificationVersion !== "v3"
77
+ ) {
78
+ return generateImage(opts);
79
+ }
80
+ const wrappedModel = wrapImageModel({
81
+ model: opts.model,
82
+ middleware: imagePlaceholderFallbackMiddleware({
83
+ mode,
84
+ onFallback: (error, prompt) => {
85
+ trackPlaceholder("image");
86
+ onFallback(error, prompt);
87
+ },
88
+ }),
89
+ });
90
+ const result = await generateImage({ ...opts, model: wrappedModel });
91
+ if (mode === "preview") trackPlaceholder("image");
92
+ return result;
93
+ };
94
+
95
+ const wrapGenerateVideo: typeof generateVideo = async (opts) => {
96
+ const wrappedModel = wrapVideoModel({
97
+ model: opts.model,
98
+ middleware: placeholderFallbackMiddleware({
99
+ mode,
100
+ onFallback: (error, prompt) => {
101
+ trackPlaceholder("video");
102
+ onFallback(error, prompt);
103
+ },
104
+ }),
105
+ });
106
+ const result = await generateVideo({ ...opts, model: wrappedModel });
107
+ if (mode === "preview") trackPlaceholder("video");
108
+ return result;
109
+ };
110
+
51
111
  const ctx: RenderContext = {
52
112
  width: props.width ?? 1920,
53
113
  height: props.height ?? 1080,
54
114
  fps: props.fps ?? 30,
55
115
  cache: options.cache ? fileCache({ dir: options.cache }) : undefined,
56
116
  generateImage: options.cache
57
- ? withCache(generateImage, { storage: fileCache({ dir: options.cache }) })
58
- : generateImage,
117
+ ? withCache(wrapGenerateImage, {
118
+ storage: fileCache({ dir: options.cache }),
119
+ })
120
+ : wrapGenerateImage,
59
121
  generateVideo: options.cache
60
- ? withCache(generateVideo, { storage: fileCache({ dir: options.cache }) })
61
- : generateVideo,
122
+ ? withCache(wrapGenerateVideo, {
123
+ storage: fileCache({ dir: options.cache }),
124
+ })
125
+ : wrapGenerateVideo,
62
126
  tempFiles: [],
63
127
  progress,
64
128
  pending: new Map(),
@@ -238,6 +302,18 @@ export async function renderRoot(
238
302
  completeTask(progress, captionsTaskId);
239
303
  }
240
304
 
305
+ if (!options.quiet && placeholderCount.total > 0) {
306
+ if (mode === "preview") {
307
+ console.log(
308
+ `\x1b[36mℹ preview mode: ${placeholderCount.total} placeholders used (${placeholderCount.images} images, ${placeholderCount.videos} videos)\x1b[0m`,
309
+ );
310
+ } else {
311
+ console.warn(
312
+ `\x1b[33m⚠ ${placeholderCount.total} elements used placeholders - run with --strict for production\x1b[0m`,
313
+ );
314
+ }
315
+ }
316
+
241
317
  const result = await Bun.file(finalOutPath).arrayBuffer();
242
318
  return new Uint8Array(result);
243
319
  }
@@ -75,6 +75,10 @@ export interface RenderProps extends BaseProps {
75
75
  export interface ClipProps extends BaseProps {
76
76
  duration?: number | "auto";
77
77
  transition?: TransitionOptions;
78
+ /** Start trim point in seconds (e.g., 1 to start from 1 second) */
79
+ cutFrom?: number;
80
+ /** End trim point in seconds (e.g., 3 to end at 3 seconds) */
81
+ cutTo?: number;
78
82
  children?: VargNode;
79
83
  }
80
84
 
@@ -206,10 +210,13 @@ export interface PackshotProps extends BaseProps {
206
210
  duration?: number;
207
211
  }
208
212
 
213
+ export type RenderMode = "strict" | "default" | "preview";
214
+
209
215
  export interface RenderOptions {
210
216
  output?: string;
211
217
  cache?: string;
212
218
  quiet?: boolean;
219
+ mode?: RenderMode;
213
220
  }
214
221
 
215
222
  export interface ElementPropsMap {
package/tsconfig.json CHANGED
@@ -6,7 +6,7 @@
6
6
  "module": "ESNext",
7
7
  "moduleDetection": "force",
8
8
  "jsx": "react-jsx",
9
- "jsxImportSource": "@/react/runtime",
9
+ "jsxImportSource": "vargai",
10
10
  "allowJs": true,
11
11
 
12
12
  // Bundler mode
@@ -30,7 +30,9 @@
30
30
  // Base URL for imports
31
31
  "baseUrl": ".",
32
32
  "paths": {
33
- "@/*": ["./src/*"]
33
+ "@/*": ["./src/*"],
34
+ "vargai/jsx-runtime": ["./src/react/runtime/jsx-runtime.ts"],
35
+ "vargai/jsx-dev-runtime": ["./src/react/runtime/jsx-dev-runtime.ts"]
34
36
  }
35
37
  },
36
38
  "include": ["src/**/*"],
@@ -1,71 +0,0 @@
1
- import { defineCommand } from "citty";
2
- import { render } from "../../react/render";
3
- import type { VargElement } from "../../react/types";
4
-
5
- export const renderCmd = defineCommand({
6
- meta: {
7
- name: "render",
8
- description: "render a react component to video",
9
- },
10
- args: {
11
- file: {
12
- type: "positional",
13
- description: "component file (.tsx)",
14
- required: true,
15
- },
16
- output: {
17
- type: "string",
18
- alias: "o",
19
- description: "output path",
20
- },
21
- cache: {
22
- type: "string",
23
- alias: "c",
24
- description: "cache directory",
25
- default: ".cache/ai",
26
- },
27
- quiet: {
28
- type: "boolean",
29
- alias: "q",
30
- description: "minimal output",
31
- default: false,
32
- },
33
- },
34
- async run({ args }) {
35
- const file = args.file as string;
36
-
37
- if (!file) {
38
- console.error("usage: varg render <component.tsx> [-o output.mp4]");
39
- process.exit(1);
40
- }
41
-
42
- const resolvedPath = Bun.resolveSync(file, process.cwd());
43
- const mod = await import(resolvedPath);
44
- const component: VargElement = mod.default;
45
-
46
- if (!component || component.type !== "render") {
47
- console.error("error: default export must be a <Render> element");
48
- process.exit(1);
49
- }
50
-
51
- const outputPath =
52
- args.output ??
53
- `output/${file
54
- .replace(/\.tsx?$/, "")
55
- .split("/")
56
- .pop()}.mp4`;
57
-
58
- if (!args.quiet) {
59
- console.log(`rendering ${file} → ${outputPath}`);
60
- }
61
-
62
- const buffer = await render(component, {
63
- output: outputPath,
64
- cache: args.cache,
65
- });
66
-
67
- if (!args.quiet) {
68
- console.log(`done! ${buffer.byteLength} bytes → ${outputPath}`);
69
- }
70
- },
71
- });
package/src/react/cli.ts DELETED
@@ -1,52 +0,0 @@
1
- #!/usr/bin/env bun
2
-
3
- import { parseArgs } from "node:util";
4
- import { render } from "./render";
5
- import type { VargElement } from "./types";
6
-
7
- const { values, positionals } = parseArgs({
8
- args: Bun.argv.slice(2),
9
- options: {
10
- output: { type: "string", short: "o" },
11
- cache: { type: "string", short: "c", default: ".cache/ai" },
12
- quiet: { type: "boolean", short: "q", default: false },
13
- },
14
- allowPositionals: true,
15
- });
16
-
17
- const [file] = positionals;
18
-
19
- if (!file) {
20
- console.error("usage: bun react/cli.ts <component.tsx> [-o output.mp4]");
21
- process.exit(1);
22
- }
23
-
24
- const resolvedPath = Bun.resolveSync(file, process.cwd());
25
- const mod = await import(resolvedPath);
26
- const component: VargElement = mod.default;
27
-
28
- if (!component || component.type !== "render") {
29
- console.error("error: default export must be a <Render> element");
30
- process.exit(1);
31
- }
32
-
33
- const outputPath =
34
- values.output ??
35
- `output/${file
36
- .replace(/\.tsx?$/, "")
37
- .split("/")
38
- .pop()}.mp4`;
39
-
40
- if (!values.quiet) {
41
- console.log(`rendering ${file} → ${outputPath}`);
42
- }
43
-
44
- const buffer = await render(component, {
45
- output: outputPath,
46
- cache: values.cache,
47
- quiet: values.quiet,
48
- });
49
-
50
- if (!values.quiet) {
51
- console.log(`done! ${buffer.byteLength} bytes → ${outputPath}`);
52
- }