vargai 0.3.1 → 0.4.0-alpha.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. package/README.md +1 -38
  2. package/biome.json +6 -1
  3. package/docs/index.html +1130 -0
  4. package/docs/prompting.md +326 -0
  5. package/docs/react.md +834 -0
  6. package/package.json +11 -6
  7. package/src/ai-sdk/index.ts +2 -21
  8. package/src/cli/commands/index.ts +1 -4
  9. package/src/cli/commands/render.tsx +71 -0
  10. package/src/cli/index.ts +2 -0
  11. package/src/react/cli.ts +52 -0
  12. package/src/react/elements.ts +146 -0
  13. package/src/react/examples/branching.tsx +66 -0
  14. package/src/react/examples/captions-demo.tsx +37 -0
  15. package/src/react/examples/character-video.tsx +84 -0
  16. package/src/react/examples/grid.tsx +53 -0
  17. package/src/react/examples/layouts-demo.tsx +57 -0
  18. package/src/react/examples/madi.tsx +60 -0
  19. package/src/react/examples/music-test.tsx +35 -0
  20. package/src/react/examples/onlyfans-1m/workflow.tsx +88 -0
  21. package/src/react/examples/orange-portrait.tsx +41 -0
  22. package/src/react/examples/split-element-demo.tsx +60 -0
  23. package/src/react/examples/split-layout-demo.tsx +60 -0
  24. package/src/react/examples/split.tsx +41 -0
  25. package/src/react/examples/video-grid.tsx +46 -0
  26. package/src/react/index.ts +43 -0
  27. package/src/react/layouts/grid.tsx +28 -0
  28. package/src/react/layouts/index.ts +2 -0
  29. package/src/react/layouts/split.tsx +20 -0
  30. package/src/react/react.test.ts +309 -0
  31. package/src/react/render.ts +21 -0
  32. package/src/react/renderers/animate.ts +59 -0
  33. package/src/react/renderers/captions.ts +297 -0
  34. package/src/react/renderers/clip.ts +248 -0
  35. package/src/react/renderers/context.ts +17 -0
  36. package/src/react/renderers/image.ts +109 -0
  37. package/src/react/renderers/index.ts +22 -0
  38. package/src/react/renderers/music.ts +60 -0
  39. package/src/react/renderers/packshot.ts +84 -0
  40. package/src/react/renderers/progress.ts +173 -0
  41. package/src/react/renderers/render.ts +243 -0
  42. package/src/react/renderers/slider.ts +69 -0
  43. package/src/react/renderers/speech.ts +53 -0
  44. package/src/react/renderers/split.ts +91 -0
  45. package/src/react/renderers/subtitle.ts +16 -0
  46. package/src/react/renderers/swipe.ts +75 -0
  47. package/src/react/renderers/title.ts +17 -0
  48. package/src/react/renderers/utils.ts +124 -0
  49. package/src/react/renderers/video.ts +127 -0
  50. package/src/react/runtime/jsx-dev-runtime.ts +43 -0
  51. package/src/react/runtime/jsx-runtime.ts +35 -0
  52. package/src/react/types.ts +232 -0
  53. package/src/studio/index.ts +26 -0
  54. package/src/studio/scanner.ts +102 -0
  55. package/src/studio/server.ts +554 -0
  56. package/src/studio/stages.ts +251 -0
  57. package/src/studio/step-renderer.ts +279 -0
  58. package/src/studio/types.ts +60 -0
  59. package/src/studio/ui/cache.html +303 -0
  60. package/src/studio/ui/index.html +1820 -0
  61. package/tsconfig.cli.json +8 -0
  62. package/tsconfig.json +3 -1
  63. package/bun.lock +0 -1255
  64. package/docs/plan.md +0 -66
  65. package/docs/todo.md +0 -14
  66. package/src/ai-sdk/middleware/index.ts +0 -25
  67. package/src/ai-sdk/middleware/placeholder.ts +0 -111
  68. package/src/ai-sdk/middleware/wrap-image-model.ts +0 -86
  69. package/src/ai-sdk/middleware/wrap-music-model.ts +0 -108
  70. package/src/ai-sdk/middleware/wrap-video-model.ts +0 -115
  71. /package/docs/{varg-sdk.md → sdk.md} +0 -0
  72. /package/src/ai-sdk/providers/{elevenlabs.ts → elevenlabs-provider.ts} +0 -0
  73. /package/src/ai-sdk/providers/{fal.ts → fal-provider.ts} +0 -0
@@ -0,0 +1,248 @@
1
+ import type {
2
+ AudioLayer,
3
+ Clip,
4
+ FillColorLayer,
5
+ ImageLayer,
6
+ ImageOverlayLayer,
7
+ Layer,
8
+ VideoLayer,
9
+ } from "../../ai-sdk/providers/editly/types";
10
+ import type {
11
+ AnimateProps,
12
+ ClipProps,
13
+ ImageProps,
14
+ SpeechProps,
15
+ VargElement,
16
+ VargNode,
17
+ VideoProps,
18
+ } from "../types";
19
+ import { renderAnimate } from "./animate";
20
+ import type { RenderContext } from "./context";
21
+ import { renderImage } from "./image";
22
+ import { renderPackshot } from "./packshot";
23
+ import { renderSlider } from "./slider";
24
+ import { renderSpeech } from "./speech";
25
+ import { renderSplit } from "./split";
26
+ import { renderSubtitle } from "./subtitle";
27
+ import { renderSwipe } from "./swipe";
28
+ import { renderTitle } from "./title";
29
+ import { renderVideo } from "./video";
30
+
31
+ type PendingLayer =
32
+ | { type: "sync"; layer: Layer }
33
+ | { type: "async"; promise: Promise<Layer> };
34
+
35
+ async function renderClipLayers(
36
+ children: VargNode[],
37
+ ctx: RenderContext,
38
+ ): Promise<Layer[]> {
39
+ const pending: PendingLayer[] = [];
40
+
41
+ for (const child of children) {
42
+ if (!child || typeof child !== "object" || !("type" in child)) continue;
43
+
44
+ const element = child as VargElement;
45
+
46
+ switch (element.type) {
47
+ case "image": {
48
+ const props = element.props as ImageProps;
49
+ const hasPosition =
50
+ props.left !== undefined ||
51
+ props.top !== undefined ||
52
+ props.width !== undefined ||
53
+ props.height !== undefined;
54
+
55
+ pending.push({
56
+ type: "async",
57
+ promise: renderImage(element as VargElement<"image">, ctx).then(
58
+ (path) =>
59
+ hasPosition
60
+ ? ({
61
+ type: "image-overlay",
62
+ path,
63
+ zoomDirection: props.zoom,
64
+ width: props.width,
65
+ height: props.height,
66
+ position: { x: props.left ?? 0, y: props.top ?? 0 },
67
+ } as ImageOverlayLayer)
68
+ : ({
69
+ type: "image",
70
+ path,
71
+ resizeMode: props.resize,
72
+ zoomDirection: props.zoom,
73
+ } as ImageLayer),
74
+ ),
75
+ });
76
+ break;
77
+ }
78
+
79
+ case "video": {
80
+ const props = element.props as VideoProps;
81
+ pending.push({
82
+ type: "async",
83
+ promise: renderVideo(element as VargElement<"video">, ctx).then(
84
+ (path) =>
85
+ ({
86
+ type: "video",
87
+ path,
88
+ resizeMode: props.resize,
89
+ cutFrom: props.cutFrom,
90
+ cutTo: props.cutTo,
91
+ mixVolume: props.keepAudio ? (props.volume ?? 1) : 0,
92
+ left: props.left,
93
+ top: props.top,
94
+ width: props.width,
95
+ height: props.height,
96
+ }) as VideoLayer,
97
+ ),
98
+ });
99
+ break;
100
+ }
101
+
102
+ case "animate": {
103
+ const props = element.props as AnimateProps;
104
+ pending.push({
105
+ type: "async",
106
+ promise: renderAnimate(element as VargElement<"animate">, ctx).then(
107
+ (path) =>
108
+ ({
109
+ type: "video",
110
+ path,
111
+ left: props.left,
112
+ top: props.top,
113
+ width: props.width,
114
+ height: props.height,
115
+ }) as VideoLayer,
116
+ ),
117
+ });
118
+ break;
119
+ }
120
+
121
+ case "title": {
122
+ pending.push({
123
+ type: "sync",
124
+ layer: renderTitle(element as VargElement<"title">),
125
+ });
126
+ break;
127
+ }
128
+
129
+ case "subtitle": {
130
+ pending.push({
131
+ type: "sync",
132
+ layer: renderSubtitle(element as VargElement<"subtitle">),
133
+ });
134
+ break;
135
+ }
136
+
137
+ case "speech": {
138
+ const props = element.props as SpeechProps;
139
+ pending.push({
140
+ type: "async",
141
+ promise: renderSpeech(element as VargElement<"speech">, ctx).then(
142
+ (result) =>
143
+ ({
144
+ type: "audio",
145
+ path: result.path,
146
+ mixVolume: props.volume ?? 1,
147
+ }) as AudioLayer,
148
+ ),
149
+ });
150
+ break;
151
+ }
152
+
153
+ case "split": {
154
+ pending.push({
155
+ type: "async",
156
+ promise: renderSplit(element as VargElement<"split">, ctx).then(
157
+ (path) =>
158
+ ({
159
+ type: "video",
160
+ path,
161
+ }) as VideoLayer,
162
+ ),
163
+ });
164
+ break;
165
+ }
166
+
167
+ case "slider": {
168
+ pending.push({
169
+ type: "async",
170
+ promise: renderSlider(element as VargElement<"slider">, ctx).then(
171
+ (path) =>
172
+ ({
173
+ type: "video",
174
+ path,
175
+ }) as VideoLayer,
176
+ ),
177
+ });
178
+ break;
179
+ }
180
+
181
+ case "swipe": {
182
+ pending.push({
183
+ type: "async",
184
+ promise: renderSwipe(element as VargElement<"swipe">, ctx).then(
185
+ (path) =>
186
+ ({
187
+ type: "video",
188
+ path,
189
+ }) as VideoLayer,
190
+ ),
191
+ });
192
+ break;
193
+ }
194
+
195
+ case "packshot": {
196
+ pending.push({
197
+ type: "async",
198
+ promise: renderPackshot(element as VargElement<"packshot">, ctx).then(
199
+ (path) =>
200
+ ({
201
+ type: "video",
202
+ path,
203
+ }) as VideoLayer,
204
+ ),
205
+ });
206
+ break;
207
+ }
208
+ }
209
+ }
210
+
211
+ const layers = await Promise.all(
212
+ pending.map((p) => (p.type === "sync" ? p.layer : p.promise)),
213
+ );
214
+
215
+ return layers;
216
+ }
217
+
218
+ export async function renderClip(
219
+ element: VargElement<"clip">,
220
+ ctx: RenderContext,
221
+ ): Promise<Clip> {
222
+ const props = element.props as ClipProps;
223
+ const layers = await renderClipLayers(element.children, ctx);
224
+
225
+ const isOverlayVideo = (l: Layer) =>
226
+ l.type === "video" &&
227
+ ((l as VideoLayer).left !== undefined ||
228
+ (l as VideoLayer).top !== undefined ||
229
+ (l as VideoLayer).width !== undefined ||
230
+ (l as VideoLayer).height !== undefined);
231
+
232
+ const hasBaseLayer = layers.some(
233
+ (l) =>
234
+ l.type === "image" ||
235
+ l.type === "fill-color" ||
236
+ (l.type === "video" && !isOverlayVideo(l)),
237
+ );
238
+
239
+ if (!hasBaseLayer && layers.length > 0) {
240
+ layers.unshift({ type: "fill-color", color: "#000000" } as FillColorLayer);
241
+ }
242
+
243
+ return {
244
+ layers,
245
+ duration: typeof props.duration === "number" ? props.duration : undefined,
246
+ transition: props.transition ?? null,
247
+ };
248
+ }
@@ -0,0 +1,17 @@
1
+ import type { generateImage } from "ai";
2
+ import type { fileCache } from "../../ai-sdk/file-cache";
3
+ import type { generateVideo } from "../../ai-sdk/generate-video";
4
+ import type { ProgressTracker } from "./progress";
5
+
6
+ export interface RenderContext {
7
+ width: number;
8
+ height: number;
9
+ fps: number;
10
+ cache?: ReturnType<typeof fileCache>;
11
+ generateImage: typeof generateImage;
12
+ generateVideo: typeof generateVideo;
13
+ tempFiles: string[];
14
+ progress?: ProgressTracker;
15
+ /** In-memory deduplication for concurrent renders of the same element */
16
+ pending: Map<string, Promise<string>>;
17
+ }
@@ -0,0 +1,109 @@
1
+ import type { generateImage } from "ai";
2
+ import { File } from "../../ai-sdk/file";
3
+ import type {
4
+ ImageInput,
5
+ ImagePrompt,
6
+ ImageProps,
7
+ VargElement,
8
+ } from "../types";
9
+ import type { RenderContext } from "./context";
10
+ import { addTask, completeTask, startTask } from "./progress";
11
+ import { computeCacheKey, toFileUrl } from "./utils";
12
+
13
+ async function resolveImageInput(
14
+ input: ImageInput,
15
+ ctx: RenderContext,
16
+ ): Promise<Uint8Array> {
17
+ if (input instanceof Uint8Array) {
18
+ return input;
19
+ }
20
+ if (typeof input === "string") {
21
+ const response = await fetch(toFileUrl(input));
22
+ return new Uint8Array(await response.arrayBuffer());
23
+ }
24
+ const path = await renderImage(input, ctx);
25
+ const response = await fetch(toFileUrl(path));
26
+ return new Uint8Array(await response.arrayBuffer());
27
+ }
28
+
29
+ async function resolvePrompt(
30
+ prompt: ImagePrompt,
31
+ ctx: RenderContext,
32
+ ): Promise<string | { text?: string; images: Uint8Array[] }> {
33
+ if (typeof prompt === "string") {
34
+ return prompt;
35
+ }
36
+ const resolvedImages = await Promise.all(
37
+ prompt.images.map((img) => resolveImageInput(img, ctx)),
38
+ );
39
+ return { text: prompt.text, images: resolvedImages };
40
+ }
41
+
42
+ export async function renderImage(
43
+ element: VargElement<"image">,
44
+ ctx: RenderContext,
45
+ ): Promise<string> {
46
+ const props = element.props as ImageProps;
47
+
48
+ if (props.src) {
49
+ return props.src;
50
+ }
51
+
52
+ const prompt = props.prompt;
53
+ if (!prompt) {
54
+ throw new Error("Image element requires either 'prompt' or 'src'");
55
+ }
56
+
57
+ const model = props.model;
58
+ if (!model) {
59
+ throw new Error("Image element requires 'model' prop when using prompt");
60
+ }
61
+
62
+ // Compute cache key for deduplication
63
+ const cacheKey = computeCacheKey(element);
64
+ const cacheKeyStr = JSON.stringify(cacheKey);
65
+
66
+ // Check if this element is already being rendered (deduplication)
67
+ const pendingRender = ctx.pending.get(cacheKeyStr);
68
+ if (pendingRender) {
69
+ return pendingRender;
70
+ }
71
+
72
+ // Create the render promise and store it for deduplication
73
+ const renderPromise = (async () => {
74
+ const resolvedPrompt = await resolvePrompt(prompt, ctx);
75
+
76
+ const modelId = typeof model === "string" ? model : model.modelId;
77
+ const taskId = ctx.progress
78
+ ? addTask(ctx.progress, "image", modelId)
79
+ : null;
80
+ if (taskId && ctx.progress) startTask(ctx.progress, taskId);
81
+
82
+ const { images } = await ctx.generateImage({
83
+ model,
84
+ prompt: resolvedPrompt,
85
+ aspectRatio: props.aspectRatio,
86
+ n: 1,
87
+ cacheKey,
88
+ } as Parameters<typeof generateImage>[0]);
89
+
90
+ if (taskId && ctx.progress) completeTask(ctx.progress, taskId);
91
+
92
+ const firstImage = images[0];
93
+ if (!firstImage?.uint8Array) {
94
+ throw new Error("Image generation returned no image data");
95
+ }
96
+ const imageData = firstImage.uint8Array;
97
+ const tempPath = await File.toTemp({
98
+ uint8Array: imageData,
99
+ mimeType: "image/png",
100
+ });
101
+ ctx.tempFiles.push(tempPath);
102
+
103
+ return tempPath;
104
+ })();
105
+
106
+ ctx.pending.set(cacheKeyStr, renderPromise);
107
+
108
+ return renderPromise;
109
+ }
@@ -0,0 +1,22 @@
1
+ export { renderAnimate } from "./animate";
2
+ export { renderCaptions } from "./captions";
3
+ export { renderClip } from "./clip";
4
+ export type { RenderContext } from "./context";
5
+ export { renderImage } from "./image";
6
+ export { renderPackshot } from "./packshot";
7
+ export {
8
+ createProgressTracker,
9
+ type GenerationType,
10
+ type ProgressTask,
11
+ type ProgressTracker,
12
+ TIME_ESTIMATES,
13
+ } from "./progress";
14
+ export { renderRoot } from "./render";
15
+ export { renderSlider } from "./slider";
16
+ export type { SpeechResult } from "./speech";
17
+ export { renderSpeech } from "./speech";
18
+ export { renderSplit } from "./split";
19
+ export { renderSwipe } from "./swipe";
20
+ export { renderTitle } from "./title";
21
+ export { computeCacheKey, getTextContent } from "./utils";
22
+ export { renderVideo } from "./video";
@@ -0,0 +1,60 @@
1
+ import { generateMusic } from "../../ai-sdk/generate-music";
2
+ import type { MusicProps, VargElement } from "../types";
3
+ import type { RenderContext } from "./context";
4
+ import { addTask, completeTask, startTask } from "./progress";
5
+
6
+ export async function renderMusic(
7
+ element: VargElement<"music">,
8
+ ctx: RenderContext,
9
+ ): Promise<{ path: string }> {
10
+ const props = element.props as MusicProps;
11
+
12
+ const prompt = props.prompt;
13
+ const model = props.model;
14
+ if (!prompt || !model) {
15
+ throw new Error("Music generation requires both prompt and model");
16
+ }
17
+
18
+ const cacheKey = JSON.stringify({
19
+ type: "music",
20
+ prompt,
21
+ model: model.modelId,
22
+ duration: props.duration,
23
+ });
24
+
25
+ const modelId = model.modelId;
26
+ const taskId = ctx.progress ? addTask(ctx.progress, "music", modelId) : null;
27
+
28
+ const generateFn = async () => {
29
+ const result = await generateMusic({
30
+ model,
31
+ prompt,
32
+ duration: props.duration,
33
+ });
34
+ return result.audio.uint8Array;
35
+ };
36
+
37
+ let audioData: Uint8Array;
38
+
39
+ if (ctx.cache) {
40
+ const cached = await ctx.cache.get(cacheKey);
41
+ if (cached) {
42
+ audioData = cached as Uint8Array;
43
+ } else {
44
+ if (taskId && ctx.progress) startTask(ctx.progress, taskId);
45
+ audioData = await generateFn();
46
+ if (taskId && ctx.progress) completeTask(ctx.progress, taskId);
47
+ await ctx.cache.set(cacheKey, audioData);
48
+ }
49
+ } else {
50
+ if (taskId && ctx.progress) startTask(ctx.progress, taskId);
51
+ audioData = await generateFn();
52
+ if (taskId && ctx.progress) completeTask(ctx.progress, taskId);
53
+ }
54
+
55
+ const tempPath = `/tmp/varg-music-${Date.now()}.mp3`;
56
+ await Bun.write(tempPath, audioData);
57
+ ctx.tempFiles.push(tempPath);
58
+
59
+ return { path: tempPath };
60
+ }
@@ -0,0 +1,84 @@
1
+ import { editly } from "../../ai-sdk/providers/editly";
2
+ import type {
3
+ Clip,
4
+ ImageOverlayLayer,
5
+ Layer,
6
+ Position,
7
+ TitleLayer,
8
+ } from "../../ai-sdk/providers/editly/types";
9
+ import type { PackshotProps, VargElement } from "../types";
10
+ import type { RenderContext } from "./context";
11
+ import { renderImage } from "./image";
12
+
13
+ function resolvePosition(pos: Position | undefined): Position {
14
+ return pos ?? "center";
15
+ }
16
+
17
+ export async function renderPackshot(
18
+ element: VargElement<"packshot">,
19
+ ctx: RenderContext,
20
+ ): Promise<string> {
21
+ const props = element.props as PackshotProps;
22
+ const duration = props.duration ?? 3;
23
+
24
+ const layers: Layer[] = [];
25
+
26
+ if (props.background) {
27
+ if (typeof props.background === "string") {
28
+ layers.push({
29
+ type: "fill-color" as const,
30
+ color: props.background,
31
+ });
32
+ } else {
33
+ const bgPath = await renderImage(props.background, ctx);
34
+ layers.push({
35
+ type: "image" as const,
36
+ path: bgPath,
37
+ resizeMode: "cover" as const,
38
+ });
39
+ }
40
+ } else {
41
+ layers.push({
42
+ type: "fill-color" as const,
43
+ color: "#000000",
44
+ });
45
+ }
46
+
47
+ if (props.logo) {
48
+ const logoLayer: ImageOverlayLayer = {
49
+ type: "image-overlay",
50
+ path: props.logo,
51
+ position: resolvePosition(props.logoPosition),
52
+ width: props.logoSize ?? "30%",
53
+ };
54
+ layers.push(logoLayer);
55
+ }
56
+
57
+ if (props.cta) {
58
+ const ctaLayer: TitleLayer = {
59
+ type: "title",
60
+ text: props.cta,
61
+ textColor: props.ctaColor ?? "white",
62
+ position: resolvePosition(props.ctaPosition ?? "bottom"),
63
+ };
64
+ layers.push(ctaLayer);
65
+ }
66
+
67
+ const clip: Clip = {
68
+ layers,
69
+ duration,
70
+ };
71
+
72
+ const outPath = `/tmp/varg-packshot-${Date.now()}.mp4`;
73
+
74
+ await editly({
75
+ outPath,
76
+ width: ctx.width,
77
+ height: ctx.height,
78
+ fps: ctx.fps,
79
+ clips: [clip],
80
+ });
81
+
82
+ ctx.tempFiles.push(outPath);
83
+ return outPath;
84
+ }