@codellyson/framely 0.1.0 → 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +3 -2
- package/src/AbsoluteFill.tsx +50 -0
- package/src/Audio.tsx +294 -0
- package/src/Composition.tsx +378 -0
- package/src/Easing.ts +294 -0
- package/src/ErrorBoundary.tsx +136 -0
- package/src/Folder.tsx +66 -0
- package/src/Freeze.tsx +63 -0
- package/src/IFrame.tsx +100 -0
- package/src/Img.tsx +146 -0
- package/src/Loop.tsx +139 -0
- package/src/Player.tsx +594 -0
- package/src/Sequence.tsx +80 -0
- package/src/Series.tsx +181 -0
- package/src/Text.tsx +376 -0
- package/src/Video.tsx +247 -0
- package/src/__tests__/Easing.test.js +119 -0
- package/src/__tests__/interpolate.test.js +127 -0
- package/src/config.ts +406 -0
- package/src/context.tsx +241 -0
- package/src/delayRender.ts +278 -0
- package/src/getInputProps.ts +217 -0
- package/src/hooks/useDelayRender.ts +117 -0
- package/src/hooks.ts +28 -0
- package/src/index.d.ts +571 -0
- package/src/index.ts +260 -0
- package/src/interpolate.ts +160 -0
- package/src/interpolateColors.ts +368 -0
- package/src/makeTransform.ts +339 -0
- package/src/measureSpring.ts +152 -0
- package/src/noise.ts +308 -0
- package/src/preload.ts +303 -0
- package/src/registerRoot.ts +346 -0
- package/src/shapes/Circle.tsx +37 -0
- package/src/shapes/Ellipse.tsx +39 -0
- package/src/shapes/Line.tsx +37 -0
- package/src/shapes/Path.tsx +56 -0
- package/src/shapes/Polygon.tsx +39 -0
- package/src/shapes/Rect.tsx +43 -0
- package/src/shapes/Svg.tsx +39 -0
- package/src/shapes/index.ts +16 -0
- package/src/shapes/usePathLength.ts +38 -0
- package/src/staticFile.ts +117 -0
- package/src/templates/api.ts +165 -0
- package/src/templates/index.ts +7 -0
- package/src/templates/mockData.ts +271 -0
- package/src/templates/types.ts +126 -0
- package/src/transitions/TransitionSeries.tsx +399 -0
- package/src/transitions/index.ts +109 -0
- package/src/transitions/presets/fade.ts +89 -0
- package/src/transitions/presets/flip.ts +263 -0
- package/src/transitions/presets/slide.ts +154 -0
- package/src/transitions/presets/wipe.ts +195 -0
- package/src/transitions/presets/zoom.ts +183 -0
- package/src/useAudioData.ts +260 -0
- package/src/useSpring.ts +215 -0
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@codellyson/framely",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.1",
|
|
4
4
|
"description": "Programmatic video creation with React",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "dist/index.js",
|
|
@@ -12,7 +12,8 @@
|
|
|
12
12
|
}
|
|
13
13
|
},
|
|
14
14
|
"files": [
|
|
15
|
-
"dist/"
|
|
15
|
+
"dist/",
|
|
16
|
+
"src/"
|
|
16
17
|
],
|
|
17
18
|
"scripts": {
|
|
18
19
|
"build": "tsc",
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
import React from 'react';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Props for the AbsoluteFill component.
|
|
5
|
+
* Extends standard HTML div attributes so callers can pass
|
|
6
|
+
* any valid div prop (className, onClick, aria-*, etc.).
|
|
7
|
+
*/
|
|
8
|
+
export interface AbsoluteFillProps
|
|
9
|
+
extends React.HTMLAttributes<HTMLDivElement> {
|
|
10
|
+
/** Optional inline styles merged on top of the base layout styles. */
|
|
11
|
+
style?: React.CSSProperties;
|
|
12
|
+
/** Content rendered inside the absolutely-positioned container. */
|
|
13
|
+
children?: React.ReactNode;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
const absoluteFillStyle: React.CSSProperties = {
|
|
17
|
+
position: 'absolute',
|
|
18
|
+
top: 0,
|
|
19
|
+
left: 0,
|
|
20
|
+
right: 0,
|
|
21
|
+
bottom: 0,
|
|
22
|
+
display: 'flex',
|
|
23
|
+
flexDirection: 'column',
|
|
24
|
+
alignItems: 'center',
|
|
25
|
+
justifyContent: 'center',
|
|
26
|
+
};
|
|
27
|
+
|
|
28
|
+
/**
|
|
29
|
+
* A full-size, absolutely positioned container.
|
|
30
|
+
* The building block for layering elements in a composition.
|
|
31
|
+
*/
|
|
32
|
+
export const AbsoluteFill = React.forwardRef<
|
|
33
|
+
HTMLDivElement,
|
|
34
|
+
AbsoluteFillProps
|
|
35
|
+
>(({ style, children, ...props }, ref) => {
|
|
36
|
+
return (
|
|
37
|
+
<div
|
|
38
|
+
ref={ref}
|
|
39
|
+
style={{
|
|
40
|
+
...absoluteFillStyle,
|
|
41
|
+
...style,
|
|
42
|
+
}}
|
|
43
|
+
{...props}
|
|
44
|
+
>
|
|
45
|
+
{children}
|
|
46
|
+
</div>
|
|
47
|
+
);
|
|
48
|
+
});
|
|
49
|
+
|
|
50
|
+
AbsoluteFill.displayName = 'AbsoluteFill';
|
package/src/Audio.tsx
ADDED
|
@@ -0,0 +1,294 @@
|
|
|
1
|
+
import { useEffect, useRef, useState, useMemo } from 'react';
|
|
2
|
+
import { useTimeline } from './context';
|
|
3
|
+
import { delayRender, continueRender, cancelRender } from './delayRender';
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Volume can be a static number (0–1) or a function that receives
|
|
7
|
+
* the current frame and returns a number (0–1).
|
|
8
|
+
*/
|
|
9
|
+
export type VolumeCallback = (frame: number) => number;
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* Props accepted by the Audio component.
|
|
13
|
+
*
|
|
14
|
+
* Extends standard HTML audio attributes so callers can pass any
|
|
15
|
+
* native audio prop (e.g. `crossOrigin`, `id`, `className`), but
|
|
16
|
+
* the component itself only uses the subset listed here.
|
|
17
|
+
*/
|
|
18
|
+
export interface AudioProps
|
|
19
|
+
extends Omit<
|
|
20
|
+
React.AudioHTMLAttributes<HTMLAudioElement>,
|
|
21
|
+
'volume' | 'onError'
|
|
22
|
+
> {
|
|
23
|
+
/** Audio source URL (use staticFile() for local assets). */
|
|
24
|
+
src: string;
|
|
25
|
+
/** Volume level 0–1, or a callback `(frame) => number`. */
|
|
26
|
+
volume?: number | VolumeCallback;
|
|
27
|
+
/** Playback speed multiplier. @default 1 */
|
|
28
|
+
playbackRate?: number;
|
|
29
|
+
/** Whether to mute audio. @default false */
|
|
30
|
+
muted?: boolean;
|
|
31
|
+
/** Loop the audio. @default false */
|
|
32
|
+
loop?: boolean;
|
|
33
|
+
/** Frame to start playing from. @default 0 */
|
|
34
|
+
startFrom?: number;
|
|
35
|
+
/** Frame to stop playing at (exclusive). */
|
|
36
|
+
endAt?: number;
|
|
37
|
+
/** Callback when audio fails to load. */
|
|
38
|
+
onError?: (error: Error) => void;
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
/**
|
|
42
|
+
* Metadata returned by {@link getAudioMetadata}.
|
|
43
|
+
*/
|
|
44
|
+
export interface AudioMetadata {
|
|
45
|
+
/** Duration of the audio in seconds. */
|
|
46
|
+
duration: number;
|
|
47
|
+
/**
|
|
48
|
+
* Sample rate of the audio.
|
|
49
|
+
* `null` when using the HTMLAudioElement API (Web Audio API
|
|
50
|
+
* would be needed for an accurate value).
|
|
51
|
+
*/
|
|
52
|
+
sampleRate: number | null;
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
/**
|
|
56
|
+
* Audio component that syncs with the Framely timeline.
|
|
57
|
+
*
|
|
58
|
+
* The audio's playback position is controlled by the current frame,
|
|
59
|
+
* ensuring perfect sync between audio and your composition.
|
|
60
|
+
*
|
|
61
|
+
* Note: During rendering, audio is handled separately by the backend
|
|
62
|
+
* (FFmpeg muxing). This component is primarily for preview playback.
|
|
63
|
+
*
|
|
64
|
+
* Usage:
|
|
65
|
+
* import { Audio, staticFile } from './lib';
|
|
66
|
+
*
|
|
67
|
+
* <Audio src={staticFile('audio/background.mp3')} />
|
|
68
|
+
* <Audio src={staticFile('audio/sfx.wav')} volume={0.8} />
|
|
69
|
+
* <Audio
|
|
70
|
+
* src={staticFile('audio/voiceover.mp3')}
|
|
71
|
+
* volume={(frame) => interpolate(frame, [0, 30], [0, 1])}
|
|
72
|
+
* />
|
|
73
|
+
*/
|
|
74
|
+
export function Audio({
|
|
75
|
+
src,
|
|
76
|
+
volume = 1,
|
|
77
|
+
playbackRate = 1,
|
|
78
|
+
muted = false,
|
|
79
|
+
loop = false,
|
|
80
|
+
startFrom = 0,
|
|
81
|
+
endAt,
|
|
82
|
+
onError,
|
|
83
|
+
}: AudioProps): null {
|
|
84
|
+
const { frame, fps, playing, renderMode } = useTimeline();
|
|
85
|
+
const audioRef = useRef<HTMLAudioElement | null>(null);
|
|
86
|
+
const handleRef = useRef<number | null>(null);
|
|
87
|
+
const [ready, setReady] = useState<boolean>(false);
|
|
88
|
+
|
|
89
|
+
// Calculate the actual volume (support callback)
|
|
90
|
+
const actualVolume: number = useMemo(() => {
|
|
91
|
+
if (typeof volume === 'function') {
|
|
92
|
+
return Math.max(0, Math.min(1, volume(frame)));
|
|
93
|
+
}
|
|
94
|
+
return Math.max(0, Math.min(1, volume));
|
|
95
|
+
}, [volume, frame]);
|
|
96
|
+
|
|
97
|
+
// In render mode, register audio track metadata for FFmpeg mixing
|
|
98
|
+
useEffect(() => {
|
|
99
|
+
if (!renderMode) return;
|
|
100
|
+
|
|
101
|
+
const trackInfo = {
|
|
102
|
+
src,
|
|
103
|
+
startFrame: startFrom,
|
|
104
|
+
volume: typeof volume === 'number' ? volume : 1,
|
|
105
|
+
};
|
|
106
|
+
|
|
107
|
+
if (!window.__FRAMELY_AUDIO_TRACKS) {
|
|
108
|
+
window.__FRAMELY_AUDIO_TRACKS = [];
|
|
109
|
+
}
|
|
110
|
+
window.__FRAMELY_AUDIO_TRACKS.push(trackInfo);
|
|
111
|
+
|
|
112
|
+
return () => {
|
|
113
|
+
if (window.__FRAMELY_AUDIO_TRACKS) {
|
|
114
|
+
const idx = window.__FRAMELY_AUDIO_TRACKS.indexOf(trackInfo);
|
|
115
|
+
if (idx !== -1) {
|
|
116
|
+
window.__FRAMELY_AUDIO_TRACKS.splice(idx, 1);
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
};
|
|
120
|
+
}, [renderMode, src, startFrom, volume]);
|
|
121
|
+
|
|
122
|
+
// Calculate target time based on frame
|
|
123
|
+
const targetTime: number = useMemo(() => {
|
|
124
|
+
const relativeFrame: number = frame - startFrom;
|
|
125
|
+
if (relativeFrame < 0) return 0;
|
|
126
|
+
return relativeFrame / fps;
|
|
127
|
+
}, [frame, startFrom, fps]);
|
|
128
|
+
|
|
129
|
+
// Check if we're within the audio's active range
|
|
130
|
+
const isActive: boolean = useMemo(() => {
|
|
131
|
+
if (frame < startFrom) return false;
|
|
132
|
+
if (endAt !== undefined && frame >= endAt) return false;
|
|
133
|
+
return true;
|
|
134
|
+
}, [frame, startFrom, endAt]);
|
|
135
|
+
|
|
136
|
+
// Delay render until audio is ready (skip in render mode)
|
|
137
|
+
useEffect(() => {
|
|
138
|
+
if (renderMode) return;
|
|
139
|
+
handleRef.current = delayRender(`Loading audio: ${src}`, {
|
|
140
|
+
timeoutInMilliseconds: 30000,
|
|
141
|
+
});
|
|
142
|
+
|
|
143
|
+
return () => {
|
|
144
|
+
if (handleRef.current !== null) {
|
|
145
|
+
continueRender(handleRef.current);
|
|
146
|
+
handleRef.current = null;
|
|
147
|
+
}
|
|
148
|
+
};
|
|
149
|
+
}, [src, renderMode]);
|
|
150
|
+
|
|
151
|
+
// Create audio element (skip in render mode — audio is handled by FFmpeg)
|
|
152
|
+
useEffect(() => {
|
|
153
|
+
if (renderMode) return;
|
|
154
|
+
const audio: HTMLAudioElement = new window.Audio();
|
|
155
|
+
audio.preload = 'auto';
|
|
156
|
+
audio.src = src;
|
|
157
|
+
audioRef.current = audio;
|
|
158
|
+
|
|
159
|
+
const handleCanPlay = (): void => {
|
|
160
|
+
setReady(true);
|
|
161
|
+
if (handleRef.current !== null) {
|
|
162
|
+
continueRender(handleRef.current);
|
|
163
|
+
handleRef.current = null;
|
|
164
|
+
}
|
|
165
|
+
};
|
|
166
|
+
|
|
167
|
+
const handleError = (): void => {
|
|
168
|
+
const err = new Error(`Failed to load audio: ${src}`);
|
|
169
|
+
onError?.(err);
|
|
170
|
+
|
|
171
|
+
if (handleRef.current !== null) {
|
|
172
|
+
cancelRender(err);
|
|
173
|
+
handleRef.current = null;
|
|
174
|
+
}
|
|
175
|
+
};
|
|
176
|
+
|
|
177
|
+
audio.addEventListener('canplaythrough', handleCanPlay);
|
|
178
|
+
audio.addEventListener('error', handleError);
|
|
179
|
+
|
|
180
|
+
return () => {
|
|
181
|
+
audio.removeEventListener('canplaythrough', handleCanPlay);
|
|
182
|
+
audio.removeEventListener('error', handleError);
|
|
183
|
+
audio.pause();
|
|
184
|
+
audio.src = '';
|
|
185
|
+
audioRef.current = null;
|
|
186
|
+
};
|
|
187
|
+
}, [src, onError, renderMode]);
|
|
188
|
+
|
|
189
|
+
// Sync audio time with frame
|
|
190
|
+
useEffect(() => {
|
|
191
|
+
if (renderMode) return;
|
|
192
|
+
const audio: HTMLAudioElement | null = audioRef.current;
|
|
193
|
+
if (!audio || !ready) return;
|
|
194
|
+
|
|
195
|
+
// Only seek if we're more than half a frame off
|
|
196
|
+
const tolerance: number = 0.5 / fps;
|
|
197
|
+
if (Math.abs(audio.currentTime - targetTime) > tolerance) {
|
|
198
|
+
audio.currentTime = targetTime;
|
|
199
|
+
}
|
|
200
|
+
}, [targetTime, ready, fps, renderMode]);
|
|
201
|
+
|
|
202
|
+
// Sync volume
|
|
203
|
+
useEffect(() => {
|
|
204
|
+
if (renderMode) return;
|
|
205
|
+
const audio: HTMLAudioElement | null = audioRef.current;
|
|
206
|
+
if (!audio) return;
|
|
207
|
+
audio.volume = actualVolume;
|
|
208
|
+
}, [actualVolume, renderMode]);
|
|
209
|
+
|
|
210
|
+
// Sync playback rate
|
|
211
|
+
useEffect(() => {
|
|
212
|
+
if (renderMode) return;
|
|
213
|
+
const audio: HTMLAudioElement | null = audioRef.current;
|
|
214
|
+
if (!audio) return;
|
|
215
|
+
audio.playbackRate = playbackRate;
|
|
216
|
+
}, [playbackRate, renderMode]);
|
|
217
|
+
|
|
218
|
+
// Sync muted state
|
|
219
|
+
useEffect(() => {
|
|
220
|
+
if (renderMode) return;
|
|
221
|
+
const audio: HTMLAudioElement | null = audioRef.current;
|
|
222
|
+
if (!audio) return;
|
|
223
|
+
audio.muted = muted;
|
|
224
|
+
}, [muted, renderMode]);
|
|
225
|
+
|
|
226
|
+
// Sync loop state
|
|
227
|
+
useEffect(() => {
|
|
228
|
+
if (renderMode) return;
|
|
229
|
+
const audio: HTMLAudioElement | null = audioRef.current;
|
|
230
|
+
if (!audio) return;
|
|
231
|
+
audio.loop = loop;
|
|
232
|
+
}, [loop, renderMode]);
|
|
233
|
+
|
|
234
|
+
// Handle play/pause (for preview mode)
|
|
235
|
+
useEffect(() => {
|
|
236
|
+
if (renderMode) return;
|
|
237
|
+
const audio: HTMLAudioElement | null = audioRef.current;
|
|
238
|
+
if (!audio || !ready) return;
|
|
239
|
+
|
|
240
|
+
if (playing && isActive) {
|
|
241
|
+
audio.play().catch(() => {
|
|
242
|
+
// Autoplay might be blocked, that's okay
|
|
243
|
+
});
|
|
244
|
+
} else {
|
|
245
|
+
audio.pause();
|
|
246
|
+
}
|
|
247
|
+
}, [playing, ready, isActive, renderMode]);
|
|
248
|
+
|
|
249
|
+
// Audio doesn't render anything visible
|
|
250
|
+
return null;
|
|
251
|
+
}
|
|
252
|
+
|
|
253
|
+
/**
|
|
254
|
+
* Get audio metadata from a source.
|
|
255
|
+
*
|
|
256
|
+
* @param src - Audio URL
|
|
257
|
+
* @returns A promise that resolves with duration and sampleRate info
|
|
258
|
+
*/
|
|
259
|
+
export async function getAudioMetadata(src: string): Promise<AudioMetadata> {
|
|
260
|
+
return new Promise<AudioMetadata>((resolve, reject) => {
|
|
261
|
+
const audio: HTMLAudioElement = new window.Audio();
|
|
262
|
+
|
|
263
|
+
audio.addEventListener('loadedmetadata', () => {
|
|
264
|
+
resolve({
|
|
265
|
+
duration: audio.duration,
|
|
266
|
+
// Note: Web Audio API would be needed for sampleRate
|
|
267
|
+
sampleRate: null,
|
|
268
|
+
});
|
|
269
|
+
});
|
|
270
|
+
|
|
271
|
+
audio.addEventListener('error', () => {
|
|
272
|
+
reject(new Error(`Failed to load audio metadata: ${src}`));
|
|
273
|
+
});
|
|
274
|
+
|
|
275
|
+
audio.src = src;
|
|
276
|
+
});
|
|
277
|
+
}
|
|
278
|
+
|
|
279
|
+
/**
|
|
280
|
+
* Calculate the duration in frames for an audio file.
|
|
281
|
+
*
|
|
282
|
+
* @param src - Audio URL
|
|
283
|
+
* @param fps - Frames per second
|
|
284
|
+
* @returns Duration in frames (rounded up)
|
|
285
|
+
*/
|
|
286
|
+
export async function getAudioDurationInFrames(
|
|
287
|
+
src: string,
|
|
288
|
+
fps: number,
|
|
289
|
+
): Promise<number> {
|
|
290
|
+
const metadata: AudioMetadata = await getAudioMetadata(src);
|
|
291
|
+
return Math.ceil(metadata.duration * fps);
|
|
292
|
+
}
|
|
293
|
+
|
|
294
|
+
export default Audio;
|