@remotion/studio 4.0.441 → 4.0.443

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. package/dist/components/AudioWaveform.js +67 -61
  2. package/dist/components/Editor.js +7 -4
  3. package/dist/components/EditorGuides/Guide.js +2 -1
  4. package/dist/components/EditorRuler/Ruler.js +2 -1
  5. package/dist/components/EditorRuler/index.js +4 -7
  6. package/dist/components/ForceSpecificCursor.d.ts +3 -0
  7. package/dist/components/ForceSpecificCursor.js +68 -0
  8. package/dist/components/MenuBuildIndicator.js +58 -3
  9. package/dist/components/MenuCompositionName.d.ts +2 -0
  10. package/dist/components/MenuCompositionName.js +80 -0
  11. package/dist/components/NewComposition/InputDragger.js +3 -0
  12. package/dist/components/RenderModal/RenderModalJSONPropsEditor.js +10 -3
  13. package/dist/components/Splitter/SplitterHandle.js +3 -0
  14. package/dist/components/Timeline/Timeline.js +3 -3
  15. package/dist/components/Timeline/TimelineDragHandler.js +4 -0
  16. package/dist/components/Timeline/TimelineRotationField.js +11 -0
  17. package/dist/components/Timeline/TimelineSequence.js +1 -1
  18. package/dist/components/Timeline/TimelineStack/index.js +2 -1
  19. package/dist/components/Timeline/TimelineVideoInfo.d.ts +4 -0
  20. package/dist/components/Timeline/TimelineVideoInfo.js +31 -12
  21. package/dist/components/Timeline/TimelineWidthProvider.js +16 -1
  22. package/dist/components/draw-peaks.d.ts +1 -0
  23. package/dist/components/draw-peaks.js +61 -0
  24. package/dist/components/load-waveform-peaks.d.ts +3 -0
  25. package/dist/components/load-waveform-peaks.js +67 -0
  26. package/dist/components/parse-color.d.ts +1 -0
  27. package/dist/components/parse-color.js +17 -0
  28. package/dist/esm/{chunk-1x2ychmc.js → chunk-cpv44d93.js} +3613 -3340
  29. package/dist/esm/internals.mjs +3613 -3340
  30. package/dist/esm/previewEntry.mjs +3405 -3132
  31. package/dist/esm/renderEntry.mjs +1 -1
  32. package/dist/helpers/calculate-timeline.d.ts +1 -2
  33. package/dist/helpers/calculate-timeline.js +2 -23
  34. package/dist/helpers/timeline-layout.d.ts +4 -1
  35. package/dist/helpers/timeline-layout.js +10 -4
  36. package/package.json +9 -9
  37. package/dist/components/AudioWaveformBar.d.ts +0 -14
  38. package/dist/components/AudioWaveformBar.js +0 -33
  39. package/dist/components/OpenEditorButton.d.ts +0 -3
  40. package/dist/components/OpenEditorButton.js +0 -67
@@ -11,6 +11,7 @@ const in_out_1 = require("../../state/in-out");
11
11
  const timeline_zoom_1 = require("../../state/timeline-zoom");
12
12
  const z_index_1 = require("../../state/z-index");
13
13
  const ContextMenu_1 = require("../ContextMenu");
14
+ const ForceSpecificCursor_1 = require("../ForceSpecificCursor");
14
15
  const is_menu_item_1 = require("../Menu/is-menu-item");
15
16
  const TimelineInOutToggle_1 = require("../TimelineInOutToggle");
16
17
  const timeline_refs_1 = require("./timeline-refs");
@@ -115,6 +116,7 @@ const Inner = () => {
115
116
  }
116
117
  const inMarker = get(inFrame);
117
118
  const outMarker = outFrame === null ? Infinity : get(outFrame - 1);
119
+ (0, ForceSpecificCursor_1.forceSpecificCursor)('ew-resize');
118
120
  setInOutDragging({
119
121
  dragging: 'in',
120
122
  initialOffset: getClientXWithScroll(e.clientX),
@@ -128,6 +130,7 @@ const Inner = () => {
128
130
  }
129
131
  const outMarker = get(outFrame);
130
132
  const inMarker = inFrame === null ? -Infinity : get(inFrame + 1);
133
+ (0, ForceSpecificCursor_1.forceSpecificCursor)('ew-resize');
131
134
  setInOutDragging({
132
135
  dragging: 'out',
133
136
  initialOffset: getClientXWithScroll(e.clientX),
@@ -311,6 +314,7 @@ const Inner = () => {
311
314
  const onPointerUpInOut = (0, react_1.useCallback)((e) => {
312
315
  document.body.style.userSelect = '';
313
316
  document.body.style.webkitUserSelect = '';
317
+ (0, ForceSpecificCursor_1.stopForcingSpecificCursor)();
314
318
  if (!videoConfig) {
315
319
  return;
316
320
  }
@@ -5,7 +5,18 @@ const jsx_runtime_1 = require("react/jsx-runtime");
5
5
  const react_1 = require("react");
6
6
  const InputDragger_1 = require("../NewComposition/InputDragger");
7
7
  const timeline_field_utils_1 = require("./timeline-field-utils");
8
+ const unitPattern = /^([+-]?(?:\d+\.?\d*|\.\d+))(deg|rad|turn|grad)$/;
9
+ const unitToDegrees = {
10
+ deg: 1,
11
+ rad: 180 / Math.PI,
12
+ turn: 360,
13
+ grad: 360 / 400,
14
+ };
8
15
  const parseCssRotationToDegrees = (value) => {
16
+ const match = value.trim().match(unitPattern);
17
+ if (match) {
18
+ return Number(match[1]) * unitToDegrees[match[2]];
19
+ }
9
20
  try {
10
21
  const m = new DOMMatrix(`rotate(${value})`);
11
22
  return Math.round(Math.atan2(m.b, m.a) * (180 / Math.PI) * 1e6) / 1e6;
@@ -107,7 +107,7 @@ const Inner = ({ s, windowWidth }) => {
107
107
  )`,
108
108
  position: 'absolute',
109
109
  right: 0,
110
- } })) : null, s.type === 'audio' ? (jsx_runtime_1.jsx(AudioWaveform_1.AudioWaveform, { src: s.src, doesVolumeChange: s.doesVolumeChange, visualizationWidth: width, startFrom: s.startMediaFrom, durationInFrames: s.duration, volume: s.volume, playbackRate: s.playbackRate })) : null, s.type === 'video' ? (jsx_runtime_1.jsx(TimelineVideoInfo_1.TimelineVideoInfo, { src: s.src, visualizationWidth: width, naturalWidth: naturalWidth, trimBefore: s.startMediaFrom, durationInFrames: s.duration, playbackRate: s.playbackRate })) : null, s.type === 'image' ? (jsx_runtime_1.jsx(TimelineImageInfo_1.TimelineImageInfo, { src: s.src, visualizationWidth: width })) : null, s.loopDisplay === undefined ? null : (jsx_runtime_1.jsx(LoopedTimelineIndicators_1.LoopedTimelineIndicator, { loops: s.loopDisplay.numberOfTimes })), s.type !== 'audio' &&
110
+ } })) : null, s.type === 'audio' ? (jsx_runtime_1.jsx(AudioWaveform_1.AudioWaveform, { src: s.src, doesVolumeChange: s.doesVolumeChange, visualizationWidth: width, startFrom: s.startMediaFrom, durationInFrames: s.duration, volume: s.volume, playbackRate: s.playbackRate })) : null, s.type === 'video' ? (jsx_runtime_1.jsx(TimelineVideoInfo_1.TimelineVideoInfo, { src: s.src, visualizationWidth: width, naturalWidth: naturalWidth, trimBefore: s.startMediaFrom, durationInFrames: s.duration, playbackRate: s.playbackRate, volume: s.volume, doesVolumeChange: s.doesVolumeChange, premountWidth: premountWidth !== null && premountWidth !== void 0 ? premountWidth : 0, postmountWidth: postmountWidth !== null && postmountWidth !== void 0 ? postmountWidth : 0 })) : null, s.type === 'image' ? (jsx_runtime_1.jsx(TimelineImageInfo_1.TimelineImageInfo, { src: s.src, visualizationWidth: width })) : null, s.loopDisplay === undefined ? null : (jsx_runtime_1.jsx(LoopedTimelineIndicators_1.LoopedTimelineIndicator, { loops: s.loopDisplay.numberOfTimes })), s.type !== 'audio' &&
111
111
  s.type !== 'video' &&
112
112
  s.type !== 'image' &&
113
113
  s.loopDisplay === undefined &&
@@ -149,7 +149,8 @@ const TimelineStack = ({ isCompact, sequence, originalLocation }) => {
149
149
  color: opening && isCompact ? colors_1.VERY_LIGHT_TEXT : colors_1.LIGHT_COLOR,
150
150
  userSelect: 'none',
151
151
  WebkitUserSelect: 'none',
152
- borderBottom: hoverEffect ? '1px solid #fff' : 'none',
152
+ textDecoration: hoverEffect ? 'underline' : 'none',
153
+ textUnderlineOffset: 2,
153
154
  cursor: hoverEffect ? 'pointer' : undefined,
154
155
  };
155
156
  }, [titleHoverable, isCompact, opening, titleHovered]);
@@ -6,4 +6,8 @@ export declare const TimelineVideoInfo: React.FC<{
6
6
  readonly trimBefore: number;
7
7
  readonly durationInFrames: number;
8
8
  readonly playbackRate: number;
9
+ readonly volume: string | number;
10
+ readonly doesVolumeChange: boolean;
11
+ readonly premountWidth: number;
12
+ readonly postmountWidth: number;
9
13
  }>;
@@ -8,21 +8,27 @@ const extract_frames_1 = require("../../helpers/extract-frames");
8
8
  const frame_database_1 = require("../../helpers/frame-database");
9
9
  const resize_video_frame_1 = require("../../helpers/resize-video-frame");
10
10
  const timeline_layout_1 = require("../../helpers/timeline-layout");
11
- const HEIGHT = (0, timeline_layout_1.getTimelineLayerHeight)('video') - 2;
12
- const containerStyle = {
13
- height: HEIGHT,
11
+ const AudioWaveform_1 = require("../AudioWaveform");
12
+ const FILMSTRIP_HEIGHT = timeline_layout_1.TIMELINE_LAYER_HEIGHT_IMAGE - 2;
13
+ const outerStyle = {
14
+ width: '100%',
15
+ height: '100%',
16
+ display: 'flex',
17
+ flexDirection: 'column',
18
+ };
19
+ const filmstripContainerStyle = {
20
+ height: FILMSTRIP_HEIGHT,
14
21
  width: '100%',
15
22
  backgroundColor: 'rgba(0, 0, 0, 0.3)',
16
23
  display: 'flex',
17
24
  borderTopLeftRadius: 2,
18
- borderBottomLeftRadius: 2,
19
25
  fontSize: 10,
20
26
  fontFamily: 'Arial, Helvetica',
21
27
  };
22
28
  const WEBCODECS_TIMESCALE = 1000000;
23
29
  const MAX_TIME_DEVIATION = WEBCODECS_TIMESCALE * 0.05;
24
30
  const getDurationOfOneFrame = ({ visualizationWidth, aspectRatio, segmentDuration, }) => {
25
- const framesFitInWidthUnrounded = visualizationWidth / (HEIGHT * aspectRatio);
31
+ const framesFitInWidthUnrounded = visualizationWidth / (FILMSTRIP_HEIGHT * aspectRatio);
26
32
  return (segmentDuration / framesFitInWidthUnrounded) * WEBCODECS_TIMESCALE;
27
33
  };
28
34
  const fixRounding = (value) => {
@@ -32,7 +38,7 @@ const fixRounding = (value) => {
32
38
  return Math.floor(value);
33
39
  };
34
40
  const calculateTimestampSlots = ({ visualizationWidth, fromSeconds, segmentDuration, aspectRatio, }) => {
35
- const framesFitInWidthUnrounded = visualizationWidth / (HEIGHT * aspectRatio);
41
+ const framesFitInWidthUnrounded = visualizationWidth / (FILMSTRIP_HEIGHT * aspectRatio);
36
42
  const framesFitInWidth = Math.ceil(framesFitInWidthUnrounded);
37
43
  const durationOfOneFrame = getDurationOfOneFrame({
38
44
  visualizationWidth,
@@ -70,8 +76,10 @@ const drawSlot = ({ frame, ctx, filledSlots, visualizationWidth, timestamp, segm
70
76
  });
71
77
  const relativeTimestamp = timestamp - fromSeconds * WEBCODECS_TIMESCALE;
72
78
  const frameIndex = relativeTimestamp / durationOfOneFrame;
73
- const left = Math.floor((frameIndex * frame.displayWidth) / window.devicePixelRatio); // round to avoid antialiasing
74
- ctx.drawImage(frame, left, 0, frame.displayWidth / window.devicePixelRatio, frame.displayHeight / window.devicePixelRatio);
79
+ const thumbnailWidth = frame.displayWidth / window.devicePixelRatio;
80
+ const left = Math.floor(frameIndex * thumbnailWidth);
81
+ const right = Math.ceil((frameIndex + 1) * thumbnailWidth);
82
+ ctx.drawImage(frame, left, 0, right - left, frame.displayHeight / window.devicePixelRatio);
75
83
  filledSlots.set(timestamp, frame.timestamp);
76
84
  };
77
85
  const fillWithCachedFrames = ({ ctx, naturalWidth, filledSlots, src, segmentDuration, fromSeconds, }) => {
@@ -138,7 +146,7 @@ const fillFrameWhereItFits = ({ frame, filledSlots, ctx, visualizationWidth, seg
138
146
  });
139
147
  }
140
148
  };
141
- const TimelineVideoInfo = ({ src, visualizationWidth, naturalWidth, trimBefore, durationInFrames, playbackRate, }) => {
149
+ const TimelineVideoInfo = ({ src, visualizationWidth, naturalWidth, trimBefore, durationInFrames, playbackRate, volume, doesVolumeChange, premountWidth, postmountWidth, }) => {
142
150
  const { fps } = (0, remotion_1.useVideoConfig)();
143
151
  const ref = (0, react_1.useRef)(null);
144
152
  const [error, setError] = (0, react_1.useState)(null);
@@ -160,7 +168,7 @@ const TimelineVideoInfo = ({ src, visualizationWidth, naturalWidth, trimBefore,
160
168
  const controller = new AbortController();
161
169
  const canvas = document.createElement('canvas');
162
170
  canvas.width = visualizationWidth;
163
- canvas.height = HEIGHT;
171
+ canvas.height = FILMSTRIP_HEIGHT;
164
172
  const ctx = canvas.getContext('2d');
165
173
  if (!ctx) {
166
174
  return;
@@ -216,7 +224,7 @@ const TimelineVideoInfo = ({ src, visualizationWidth, naturalWidth, trimBefore,
216
224
  let frame;
217
225
  try {
218
226
  frame = sample.toVideoFrame();
219
- const scale = (HEIGHT / frame.displayHeight) * window.devicePixelRatio;
227
+ const scale = (FILMSTRIP_HEIGHT / frame.displayHeight) * window.devicePixelRatio;
220
228
  const transformed = (0, resize_video_frame_1.resizeVideoFrame)({
221
229
  frame,
222
230
  scale,
@@ -298,6 +306,17 @@ const TimelineVideoInfo = ({ src, visualizationWidth, naturalWidth, trimBefore,
298
306
  trimBefore,
299
307
  visualizationWidth,
300
308
  ]);
301
- return jsx_runtime_1.jsx("div", { ref: ref, style: containerStyle });
309
+ const audioWidth = visualizationWidth - premountWidth - postmountWidth;
310
+ const audioStyle = (0, react_1.useMemo)(() => {
311
+ return {
312
+ height: timeline_layout_1.TIMELINE_LAYER_HEIGHT_AUDIO,
313
+ width: audioWidth,
314
+ position: 'relative',
315
+ marginLeft: premountWidth,
316
+ };
317
+ }, [audioWidth, premountWidth]);
318
+ return (jsx_runtime_1.jsxs("div", { style: outerStyle, children: [
319
+ jsx_runtime_1.jsx("div", { ref: ref, style: filmstripContainerStyle }), jsx_runtime_1.jsx("div", { style: audioStyle, children: jsx_runtime_1.jsx(AudioWaveform_1.AudioWaveform, { src: src, visualizationWidth: audioWidth, startFrom: trimBefore, durationInFrames: durationInFrames, volume: volume, doesVolumeChange: doesVolumeChange, playbackRate: playbackRate }) })
320
+ ] }));
302
321
  };
303
322
  exports.TimelineVideoInfo = TimelineVideoInfo;
@@ -4,6 +4,7 @@ exports.TimelineWidthProvider = exports.TimelineWidthContext = void 0;
4
4
  const jsx_runtime_1 = require("react/jsx-runtime");
5
5
  const player_1 = require("@remotion/player");
6
6
  const react_1 = require("react");
7
+ const timeline_zoom_1 = require("../../state/timeline-zoom");
7
8
  const timeline_refs_1 = require("./timeline-refs");
8
9
  exports.TimelineWidthContext = (0, react_1.createContext)(null);
9
10
  const TimelineWidthProvider = ({ children }) => {
@@ -12,6 +13,20 @@ const TimelineWidthProvider = ({ children }) => {
12
13
  triggerOnWindowResize: false,
13
14
  shouldApplyCssTransforms: true,
14
15
  });
15
- return (jsx_runtime_1.jsx(exports.TimelineWidthContext.Provider, { value: (_a = size === null || size === void 0 ? void 0 : size.width) !== null && _a !== void 0 ? _a : null, children: children }));
16
+ const { zoom: zoomMap } = (0, react_1.useContext)(timeline_zoom_1.TimelineZoomCtx);
17
+ const [widthOverride, setWidthOverride] = (0, react_1.useState)(null);
18
+ const observedWidth = (_a = size === null || size === void 0 ? void 0 : size.width) !== null && _a !== void 0 ? _a : null;
19
+ (0, react_1.useLayoutEffect)(() => {
20
+ var _a;
21
+ var _b;
22
+ const actual = (_b = (_a = timeline_refs_1.sliderAreaRef.current) === null || _a === void 0 ? void 0 : _a.clientWidth) !== null && _b !== void 0 ? _b : null;
23
+ if (actual !== null && actual !== observedWidth) {
24
+ setWidthOverride(actual);
25
+ }
26
+ else {
27
+ setWidthOverride(null);
28
+ }
29
+ }, [observedWidth, zoomMap]);
30
+ return (jsx_runtime_1.jsx(exports.TimelineWidthContext.Provider, { value: widthOverride !== null && widthOverride !== void 0 ? widthOverride : observedWidth, children: children }));
16
31
  };
17
32
  exports.TimelineWidthProvider = TimelineWidthProvider;
@@ -0,0 +1 @@
1
+ export declare const drawBars: (canvas: HTMLCanvasElement, peaks: Float32Array<ArrayBufferLike>, color: string, volume: number, width: number) => void;
@@ -0,0 +1,61 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.drawBars = void 0;
4
+ const parse_color_1 = require("./parse-color");
5
+ const CLIPPING_COLOR = '#FF7F50';
6
+ const drawBars = (canvas, peaks, color, volume, width) => {
7
+ const ctx = canvas.getContext('2d');
8
+ if (!ctx) {
9
+ throw new Error('Failed to get canvas context');
10
+ }
11
+ const { height } = canvas;
12
+ const w = canvas.width;
13
+ ctx.clearRect(0, 0, w, height);
14
+ if (volume === 0)
15
+ return;
16
+ const [r, g, b, a] = (0, parse_color_1.parseColor)(color);
17
+ const [cr, cg, cb, ca] = (0, parse_color_1.parseColor)(CLIPPING_COLOR);
18
+ const imageData = ctx.createImageData(w, height);
19
+ const { data } = imageData;
20
+ const numBars = width;
21
+ for (let barIndex = 0; barIndex < numBars; barIndex++) {
22
+ const x = barIndex;
23
+ if (x >= w)
24
+ break;
25
+ const peakIndex = Math.floor((barIndex / numBars) * peaks.length);
26
+ const peak = peaks[peakIndex] || 0;
27
+ const scaledPeak = peak * volume;
28
+ const halfBar = Math.max(0, Math.min(height / 2, (scaledPeak * height) / 2));
29
+ if (halfBar === 0)
30
+ continue;
31
+ const mid = height / 2;
32
+ const barY = Math.round(mid - halfBar);
33
+ const barEnd = Math.round(mid + halfBar);
34
+ const isClipping = scaledPeak > 1;
35
+ const clipTopEnd = isClipping ? Math.min(barY + 2, barEnd) : barY;
36
+ const clipBotStart = isClipping ? Math.max(barEnd - 2, barY) : barEnd;
37
+ for (let y = barY; y < clipTopEnd; y++) {
38
+ const idx = (y * w + x) * 4;
39
+ data[idx] = cr;
40
+ data[idx + 1] = cg;
41
+ data[idx + 2] = cb;
42
+ data[idx + 3] = ca;
43
+ }
44
+ for (let y = clipTopEnd; y < clipBotStart; y++) {
45
+ const idx = (y * w + x) * 4;
46
+ data[idx] = r;
47
+ data[idx + 1] = g;
48
+ data[idx + 2] = b;
49
+ data[idx + 3] = a;
50
+ }
51
+ for (let y = clipBotStart; y < barEnd; y++) {
52
+ const idx = (y * w + x) * 4;
53
+ data[idx] = cr;
54
+ data[idx + 1] = cg;
55
+ data[idx + 2] = cb;
56
+ data[idx + 3] = ca;
57
+ }
58
+ }
59
+ ctx.putImageData(imageData, 0, 0);
60
+ };
61
+ exports.drawBars = drawBars;
@@ -0,0 +1,3 @@
1
+ declare const TARGET_SAMPLE_RATE = 100;
2
+ export { TARGET_SAMPLE_RATE };
3
+ export declare function loadWaveformPeaks(url: string, signal: AbortSignal): Promise<Float32Array>;
@@ -0,0 +1,67 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.TARGET_SAMPLE_RATE = void 0;
4
+ exports.loadWaveformPeaks = loadWaveformPeaks;
5
+ const mediabunny_1 = require("mediabunny");
6
+ const TARGET_SAMPLE_RATE = 100;
7
+ exports.TARGET_SAMPLE_RATE = TARGET_SAMPLE_RATE;
8
+ const peaksCache = new Map();
9
+ async function loadWaveformPeaks(url, signal) {
10
+ const cached = peaksCache.get(url);
11
+ if (cached)
12
+ return cached;
13
+ const input = new mediabunny_1.Input({
14
+ formats: mediabunny_1.ALL_FORMATS,
15
+ source: new mediabunny_1.UrlSource(url),
16
+ });
17
+ try {
18
+ const audioTrack = await input.getPrimaryAudioTrack();
19
+ if (!audioTrack) {
20
+ return new Float32Array(0);
21
+ }
22
+ const { sampleRate } = audioTrack;
23
+ const durationInSeconds = await audioTrack.computeDuration();
24
+ const totalPeaks = Math.ceil(durationInSeconds * TARGET_SAMPLE_RATE);
25
+ const samplesPerPeak = Math.floor(sampleRate / TARGET_SAMPLE_RATE);
26
+ const peaks = new Float32Array(totalPeaks);
27
+ let peakIndex = 0;
28
+ let peakMax = 0;
29
+ let sampleInPeak = 0;
30
+ const sink = new mediabunny_1.AudioSampleSink(audioTrack);
31
+ for await (const sample of sink.samples()) {
32
+ if (signal.aborted) {
33
+ sample.close();
34
+ return new Float32Array(0);
35
+ }
36
+ const bytesNeeded = sample.allocationSize({
37
+ format: 'f32',
38
+ planeIndex: 0,
39
+ });
40
+ const floats = new Float32Array(bytesNeeded / 4);
41
+ sample.copyTo(floats, { format: 'f32', planeIndex: 0 });
42
+ sample.close();
43
+ for (let i = 0; i < floats.length; i++) {
44
+ const abs = Math.abs(floats[i]);
45
+ if (abs > peakMax)
46
+ peakMax = abs;
47
+ sampleInPeak++;
48
+ if (sampleInPeak >= samplesPerPeak) {
49
+ if (peakIndex < totalPeaks) {
50
+ peaks[peakIndex] = peakMax;
51
+ }
52
+ peakIndex++;
53
+ peakMax = 0;
54
+ sampleInPeak = 0;
55
+ }
56
+ }
57
+ }
58
+ if (sampleInPeak > 0 && peakIndex < totalPeaks) {
59
+ peaks[peakIndex] = peakMax;
60
+ }
61
+ peaksCache.set(url, peaks);
62
+ return peaks;
63
+ }
64
+ finally {
65
+ input.dispose();
66
+ }
67
+ }
@@ -0,0 +1 @@
1
+ export declare const parseColor: (color: string) => [number, number, number, number];
@@ -0,0 +1,17 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.parseColor = void 0;
4
+ const colorCache = new Map();
5
+ const parseColor = (color) => {
6
+ const cached = colorCache.get(color);
7
+ if (cached)
8
+ return cached;
9
+ const ctx = new OffscreenCanvas(1, 1).getContext('2d');
10
+ ctx.fillStyle = color;
11
+ ctx.fillRect(0, 0, 1, 1);
12
+ const [r, g, b, a] = ctx.getImageData(0, 0, 1, 1).data;
13
+ const result = [r, g, b, a];
14
+ colorCache.set(color, result);
15
+ return result;
16
+ };
17
+ exports.parseColor = parseColor;