@waveform-playlist/browser 11.2.0 → 11.3.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -2741,6 +2741,7 @@ function useTrackDynamicEffects() {
2741
2741
 
2742
2742
  // src/hooks/useExportWav.ts
2743
2743
  var import_react19 = require("react");
2744
+ var import_core4 = require("@waveform-playlist/core");
2744
2745
  var import_playout2 = require("@waveform-playlist/playout");
2745
2746
 
2746
2747
  // src/utils/wavEncoder.ts
@@ -2851,47 +2852,24 @@ function useExportWav() {
2851
2852
  totalDurationSamples += Math.round(sampleRate * 0.1);
2852
2853
  const duration = totalDurationSamples / sampleRate;
2853
2854
  const tracksToRender = mode === "individual" ? [{ track: tracks[trackIndex], state: trackStates[trackIndex], index: trackIndex }] : tracks.map((track, index) => ({ track, state: trackStates[index], index }));
2854
- const hasSolo = trackStates.some((state) => state.soloed);
2855
- const hasOfflineTrackEffects = !!createOfflineTrackEffects;
2856
- let renderedBuffer;
2857
- if ((effectsFunction || hasOfflineTrackEffects) && applyEffects) {
2858
- renderedBuffer = yield renderWithToneEffects(
2859
- tracksToRender,
2860
- trackStates,
2861
- hasSolo,
2862
- duration,
2863
- sampleRate,
2864
- effectsFunction,
2865
- createOfflineTrackEffects,
2866
- (p) => {
2867
- setProgress(p);
2868
- onProgress == null ? void 0 : onProgress(p);
2869
- }
2870
- );
2871
- } else {
2872
- const offlineCtx = new OfflineAudioContext(2, totalDurationSamples, sampleRate);
2873
- let scheduledClips = 0;
2874
- const totalClips = tracksToRender.reduce((sum, { track }) => sum + track.clips.length, 0);
2875
- for (const { track, state } of tracksToRender) {
2876
- if (state.muted && !state.soloed) continue;
2877
- if (hasSolo && !state.soloed) continue;
2878
- for (const clip of track.clips) {
2879
- yield scheduleClip(offlineCtx, clip, state, sampleRate, applyEffects);
2880
- scheduledClips++;
2881
- const currentProgress = scheduledClips / totalClips * 0.5;
2882
- setProgress(currentProgress);
2883
- onProgress == null ? void 0 : onProgress(currentProgress);
2884
- }
2885
- }
2886
- setProgress(0.5);
2887
- onProgress == null ? void 0 : onProgress(0.5);
2888
- renderedBuffer = yield offlineCtx.startRendering();
2889
- }
2890
- setProgress(0.9);
2891
- onProgress == null ? void 0 : onProgress(0.9);
2855
+ const hasSolo = mode === "master" && trackStates.some((state) => state.soloed);
2856
+ const reportProgress = (p) => {
2857
+ setProgress(p);
2858
+ onProgress == null ? void 0 : onProgress(p);
2859
+ };
2860
+ const renderedBuffer = yield renderOffline(
2861
+ tracksToRender,
2862
+ hasSolo,
2863
+ duration,
2864
+ sampleRate,
2865
+ applyEffects,
2866
+ effectsFunction,
2867
+ createOfflineTrackEffects,
2868
+ reportProgress
2869
+ );
2870
+ reportProgress(0.9);
2892
2871
  const blob = encodeWav(renderedBuffer, { bitDepth });
2893
- setProgress(1);
2894
- onProgress == null ? void 0 : onProgress(1);
2872
+ reportProgress(1);
2895
2873
  if (autoDownload) {
2896
2874
  const exportFilename = mode === "individual" ? `${filename}_${tracks[trackIndex].name}` : filename;
2897
2875
  downloadBlob(blob, `${exportFilename}.wav`);
@@ -2918,29 +2896,35 @@ function useExportWav() {
2918
2896
  error
2919
2897
  };
2920
2898
  }
2921
- function renderWithToneEffects(tracksToRender, _trackStates, hasSolo, duration, sampleRate, effectsFunction, createOfflineTrackEffects, onProgress) {
2899
+ function renderOffline(tracksToRender, hasSolo, duration, sampleRate, applyEffects, effectsFunction, createOfflineTrackEffects, onProgress) {
2922
2900
  return __async(this, null, function* () {
2923
2901
  const { Offline, Volume: Volume2, Gain, Panner, Player, ToneAudioBuffer } = yield import("tone");
2924
2902
  onProgress(0.1);
2903
+ const audibleTracks = tracksToRender.filter(({ state }) => {
2904
+ if (state.muted && !state.soloed) return false;
2905
+ if (hasSolo && !state.soloed) return false;
2906
+ return true;
2907
+ });
2908
+ const outputChannels = audibleTracks.reduce(
2909
+ (max, { track }) => Math.max(max, (0, import_core4.trackChannelCount)(track)),
2910
+ 1
2911
+ );
2925
2912
  let buffer;
2926
2913
  try {
2927
2914
  buffer = yield Offline(
2928
2915
  (_0) => __async(null, [_0], function* ({ transport, destination }) {
2929
2916
  const masterVolume = new Volume2(0);
2930
- let cleanup = void 0;
2931
- if (effectsFunction) {
2932
- cleanup = effectsFunction(masterVolume, destination, true);
2917
+ if (effectsFunction && applyEffects) {
2918
+ effectsFunction(masterVolume, destination, true);
2933
2919
  } else {
2934
2920
  masterVolume.connect(destination);
2935
2921
  }
2936
- for (const { track, state } of tracksToRender) {
2937
- if (state.muted && !state.soloed) continue;
2938
- if (hasSolo && !state.soloed) continue;
2939
- const trackVolume = new Volume2(gainToDb(state.volume));
2940
- const trackPan = new Panner(state.pan);
2922
+ for (const { track, state } of audibleTracks) {
2923
+ const trackVolume = new Volume2((0, import_core4.gainToDb)(state.volume));
2924
+ const trackPan = new Panner({ pan: state.pan, channelCount: (0, import_core4.trackChannelCount)(track) });
2941
2925
  const trackMute = new Gain(state.muted ? 0 : 1);
2942
2926
  const trackEffects = createOfflineTrackEffects == null ? void 0 : createOfflineTrackEffects(track.id);
2943
- if (trackEffects) {
2927
+ if (trackEffects && applyEffects) {
2944
2928
  trackEffects(trackMute, masterVolume, true);
2945
2929
  } else {
2946
2930
  trackMute.connect(masterVolume);
@@ -2957,6 +2941,12 @@ function renderWithToneEffects(tracksToRender, _trackStates, hasSolo, duration,
2957
2941
  fadeIn,
2958
2942
  fadeOut
2959
2943
  } = clip;
2944
+ if (!audioBuffer) {
2945
+ console.warn(
2946
+ '[waveform-playlist] Skipping clip "' + (clip.name || clip.id) + '" - no audioBuffer for export'
2947
+ );
2948
+ continue;
2949
+ }
2960
2950
  const startTime = startSample / sampleRate;
2961
2951
  const clipDuration = durationSamples / sampleRate;
2962
2952
  const offset = offsetSamples / sampleRate;
@@ -2965,167 +2955,56 @@ function renderWithToneEffects(tracksToRender, _trackStates, hasSolo, duration,
2965
2955
  const fadeGain = new Gain(clipGain);
2966
2956
  player.connect(fadeGain);
2967
2957
  fadeGain.connect(trackVolume);
2968
- if (fadeIn) {
2969
- const fadeInStart = startTime;
2970
- const fadeInEnd = startTime + fadeIn.duration;
2958
+ if (applyEffects) {
2971
2959
  const audioParam = (0, import_playout2.getUnderlyingAudioParam)(fadeGain.gain);
2972
2960
  if (audioParam) {
2973
- audioParam.setValueAtTime(0, fadeInStart);
2974
- audioParam.linearRampToValueAtTime(clipGain, fadeInEnd);
2975
- }
2976
- }
2977
- if (fadeOut) {
2978
- const fadeOutStart = startTime + clipDuration - fadeOut.duration;
2979
- const fadeOutEnd = startTime + clipDuration;
2980
- const audioParam = (0, import_playout2.getUnderlyingAudioParam)(fadeGain.gain);
2981
- if (audioParam) {
2982
- audioParam.setValueAtTime(clipGain, fadeOutStart);
2983
- audioParam.linearRampToValueAtTime(0, fadeOutEnd);
2961
+ applyClipFades(audioParam, clipGain, startTime, clipDuration, fadeIn, fadeOut);
2962
+ } else if (fadeIn || fadeOut) {
2963
+ console.warn(
2964
+ '[waveform-playlist] Cannot apply fades for clip "' + (clip.name || clip.id) + '" - AudioParam not accessible'
2965
+ );
2984
2966
  }
2985
2967
  }
2986
2968
  player.start(startTime, offset, clipDuration);
2987
2969
  }
2988
2970
  }
2989
2971
  transport.start(0);
2990
- if (cleanup) {
2991
- }
2992
2972
  }),
2993
2973
  duration,
2994
- 2,
2995
- // stereo
2974
+ outputChannels,
2996
2975
  sampleRate
2997
2976
  );
2998
2977
  } catch (err) {
2999
2978
  if (err instanceof Error) {
3000
2979
  throw err;
3001
2980
  } else {
3002
- throw new Error(`Tone.Offline rendering failed: ${String(err)}`);
2981
+ throw new Error("Tone.Offline rendering failed: " + String(err));
3003
2982
  }
3004
2983
  }
3005
2984
  onProgress(0.9);
3006
- return buffer.get();
3007
- });
3008
- }
3009
- function gainToDb(gain) {
3010
- return 20 * Math.log10(Math.max(gain, 1e-4));
3011
- }
3012
- function scheduleClip(ctx, clip, trackState, sampleRate, applyEffects) {
3013
- return __async(this, null, function* () {
3014
- const {
3015
- audioBuffer,
3016
- startSample,
3017
- durationSamples,
3018
- offsetSamples,
3019
- gain: clipGain,
3020
- fadeIn,
3021
- fadeOut
3022
- } = clip;
3023
- if (!audioBuffer) {
3024
- console.warn(`Skipping clip "${clip.name || clip.id}" - no audioBuffer for export`);
3025
- return;
2985
+ const result = buffer.get();
2986
+ if (!result) {
2987
+ throw new Error("Offline rendering produced no audio buffer");
3026
2988
  }
3027
- const startTime = startSample / sampleRate;
3028
- const duration = durationSamples / sampleRate;
3029
- const offset = offsetSamples / sampleRate;
3030
- const source = ctx.createBufferSource();
3031
- source.buffer = audioBuffer;
3032
- const gainNode = ctx.createGain();
3033
- const baseGain = clipGain * trackState.volume;
3034
- const pannerNode = ctx.createStereoPanner();
3035
- pannerNode.pan.value = trackState.pan;
3036
- source.connect(gainNode);
3037
- gainNode.connect(pannerNode);
3038
- pannerNode.connect(ctx.destination);
3039
- if (applyEffects) {
3040
- if (fadeIn) {
3041
- gainNode.gain.setValueAtTime(0, startTime);
3042
- } else {
3043
- gainNode.gain.setValueAtTime(baseGain, startTime);
3044
- }
3045
- if (fadeIn) {
3046
- const fadeInStart = startTime;
3047
- const fadeInEnd = startTime + fadeIn.duration;
3048
- applyFadeEnvelope(
3049
- gainNode.gain,
3050
- fadeInStart,
3051
- fadeInEnd,
3052
- 0,
3053
- baseGain,
3054
- fadeIn.type || "linear"
3055
- );
3056
- }
3057
- if (fadeOut) {
3058
- const fadeOutStart = startTime + duration - fadeOut.duration;
3059
- const fadeOutEnd = startTime + duration;
3060
- if (!fadeIn || fadeIn.duration < duration - fadeOut.duration) {
3061
- gainNode.gain.setValueAtTime(baseGain, fadeOutStart);
3062
- }
3063
- applyFadeEnvelope(
3064
- gainNode.gain,
3065
- fadeOutStart,
3066
- fadeOutEnd,
3067
- baseGain,
3068
- 0,
3069
- fadeOut.type || "linear"
3070
- );
3071
- }
3072
- } else {
3073
- gainNode.gain.setValueAtTime(baseGain, startTime);
3074
- }
3075
- source.start(startTime, offset, duration);
2989
+ return result;
3076
2990
  });
3077
2991
  }
3078
- function applyFadeEnvelope(gainParam, startTime, endTime, startValue, endValue, fadeType) {
3079
- const duration = endTime - startTime;
3080
- if (duration <= 0) return;
3081
- switch (fadeType) {
3082
- case "linear":
3083
- gainParam.setValueAtTime(startValue, startTime);
3084
- gainParam.linearRampToValueAtTime(endValue, endTime);
3085
- break;
3086
- case "exponential": {
3087
- const expStart = Math.max(startValue, 1e-4);
3088
- const expEnd = Math.max(endValue, 1e-4);
3089
- gainParam.setValueAtTime(expStart, startTime);
3090
- gainParam.exponentialRampToValueAtTime(expEnd, endTime);
3091
- if (endValue === 0) {
3092
- gainParam.setValueAtTime(0, endTime);
3093
- }
3094
- break;
3095
- }
3096
- case "logarithmic": {
3097
- const logCurve = generateFadeCurve(startValue, endValue, 256, "logarithmic");
3098
- gainParam.setValueCurveAtTime(logCurve, startTime, duration);
3099
- break;
3100
- }
3101
- case "sCurve": {
3102
- const sCurve = generateFadeCurve(startValue, endValue, 256, "sCurve");
3103
- gainParam.setValueCurveAtTime(sCurve, startTime, duration);
3104
- break;
3105
- }
3106
- default:
3107
- gainParam.setValueAtTime(startValue, startTime);
3108
- gainParam.linearRampToValueAtTime(endValue, endTime);
2992
+ function applyClipFades(gainParam, clipGain, startTime, clipDuration, fadeIn, fadeOut) {
2993
+ if (fadeIn) {
2994
+ gainParam.setValueAtTime(0, startTime);
2995
+ } else {
2996
+ gainParam.setValueAtTime(clipGain, startTime);
3109
2997
  }
3110
- }
3111
- function generateFadeCurve(startValue, endValue, numPoints, curveType) {
3112
- const curve = new Float32Array(numPoints);
3113
- const range = endValue - startValue;
3114
- for (let i = 0; i < numPoints; i++) {
3115
- const t = i / (numPoints - 1);
3116
- let curveValue;
3117
- if (curveType === "logarithmic") {
3118
- if (range > 0) {
3119
- curveValue = Math.log10(1 + t * 9) / Math.log10(10);
3120
- } else {
3121
- curveValue = 1 - Math.log10(1 + (1 - t) * 9) / Math.log10(10);
3122
- }
3123
- } else {
3124
- curveValue = t * t * (3 - 2 * t);
2998
+ if (fadeIn) {
2999
+ (0, import_core4.applyFadeIn)(gainParam, startTime, fadeIn.duration, fadeIn.type || "linear", 0, clipGain);
3000
+ }
3001
+ if (fadeOut) {
3002
+ const fadeOutStart = startTime + clipDuration - fadeOut.duration;
3003
+ if (!fadeIn || fadeIn.duration < clipDuration - fadeOut.duration) {
3004
+ gainParam.setValueAtTime(clipGain, fadeOutStart);
3125
3005
  }
3126
- curve[i] = startValue + range * curveValue;
3006
+ (0, import_core4.applyFadeOut)(gainParam, fadeOutStart, fadeOut.duration, fadeOut.type || "linear", clipGain, 0);
3127
3007
  }
3128
- return curve;
3129
3008
  }
3130
3009
 
3131
3010
  // src/hooks/useAnimationFrameLoop.ts
@@ -3520,7 +3399,7 @@ function useWaveformDataCache(tracks, baseScale) {
3520
3399
 
3521
3400
  // src/hooks/useDynamicTracks.ts
3522
3401
  var import_react22 = require("react");
3523
- var import_core4 = require("@waveform-playlist/core");
3402
+ var import_core5 = require("@waveform-playlist/core");
3524
3403
  var import_playout3 = require("@waveform-playlist/playout");
3525
3404
  function getSourceName(source) {
3526
3405
  var _a, _b, _c, _d, _e;
@@ -3574,7 +3453,7 @@ function useDynamicTracks() {
3574
3453
  if (sources.length === 0) return;
3575
3454
  const audioContext = (0, import_playout3.getGlobalAudioContext)();
3576
3455
  const placeholders = sources.map((source) => ({
3577
- track: (0, import_core4.createTrack)({ name: `${getSourceName(source)} (loading...)`, clips: [] }),
3456
+ track: (0, import_core5.createTrack)({ name: `${getSourceName(source)} (loading...)`, clips: [] }),
3578
3457
  source
3579
3458
  }));
3580
3459
  setTracks((prev) => [...prev, ...placeholders.map((p) => p.track)]);
@@ -3586,7 +3465,7 @@ function useDynamicTracks() {
3586
3465
  (() => __async(null, null, function* () {
3587
3466
  try {
3588
3467
  const { audioBuffer, name } = yield decodeSource(source, audioContext, controller.signal);
3589
- const clip = (0, import_core4.createClipFromSeconds)({
3468
+ const clip = (0, import_core5.createClipFromSeconds)({
3590
3469
  audioBuffer,
3591
3470
  startTime: 0,
3592
3471
  duration: audioBuffer.duration,
@@ -3644,7 +3523,7 @@ function useDynamicTracks() {
3644
3523
  // src/hooks/useOutputMeter.ts
3645
3524
  var import_react23 = require("react");
3646
3525
  var import_playout4 = require("@waveform-playlist/playout");
3647
- var import_core5 = require("@waveform-playlist/core");
3526
+ var import_core6 = require("@waveform-playlist/core");
3648
3527
  var import_worklets = require("@waveform-playlist/worklets");
3649
3528
  var PEAK_DECAY = 0.98;
3650
3529
  function useOutputMeter(options = {}) {
@@ -3699,8 +3578,8 @@ function useOutputMeter(options = {}) {
3699
3578
  const rmsValues = [];
3700
3579
  for (let ch = 0; ch < peak.length; ch++) {
3701
3580
  smoothed[ch] = Math.max(peak[ch], ((_a = smoothed[ch]) != null ? _a : 0) * PEAK_DECAY);
3702
- peakValues.push((0, import_core5.gainToNormalized)(smoothed[ch]));
3703
- rmsValues.push((0, import_core5.gainToNormalized)(rms[ch]));
3581
+ peakValues.push((0, import_core6.gainToNormalized)(smoothed[ch]));
3582
+ rmsValues.push((0, import_core6.gainToNormalized)(rms[ch]));
3704
3583
  }
3705
3584
  setLevels(peakValues);
3706
3585
  setRmsLevels(rmsValues);
@@ -3822,6 +3701,7 @@ var WaveformPlaylistProvider = ({
3822
3701
  const playbackEndTimeRef = (0, import_react24.useRef)(null);
3823
3702
  const scrollContainerRef = (0, import_react24.useRef)(null);
3824
3703
  const isAutomaticScrollRef = (0, import_react24.useRef)(false);
3704
+ const frameCallbacksRef = (0, import_react24.useRef)(/* @__PURE__ */ new Map());
3825
3705
  const continuousPlayRef = (0, import_react24.useRef)((_d = annotationList == null ? void 0 : annotationList.isContinuousPlay) != null ? _d : false);
3826
3706
  const activeAnnotationIdRef = (0, import_react24.useRef)(null);
3827
3707
  const engineTracksRef = (0, import_react24.useRef)(null);
@@ -4282,10 +4162,30 @@ var WaveformPlaylistProvider = ({
4282
4162
  const elapsed = (0, import_tone4.getContext)().currentTime - ((_a2 = playbackStartTimeRef.current) != null ? _a2 : 0);
4283
4163
  return ((_b2 = audioStartPositionRef.current) != null ? _b2 : 0) + elapsed;
4284
4164
  }, []);
4165
+ const registerFrameCallback = (0, import_react24.useCallback)((id, cb) => {
4166
+ frameCallbacksRef.current.set(id, cb);
4167
+ }, []);
4168
+ const unregisterFrameCallback = (0, import_react24.useCallback)((id) => {
4169
+ frameCallbacksRef.current.delete(id);
4170
+ }, []);
4285
4171
  const startAnimationLoop = (0, import_react24.useCallback)(() => {
4172
+ const audioCtx = (0, import_playout5.getGlobalAudioContext)();
4286
4173
  const updateTime = () => {
4287
4174
  const time = getPlaybackTime();
4288
4175
  currentTimeRef.current = time;
4176
+ const latency = "outputLatency" in audioCtx ? audioCtx.outputLatency : 0;
4177
+ const visualTime = Math.max(0, time - latency);
4178
+ const sr = sampleRateRef.current;
4179
+ const spp = samplesPerPixelRef.current;
4180
+ const frameData = {
4181
+ time,
4182
+ visualTime,
4183
+ sampleRate: sr,
4184
+ samplesPerPixel: spp
4185
+ };
4186
+ for (const cb of frameCallbacksRef.current.values()) {
4187
+ cb(frameData);
4188
+ }
4289
4189
  const currentAnnotations = annotationsRef.current;
4290
4190
  if (currentAnnotations.length > 0) {
4291
4191
  const currentAnnotation = currentAnnotations.find(
@@ -4320,10 +4220,9 @@ var WaveformPlaylistProvider = ({
4320
4220
  }
4321
4221
  if (isAutomaticScrollRef.current && scrollContainerRef.current && duration > 0) {
4322
4222
  const container = scrollContainerRef.current;
4323
- const sr = sampleRateRef.current;
4324
- const pixelPosition = time * sr / samplesPerPixelRef.current;
4223
+ const pixelPosition = visualTime * sr / spp;
4325
4224
  const containerWidth = container.clientWidth;
4326
- const targetScrollLeft = Math.max(0, pixelPosition - containerWidth / 2);
4225
+ const targetScrollLeft = Math.round(Math.max(0, pixelPosition - containerWidth / 2));
4327
4226
  container.scrollLeft = targetScrollLeft;
4328
4227
  }
4329
4228
  if (playbackEndTimeRef.current !== null && time >= playbackEndTimeRef.current) {
@@ -4559,7 +4458,9 @@ var WaveformPlaylistProvider = ({
4559
4458
  currentTimeRef,
4560
4459
  playbackStartTimeRef,
4561
4460
  audioStartPositionRef,
4562
- getPlaybackTime
4461
+ getPlaybackTime,
4462
+ registerFrameCallback,
4463
+ unregisterFrameCallback
4563
4464
  }),
4564
4465
  [
4565
4466
  isPlaying,
@@ -4567,7 +4468,9 @@ var WaveformPlaylistProvider = ({
4567
4468
  currentTimeRef,
4568
4469
  playbackStartTimeRef,
4569
4470
  audioStartPositionRef,
4570
- getPlaybackTime
4471
+ getPlaybackTime,
4472
+ registerFrameCallback,
4473
+ unregisterFrameCallback
4571
4474
  ]
4572
4475
  );
4573
4476
  const stateValue = (0, import_react24.useMemo)(
@@ -5348,32 +5251,19 @@ var PositionDisplay = import_styled_components3.default.span`
5348
5251
  var AudioPosition = ({ className }) => {
5349
5252
  var _a;
5350
5253
  const timeRef = (0, import_react27.useRef)(null);
5351
- const animationFrameRef = (0, import_react27.useRef)(null);
5352
- const { isPlaying, currentTimeRef, getPlaybackTime } = usePlaybackAnimation();
5254
+ const { isPlaying, currentTimeRef, registerFrameCallback, unregisterFrameCallback } = usePlaybackAnimation();
5353
5255
  const { timeFormat: format } = usePlaylistData();
5354
5256
  (0, import_react27.useEffect)(() => {
5355
- const updateTime = () => {
5356
- var _a2;
5357
- if (timeRef.current) {
5358
- const time = isPlaying ? getPlaybackTime() : (_a2 = currentTimeRef.current) != null ? _a2 : 0;
5359
- timeRef.current.textContent = (0, import_ui_components6.formatTime)(time, format);
5360
- }
5361
- if (isPlaying) {
5362
- animationFrameRef.current = requestAnimationFrame(updateTime);
5363
- }
5364
- };
5257
+ const id = "audio-position";
5365
5258
  if (isPlaying) {
5366
- animationFrameRef.current = requestAnimationFrame(updateTime);
5367
- } else {
5368
- updateTime();
5259
+ registerFrameCallback(id, ({ time }) => {
5260
+ if (timeRef.current) {
5261
+ timeRef.current.textContent = (0, import_ui_components6.formatTime)(time, format);
5262
+ }
5263
+ });
5369
5264
  }
5370
- return () => {
5371
- if (animationFrameRef.current) {
5372
- cancelAnimationFrame(animationFrameRef.current);
5373
- animationFrameRef.current = null;
5374
- }
5375
- };
5376
- }, [isPlaying, format, currentTimeRef, getPlaybackTime]);
5265
+ return () => unregisterFrameCallback(id);
5266
+ }, [isPlaying, format, registerFrameCallback, unregisterFrameCallback]);
5377
5267
  (0, import_react27.useEffect)(() => {
5378
5268
  var _a2;
5379
5269
  if (!isPlaying && timeRef.current) {
@@ -5533,33 +5423,20 @@ var PlayheadLine = import_styled_components4.default.div.attrs((props) => ({
5533
5423
  `;
5534
5424
  var AnimatedPlayhead = ({ color = "#ff0000" }) => {
5535
5425
  const playheadRef = (0, import_react30.useRef)(null);
5536
- const animationFrameRef = (0, import_react30.useRef)(null);
5537
- const { isPlaying, currentTimeRef, getPlaybackTime } = usePlaybackAnimation();
5426
+ const { isPlaying, currentTimeRef, registerFrameCallback, unregisterFrameCallback } = usePlaybackAnimation();
5538
5427
  const { samplesPerPixel, sampleRate, progressBarWidth } = usePlaylistData();
5539
5428
  (0, import_react30.useEffect)(() => {
5540
- const updatePosition = () => {
5541
- var _a;
5542
- if (playheadRef.current) {
5543
- const time = isPlaying ? getPlaybackTime() : (_a = currentTimeRef.current) != null ? _a : 0;
5544
- const position = time * sampleRate / samplesPerPixel;
5545
- playheadRef.current.style.transform = `translate3d(${position}px, 0, 0)`;
5546
- }
5547
- if (isPlaying) {
5548
- animationFrameRef.current = requestAnimationFrame(updatePosition);
5549
- }
5550
- };
5429
+ const id = "playhead";
5551
5430
  if (isPlaying) {
5552
- animationFrameRef.current = requestAnimationFrame(updatePosition);
5553
- } else {
5554
- updatePosition();
5431
+ registerFrameCallback(id, ({ visualTime, sampleRate: sr, samplesPerPixel: spp }) => {
5432
+ if (playheadRef.current) {
5433
+ const px = visualTime * sr / spp;
5434
+ playheadRef.current.style.transform = `translate3d(${px}px, 0, 0)`;
5435
+ }
5436
+ });
5555
5437
  }
5556
- return () => {
5557
- if (animationFrameRef.current) {
5558
- cancelAnimationFrame(animationFrameRef.current);
5559
- animationFrameRef.current = null;
5560
- }
5561
- };
5562
- }, [isPlaying, sampleRate, samplesPerPixel, currentTimeRef, getPlaybackTime]);
5438
+ return () => unregisterFrameCallback(id);
5439
+ }, [isPlaying, registerFrameCallback, unregisterFrameCallback]);
5563
5440
  (0, import_react30.useEffect)(() => {
5564
5441
  var _a;
5565
5442
  if (!isPlaying && playheadRef.current) {
@@ -5574,7 +5451,7 @@ var AnimatedPlayhead = ({ color = "#ff0000" }) => {
5574
5451
  // src/components/ChannelWithProgress.tsx
5575
5452
  var import_react31 = require("react");
5576
5453
  var import_styled_components5 = __toESM(require("styled-components"));
5577
- var import_core6 = require("@waveform-playlist/core");
5454
+ var import_core7 = require("@waveform-playlist/core");
5578
5455
  var import_ui_components8 = require("@waveform-playlist/ui-components");
5579
5456
  var import_jsx_runtime9 = require("react/jsx-runtime");
5580
5457
  var ChannelWrapper = import_styled_components5.default.div`
@@ -5631,58 +5508,44 @@ var ChannelWithProgress = (_a) => {
5631
5508
  "clipOffsetSeconds"
5632
5509
  ]);
5633
5510
  const progressRef = (0, import_react31.useRef)(null);
5634
- const animationFrameRef = (0, import_react31.useRef)(null);
5511
+ const callbackId = (0, import_react31.useId)();
5635
5512
  const theme = (0, import_ui_components8.useTheme)();
5636
5513
  const { waveHeight } = (0, import_ui_components8.usePlaylistInfo)();
5637
- const { isPlaying, currentTimeRef, getPlaybackTime } = usePlaybackAnimation();
5514
+ const { isPlaying, currentTimeRef, registerFrameCallback, unregisterFrameCallback } = usePlaybackAnimation();
5638
5515
  const { samplesPerPixel, sampleRate } = usePlaylistData();
5639
5516
  const progressColor = (theme == null ? void 0 : theme.waveProgressColor) || "rgba(0, 0, 0, 0.1)";
5640
- const clipPixelWidth = (0, import_core6.clipPixelWidth)(
5517
+ const clipPixelWidth = (0, import_core7.clipPixelWidth)(
5641
5518
  clipStartSample,
5642
5519
  clipDurationSamples,
5643
5520
  samplesPerPixel
5644
5521
  );
5645
5522
  (0, import_react31.useEffect)(() => {
5646
- const updateProgress = () => {
5647
- var _a2;
5648
- if (progressRef.current) {
5649
- const currentTime = isPlaying ? getPlaybackTime() : (_a2 = currentTimeRef.current) != null ? _a2 : 0;
5650
- const currentSample = currentTime * sampleRate;
5651
- const clipEndSample = clipStartSample + clipDurationSamples;
5652
- let ratio = 0;
5653
- if (currentSample <= clipStartSample) {
5654
- ratio = 0;
5655
- } else if (currentSample >= clipEndSample) {
5656
- ratio = 1;
5657
- } else {
5658
- const playedSamples = currentSample - clipStartSample;
5659
- ratio = playedSamples / clipDurationSamples;
5660
- }
5661
- progressRef.current.style.transform = `scaleX(${ratio})`;
5662
- }
5663
- if (isPlaying) {
5664
- animationFrameRef.current = requestAnimationFrame(updateProgress);
5665
- }
5666
- };
5667
5523
  if (isPlaying) {
5668
- animationFrameRef.current = requestAnimationFrame(updateProgress);
5669
- } else {
5670
- updateProgress();
5524
+ registerFrameCallback(callbackId, ({ visualTime, sampleRate: sr }) => {
5525
+ if (progressRef.current) {
5526
+ const currentSample = visualTime * sr;
5527
+ const clipEndSample = clipStartSample + clipDurationSamples;
5528
+ let ratio = 0;
5529
+ if (currentSample <= clipStartSample) {
5530
+ ratio = 0;
5531
+ } else if (currentSample >= clipEndSample) {
5532
+ ratio = 1;
5533
+ } else {
5534
+ const playedSamples = currentSample - clipStartSample;
5535
+ ratio = playedSamples / clipDurationSamples;
5536
+ }
5537
+ progressRef.current.style.transform = `scaleX(${ratio})`;
5538
+ }
5539
+ });
5671
5540
  }
5672
- return () => {
5673
- if (animationFrameRef.current) {
5674
- cancelAnimationFrame(animationFrameRef.current);
5675
- animationFrameRef.current = null;
5676
- }
5677
- };
5541
+ return () => unregisterFrameCallback(callbackId);
5678
5542
  }, [
5679
5543
  isPlaying,
5680
- sampleRate,
5681
5544
  clipStartSample,
5682
5545
  clipDurationSamples,
5683
- clipPixelWidth,
5684
- currentTimeRef,
5685
- getPlaybackTime
5546
+ callbackId,
5547
+ registerFrameCallback,
5548
+ unregisterFrameCallback
5686
5549
  ]);
5687
5550
  (0, import_react31.useEffect)(() => {
5688
5551
  var _a2;
@@ -7161,7 +7024,7 @@ var KeyboardShortcuts = ({
7161
7024
  var import_react40 = __toESM(require("react"));
7162
7025
  var import_react41 = require("@dnd-kit/react");
7163
7026
  var import_modifiers2 = require("@dnd-kit/abstract/modifiers");
7164
- var import_core8 = require("@waveform-playlist/core");
7027
+ var import_core9 = require("@waveform-playlist/core");
7165
7028
  var import_ui_components12 = require("@waveform-playlist/ui-components");
7166
7029
 
7167
7030
  // src/modifiers/ClipCollisionModifier.ts
@@ -7190,7 +7053,7 @@ var ClipCollisionModifier = _ClipCollisionModifier;
7190
7053
 
7191
7054
  // src/modifiers/SnapToGridModifier.ts
7192
7055
  var import_abstract2 = require("@dnd-kit/abstract");
7193
- var import_core7 = require("@waveform-playlist/core");
7056
+ var import_core8 = require("@waveform-playlist/core");
7194
7057
  var _SnapToGridModifier = class _SnapToGridModifier extends import_abstract2.Modifier {
7195
7058
  apply(operation) {
7196
7059
  const { transform, source } = operation;
@@ -7211,18 +7074,18 @@ var _SnapToGridModifier = class _SnapToGridModifier extends import_abstract2.Mod
7211
7074
  }
7212
7075
  const { snapTo, bpm, timeSignature, sampleRate } = this.options;
7213
7076
  if (snapTo === "off") return transform;
7214
- const gridTicks = snapTo === "bar" ? (0, import_core7.ticksPerBar)(timeSignature) : (0, import_core7.ticksPerBeat)(timeSignature);
7077
+ const gridTicks = snapTo === "bar" ? (0, import_core8.ticksPerBar)(timeSignature) : (0, import_core8.ticksPerBeat)(timeSignature);
7215
7078
  if (startSample !== void 0) {
7216
7079
  const proposedSamples = startSample + transform.x * samplesPerPixel;
7217
- const proposedTicks = (0, import_core7.samplesToTicks)(proposedSamples, bpm, sampleRate);
7218
- const snappedTicks2 = (0, import_core7.snapToGrid)(proposedTicks, gridTicks);
7219
- const snappedSamples2 = (0, import_core7.ticksToSamples)(snappedTicks2, bpm, sampleRate);
7080
+ const proposedTicks = (0, import_core8.samplesToTicks)(proposedSamples, bpm, sampleRate);
7081
+ const snappedTicks2 = (0, import_core8.snapToGrid)(proposedTicks, gridTicks);
7082
+ const snappedSamples2 = (0, import_core8.ticksToSamples)(snappedTicks2, bpm, sampleRate);
7220
7083
  return { x: (snappedSamples2 - startSample) / samplesPerPixel, y: 0 };
7221
7084
  }
7222
7085
  const deltaSamples = transform.x * samplesPerPixel;
7223
- const deltaTicks = (0, import_core7.samplesToTicks)(deltaSamples, bpm, sampleRate);
7224
- const snappedTicks = (0, import_core7.snapToGrid)(deltaTicks, gridTicks);
7225
- const snappedSamples = (0, import_core7.ticksToSamples)(snappedTicks, bpm, sampleRate);
7086
+ const deltaTicks = (0, import_core8.samplesToTicks)(deltaSamples, bpm, sampleRate);
7087
+ const snappedTicks = (0, import_core8.snapToGrid)(deltaTicks, gridTicks);
7088
+ const snappedSamples = (0, import_core8.ticksToSamples)(snappedTicks, bpm, sampleRate);
7226
7089
  return { x: snappedSamples / samplesPerPixel, y: 0 };
7227
7090
  }
7228
7091
  };
@@ -7253,11 +7116,11 @@ var ClipInteractionProvider = ({
7253
7116
  const snapSamplePosition = (0, import_react40.useMemo)(() => {
7254
7117
  if (useBeatsSnap && beatsAndBars) {
7255
7118
  const { bpm, timeSignature, snapTo } = beatsAndBars;
7256
- const gridTicks = snapTo === "bar" ? (0, import_core8.ticksPerBar)(timeSignature) : (0, import_core8.ticksPerBeat)(timeSignature);
7119
+ const gridTicks = snapTo === "bar" ? (0, import_core9.ticksPerBar)(timeSignature) : (0, import_core9.ticksPerBeat)(timeSignature);
7257
7120
  return (samplePos) => {
7258
- const ticks = (0, import_core8.samplesToTicks)(samplePos, bpm, sampleRate);
7259
- const snapped = (0, import_core8.snapToGrid)(ticks, gridTicks);
7260
- return (0, import_core8.ticksToSamples)(snapped, bpm, sampleRate);
7121
+ const ticks = (0, import_core9.samplesToTicks)(samplePos, bpm, sampleRate);
7122
+ const snapped = (0, import_core9.snapToGrid)(ticks, gridTicks);
7123
+ return (0, import_core9.ticksToSamples)(snapped, bpm, sampleRate);
7261
7124
  };
7262
7125
  }
7263
7126
  if (useTimescaleSnap) {