@waveform-playlist/browser 11.3.0 → 11.3.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.mts CHANGED
@@ -962,7 +962,7 @@ type TrackEffectsFunction = (graphEnd: unknown, destination: unknown, isOffline:
962
962
  interface ExportOptions extends WavEncoderOptions {
963
963
  /** Filename for download (without extension) */
964
964
  filename?: string;
965
- /** Export mode: 'master' for stereo mix, 'individual' for single track */
965
+ /** Export mode: 'master' for full mixdown, 'individual' for single track */
966
966
  mode?: 'master' | 'individual';
967
967
  /** Track index for individual export (only used when mode is 'individual') */
968
968
  trackIndex?: number;
@@ -971,8 +971,8 @@ interface ExportOptions extends WavEncoderOptions {
971
971
  /** Whether to apply effects (fades, etc.) - defaults to true */
972
972
  applyEffects?: boolean;
973
973
  /**
974
- * Optional Tone.js effects function for master effects. When provided, export will use Tone.Offline
975
- * to render through the effects chain. The function receives isOffline=true.
974
+ * Optional Tone.js effects function for master effects. When provided, export renders
975
+ * through the effects chain. The function receives isOffline=true.
976
976
  */
977
977
  effectsFunction?: EffectsFunction;
978
978
  /**
@@ -1009,8 +1009,8 @@ interface TrackState {
1009
1009
  pan: number;
1010
1010
  }
1011
1011
  /**
1012
- * Hook for exporting the waveform playlist to WAV format
1013
- * Uses OfflineAudioContext for fast, non-real-time rendering
1012
+ * Hook for exporting the waveform playlist to WAV format.
1013
+ * Uses Tone.Offline for non-real-time rendering, mirroring the live playback graph.
1014
1014
  */
1015
1015
  declare function useExportWav(): UseExportWavReturn;
1016
1016
 
package/dist/index.d.ts CHANGED
@@ -962,7 +962,7 @@ type TrackEffectsFunction = (graphEnd: unknown, destination: unknown, isOffline:
962
962
  interface ExportOptions extends WavEncoderOptions {
963
963
  /** Filename for download (without extension) */
964
964
  filename?: string;
965
- /** Export mode: 'master' for stereo mix, 'individual' for single track */
965
+ /** Export mode: 'master' for full mixdown, 'individual' for single track */
966
966
  mode?: 'master' | 'individual';
967
967
  /** Track index for individual export (only used when mode is 'individual') */
968
968
  trackIndex?: number;
@@ -971,8 +971,8 @@ interface ExportOptions extends WavEncoderOptions {
971
971
  /** Whether to apply effects (fades, etc.) - defaults to true */
972
972
  applyEffects?: boolean;
973
973
  /**
974
- * Optional Tone.js effects function for master effects. When provided, export will use Tone.Offline
975
- * to render through the effects chain. The function receives isOffline=true.
974
+ * Optional Tone.js effects function for master effects. When provided, export renders
975
+ * through the effects chain. The function receives isOffline=true.
976
976
  */
977
977
  effectsFunction?: EffectsFunction;
978
978
  /**
@@ -1009,8 +1009,8 @@ interface TrackState {
1009
1009
  pan: number;
1010
1010
  }
1011
1011
  /**
1012
- * Hook for exporting the waveform playlist to WAV format
1013
- * Uses OfflineAudioContext for fast, non-real-time rendering
1012
+ * Hook for exporting the waveform playlist to WAV format.
1013
+ * Uses Tone.Offline for non-real-time rendering, mirroring the live playback graph.
1014
1014
  */
1015
1015
  declare function useExportWav(): UseExportWavReturn;
1016
1016
 
package/dist/index.js CHANGED
@@ -2741,6 +2741,7 @@ function useTrackDynamicEffects() {
2741
2741
 
2742
2742
  // src/hooks/useExportWav.ts
2743
2743
  var import_react19 = require("react");
2744
+ var import_core4 = require("@waveform-playlist/core");
2744
2745
  var import_playout2 = require("@waveform-playlist/playout");
2745
2746
 
2746
2747
  // src/utils/wavEncoder.ts
@@ -2851,47 +2852,24 @@ function useExportWav() {
2851
2852
  totalDurationSamples += Math.round(sampleRate * 0.1);
2852
2853
  const duration = totalDurationSamples / sampleRate;
2853
2854
  const tracksToRender = mode === "individual" ? [{ track: tracks[trackIndex], state: trackStates[trackIndex], index: trackIndex }] : tracks.map((track, index) => ({ track, state: trackStates[index], index }));
2854
- const hasSolo = trackStates.some((state) => state.soloed);
2855
- const hasOfflineTrackEffects = !!createOfflineTrackEffects;
2856
- let renderedBuffer;
2857
- if ((effectsFunction || hasOfflineTrackEffects) && applyEffects) {
2858
- renderedBuffer = yield renderWithToneEffects(
2859
- tracksToRender,
2860
- trackStates,
2861
- hasSolo,
2862
- duration,
2863
- sampleRate,
2864
- effectsFunction,
2865
- createOfflineTrackEffects,
2866
- (p) => {
2867
- setProgress(p);
2868
- onProgress == null ? void 0 : onProgress(p);
2869
- }
2870
- );
2871
- } else {
2872
- const offlineCtx = new OfflineAudioContext(2, totalDurationSamples, sampleRate);
2873
- let scheduledClips = 0;
2874
- const totalClips = tracksToRender.reduce((sum, { track }) => sum + track.clips.length, 0);
2875
- for (const { track, state } of tracksToRender) {
2876
- if (state.muted && !state.soloed) continue;
2877
- if (hasSolo && !state.soloed) continue;
2878
- for (const clip of track.clips) {
2879
- yield scheduleClip(offlineCtx, clip, state, sampleRate, applyEffects);
2880
- scheduledClips++;
2881
- const currentProgress = scheduledClips / totalClips * 0.5;
2882
- setProgress(currentProgress);
2883
- onProgress == null ? void 0 : onProgress(currentProgress);
2884
- }
2885
- }
2886
- setProgress(0.5);
2887
- onProgress == null ? void 0 : onProgress(0.5);
2888
- renderedBuffer = yield offlineCtx.startRendering();
2889
- }
2890
- setProgress(0.9);
2891
- onProgress == null ? void 0 : onProgress(0.9);
2855
+ const hasSolo = mode === "master" && trackStates.some((state) => state.soloed);
2856
+ const reportProgress = (p) => {
2857
+ setProgress(p);
2858
+ onProgress == null ? void 0 : onProgress(p);
2859
+ };
2860
+ const renderedBuffer = yield renderOffline(
2861
+ tracksToRender,
2862
+ hasSolo,
2863
+ duration,
2864
+ sampleRate,
2865
+ applyEffects,
2866
+ effectsFunction,
2867
+ createOfflineTrackEffects,
2868
+ reportProgress
2869
+ );
2870
+ reportProgress(0.9);
2892
2871
  const blob = encodeWav(renderedBuffer, { bitDepth });
2893
- setProgress(1);
2894
- onProgress == null ? void 0 : onProgress(1);
2872
+ reportProgress(1);
2895
2873
  if (autoDownload) {
2896
2874
  const exportFilename = mode === "individual" ? `${filename}_${tracks[trackIndex].name}` : filename;
2897
2875
  downloadBlob(blob, `${exportFilename}.wav`);
@@ -2918,29 +2896,35 @@ function useExportWav() {
2918
2896
  error
2919
2897
  };
2920
2898
  }
2921
- function renderWithToneEffects(tracksToRender, _trackStates, hasSolo, duration, sampleRate, effectsFunction, createOfflineTrackEffects, onProgress) {
2899
+ function renderOffline(tracksToRender, hasSolo, duration, sampleRate, applyEffects, effectsFunction, createOfflineTrackEffects, onProgress) {
2922
2900
  return __async(this, null, function* () {
2923
2901
  const { Offline, Volume: Volume2, Gain, Panner, Player, ToneAudioBuffer } = yield import("tone");
2924
2902
  onProgress(0.1);
2903
+ const audibleTracks = tracksToRender.filter(({ state }) => {
2904
+ if (state.muted && !state.soloed) return false;
2905
+ if (hasSolo && !state.soloed) return false;
2906
+ return true;
2907
+ });
2908
+ const outputChannels = audibleTracks.reduce(
2909
+ (max, { track }) => Math.max(max, (0, import_core4.trackChannelCount)(track)),
2910
+ 1
2911
+ );
2925
2912
  let buffer;
2926
2913
  try {
2927
2914
  buffer = yield Offline(
2928
2915
  (_0) => __async(null, [_0], function* ({ transport, destination }) {
2929
2916
  const masterVolume = new Volume2(0);
2930
- let cleanup = void 0;
2931
- if (effectsFunction) {
2932
- cleanup = effectsFunction(masterVolume, destination, true);
2917
+ if (effectsFunction && applyEffects) {
2918
+ effectsFunction(masterVolume, destination, true);
2933
2919
  } else {
2934
2920
  masterVolume.connect(destination);
2935
2921
  }
2936
- for (const { track, state } of tracksToRender) {
2937
- if (state.muted && !state.soloed) continue;
2938
- if (hasSolo && !state.soloed) continue;
2939
- const trackVolume = new Volume2(gainToDb(state.volume));
2940
- const trackPan = new Panner(state.pan);
2922
+ for (const { track, state } of audibleTracks) {
2923
+ const trackVolume = new Volume2((0, import_core4.gainToDb)(state.volume));
2924
+ const trackPan = new Panner({ pan: state.pan, channelCount: (0, import_core4.trackChannelCount)(track) });
2941
2925
  const trackMute = new Gain(state.muted ? 0 : 1);
2942
2926
  const trackEffects = createOfflineTrackEffects == null ? void 0 : createOfflineTrackEffects(track.id);
2943
- if (trackEffects) {
2927
+ if (trackEffects && applyEffects) {
2944
2928
  trackEffects(trackMute, masterVolume, true);
2945
2929
  } else {
2946
2930
  trackMute.connect(masterVolume);
@@ -2957,6 +2941,12 @@ function renderWithToneEffects(tracksToRender, _trackStates, hasSolo, duration,
2957
2941
  fadeIn,
2958
2942
  fadeOut
2959
2943
  } = clip;
2944
+ if (!audioBuffer) {
2945
+ console.warn(
2946
+ '[waveform-playlist] Skipping clip "' + (clip.name || clip.id) + '" - no audioBuffer for export'
2947
+ );
2948
+ continue;
2949
+ }
2960
2950
  const startTime = startSample / sampleRate;
2961
2951
  const clipDuration = durationSamples / sampleRate;
2962
2952
  const offset = offsetSamples / sampleRate;
@@ -2965,167 +2955,56 @@ function renderWithToneEffects(tracksToRender, _trackStates, hasSolo, duration,
2965
2955
  const fadeGain = new Gain(clipGain);
2966
2956
  player.connect(fadeGain);
2967
2957
  fadeGain.connect(trackVolume);
2968
- if (fadeIn) {
2969
- const fadeInStart = startTime;
2970
- const fadeInEnd = startTime + fadeIn.duration;
2958
+ if (applyEffects) {
2971
2959
  const audioParam = (0, import_playout2.getUnderlyingAudioParam)(fadeGain.gain);
2972
2960
  if (audioParam) {
2973
- audioParam.setValueAtTime(0, fadeInStart);
2974
- audioParam.linearRampToValueAtTime(clipGain, fadeInEnd);
2975
- }
2976
- }
2977
- if (fadeOut) {
2978
- const fadeOutStart = startTime + clipDuration - fadeOut.duration;
2979
- const fadeOutEnd = startTime + clipDuration;
2980
- const audioParam = (0, import_playout2.getUnderlyingAudioParam)(fadeGain.gain);
2981
- if (audioParam) {
2982
- audioParam.setValueAtTime(clipGain, fadeOutStart);
2983
- audioParam.linearRampToValueAtTime(0, fadeOutEnd);
2961
+ applyClipFades(audioParam, clipGain, startTime, clipDuration, fadeIn, fadeOut);
2962
+ } else if (fadeIn || fadeOut) {
2963
+ console.warn(
2964
+ '[waveform-playlist] Cannot apply fades for clip "' + (clip.name || clip.id) + '" - AudioParam not accessible'
2965
+ );
2984
2966
  }
2985
2967
  }
2986
2968
  player.start(startTime, offset, clipDuration);
2987
2969
  }
2988
2970
  }
2989
2971
  transport.start(0);
2990
- if (cleanup) {
2991
- }
2992
2972
  }),
2993
2973
  duration,
2994
- 2,
2995
- // stereo
2974
+ outputChannels,
2996
2975
  sampleRate
2997
2976
  );
2998
2977
  } catch (err) {
2999
2978
  if (err instanceof Error) {
3000
2979
  throw err;
3001
2980
  } else {
3002
- throw new Error(`Tone.Offline rendering failed: ${String(err)}`);
2981
+ throw new Error("Tone.Offline rendering failed: " + String(err));
3003
2982
  }
3004
2983
  }
3005
2984
  onProgress(0.9);
3006
- return buffer.get();
3007
- });
3008
- }
3009
- function gainToDb(gain) {
3010
- return 20 * Math.log10(Math.max(gain, 1e-4));
3011
- }
3012
- function scheduleClip(ctx, clip, trackState, sampleRate, applyEffects) {
3013
- return __async(this, null, function* () {
3014
- const {
3015
- audioBuffer,
3016
- startSample,
3017
- durationSamples,
3018
- offsetSamples,
3019
- gain: clipGain,
3020
- fadeIn,
3021
- fadeOut
3022
- } = clip;
3023
- if (!audioBuffer) {
3024
- console.warn(`Skipping clip "${clip.name || clip.id}" - no audioBuffer for export`);
3025
- return;
2985
+ const result = buffer.get();
2986
+ if (!result) {
2987
+ throw new Error("Offline rendering produced no audio buffer");
3026
2988
  }
3027
- const startTime = startSample / sampleRate;
3028
- const duration = durationSamples / sampleRate;
3029
- const offset = offsetSamples / sampleRate;
3030
- const source = ctx.createBufferSource();
3031
- source.buffer = audioBuffer;
3032
- const gainNode = ctx.createGain();
3033
- const baseGain = clipGain * trackState.volume;
3034
- const pannerNode = ctx.createStereoPanner();
3035
- pannerNode.pan.value = trackState.pan;
3036
- source.connect(gainNode);
3037
- gainNode.connect(pannerNode);
3038
- pannerNode.connect(ctx.destination);
3039
- if (applyEffects) {
3040
- if (fadeIn) {
3041
- gainNode.gain.setValueAtTime(0, startTime);
3042
- } else {
3043
- gainNode.gain.setValueAtTime(baseGain, startTime);
3044
- }
3045
- if (fadeIn) {
3046
- const fadeInStart = startTime;
3047
- const fadeInEnd = startTime + fadeIn.duration;
3048
- applyFadeEnvelope(
3049
- gainNode.gain,
3050
- fadeInStart,
3051
- fadeInEnd,
3052
- 0,
3053
- baseGain,
3054
- fadeIn.type || "linear"
3055
- );
3056
- }
3057
- if (fadeOut) {
3058
- const fadeOutStart = startTime + duration - fadeOut.duration;
3059
- const fadeOutEnd = startTime + duration;
3060
- if (!fadeIn || fadeIn.duration < duration - fadeOut.duration) {
3061
- gainNode.gain.setValueAtTime(baseGain, fadeOutStart);
3062
- }
3063
- applyFadeEnvelope(
3064
- gainNode.gain,
3065
- fadeOutStart,
3066
- fadeOutEnd,
3067
- baseGain,
3068
- 0,
3069
- fadeOut.type || "linear"
3070
- );
3071
- }
3072
- } else {
3073
- gainNode.gain.setValueAtTime(baseGain, startTime);
3074
- }
3075
- source.start(startTime, offset, duration);
2989
+ return result;
3076
2990
  });
3077
2991
  }
3078
- function applyFadeEnvelope(gainParam, startTime, endTime, startValue, endValue, fadeType) {
3079
- const duration = endTime - startTime;
3080
- if (duration <= 0) return;
3081
- switch (fadeType) {
3082
- case "linear":
3083
- gainParam.setValueAtTime(startValue, startTime);
3084
- gainParam.linearRampToValueAtTime(endValue, endTime);
3085
- break;
3086
- case "exponential": {
3087
- const expStart = Math.max(startValue, 1e-4);
3088
- const expEnd = Math.max(endValue, 1e-4);
3089
- gainParam.setValueAtTime(expStart, startTime);
3090
- gainParam.exponentialRampToValueAtTime(expEnd, endTime);
3091
- if (endValue === 0) {
3092
- gainParam.setValueAtTime(0, endTime);
3093
- }
3094
- break;
3095
- }
3096
- case "logarithmic": {
3097
- const logCurve = generateFadeCurve(startValue, endValue, 256, "logarithmic");
3098
- gainParam.setValueCurveAtTime(logCurve, startTime, duration);
3099
- break;
3100
- }
3101
- case "sCurve": {
3102
- const sCurve = generateFadeCurve(startValue, endValue, 256, "sCurve");
3103
- gainParam.setValueCurveAtTime(sCurve, startTime, duration);
3104
- break;
3105
- }
3106
- default:
3107
- gainParam.setValueAtTime(startValue, startTime);
3108
- gainParam.linearRampToValueAtTime(endValue, endTime);
2992
+ function applyClipFades(gainParam, clipGain, startTime, clipDuration, fadeIn, fadeOut) {
2993
+ if (fadeIn) {
2994
+ gainParam.setValueAtTime(0, startTime);
2995
+ } else {
2996
+ gainParam.setValueAtTime(clipGain, startTime);
3109
2997
  }
3110
- }
3111
- function generateFadeCurve(startValue, endValue, numPoints, curveType) {
3112
- const curve = new Float32Array(numPoints);
3113
- const range = endValue - startValue;
3114
- for (let i = 0; i < numPoints; i++) {
3115
- const t = i / (numPoints - 1);
3116
- let curveValue;
3117
- if (curveType === "logarithmic") {
3118
- if (range > 0) {
3119
- curveValue = Math.log10(1 + t * 9) / Math.log10(10);
3120
- } else {
3121
- curveValue = 1 - Math.log10(1 + (1 - t) * 9) / Math.log10(10);
3122
- }
3123
- } else {
3124
- curveValue = t * t * (3 - 2 * t);
2998
+ if (fadeIn) {
2999
+ (0, import_core4.applyFadeIn)(gainParam, startTime, fadeIn.duration, fadeIn.type || "linear", 0, clipGain);
3000
+ }
3001
+ if (fadeOut) {
3002
+ const fadeOutStart = startTime + clipDuration - fadeOut.duration;
3003
+ if (!fadeIn || fadeIn.duration < clipDuration - fadeOut.duration) {
3004
+ gainParam.setValueAtTime(clipGain, fadeOutStart);
3125
3005
  }
3126
- curve[i] = startValue + range * curveValue;
3006
+ (0, import_core4.applyFadeOut)(gainParam, fadeOutStart, fadeOut.duration, fadeOut.type || "linear", clipGain, 0);
3127
3007
  }
3128
- return curve;
3129
3008
  }
3130
3009
 
3131
3010
  // src/hooks/useAnimationFrameLoop.ts
@@ -3520,7 +3399,7 @@ function useWaveformDataCache(tracks, baseScale) {
3520
3399
 
3521
3400
  // src/hooks/useDynamicTracks.ts
3522
3401
  var import_react22 = require("react");
3523
- var import_core4 = require("@waveform-playlist/core");
3402
+ var import_core5 = require("@waveform-playlist/core");
3524
3403
  var import_playout3 = require("@waveform-playlist/playout");
3525
3404
  function getSourceName(source) {
3526
3405
  var _a, _b, _c, _d, _e;
@@ -3574,7 +3453,7 @@ function useDynamicTracks() {
3574
3453
  if (sources.length === 0) return;
3575
3454
  const audioContext = (0, import_playout3.getGlobalAudioContext)();
3576
3455
  const placeholders = sources.map((source) => ({
3577
- track: (0, import_core4.createTrack)({ name: `${getSourceName(source)} (loading...)`, clips: [] }),
3456
+ track: (0, import_core5.createTrack)({ name: `${getSourceName(source)} (loading...)`, clips: [] }),
3578
3457
  source
3579
3458
  }));
3580
3459
  setTracks((prev) => [...prev, ...placeholders.map((p) => p.track)]);
@@ -3586,7 +3465,7 @@ function useDynamicTracks() {
3586
3465
  (() => __async(null, null, function* () {
3587
3466
  try {
3588
3467
  const { audioBuffer, name } = yield decodeSource(source, audioContext, controller.signal);
3589
- const clip = (0, import_core4.createClipFromSeconds)({
3468
+ const clip = (0, import_core5.createClipFromSeconds)({
3590
3469
  audioBuffer,
3591
3470
  startTime: 0,
3592
3471
  duration: audioBuffer.duration,
@@ -3644,7 +3523,7 @@ function useDynamicTracks() {
3644
3523
  // src/hooks/useOutputMeter.ts
3645
3524
  var import_react23 = require("react");
3646
3525
  var import_playout4 = require("@waveform-playlist/playout");
3647
- var import_core5 = require("@waveform-playlist/core");
3526
+ var import_core6 = require("@waveform-playlist/core");
3648
3527
  var import_worklets = require("@waveform-playlist/worklets");
3649
3528
  var PEAK_DECAY = 0.98;
3650
3529
  function useOutputMeter(options = {}) {
@@ -3699,8 +3578,8 @@ function useOutputMeter(options = {}) {
3699
3578
  const rmsValues = [];
3700
3579
  for (let ch = 0; ch < peak.length; ch++) {
3701
3580
  smoothed[ch] = Math.max(peak[ch], ((_a = smoothed[ch]) != null ? _a : 0) * PEAK_DECAY);
3702
- peakValues.push((0, import_core5.gainToNormalized)(smoothed[ch]));
3703
- rmsValues.push((0, import_core5.gainToNormalized)(rms[ch]));
3581
+ peakValues.push((0, import_core6.gainToNormalized)(smoothed[ch]));
3582
+ rmsValues.push((0, import_core6.gainToNormalized)(rms[ch]));
3704
3583
  }
3705
3584
  setLevels(peakValues);
3706
3585
  setRmsLevels(rmsValues);
@@ -5572,7 +5451,7 @@ var AnimatedPlayhead = ({ color = "#ff0000" }) => {
5572
5451
  // src/components/ChannelWithProgress.tsx
5573
5452
  var import_react31 = require("react");
5574
5453
  var import_styled_components5 = __toESM(require("styled-components"));
5575
- var import_core6 = require("@waveform-playlist/core");
5454
+ var import_core7 = require("@waveform-playlist/core");
5576
5455
  var import_ui_components8 = require("@waveform-playlist/ui-components");
5577
5456
  var import_jsx_runtime9 = require("react/jsx-runtime");
5578
5457
  var ChannelWrapper = import_styled_components5.default.div`
@@ -5635,7 +5514,7 @@ var ChannelWithProgress = (_a) => {
5635
5514
  const { isPlaying, currentTimeRef, registerFrameCallback, unregisterFrameCallback } = usePlaybackAnimation();
5636
5515
  const { samplesPerPixel, sampleRate } = usePlaylistData();
5637
5516
  const progressColor = (theme == null ? void 0 : theme.waveProgressColor) || "rgba(0, 0, 0, 0.1)";
5638
- const clipPixelWidth = (0, import_core6.clipPixelWidth)(
5517
+ const clipPixelWidth = (0, import_core7.clipPixelWidth)(
5639
5518
  clipStartSample,
5640
5519
  clipDurationSamples,
5641
5520
  samplesPerPixel
@@ -7145,7 +7024,7 @@ var KeyboardShortcuts = ({
7145
7024
  var import_react40 = __toESM(require("react"));
7146
7025
  var import_react41 = require("@dnd-kit/react");
7147
7026
  var import_modifiers2 = require("@dnd-kit/abstract/modifiers");
7148
- var import_core8 = require("@waveform-playlist/core");
7027
+ var import_core9 = require("@waveform-playlist/core");
7149
7028
  var import_ui_components12 = require("@waveform-playlist/ui-components");
7150
7029
 
7151
7030
  // src/modifiers/ClipCollisionModifier.ts
@@ -7174,7 +7053,7 @@ var ClipCollisionModifier = _ClipCollisionModifier;
7174
7053
 
7175
7054
  // src/modifiers/SnapToGridModifier.ts
7176
7055
  var import_abstract2 = require("@dnd-kit/abstract");
7177
- var import_core7 = require("@waveform-playlist/core");
7056
+ var import_core8 = require("@waveform-playlist/core");
7178
7057
  var _SnapToGridModifier = class _SnapToGridModifier extends import_abstract2.Modifier {
7179
7058
  apply(operation) {
7180
7059
  const { transform, source } = operation;
@@ -7195,18 +7074,18 @@ var _SnapToGridModifier = class _SnapToGridModifier extends import_abstract2.Mod
7195
7074
  }
7196
7075
  const { snapTo, bpm, timeSignature, sampleRate } = this.options;
7197
7076
  if (snapTo === "off") return transform;
7198
- const gridTicks = snapTo === "bar" ? (0, import_core7.ticksPerBar)(timeSignature) : (0, import_core7.ticksPerBeat)(timeSignature);
7077
+ const gridTicks = snapTo === "bar" ? (0, import_core8.ticksPerBar)(timeSignature) : (0, import_core8.ticksPerBeat)(timeSignature);
7199
7078
  if (startSample !== void 0) {
7200
7079
  const proposedSamples = startSample + transform.x * samplesPerPixel;
7201
- const proposedTicks = (0, import_core7.samplesToTicks)(proposedSamples, bpm, sampleRate);
7202
- const snappedTicks2 = (0, import_core7.snapToGrid)(proposedTicks, gridTicks);
7203
- const snappedSamples2 = (0, import_core7.ticksToSamples)(snappedTicks2, bpm, sampleRate);
7080
+ const proposedTicks = (0, import_core8.samplesToTicks)(proposedSamples, bpm, sampleRate);
7081
+ const snappedTicks2 = (0, import_core8.snapToGrid)(proposedTicks, gridTicks);
7082
+ const snappedSamples2 = (0, import_core8.ticksToSamples)(snappedTicks2, bpm, sampleRate);
7204
7083
  return { x: (snappedSamples2 - startSample) / samplesPerPixel, y: 0 };
7205
7084
  }
7206
7085
  const deltaSamples = transform.x * samplesPerPixel;
7207
- const deltaTicks = (0, import_core7.samplesToTicks)(deltaSamples, bpm, sampleRate);
7208
- const snappedTicks = (0, import_core7.snapToGrid)(deltaTicks, gridTicks);
7209
- const snappedSamples = (0, import_core7.ticksToSamples)(snappedTicks, bpm, sampleRate);
7086
+ const deltaTicks = (0, import_core8.samplesToTicks)(deltaSamples, bpm, sampleRate);
7087
+ const snappedTicks = (0, import_core8.snapToGrid)(deltaTicks, gridTicks);
7088
+ const snappedSamples = (0, import_core8.ticksToSamples)(snappedTicks, bpm, sampleRate);
7210
7089
  return { x: snappedSamples / samplesPerPixel, y: 0 };
7211
7090
  }
7212
7091
  };
@@ -7237,11 +7116,11 @@ var ClipInteractionProvider = ({
7237
7116
  const snapSamplePosition = (0, import_react40.useMemo)(() => {
7238
7117
  if (useBeatsSnap && beatsAndBars) {
7239
7118
  const { bpm, timeSignature, snapTo } = beatsAndBars;
7240
- const gridTicks = snapTo === "bar" ? (0, import_core8.ticksPerBar)(timeSignature) : (0, import_core8.ticksPerBeat)(timeSignature);
7119
+ const gridTicks = snapTo === "bar" ? (0, import_core9.ticksPerBar)(timeSignature) : (0, import_core9.ticksPerBeat)(timeSignature);
7241
7120
  return (samplePos) => {
7242
- const ticks = (0, import_core8.samplesToTicks)(samplePos, bpm, sampleRate);
7243
- const snapped = (0, import_core8.snapToGrid)(ticks, gridTicks);
7244
- return (0, import_core8.ticksToSamples)(snapped, bpm, sampleRate);
7121
+ const ticks = (0, import_core9.samplesToTicks)(samplePos, bpm, sampleRate);
7122
+ const snapped = (0, import_core9.snapToGrid)(ticks, gridTicks);
7123
+ return (0, import_core9.ticksToSamples)(snapped, bpm, sampleRate);
7245
7124
  };
7246
7125
  }
7247
7126
  if (useTimescaleSnap) {