@dawcore/transport 0.0.2 → 0.0.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -86,12 +86,15 @@ var Clock = class {
86
86
 
87
87
  // src/core/scheduler.ts
88
88
  var Scheduler = class {
89
- constructor(options = {}) {
89
+ constructor(tempoMap, options = {}) {
90
90
  this._rightEdge = 0;
91
+ // integer ticks
91
92
  this._listeners = /* @__PURE__ */ new Set();
92
93
  this._loopEnabled = false;
93
94
  this._loopStart = 0;
95
+ // integer ticks
94
96
  this._loopEnd = 0;
97
+ this._tempoMap = tempoMap;
95
98
  this._lookahead = options.lookahead ?? 0.2;
96
99
  this._onLoop = options.onLoop;
97
100
  }
@@ -101,25 +104,40 @@ var Scheduler = class {
101
104
  removeListener(listener) {
102
105
  this._listeners.delete(listener);
103
106
  }
104
- setLoop(enabled, start, end) {
105
- if (enabled && start >= end) {
107
+ /** Primary API — ticks as source of truth */
108
+ setLoop(enabled, startTick, endTick) {
109
+ if (enabled && (!Number.isFinite(startTick) || !Number.isFinite(endTick))) {
106
110
  console.warn(
107
- "[waveform-playlist] Scheduler.setLoop: start (" + start + ") must be less than end (" + end + ")"
111
+ "[waveform-playlist] Scheduler.setLoop: non-finite tick values (" + startTick + ", " + endTick + ")"
112
+ );
113
+ return;
114
+ }
115
+ if (enabled && startTick >= endTick) {
116
+ console.warn(
117
+ "[waveform-playlist] Scheduler.setLoop: startTick (" + startTick + ") must be less than endTick (" + endTick + ")"
108
118
  );
109
119
  return;
110
120
  }
111
121
  this._loopEnabled = enabled;
112
- this._loopStart = start;
113
- this._loopEnd = end;
114
- }
115
- reset(time) {
116
- this._rightEdge = time;
117
- }
118
- advance(currentTime) {
119
- const targetEdge = currentTime + this._lookahead;
122
+ this._loopStart = Math.round(startTick);
123
+ this._loopEnd = Math.round(endTick);
124
+ }
125
+ /** Convenience — converts seconds to ticks via TempoMap */
126
+ setLoopSeconds(enabled, startSec, endSec) {
127
+ const startTick = this._tempoMap.secondsToTicks(startSec);
128
+ const endTick = this._tempoMap.secondsToTicks(endSec);
129
+ this.setLoop(enabled, startTick, endTick);
130
+ }
131
+ /** Reset scheduling cursor. Takes seconds (from Clock), converts to ticks. */
132
+ reset(timeSeconds) {
133
+ this._rightEdge = this._tempoMap.secondsToTicks(timeSeconds);
134
+ }
135
+ /** Advance the scheduling window. Takes seconds (from Clock), converts to ticks. */
136
+ advance(currentTimeSeconds) {
137
+ const targetTick = this._tempoMap.secondsToTicks(currentTimeSeconds + this._lookahead);
120
138
  if (this._loopEnabled && this._loopEnd > this._loopStart) {
121
139
  const loopDuration = this._loopEnd - this._loopStart;
122
- let remaining = targetEdge - this._rightEdge;
140
+ let remaining = targetTick - this._rightEdge;
123
141
  while (remaining > 0) {
124
142
  const distToEnd = this._loopEnd - this._rightEdge;
125
143
  if (distToEnd <= 0 || distToEnd > remaining) {
@@ -132,21 +150,25 @@ var Scheduler = class {
132
150
  for (const listener of this._listeners) {
133
151
  listener.onPositionJump(this._loopStart);
134
152
  }
135
- this._onLoop?.(this._loopStart);
153
+ this._onLoop?.(
154
+ this._tempoMap.ticksToSeconds(this._loopStart),
155
+ this._tempoMap.ticksToSeconds(this._loopEnd),
156
+ currentTimeSeconds
157
+ );
136
158
  this._rightEdge = this._loopStart;
137
159
  if (loopDuration <= 0) break;
138
160
  }
139
161
  return;
140
162
  }
141
- if (targetEdge > this._rightEdge) {
142
- this._generateAndConsume(this._rightEdge, targetEdge);
143
- this._rightEdge = targetEdge;
163
+ if (targetTick > this._rightEdge) {
164
+ this._generateAndConsume(this._rightEdge, targetTick);
165
+ this._rightEdge = targetTick;
144
166
  }
145
167
  }
146
- _generateAndConsume(from, to) {
168
+ _generateAndConsume(fromTick, toTick) {
147
169
  for (const listener of this._listeners) {
148
170
  try {
149
- const events = listener.generate(from, to);
171
+ const events = listener.generate(fromTick, toTick);
150
172
  for (const event of events) {
151
173
  try {
152
174
  listener.consume(event);
@@ -196,42 +218,77 @@ var Timer = class {
196
218
  // src/timeline/sample-timeline.ts
197
219
  var SampleTimeline = class {
198
220
  constructor(sampleRate) {
221
+ this._tempoMap = null;
199
222
  this._sampleRate = sampleRate;
200
223
  }
201
224
  get sampleRate() {
202
225
  return this._sampleRate;
203
226
  }
227
+ setTempoMap(tempoMap) {
228
+ this._tempoMap = tempoMap;
229
+ }
204
230
  samplesToSeconds(samples) {
205
231
  return samples / this._sampleRate;
206
232
  }
207
233
  secondsToSamples(seconds) {
208
234
  return Math.round(seconds * this._sampleRate);
209
235
  }
236
+ ticksToSamples(ticks) {
237
+ if (!this._tempoMap) {
238
+ throw new Error(
239
+ "[waveform-playlist] SampleTimeline: tempoMap not set \u2014 call setTempoMap() first"
240
+ );
241
+ }
242
+ return Math.round(this._tempoMap.ticksToSeconds(ticks) * this._sampleRate);
243
+ }
244
+ samplesToTicks(samples) {
245
+ if (!this._tempoMap) {
246
+ throw new Error(
247
+ "[waveform-playlist] SampleTimeline: tempoMap not set \u2014 call setTempoMap() first"
248
+ );
249
+ }
250
+ return this._tempoMap.secondsToTicks(samples / this._sampleRate);
251
+ }
210
252
  };
211
253
 
212
254
  // src/timeline/tempo-map.ts
255
+ var CURVE_EPSILON = 1e-15;
256
+ var CURVE_SUBDIVISIONS = 64;
257
+ function curveNormalizedAt(x, slope) {
258
+ if (slope > 0.499999 && slope < 0.500001) return x;
259
+ const p = Math.max(CURVE_EPSILON, Math.min(1 - CURVE_EPSILON, slope));
260
+ return p * p / (1 - p * 2) * (Math.pow((1 - p) / p, 2 * x) - 1);
261
+ }
213
262
  var TempoMap = class {
214
263
  constructor(ppqn = 960, initialBpm = 120) {
215
264
  this._ppqn = ppqn;
216
- this._entries = [{ tick: 0, bpm: initialBpm, secondsAtTick: 0 }];
265
+ this._entries = [{ tick: 0, bpm: initialBpm, interpolation: "step", secondsAtTick: 0 }];
217
266
  }
218
267
  getTempo(atTick = 0) {
219
- const entry = this._entryAt(atTick);
220
- return entry.bpm;
221
- }
222
- setTempo(bpm, atTick = 0) {
268
+ return this._getTempoAt(atTick);
269
+ }
270
+ setTempo(bpm, atTick = 0, options) {
271
+ const interpolation = options?.interpolation ?? "step";
272
+ if (typeof interpolation === "object" && interpolation.type === "curve") {
273
+ const s = interpolation.slope;
274
+ if (!Number.isFinite(s) || s <= 0 || s >= 1) {
275
+ throw new Error(
276
+ "[waveform-playlist] TempoMap: curve slope must be between 0 and 1 (exclusive), got " + s
277
+ );
278
+ }
279
+ }
223
280
  if (atTick === 0) {
224
- this._entries[0] = { ...this._entries[0], bpm };
281
+ this._entries[0] = { ...this._entries[0], bpm, interpolation: "step" };
225
282
  this._recomputeCache(0);
226
283
  return;
227
284
  }
228
285
  let i = this._entries.length - 1;
229
286
  while (i > 0 && this._entries[i].tick > atTick) i--;
230
287
  if (this._entries[i].tick === atTick) {
231
- this._entries[i] = { ...this._entries[i], bpm };
288
+ this._entries[i] = { ...this._entries[i], bpm, interpolation };
232
289
  } else {
233
290
  const secondsAtTick = this._ticksToSecondsInternal(atTick);
234
- this._entries.splice(i + 1, 0, { tick: atTick, bpm, secondsAtTick });
291
+ this._entries.splice(i + 1, 0, { tick: atTick, bpm, interpolation, secondsAtTick });
235
292
  i = i + 1;
236
293
  }
237
294
  this._recomputeCache(i);
@@ -252,8 +309,30 @@ var TempoMap = class {
252
309
  }
253
310
  const entry = this._entries[lo];
254
311
  const secondsIntoSegment = seconds - entry.secondsAtTick;
312
+ const nextEntry = lo < this._entries.length - 1 ? this._entries[lo + 1] : null;
313
+ if (nextEntry && nextEntry.interpolation === "linear") {
314
+ return Math.round(
315
+ entry.tick + this._secondsToTicksLinear(
316
+ secondsIntoSegment,
317
+ entry.bpm,
318
+ nextEntry.bpm,
319
+ nextEntry.tick - entry.tick
320
+ )
321
+ );
322
+ }
323
+ if (nextEntry && typeof nextEntry.interpolation === "object") {
324
+ return Math.round(
325
+ entry.tick + this._secondsToTicksCurve(
326
+ secondsIntoSegment,
327
+ entry.bpm,
328
+ nextEntry.bpm,
329
+ nextEntry.tick - entry.tick,
330
+ nextEntry.interpolation.slope
331
+ )
332
+ );
333
+ }
255
334
  const ticksPerSecond = entry.bpm / 60 * this._ppqn;
256
- return entry.tick + secondsIntoSegment * ticksPerSecond;
335
+ return Math.round(entry.tick + secondsIntoSegment * ticksPerSecond);
257
336
  }
258
337
  beatsToSeconds(beats) {
259
338
  return this.ticksToSeconds(beats * this._ppqn);
@@ -263,15 +342,122 @@ var TempoMap = class {
263
342
  }
264
343
  clearTempos() {
265
344
  const first = this._entries[0];
266
- this._entries = [{ tick: 0, bpm: first.bpm, secondsAtTick: 0 }];
345
+ this._entries = [{ tick: 0, bpm: first.bpm, interpolation: "step", secondsAtTick: 0 }];
346
+ }
347
+ /** Get the interpolated BPM at a tick position */
348
+ _getTempoAt(atTick) {
349
+ const entryIndex = this._entryIndexAt(atTick);
350
+ const entry = this._entries[entryIndex];
351
+ const nextEntry = entryIndex < this._entries.length - 1 ? this._entries[entryIndex + 1] : null;
352
+ if (nextEntry && nextEntry.interpolation !== "step") {
353
+ const segmentTicks = nextEntry.tick - entry.tick;
354
+ const ticksInto = atTick - entry.tick;
355
+ if (segmentTicks > 0) {
356
+ const progress = ticksInto / segmentTicks;
357
+ if (nextEntry.interpolation === "linear") {
358
+ return entry.bpm + (nextEntry.bpm - entry.bpm) * progress;
359
+ }
360
+ const t = curveNormalizedAt(progress, nextEntry.interpolation.slope);
361
+ return entry.bpm + (nextEntry.bpm - entry.bpm) * t;
362
+ }
363
+ }
364
+ return entry.bpm;
267
365
  }
268
366
  _ticksToSecondsInternal(ticks) {
269
- const entry = this._entryAt(ticks);
367
+ const entryIndex = this._entryIndexAt(ticks);
368
+ const entry = this._entries[entryIndex];
270
369
  const ticksIntoSegment = ticks - entry.tick;
370
+ const nextEntry = entryIndex < this._entries.length - 1 ? this._entries[entryIndex + 1] : null;
371
+ if (nextEntry && nextEntry.interpolation === "linear") {
372
+ const segmentTicks = nextEntry.tick - entry.tick;
373
+ return entry.secondsAtTick + this._ticksToSecondsLinear(ticksIntoSegment, entry.bpm, nextEntry.bpm, segmentTicks);
374
+ }
375
+ if (nextEntry && typeof nextEntry.interpolation === "object") {
376
+ const segmentTicks = nextEntry.tick - entry.tick;
377
+ return entry.secondsAtTick + this._ticksToSecondsCurve(
378
+ ticksIntoSegment,
379
+ entry.bpm,
380
+ nextEntry.bpm,
381
+ segmentTicks,
382
+ nextEntry.interpolation.slope
383
+ );
384
+ }
271
385
  const secondsPerTick = 60 / (entry.bpm * this._ppqn);
272
386
  return entry.secondsAtTick + ticksIntoSegment * secondsPerTick;
273
387
  }
274
- _entryAt(tick) {
388
+ /**
389
+ * Exact integration for a linear BPM ramp using the logarithmic formula.
390
+ * For bpm(t) = bpm0 + r*t where r = (bpm1-bpm0)/T:
391
+ * seconds = (T * 60) / (ppqn * (bpm1-bpm0)) * ln(bpmAtTick / bpm0)
392
+ */
393
+ _ticksToSecondsLinear(ticks, bpm0, bpm1, totalSegmentTicks) {
394
+ if (totalSegmentTicks === 0) return 0;
395
+ const bpmAtTick = bpm0 + (bpm1 - bpm0) * (ticks / totalSegmentTicks);
396
+ if (Math.abs(bpm1 - bpm0) < 1e-10) {
397
+ return ticks * 60 / (bpm0 * this._ppqn);
398
+ }
399
+ const deltaBpm = bpm1 - bpm0;
400
+ return totalSegmentTicks * 60 / (this._ppqn * deltaBpm) * Math.log(bpmAtTick / bpm0);
401
+ }
402
+ /**
403
+ * Inverse of _ticksToSecondsLinear: given seconds, return ticks.
404
+ * Closed-form via exponential: bpmAtTick = bpm0 * exp(seconds * deltaBpm * ppqn / (60 * T))
405
+ * then ticks = (bpmAtTick - bpm0) * T / deltaBpm
406
+ *
407
+ * Note: exp(log(x)) has ~1 ULP floating-point error, so round-trips depend on
408
+ * Math.round() in the caller (secondsToTicks). This is sufficient for all tested
409
+ * BPM ranges (10–300 BPM) but is not algebraically exact like the previous
410
+ * trapezoidal/quadratic approach was.
411
+ */
412
+ _secondsToTicksLinear(seconds, bpm0, bpm1, totalSegmentTicks) {
413
+ if (totalSegmentTicks === 0 || seconds === 0) return 0;
414
+ if (Math.abs(bpm1 - bpm0) < 1e-10) {
415
+ return seconds * bpm0 * this._ppqn / 60;
416
+ }
417
+ const deltaBpm = bpm1 - bpm0;
418
+ const bpmAtTick = bpm0 * Math.exp(seconds * deltaBpm * this._ppqn / (60 * totalSegmentTicks));
419
+ return (bpmAtTick - bpm0) / deltaBpm * totalSegmentTicks;
420
+ }
421
+ /**
422
+ * Subdivided trapezoidal integration for a Möbius-Ease tempo curve.
423
+ * The BPM at progress p is: bpm0 + curveNormalizedAt(p, slope) * (bpm1 - bpm0).
424
+ * We subdivide into CURVE_SUBDIVISIONS intervals and apply trapezoidal rule.
425
+ */
426
+ _ticksToSecondsCurve(ticks, bpm0, bpm1, totalSegmentTicks, slope) {
427
+ if (totalSegmentTicks === 0 || ticks === 0) return 0;
428
+ const n = CURVE_SUBDIVISIONS;
429
+ const dt = ticks / n;
430
+ let seconds = 0;
431
+ let prevBpm = bpm0;
432
+ for (let i = 1; i <= n; i++) {
433
+ const progress = dt * i / totalSegmentTicks;
434
+ const curBpm = bpm0 + curveNormalizedAt(progress, slope) * (bpm1 - bpm0);
435
+ seconds += dt * 60 / this._ppqn * (1 / prevBpm + 1 / curBpm) / 2;
436
+ prevBpm = curBpm;
437
+ }
438
+ return seconds;
439
+ }
440
+ /**
441
+ * Inverse of _ticksToSecondsCurve: given seconds into a curved segment,
442
+ * return ticks. Uses binary search since there's no closed-form inverse.
443
+ */
444
+ _secondsToTicksCurve(seconds, bpm0, bpm1, totalSegmentTicks, slope) {
445
+ if (totalSegmentTicks === 0 || seconds === 0) return 0;
446
+ const iterations = Math.min(40, Math.max(1, Math.ceil(Math.log2(2 * totalSegmentTicks))));
447
+ let lo = 0;
448
+ let hi = totalSegmentTicks;
449
+ for (let i = 0; i < iterations; i++) {
450
+ const mid = (lo + hi) / 2;
451
+ const midSeconds = this._ticksToSecondsCurve(mid, bpm0, bpm1, totalSegmentTicks, slope);
452
+ if (midSeconds < seconds) {
453
+ lo = mid;
454
+ } else {
455
+ hi = mid;
456
+ }
457
+ }
458
+ return (lo + hi) / 2;
459
+ }
460
+ _entryIndexAt(tick) {
275
461
  let lo = 0;
276
462
  let hi = this._entries.length - 1;
277
463
  while (lo < hi) {
@@ -282,16 +468,31 @@ var TempoMap = class {
282
468
  hi = mid - 1;
283
469
  }
284
470
  }
285
- return this._entries[lo];
471
+ return lo;
286
472
  }
287
473
  _recomputeCache(fromIndex) {
288
474
  for (let i = Math.max(1, fromIndex); i < this._entries.length; i++) {
289
475
  const prev = this._entries[i - 1];
290
476
  const tickDelta = this._entries[i].tick - prev.tick;
291
- const secondsPerTick = 60 / (prev.bpm * this._ppqn);
477
+ const entry = this._entries[i];
478
+ let segmentSeconds;
479
+ if (entry.interpolation === "linear") {
480
+ segmentSeconds = this._ticksToSecondsLinear(tickDelta, prev.bpm, entry.bpm, tickDelta);
481
+ } else if (typeof entry.interpolation === "object") {
482
+ segmentSeconds = this._ticksToSecondsCurve(
483
+ tickDelta,
484
+ prev.bpm,
485
+ entry.bpm,
486
+ tickDelta,
487
+ entry.interpolation.slope
488
+ );
489
+ } else {
490
+ const secondsPerTick = 60 / (prev.bpm * this._ppqn);
491
+ segmentSeconds = tickDelta * secondsPerTick;
492
+ }
292
493
  this._entries[i] = {
293
- ...this._entries[i],
294
- secondsAtTick: prev.secondsAtTick + tickDelta * secondsPerTick
494
+ ...entry,
495
+ secondsAtTick: prev.secondsAtTick + segmentSeconds
295
496
  };
296
497
  }
297
498
  }
@@ -555,14 +756,15 @@ var TrackNode = class {
555
756
 
556
757
  // src/audio/clip-player.ts
557
758
  var ClipPlayer = class {
558
- constructor(audioContext, sampleTimeline, toAudioTime) {
759
+ constructor(audioContext, sampleTimeline, tempoMap, toAudioTime) {
559
760
  this._tracks = /* @__PURE__ */ new Map();
560
761
  this._trackNodes = /* @__PURE__ */ new Map();
561
762
  this._activeSources = /* @__PURE__ */ new Map();
562
763
  this._loopEnabled = false;
563
- this._loopEnd = 0;
764
+ this._loopEndSamples = 0;
564
765
  this._audioContext = audioContext;
565
766
  this._sampleTimeline = sampleTimeline;
767
+ this._tempoMap = tempoMap;
566
768
  this._toAudioTime = toAudioTime;
567
769
  }
568
770
  setTracks(tracks, trackNodes) {
@@ -572,41 +774,50 @@ var ClipPlayer = class {
572
774
  this._tracks.set(track.id, { track, clips: track.clips });
573
775
  }
574
776
  }
575
- setLoop(enabled, _start, end) {
777
+ /** Set loop region using ticks. startTick is unused — loop clamping only needs
778
+ * the end boundary; mid-clip restart at loopStart is handled by onPositionJump. */
779
+ setLoop(enabled, _startTick, endTick) {
780
+ this._loopEnabled = enabled;
781
+ this._loopEndSamples = this._sampleTimeline.ticksToSamples(endTick);
782
+ }
783
+ /** Set loop region using samples directly */
784
+ setLoopSamples(enabled, _startSample, endSample) {
576
785
  this._loopEnabled = enabled;
577
- this._loopEnd = end;
786
+ this._loopEndSamples = endSample;
578
787
  }
579
788
  updateTrack(trackId, track) {
580
789
  this._tracks.set(trackId, { track, clips: track.clips });
581
790
  this._silenceTrack(trackId);
582
791
  }
583
- generate(fromTime, toTime) {
792
+ generate(fromTick, toTick) {
584
793
  const events = [];
794
+ const fromSample = this._sampleTimeline.ticksToSamples(fromTick);
795
+ const toSample = this._sampleTimeline.ticksToSamples(toTick);
585
796
  for (const [trackId, state] of this._tracks) {
586
797
  for (const clip of state.clips) {
587
798
  if (clip.durationSamples === 0) continue;
588
799
  if (!clip.audioBuffer) continue;
589
- const clipStartTime = this._sampleTimeline.samplesToSeconds(clip.startSample);
590
- const clipDuration = this._sampleTimeline.samplesToSeconds(clip.durationSamples);
591
- const clipOffsetTime = this._sampleTimeline.samplesToSeconds(clip.offsetSamples);
592
- if (clipStartTime < fromTime) continue;
593
- if (clipStartTime >= toTime) continue;
594
- const fadeInDuration = clip.fadeIn ? this._sampleTimeline.samplesToSeconds(clip.fadeIn.duration ?? 0) : 0;
595
- const fadeOutDuration = clip.fadeOut ? this._sampleTimeline.samplesToSeconds(clip.fadeOut.duration ?? 0) : 0;
596
- let duration = clipDuration;
597
- if (this._loopEnabled && clipStartTime + duration > this._loopEnd) {
598
- duration = this._loopEnd - clipStartTime;
800
+ const clipStartSample = clip.startSample;
801
+ if (clipStartSample < fromSample) continue;
802
+ if (clipStartSample >= toSample) continue;
803
+ const fadeInDurationSamples = clip.fadeIn ? clip.fadeIn.duration ?? 0 : 0;
804
+ const fadeOutDurationSamples = clip.fadeOut ? clip.fadeOut.duration ?? 0 : 0;
805
+ let durationSamples = clip.durationSamples;
806
+ if (this._loopEnabled && clipStartSample + durationSamples > this._loopEndSamples) {
807
+ durationSamples = this._loopEndSamples - clipStartSample;
599
808
  }
809
+ const clipTick = this._sampleTimeline.samplesToTicks(clipStartSample);
600
810
  events.push({
601
811
  trackId,
602
812
  clipId: clip.id,
603
813
  audioBuffer: clip.audioBuffer,
604
- transportTime: clipStartTime,
605
- offset: clipOffsetTime,
606
- duration,
814
+ tick: clipTick,
815
+ startSample: clipStartSample,
816
+ offsetSamples: clip.offsetSamples,
817
+ durationSamples,
607
818
  gain: clip.gain,
608
- fadeInDuration,
609
- fadeOutDuration
819
+ fadeInDurationSamples,
820
+ fadeOutDurationSamples
610
821
  });
611
822
  }
612
823
  }
@@ -620,18 +831,25 @@ var ClipPlayer = class {
620
831
  );
621
832
  return;
622
833
  }
623
- if (event.offset >= event.audioBuffer.duration) {
834
+ const sampleRate = this._sampleTimeline.sampleRate;
835
+ const offsetSeconds = event.offsetSamples / sampleRate;
836
+ const durationSeconds = event.durationSamples / sampleRate;
837
+ if (offsetSeconds >= event.audioBuffer.duration) {
838
+ console.warn(
839
+ "[waveform-playlist] ClipPlayer.consume: offset (" + offsetSeconds + "s) exceeds audioBuffer.duration (" + event.audioBuffer.duration + 's) for clipId "' + event.clipId + '" \u2014 clip will not play'
840
+ );
624
841
  return;
625
842
  }
626
843
  const source = this._audioContext.createBufferSource();
627
844
  source.buffer = event.audioBuffer;
628
- const when = this._toAudioTime(event.transportTime);
845
+ const transportSeconds = this._tempoMap.ticksToSeconds(event.tick);
846
+ const when = this._toAudioTime(transportSeconds);
629
847
  const gainNode = this._audioContext.createGain();
630
848
  gainNode.gain.value = event.gain;
631
- let fadeIn = event.fadeInDuration;
632
- let fadeOut = event.fadeOutDuration;
633
- if (fadeIn + fadeOut > event.duration) {
634
- const ratio = event.duration / (fadeIn + fadeOut);
849
+ let fadeIn = event.fadeInDurationSamples / sampleRate;
850
+ let fadeOut = event.fadeOutDurationSamples / sampleRate;
851
+ if (fadeIn + fadeOut > durationSeconds) {
852
+ const ratio = durationSeconds / (fadeIn + fadeOut);
635
853
  fadeIn *= ratio;
636
854
  fadeOut *= ratio;
637
855
  }
@@ -640,9 +858,9 @@ var ClipPlayer = class {
640
858
  gainNode.gain.linearRampToValueAtTime(event.gain, when + fadeIn);
641
859
  }
642
860
  if (fadeOut > 0) {
643
- const fadeOutStart = when + event.duration - fadeOut;
861
+ const fadeOutStart = when + durationSeconds - fadeOut;
644
862
  gainNode.gain.setValueAtTime(event.gain, fadeOutStart);
645
- gainNode.gain.linearRampToValueAtTime(0, when + event.duration);
863
+ gainNode.gain.linearRampToValueAtTime(0, when + durationSeconds);
646
864
  }
647
865
  source.connect(gainNode);
648
866
  gainNode.connect(trackNode.input);
@@ -658,33 +876,37 @@ var ClipPlayer = class {
658
876
  console.warn("[waveform-playlist] ClipPlayer: error disconnecting gain node:", String(err));
659
877
  }
660
878
  });
661
- source.start(when, event.offset, event.duration);
879
+ source.start(when, offsetSeconds, durationSeconds);
662
880
  }
663
- onPositionJump(newTime) {
881
+ onPositionJump(newTick) {
664
882
  this.silence();
883
+ const newSample = this._sampleTimeline.ticksToSamples(newTick);
665
884
  for (const [trackId, state] of this._tracks) {
666
885
  for (const clip of state.clips) {
667
886
  if (clip.durationSamples === 0) continue;
668
887
  if (!clip.audioBuffer) continue;
669
- const clipStartTime = this._sampleTimeline.samplesToSeconds(clip.startSample);
670
- const clipDuration = this._sampleTimeline.samplesToSeconds(clip.durationSamples);
671
- const clipEndTime = clipStartTime + clipDuration;
672
- const clipOffsetTime = this._sampleTimeline.samplesToSeconds(clip.offsetSamples);
673
- if (clipStartTime <= newTime && clipEndTime > newTime) {
674
- const offsetIntoClip = newTime - clipStartTime;
675
- const offset = clipOffsetTime + offsetIntoClip;
676
- const duration = clipEndTime - newTime;
677
- const fadeOutDuration = clip.fadeOut ? this._sampleTimeline.samplesToSeconds(clip.fadeOut.duration ?? 0) : 0;
888
+ const clipStartSample = clip.startSample;
889
+ const clipEndSample = clipStartSample + clip.durationSamples;
890
+ if (clipStartSample <= newSample && clipEndSample > newSample) {
891
+ const offsetIntoClipSamples = newSample - clipStartSample;
892
+ const offsetSamples = clip.offsetSamples + offsetIntoClipSamples;
893
+ let durationSamples = clipEndSample - newSample;
894
+ if (this._loopEnabled && newSample + durationSamples > this._loopEndSamples) {
895
+ durationSamples = this._loopEndSamples - newSample;
896
+ }
897
+ if (durationSamples <= 0) continue;
898
+ const fadeOutDurationSamples = clip.fadeOut ? clip.fadeOut.duration ?? 0 : 0;
678
899
  this.consume({
679
900
  trackId,
680
901
  clipId: clip.id,
681
902
  audioBuffer: clip.audioBuffer,
682
- transportTime: newTime,
683
- offset,
684
- duration,
903
+ tick: newTick,
904
+ startSample: newSample,
905
+ offsetSamples,
906
+ durationSamples,
685
907
  gain: clip.gain,
686
- fadeInDuration: 0,
687
- fadeOutDuration
908
+ fadeInDurationSamples: 0,
909
+ fadeOutDurationSamples
688
910
  });
689
911
  }
690
912
  }
@@ -757,31 +979,29 @@ var MetronomePlayer = class {
757
979
  this._accentBuffer = accent;
758
980
  this._normalBuffer = normal;
759
981
  }
760
- generate(fromTime, toTime) {
982
+ generate(fromTick, toTick) {
761
983
  if (!this._enabled || !this._accentBuffer || !this._normalBuffer) {
762
984
  return [];
763
985
  }
764
986
  const events = [];
765
- const fromTicks = this._tempoMap.secondsToTicks(fromTime);
766
- const toTicks = this._tempoMap.secondsToTicks(toTime);
767
- let entry = this._meterMap.getEntryAt(fromTicks);
768
- let beatSize = this._meterMap.ticksPerBeat(fromTicks);
769
- const tickIntoSection = fromTicks - entry.tick;
987
+ let entry = this._meterMap.getEntryAt(fromTick);
988
+ let beatSize = this._meterMap.ticksPerBeat(fromTick);
989
+ const tickIntoSection = fromTick - entry.tick;
770
990
  let tick = entry.tick + Math.ceil(tickIntoSection / beatSize) * beatSize;
771
- while (tick < toTicks) {
772
- const currentEntry = this._meterMap.getEntryAt(tick);
991
+ while (tick < toTick) {
992
+ const tickPos = tick;
993
+ const currentEntry = this._meterMap.getEntryAt(tickPos);
773
994
  if (currentEntry.tick !== entry.tick) {
774
995
  entry = currentEntry;
775
- beatSize = this._meterMap.ticksPerBeat(tick);
996
+ beatSize = this._meterMap.ticksPerBeat(tickPos);
776
997
  }
777
- const isAccent = this._meterMap.isBarBoundary(tick);
778
- const transportTime = this._tempoMap.ticksToSeconds(tick);
998
+ const isAccent = this._meterMap.isBarBoundary(tickPos);
779
999
  events.push({
780
- transportTime,
1000
+ tick: tickPos,
781
1001
  isAccent,
782
1002
  buffer: isAccent ? this._accentBuffer : this._normalBuffer
783
1003
  });
784
- beatSize = this._meterMap.ticksPerBeat(tick);
1004
+ beatSize = this._meterMap.ticksPerBeat(tickPos);
785
1005
  tick += beatSize;
786
1006
  }
787
1007
  return events;
@@ -802,10 +1022,10 @@ var MetronomePlayer = class {
802
1022
  );
803
1023
  }
804
1024
  });
805
- source.start(this._toAudioTime(event.transportTime));
1025
+ const transportTime = this._tempoMap.ticksToSeconds(event.tick);
1026
+ source.start(this._toAudioTime(transportTime));
806
1027
  }
807
- onPositionJump(_newTime) {
808
- this.silence();
1028
+ onPositionJump(_newTick) {
809
1029
  }
810
1030
  silence() {
811
1031
  for (const source of this._activeSources) {
@@ -838,6 +1058,9 @@ var Transport = class _Transport {
838
1058
  this._soloedTrackIds = /* @__PURE__ */ new Set();
839
1059
  this._mutedTrackIds = /* @__PURE__ */ new Set();
840
1060
  this._playing = false;
1061
+ this._loopEnabled = false;
1062
+ this._loopStartTick = 0;
1063
+ this._loopStartSeconds = 0;
841
1064
  this._listeners = /* @__PURE__ */ new Map();
842
1065
  this._audioContext = audioContext;
843
1066
  const sampleRate = options.sampleRate ?? audioContext.sampleRate;
@@ -848,15 +1071,17 @@ var Transport = class _Transport {
848
1071
  const lookahead = options.schedulerLookahead ?? 0.2;
849
1072
  _Transport._validateOptions(sampleRate, ppqn, tempo, numerator, denominator, lookahead);
850
1073
  this._clock = new Clock(audioContext);
851
- this._scheduler = new Scheduler({
852
- lookahead,
853
- onLoop: (loopStartTime) => {
854
- this._clock.seekTo(loopStartTime);
855
- }
856
- });
857
1074
  this._sampleTimeline = new SampleTimeline(sampleRate);
858
1075
  this._meterMap = new MeterMap(ppqn, numerator, denominator);
859
1076
  this._tempoMap = new TempoMap(ppqn, tempo);
1077
+ this._scheduler = new Scheduler(this._tempoMap, {
1078
+ lookahead,
1079
+ onLoop: (loopStartSeconds, loopEndSeconds, currentTimeSeconds) => {
1080
+ const timeToBoundary = loopEndSeconds - currentTimeSeconds;
1081
+ this._clock.seekTo(loopStartSeconds - timeToBoundary);
1082
+ }
1083
+ });
1084
+ this._sampleTimeline.setTempoMap(this._tempoMap);
860
1085
  this._initAudioGraph(audioContext);
861
1086
  this._timer = new Timer(() => {
862
1087
  const time = this._clock.getTime();
@@ -880,7 +1105,8 @@ var Transport = class _Transport {
880
1105
  this._scheduler.reset(currentTime);
881
1106
  this._endTime = endTime;
882
1107
  this._clock.start();
883
- this._clipPlayer.onPositionJump(currentTime);
1108
+ const currentTick = this._tempoMap.secondsToTicks(currentTime);
1109
+ this._clipPlayer.onPositionJump(currentTick);
884
1110
  this._timer.start();
885
1111
  this._playing = true;
886
1112
  this._emit("play");
@@ -916,12 +1142,17 @@ var Transport = class _Transport {
916
1142
  this._endTime = void 0;
917
1143
  if (wasPlaying) {
918
1144
  this._clock.start();
919
- this._clipPlayer.onPositionJump(time);
1145
+ const seekTick = this._tempoMap.secondsToTicks(time);
1146
+ this._clipPlayer.onPositionJump(seekTick);
920
1147
  this._timer.start();
921
1148
  }
922
1149
  }
923
1150
  getCurrentTime() {
924
- return this._clock.getTime();
1151
+ const t = this._clock.getTime();
1152
+ if (this._loopEnabled && t < this._loopStartSeconds) {
1153
+ return this._loopStartSeconds;
1154
+ }
1155
+ return t;
925
1156
  }
926
1157
  isPlaying() {
927
1158
  return this._playing;
@@ -1037,20 +1268,56 @@ var Transport = class _Transport {
1037
1268
  this._masterNode.setVolume(volume);
1038
1269
  }
1039
1270
  // --- Loop ---
1040
- setLoop(enabled, start, end) {
1041
- if (enabled && start >= end) {
1271
+ /** Primary loop API — ticks as source of truth */
1272
+ setLoop(enabled, startTick, endTick) {
1273
+ if (enabled && startTick >= endTick) {
1274
+ console.warn(
1275
+ "[waveform-playlist] Transport.setLoop: startTick (" + startTick + ") must be less than endTick (" + endTick + ")"
1276
+ );
1277
+ return;
1278
+ }
1279
+ this._loopEnabled = enabled;
1280
+ this._loopStartTick = startTick;
1281
+ this._loopStartSeconds = this._tempoMap.ticksToSeconds(startTick);
1282
+ this._scheduler.setLoop(enabled, startTick, endTick);
1283
+ this._clipPlayer.setLoop(enabled, startTick, endTick);
1284
+ this._emit("loop");
1285
+ }
1286
+ /** Convenience — converts seconds to ticks */
1287
+ setLoopSeconds(enabled, startSec, endSec) {
1288
+ const startTick = this._tempoMap.secondsToTicks(startSec);
1289
+ const endTick = this._tempoMap.secondsToTicks(endSec);
1290
+ this.setLoop(enabled, startTick, endTick);
1291
+ }
1292
+ /** Convenience — sets loop in samples */
1293
+ setLoopSamples(enabled, startSample, endSample) {
1294
+ if (enabled && (!Number.isFinite(startSample) || !Number.isFinite(endSample))) {
1295
+ console.warn(
1296
+ "[waveform-playlist] Transport.setLoopSamples: non-finite sample values (" + startSample + ", " + endSample + ")"
1297
+ );
1298
+ return;
1299
+ }
1300
+ if (enabled && startSample >= endSample) {
1042
1301
  console.warn(
1043
- "[waveform-playlist] Transport.setLoop: start (" + start + ") must be less than end (" + end + ")"
1302
+ "[waveform-playlist] Transport.setLoopSamples: startSample (" + startSample + ") must be less than endSample (" + endSample + ")"
1044
1303
  );
1045
1304
  return;
1046
1305
  }
1047
- this._scheduler.setLoop(enabled, start, end);
1048
- this._clipPlayer.setLoop(enabled, start, end);
1306
+ const startTick = this._sampleTimeline.samplesToTicks(startSample);
1307
+ const endTick = this._sampleTimeline.samplesToTicks(endSample);
1308
+ this._loopEnabled = enabled;
1309
+ this._loopStartTick = startTick;
1310
+ this._loopStartSeconds = this._tempoMap.ticksToSeconds(startTick);
1311
+ this._clipPlayer.setLoopSamples(enabled, startSample, endSample);
1312
+ this._scheduler.setLoop(enabled, startTick, endTick);
1049
1313
  this._emit("loop");
1050
1314
  }
1051
1315
  // --- Tempo ---
1052
- setTempo(bpm, atTick) {
1053
- this._tempoMap.setTempo(bpm, atTick);
1316
+ setTempo(bpm, atTick, options) {
1317
+ this._tempoMap.setTempo(bpm, atTick, options);
1318
+ if (this._loopEnabled) {
1319
+ this._loopStartSeconds = this._tempoMap.ticksToSeconds(this._loopStartTick);
1320
+ }
1054
1321
  this._emit("tempochange");
1055
1322
  }
1056
1323
  getTempo(atTick) {
@@ -1074,6 +1341,9 @@ var Transport = class _Transport {
1074
1341
  }
1075
1342
  clearTempos() {
1076
1343
  this._tempoMap.clearTempos();
1344
+ if (this._loopEnabled) {
1345
+ this._loopStartSeconds = this._tempoMap.ticksToSeconds(this._loopStartTick);
1346
+ }
1077
1347
  this._emit("tempochange");
1078
1348
  }
1079
1349
  barToTick(bar) {
@@ -1169,7 +1439,12 @@ var Transport = class _Transport {
1169
1439
  this._masterNode = new MasterNode(audioContext);
1170
1440
  this._masterNode.output.connect(audioContext.destination);
1171
1441
  const toAudioTime = (transportTime) => this._clock.toAudioTime(transportTime);
1172
- this._clipPlayer = new ClipPlayer(audioContext, this._sampleTimeline, toAudioTime);
1442
+ this._clipPlayer = new ClipPlayer(
1443
+ audioContext,
1444
+ this._sampleTimeline,
1445
+ this._tempoMap,
1446
+ toAudioTime
1447
+ );
1173
1448
  this._metronomePlayer = new MetronomePlayer(
1174
1449
  audioContext,
1175
1450
  this._tempoMap,
@@ -1272,7 +1547,7 @@ var NativePlayoutAdapter = class {
1272
1547
  this._transport.setTrackPan(trackId, pan);
1273
1548
  }
1274
1549
  setLoop(enabled, start, end) {
1275
- this._transport.setLoop(enabled, start, end);
1550
+ this._transport.setLoopSeconds(enabled, start, end);
1276
1551
  }
1277
1552
  dispose() {
1278
1553
  this._transport.dispose();