@waveform-playlist/playout 9.0.3 → 9.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +270 -4
- package/dist/index.d.ts +270 -4
- package/dist/index.js +850 -51
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +845 -37
- package/dist/index.mjs.map +1 -1
- package/package.json +6 -5
package/dist/index.js
CHANGED
|
@@ -20,6 +20,9 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
|
|
|
20
20
|
// src/index.ts
|
|
21
21
|
var index_exports = {};
|
|
22
22
|
__export(index_exports, {
|
|
23
|
+
MidiToneTrack: () => MidiToneTrack,
|
|
24
|
+
SoundFontCache: () => SoundFontCache,
|
|
25
|
+
SoundFontToneTrack: () => SoundFontToneTrack,
|
|
23
26
|
TonePlayout: () => TonePlayout,
|
|
24
27
|
ToneTrack: () => ToneTrack,
|
|
25
28
|
applyFadeIn: () => applyFadeIn,
|
|
@@ -39,7 +42,7 @@ __export(index_exports, {
|
|
|
39
42
|
module.exports = __toCommonJS(index_exports);
|
|
40
43
|
|
|
41
44
|
// src/TonePlayout.ts
|
|
42
|
-
var
|
|
45
|
+
var import_tone4 = require("tone");
|
|
43
46
|
|
|
44
47
|
// src/ToneTrack.ts
|
|
45
48
|
var import_tone = require("tone");
|
|
@@ -449,6 +452,561 @@ var ToneTrack = class {
|
|
|
449
452
|
}
|
|
450
453
|
};
|
|
451
454
|
|
|
455
|
+
// src/MidiToneTrack.ts
|
|
456
|
+
var import_tone2 = require("tone");
|
|
457
|
+
function getDrumCategory(midiNote) {
|
|
458
|
+
if (midiNote === 35 || midiNote === 36) return "kick";
|
|
459
|
+
if (midiNote >= 37 && midiNote <= 40) return "snare";
|
|
460
|
+
if (midiNote === 41 || midiNote === 43 || midiNote === 45 || midiNote === 47 || midiNote === 48 || midiNote === 50)
|
|
461
|
+
return "tom";
|
|
462
|
+
return "cymbal";
|
|
463
|
+
}
|
|
464
|
+
var MidiToneTrack = class {
|
|
465
|
+
constructor(options) {
|
|
466
|
+
this.track = options.track;
|
|
467
|
+
this.volumeNode = new import_tone2.Volume(this.gainToDb(options.track.gain));
|
|
468
|
+
this.panNode = new import_tone2.Panner(options.track.stereoPan);
|
|
469
|
+
this.muteGain = new import_tone2.Gain(options.track.muted ? 0 : 1);
|
|
470
|
+
this.volumeNode.chain(this.panNode, this.muteGain);
|
|
471
|
+
this.synth = new import_tone2.PolySynth(import_tone2.Synth, options.synthOptions);
|
|
472
|
+
this.synth.connect(this.volumeNode);
|
|
473
|
+
this.kickSynth = new import_tone2.PolySynth(import_tone2.MembraneSynth, {
|
|
474
|
+
voice: import_tone2.MembraneSynth,
|
|
475
|
+
options: {
|
|
476
|
+
pitchDecay: 0.05,
|
|
477
|
+
octaves: 6,
|
|
478
|
+
envelope: { attack: 1e-3, decay: 0.4, sustain: 0, release: 0.1 }
|
|
479
|
+
}
|
|
480
|
+
});
|
|
481
|
+
this.snareSynth = new import_tone2.NoiseSynth({
|
|
482
|
+
noise: { type: "white" },
|
|
483
|
+
envelope: { attack: 1e-3, decay: 0.15, sustain: 0, release: 0.05 }
|
|
484
|
+
});
|
|
485
|
+
this.cymbalSynth = new import_tone2.PolySynth(import_tone2.MetalSynth, {
|
|
486
|
+
voice: import_tone2.MetalSynth,
|
|
487
|
+
options: {
|
|
488
|
+
envelope: { attack: 1e-3, decay: 0.3, release: 0.1 },
|
|
489
|
+
harmonicity: 5.1,
|
|
490
|
+
modulationIndex: 32,
|
|
491
|
+
resonance: 4e3,
|
|
492
|
+
octaves: 1.5
|
|
493
|
+
}
|
|
494
|
+
});
|
|
495
|
+
this.tomSynth = new import_tone2.PolySynth(import_tone2.MembraneSynth, {
|
|
496
|
+
voice: import_tone2.MembraneSynth,
|
|
497
|
+
options: {
|
|
498
|
+
pitchDecay: 0.08,
|
|
499
|
+
octaves: 4,
|
|
500
|
+
envelope: { attack: 1e-3, decay: 0.3, sustain: 0, release: 0.1 }
|
|
501
|
+
}
|
|
502
|
+
});
|
|
503
|
+
this.kickSynth.connect(this.volumeNode);
|
|
504
|
+
this.snareSynth.connect(this.volumeNode);
|
|
505
|
+
this.cymbalSynth.connect(this.volumeNode);
|
|
506
|
+
this.tomSynth.connect(this.volumeNode);
|
|
507
|
+
const destination = options.destination || (0, import_tone2.getDestination)();
|
|
508
|
+
if (options.effects) {
|
|
509
|
+
const cleanup = options.effects(this.muteGain, destination, false);
|
|
510
|
+
if (cleanup) {
|
|
511
|
+
this.effectsCleanup = cleanup;
|
|
512
|
+
}
|
|
513
|
+
} else {
|
|
514
|
+
this.muteGain.connect(destination);
|
|
515
|
+
}
|
|
516
|
+
this.scheduledClips = options.clips.map((clipInfo) => {
|
|
517
|
+
const visibleNotes = clipInfo.notes.filter((note) => {
|
|
518
|
+
const noteEnd = note.time + note.duration;
|
|
519
|
+
return note.time < clipInfo.offset + clipInfo.duration && noteEnd > clipInfo.offset;
|
|
520
|
+
});
|
|
521
|
+
const absClipStart = this.track.startTime + clipInfo.startTime;
|
|
522
|
+
const partEvents = visibleNotes.map((note) => {
|
|
523
|
+
const adjustedTime = note.time - clipInfo.offset;
|
|
524
|
+
const clampedStart = Math.max(0, adjustedTime);
|
|
525
|
+
const clampedDuration = Math.min(
|
|
526
|
+
note.duration - Math.max(0, clipInfo.offset - note.time),
|
|
527
|
+
clipInfo.duration - clampedStart
|
|
528
|
+
);
|
|
529
|
+
return {
|
|
530
|
+
time: absClipStart + clampedStart,
|
|
531
|
+
note: note.name,
|
|
532
|
+
midi: note.midi,
|
|
533
|
+
duration: Math.max(0, clampedDuration),
|
|
534
|
+
velocity: note.velocity,
|
|
535
|
+
channel: note.channel
|
|
536
|
+
};
|
|
537
|
+
});
|
|
538
|
+
const part = new import_tone2.Part((time, event) => {
|
|
539
|
+
if (event.duration > 0) {
|
|
540
|
+
this.triggerNote(
|
|
541
|
+
event.midi,
|
|
542
|
+
event.note,
|
|
543
|
+
event.duration,
|
|
544
|
+
time,
|
|
545
|
+
event.velocity,
|
|
546
|
+
event.channel
|
|
547
|
+
);
|
|
548
|
+
}
|
|
549
|
+
}, partEvents);
|
|
550
|
+
part.start(0);
|
|
551
|
+
return { clipInfo, part };
|
|
552
|
+
});
|
|
553
|
+
}
|
|
554
|
+
/**
|
|
555
|
+
* Trigger a note using the appropriate synth.
|
|
556
|
+
* Routes per-note: channel 9 → percussion synths, others → melodic PolySynth.
|
|
557
|
+
*/
|
|
558
|
+
triggerNote(midiNote, noteName, duration, time, velocity, channel) {
|
|
559
|
+
if (channel === 9) {
|
|
560
|
+
const category = getDrumCategory(midiNote);
|
|
561
|
+
switch (category) {
|
|
562
|
+
case "kick":
|
|
563
|
+
this.kickSynth.triggerAttackRelease("C1", duration, time, velocity);
|
|
564
|
+
break;
|
|
565
|
+
case "snare":
|
|
566
|
+
try {
|
|
567
|
+
this.snareSynth.triggerAttackRelease(duration, time, velocity);
|
|
568
|
+
} catch (err) {
|
|
569
|
+
console.warn(
|
|
570
|
+
"[waveform-playlist] Snare overlap \u2014 previous hit still decaying, skipped:",
|
|
571
|
+
err
|
|
572
|
+
);
|
|
573
|
+
}
|
|
574
|
+
break;
|
|
575
|
+
case "tom": {
|
|
576
|
+
const tomPitches = {
|
|
577
|
+
41: "G1",
|
|
578
|
+
43: "A1",
|
|
579
|
+
45: "C2",
|
|
580
|
+
47: "D2",
|
|
581
|
+
48: "E2",
|
|
582
|
+
50: "G2"
|
|
583
|
+
};
|
|
584
|
+
this.tomSynth.triggerAttackRelease(
|
|
585
|
+
tomPitches[midiNote] || "C2",
|
|
586
|
+
duration,
|
|
587
|
+
time,
|
|
588
|
+
velocity
|
|
589
|
+
);
|
|
590
|
+
break;
|
|
591
|
+
}
|
|
592
|
+
case "cymbal":
|
|
593
|
+
this.cymbalSynth.triggerAttackRelease("C4", duration, time, velocity);
|
|
594
|
+
break;
|
|
595
|
+
}
|
|
596
|
+
} else {
|
|
597
|
+
this.synth.triggerAttackRelease(noteName, duration, time, velocity);
|
|
598
|
+
}
|
|
599
|
+
}
|
|
600
|
+
gainToDb(gain) {
|
|
601
|
+
return 20 * Math.log10(gain);
|
|
602
|
+
}
|
|
603
|
+
/**
|
|
604
|
+
* No-op for MIDI — schedule guard is for AudioBufferSourceNode ghost tick prevention.
|
|
605
|
+
* Tone.Part handles its own scheduling relative to Transport.
|
|
606
|
+
*/
|
|
607
|
+
setScheduleGuardOffset(_offset) {
|
|
608
|
+
}
|
|
609
|
+
/**
|
|
610
|
+
* For MIDI, mid-clip sources are notes that should already be sounding.
|
|
611
|
+
* We trigger them with their remaining duration.
|
|
612
|
+
*/
|
|
613
|
+
startMidClipSources(transportOffset, audioContextTime) {
|
|
614
|
+
for (const { clipInfo } of this.scheduledClips) {
|
|
615
|
+
const absClipStart = this.track.startTime + clipInfo.startTime;
|
|
616
|
+
const absClipEnd = absClipStart + clipInfo.duration;
|
|
617
|
+
if (absClipStart < transportOffset && absClipEnd > transportOffset) {
|
|
618
|
+
for (const note of clipInfo.notes) {
|
|
619
|
+
const adjustedTime = note.time - clipInfo.offset;
|
|
620
|
+
const noteAbsStart = absClipStart + Math.max(0, adjustedTime);
|
|
621
|
+
const noteAbsEnd = noteAbsStart + note.duration;
|
|
622
|
+
if (noteAbsStart < transportOffset && noteAbsEnd > transportOffset) {
|
|
623
|
+
const remainingDuration = noteAbsEnd - transportOffset;
|
|
624
|
+
try {
|
|
625
|
+
this.triggerNote(
|
|
626
|
+
note.midi,
|
|
627
|
+
note.name,
|
|
628
|
+
remainingDuration,
|
|
629
|
+
audioContextTime,
|
|
630
|
+
note.velocity,
|
|
631
|
+
note.channel
|
|
632
|
+
);
|
|
633
|
+
} catch (err) {
|
|
634
|
+
console.warn(
|
|
635
|
+
`[waveform-playlist] Failed to start mid-clip MIDI note "${note.name}" on track "${this.id}":`,
|
|
636
|
+
err
|
|
637
|
+
);
|
|
638
|
+
}
|
|
639
|
+
}
|
|
640
|
+
}
|
|
641
|
+
}
|
|
642
|
+
}
|
|
643
|
+
}
|
|
644
|
+
/**
|
|
645
|
+
* Stop all sounding notes and cancel scheduled Part events.
|
|
646
|
+
*/
|
|
647
|
+
stopAllSources() {
|
|
648
|
+
const now3 = (0, import_tone2.getContext)().rawContext.currentTime;
|
|
649
|
+
try {
|
|
650
|
+
this.synth.releaseAll(now3);
|
|
651
|
+
this.kickSynth.releaseAll(now3);
|
|
652
|
+
this.cymbalSynth.releaseAll(now3);
|
|
653
|
+
this.tomSynth.releaseAll(now3);
|
|
654
|
+
} catch (err) {
|
|
655
|
+
console.warn(`[waveform-playlist] Error releasing synth on track "${this.id}":`, err);
|
|
656
|
+
}
|
|
657
|
+
}
|
|
658
|
+
/**
|
|
659
|
+
* No-op for MIDI — MIDI uses note velocity, not gain fades.
|
|
660
|
+
*/
|
|
661
|
+
prepareFades(_when, _offset) {
|
|
662
|
+
}
|
|
663
|
+
/**
|
|
664
|
+
* No-op for MIDI — no fade automation to cancel.
|
|
665
|
+
*/
|
|
666
|
+
cancelFades() {
|
|
667
|
+
}
|
|
668
|
+
setVolume(gain) {
|
|
669
|
+
this.track.gain = gain;
|
|
670
|
+
this.volumeNode.volume.value = this.gainToDb(gain);
|
|
671
|
+
}
|
|
672
|
+
setPan(pan) {
|
|
673
|
+
this.track.stereoPan = pan;
|
|
674
|
+
this.panNode.pan.value = pan;
|
|
675
|
+
}
|
|
676
|
+
setMute(muted) {
|
|
677
|
+
this.track.muted = muted;
|
|
678
|
+
const value = muted ? 0 : 1;
|
|
679
|
+
const audioParam = getUnderlyingAudioParam(this.muteGain.gain);
|
|
680
|
+
audioParam?.setValueAtTime(value, 0);
|
|
681
|
+
this.muteGain.gain.value = value;
|
|
682
|
+
}
|
|
683
|
+
setSolo(soloed) {
|
|
684
|
+
this.track.soloed = soloed;
|
|
685
|
+
}
|
|
686
|
+
dispose() {
|
|
687
|
+
if (this.effectsCleanup) {
|
|
688
|
+
try {
|
|
689
|
+
this.effectsCleanup();
|
|
690
|
+
} catch (err) {
|
|
691
|
+
console.warn(
|
|
692
|
+
`[waveform-playlist] Error during MIDI track "${this.id}" effects cleanup:`,
|
|
693
|
+
err
|
|
694
|
+
);
|
|
695
|
+
}
|
|
696
|
+
}
|
|
697
|
+
this.stopAllSources();
|
|
698
|
+
this.scheduledClips.forEach(({ part }, index) => {
|
|
699
|
+
try {
|
|
700
|
+
part.dispose();
|
|
701
|
+
} catch (err) {
|
|
702
|
+
console.warn(
|
|
703
|
+
`[waveform-playlist] Error disposing Part ${index} on MIDI track "${this.id}":`,
|
|
704
|
+
err
|
|
705
|
+
);
|
|
706
|
+
}
|
|
707
|
+
});
|
|
708
|
+
const synthsToDispose = [
|
|
709
|
+
this.synth,
|
|
710
|
+
this.kickSynth,
|
|
711
|
+
this.snareSynth,
|
|
712
|
+
this.cymbalSynth,
|
|
713
|
+
this.tomSynth
|
|
714
|
+
];
|
|
715
|
+
for (const s of synthsToDispose) {
|
|
716
|
+
try {
|
|
717
|
+
s?.dispose();
|
|
718
|
+
} catch (err) {
|
|
719
|
+
console.warn(`[waveform-playlist] Error disposing synth on MIDI track "${this.id}":`, err);
|
|
720
|
+
}
|
|
721
|
+
}
|
|
722
|
+
try {
|
|
723
|
+
this.volumeNode.dispose();
|
|
724
|
+
} catch (err) {
|
|
725
|
+
console.warn(
|
|
726
|
+
`[waveform-playlist] Error disposing volumeNode on MIDI track "${this.id}":`,
|
|
727
|
+
err
|
|
728
|
+
);
|
|
729
|
+
}
|
|
730
|
+
try {
|
|
731
|
+
this.panNode.dispose();
|
|
732
|
+
} catch (err) {
|
|
733
|
+
console.warn(`[waveform-playlist] Error disposing panNode on MIDI track "${this.id}":`, err);
|
|
734
|
+
}
|
|
735
|
+
try {
|
|
736
|
+
this.muteGain.dispose();
|
|
737
|
+
} catch (err) {
|
|
738
|
+
console.warn(`[waveform-playlist] Error disposing muteGain on MIDI track "${this.id}":`, err);
|
|
739
|
+
}
|
|
740
|
+
}
|
|
741
|
+
get id() {
|
|
742
|
+
return this.track.id;
|
|
743
|
+
}
|
|
744
|
+
get duration() {
|
|
745
|
+
if (this.scheduledClips.length === 0) return 0;
|
|
746
|
+
const lastClip = this.scheduledClips[this.scheduledClips.length - 1];
|
|
747
|
+
return lastClip.clipInfo.startTime + lastClip.clipInfo.duration;
|
|
748
|
+
}
|
|
749
|
+
get muted() {
|
|
750
|
+
return this.track.muted;
|
|
751
|
+
}
|
|
752
|
+
get startTime() {
|
|
753
|
+
return this.track.startTime;
|
|
754
|
+
}
|
|
755
|
+
};
|
|
756
|
+
|
|
757
|
+
// src/SoundFontToneTrack.ts
|
|
758
|
+
var import_tone3 = require("tone");
|
|
759
|
+
var _SoundFontToneTrack = class _SoundFontToneTrack {
|
|
760
|
+
constructor(options) {
|
|
761
|
+
this.activeSources = /* @__PURE__ */ new Set();
|
|
762
|
+
this.track = options.track;
|
|
763
|
+
this.soundFontCache = options.soundFontCache;
|
|
764
|
+
this.programNumber = options.programNumber ?? 0;
|
|
765
|
+
this.bankNumber = options.isPercussion ? 128 : 0;
|
|
766
|
+
this.volumeNode = new import_tone3.Volume(this.gainToDb(options.track.gain));
|
|
767
|
+
this.panNode = new import_tone3.Panner(options.track.stereoPan);
|
|
768
|
+
this.muteGain = new import_tone3.Gain(options.track.muted ? 0 : 1);
|
|
769
|
+
this.volumeNode.chain(this.panNode, this.muteGain);
|
|
770
|
+
const destination = options.destination || (0, import_tone3.getDestination)();
|
|
771
|
+
if (options.effects) {
|
|
772
|
+
const cleanup = options.effects(this.muteGain, destination, false);
|
|
773
|
+
if (cleanup) {
|
|
774
|
+
this.effectsCleanup = cleanup;
|
|
775
|
+
}
|
|
776
|
+
} else {
|
|
777
|
+
this.muteGain.connect(destination);
|
|
778
|
+
}
|
|
779
|
+
this.scheduledClips = options.clips.map((clipInfo) => {
|
|
780
|
+
const visibleNotes = clipInfo.notes.filter((note) => {
|
|
781
|
+
const noteEnd = note.time + note.duration;
|
|
782
|
+
return note.time < clipInfo.offset + clipInfo.duration && noteEnd > clipInfo.offset;
|
|
783
|
+
});
|
|
784
|
+
const absClipStart = this.track.startTime + clipInfo.startTime;
|
|
785
|
+
const partEvents = visibleNotes.map((note) => {
|
|
786
|
+
const adjustedTime = note.time - clipInfo.offset;
|
|
787
|
+
const clampedStart = Math.max(0, adjustedTime);
|
|
788
|
+
const clampedDuration = Math.min(
|
|
789
|
+
note.duration - Math.max(0, clipInfo.offset - note.time),
|
|
790
|
+
clipInfo.duration - clampedStart
|
|
791
|
+
);
|
|
792
|
+
return {
|
|
793
|
+
time: absClipStart + clampedStart,
|
|
794
|
+
note: note.name,
|
|
795
|
+
midi: note.midi,
|
|
796
|
+
duration: Math.max(0, clampedDuration),
|
|
797
|
+
velocity: note.velocity,
|
|
798
|
+
channel: note.channel
|
|
799
|
+
};
|
|
800
|
+
});
|
|
801
|
+
const part = new import_tone3.Part((time, event) => {
|
|
802
|
+
if (event.duration > 0) {
|
|
803
|
+
this.triggerNote(event.midi, event.duration, time, event.velocity, event.channel);
|
|
804
|
+
}
|
|
805
|
+
}, partEvents);
|
|
806
|
+
part.start(0);
|
|
807
|
+
return { clipInfo, part };
|
|
808
|
+
});
|
|
809
|
+
}
|
|
810
|
+
/**
|
|
811
|
+
* Trigger a note by creating a native AudioBufferSourceNode from the SoundFont cache.
|
|
812
|
+
*
|
|
813
|
+
* Per-note routing: channel 9 → bank 128 (drums), others → bank 0 with programNumber.
|
|
814
|
+
*/
|
|
815
|
+
triggerNote(midiNote, duration, time, velocity, channel) {
|
|
816
|
+
const bank = channel === 9 ? 128 : this.bankNumber;
|
|
817
|
+
const preset = channel === 9 ? 0 : this.programNumber;
|
|
818
|
+
const sfSample = this.soundFontCache.getAudioBuffer(midiNote, bank, preset);
|
|
819
|
+
if (!sfSample) {
|
|
820
|
+
if (!_SoundFontToneTrack._missingSampleWarned) {
|
|
821
|
+
console.warn(
|
|
822
|
+
`[waveform-playlist] SoundFont sample not found for MIDI note ${midiNote} (bank ${bank}, preset ${preset}). Subsequent missing samples will be silent.`
|
|
823
|
+
);
|
|
824
|
+
_SoundFontToneTrack._missingSampleWarned = true;
|
|
825
|
+
}
|
|
826
|
+
return;
|
|
827
|
+
}
|
|
828
|
+
const rawContext = (0, import_tone3.getContext)().rawContext;
|
|
829
|
+
const source = rawContext.createBufferSource();
|
|
830
|
+
source.buffer = sfSample.buffer;
|
|
831
|
+
source.playbackRate.value = sfSample.playbackRate;
|
|
832
|
+
if (sfSample.loopMode === 1 || sfSample.loopMode === 3) {
|
|
833
|
+
source.loop = true;
|
|
834
|
+
source.loopStart = sfSample.loopStart;
|
|
835
|
+
source.loopEnd = sfSample.loopEnd;
|
|
836
|
+
}
|
|
837
|
+
const sampleDuration = sfSample.buffer.duration / sfSample.playbackRate;
|
|
838
|
+
const effectiveDuration = sfSample.loopMode === 0 ? Math.max(duration, sampleDuration) : duration;
|
|
839
|
+
const peakGain = velocity * velocity;
|
|
840
|
+
const gainNode = rawContext.createGain();
|
|
841
|
+
const { attackVolEnv, holdVolEnv, decayVolEnv, sustainVolEnv, releaseVolEnv } = sfSample;
|
|
842
|
+
const sustainGain = peakGain * sustainVolEnv;
|
|
843
|
+
gainNode.gain.setValueAtTime(0, time);
|
|
844
|
+
gainNode.gain.linearRampToValueAtTime(peakGain, time + attackVolEnv);
|
|
845
|
+
if (holdVolEnv > 1e-3) {
|
|
846
|
+
gainNode.gain.setValueAtTime(peakGain, time + attackVolEnv + holdVolEnv);
|
|
847
|
+
}
|
|
848
|
+
const decayStart = time + attackVolEnv + holdVolEnv;
|
|
849
|
+
gainNode.gain.linearRampToValueAtTime(sustainGain, decayStart + decayVolEnv);
|
|
850
|
+
gainNode.gain.setValueAtTime(sustainGain, time + effectiveDuration);
|
|
851
|
+
gainNode.gain.linearRampToValueAtTime(0, time + effectiveDuration + releaseVolEnv);
|
|
852
|
+
source.connect(gainNode);
|
|
853
|
+
gainNode.connect(this.volumeNode.input.input);
|
|
854
|
+
this.activeSources.add(source);
|
|
855
|
+
source.onended = () => {
|
|
856
|
+
this.activeSources.delete(source);
|
|
857
|
+
try {
|
|
858
|
+
gainNode.disconnect();
|
|
859
|
+
} catch (err) {
|
|
860
|
+
console.warn("[waveform-playlist] GainNode already disconnected:", err);
|
|
861
|
+
}
|
|
862
|
+
};
|
|
863
|
+
source.start(time);
|
|
864
|
+
source.stop(time + effectiveDuration + releaseVolEnv);
|
|
865
|
+
}
|
|
866
|
+
gainToDb(gain) {
|
|
867
|
+
return 20 * Math.log10(gain);
|
|
868
|
+
}
|
|
869
|
+
/**
|
|
870
|
+
* No-op — Tone.Part handles scheduling internally, no ghost tick guard needed.
|
|
871
|
+
*/
|
|
872
|
+
setScheduleGuardOffset(_offset) {
|
|
873
|
+
}
|
|
874
|
+
/**
|
|
875
|
+
* Start notes that should already be sounding at the current transport offset.
|
|
876
|
+
*/
|
|
877
|
+
startMidClipSources(transportOffset, audioContextTime) {
|
|
878
|
+
for (const { clipInfo } of this.scheduledClips) {
|
|
879
|
+
const absClipStart = this.track.startTime + clipInfo.startTime;
|
|
880
|
+
const absClipEnd = absClipStart + clipInfo.duration;
|
|
881
|
+
if (absClipStart < transportOffset && absClipEnd > transportOffset) {
|
|
882
|
+
for (const note of clipInfo.notes) {
|
|
883
|
+
const adjustedTime = note.time - clipInfo.offset;
|
|
884
|
+
const noteAbsStart = absClipStart + Math.max(0, adjustedTime);
|
|
885
|
+
const noteAbsEnd = noteAbsStart + note.duration;
|
|
886
|
+
if (noteAbsStart < transportOffset && noteAbsEnd > transportOffset) {
|
|
887
|
+
const remainingDuration = noteAbsEnd - transportOffset;
|
|
888
|
+
try {
|
|
889
|
+
this.triggerNote(
|
|
890
|
+
note.midi,
|
|
891
|
+
remainingDuration,
|
|
892
|
+
audioContextTime,
|
|
893
|
+
note.velocity,
|
|
894
|
+
note.channel
|
|
895
|
+
);
|
|
896
|
+
} catch (err) {
|
|
897
|
+
console.warn(
|
|
898
|
+
`[waveform-playlist] Failed to start mid-clip SoundFont note on track "${this.id}":`,
|
|
899
|
+
err
|
|
900
|
+
);
|
|
901
|
+
}
|
|
902
|
+
}
|
|
903
|
+
}
|
|
904
|
+
}
|
|
905
|
+
}
|
|
906
|
+
}
|
|
907
|
+
/**
|
|
908
|
+
* Stop all active AudioBufferSourceNodes.
|
|
909
|
+
*/
|
|
910
|
+
stopAllSources() {
|
|
911
|
+
for (const source of this.activeSources) {
|
|
912
|
+
try {
|
|
913
|
+
source.stop();
|
|
914
|
+
} catch (err) {
|
|
915
|
+
console.warn("[waveform-playlist] Error stopping AudioBufferSourceNode:", err);
|
|
916
|
+
}
|
|
917
|
+
}
|
|
918
|
+
this.activeSources.clear();
|
|
919
|
+
}
|
|
920
|
+
/** No-op for MIDI — MIDI uses note velocity, not gain fades. */
|
|
921
|
+
prepareFades(_when, _offset) {
|
|
922
|
+
}
|
|
923
|
+
/** No-op for MIDI — no fade automation to cancel. */
|
|
924
|
+
cancelFades() {
|
|
925
|
+
}
|
|
926
|
+
setVolume(gain) {
|
|
927
|
+
this.track.gain = gain;
|
|
928
|
+
this.volumeNode.volume.value = this.gainToDb(gain);
|
|
929
|
+
}
|
|
930
|
+
setPan(pan) {
|
|
931
|
+
this.track.stereoPan = pan;
|
|
932
|
+
this.panNode.pan.value = pan;
|
|
933
|
+
}
|
|
934
|
+
setMute(muted) {
|
|
935
|
+
this.track.muted = muted;
|
|
936
|
+
const value = muted ? 0 : 1;
|
|
937
|
+
const audioParam = getUnderlyingAudioParam(this.muteGain.gain);
|
|
938
|
+
audioParam?.setValueAtTime(value, 0);
|
|
939
|
+
this.muteGain.gain.value = value;
|
|
940
|
+
}
|
|
941
|
+
setSolo(soloed) {
|
|
942
|
+
this.track.soloed = soloed;
|
|
943
|
+
}
|
|
944
|
+
dispose() {
|
|
945
|
+
if (this.effectsCleanup) {
|
|
946
|
+
try {
|
|
947
|
+
this.effectsCleanup();
|
|
948
|
+
} catch (err) {
|
|
949
|
+
console.warn(
|
|
950
|
+
`[waveform-playlist] Error during SoundFont track "${this.id}" effects cleanup:`,
|
|
951
|
+
err
|
|
952
|
+
);
|
|
953
|
+
}
|
|
954
|
+
}
|
|
955
|
+
this.stopAllSources();
|
|
956
|
+
this.scheduledClips.forEach(({ part }, index) => {
|
|
957
|
+
try {
|
|
958
|
+
part.dispose();
|
|
959
|
+
} catch (err) {
|
|
960
|
+
console.warn(
|
|
961
|
+
`[waveform-playlist] Error disposing Part ${index} on SoundFont track "${this.id}":`,
|
|
962
|
+
err
|
|
963
|
+
);
|
|
964
|
+
}
|
|
965
|
+
});
|
|
966
|
+
try {
|
|
967
|
+
this.volumeNode.dispose();
|
|
968
|
+
} catch (err) {
|
|
969
|
+
console.warn(
|
|
970
|
+
`[waveform-playlist] Error disposing volumeNode on SoundFont track "${this.id}":`,
|
|
971
|
+
err
|
|
972
|
+
);
|
|
973
|
+
}
|
|
974
|
+
try {
|
|
975
|
+
this.panNode.dispose();
|
|
976
|
+
} catch (err) {
|
|
977
|
+
console.warn(
|
|
978
|
+
`[waveform-playlist] Error disposing panNode on SoundFont track "${this.id}":`,
|
|
979
|
+
err
|
|
980
|
+
);
|
|
981
|
+
}
|
|
982
|
+
try {
|
|
983
|
+
this.muteGain.dispose();
|
|
984
|
+
} catch (err) {
|
|
985
|
+
console.warn(
|
|
986
|
+
`[waveform-playlist] Error disposing muteGain on SoundFont track "${this.id}":`,
|
|
987
|
+
err
|
|
988
|
+
);
|
|
989
|
+
}
|
|
990
|
+
}
|
|
991
|
+
get id() {
|
|
992
|
+
return this.track.id;
|
|
993
|
+
}
|
|
994
|
+
get duration() {
|
|
995
|
+
if (this.scheduledClips.length === 0) return 0;
|
|
996
|
+
const lastClip = this.scheduledClips[this.scheduledClips.length - 1];
|
|
997
|
+
return lastClip.clipInfo.startTime + lastClip.clipInfo.duration;
|
|
998
|
+
}
|
|
999
|
+
get muted() {
|
|
1000
|
+
return this.track.muted;
|
|
1001
|
+
}
|
|
1002
|
+
get startTime() {
|
|
1003
|
+
return this.track.startTime;
|
|
1004
|
+
}
|
|
1005
|
+
};
|
|
1006
|
+
/** Rate-limit missing sample warnings — one per class lifetime */
|
|
1007
|
+
_SoundFontToneTrack._missingSampleWarned = false;
|
|
1008
|
+
var SoundFontToneTrack = _SoundFontToneTrack;
|
|
1009
|
+
|
|
452
1010
|
// src/TonePlayout.ts
|
|
453
1011
|
var TonePlayout = class {
|
|
454
1012
|
constructor(options = {}) {
|
|
@@ -461,9 +1019,9 @@ var TonePlayout = class {
|
|
|
461
1019
|
this._loopEnabled = false;
|
|
462
1020
|
this._loopStart = 0;
|
|
463
1021
|
this._loopEnd = 0;
|
|
464
|
-
this.masterVolume = new
|
|
1022
|
+
this.masterVolume = new import_tone4.Volume(this.gainToDb(options.masterGain ?? 1));
|
|
465
1023
|
if (options.effects) {
|
|
466
|
-
const cleanup = options.effects(this.masterVolume, (0,
|
|
1024
|
+
const cleanup = options.effects(this.masterVolume, (0, import_tone4.getDestination)(), false);
|
|
467
1025
|
if (cleanup) {
|
|
468
1026
|
this.effectsCleanup = cleanup;
|
|
469
1027
|
}
|
|
@@ -483,7 +1041,7 @@ var TonePlayout = class {
|
|
|
483
1041
|
clearCompletionEvent() {
|
|
484
1042
|
if (this._completionEventId !== null) {
|
|
485
1043
|
try {
|
|
486
|
-
(0,
|
|
1044
|
+
(0, import_tone4.getTransport)().clear(this._completionEventId);
|
|
487
1045
|
} catch (err) {
|
|
488
1046
|
console.warn("[waveform-playlist] Error clearing Transport completion event:", err);
|
|
489
1047
|
}
|
|
@@ -492,7 +1050,7 @@ var TonePlayout = class {
|
|
|
492
1050
|
}
|
|
493
1051
|
async init() {
|
|
494
1052
|
if (this.isInitialized) return;
|
|
495
|
-
await (0,
|
|
1053
|
+
await (0, import_tone4.start)();
|
|
496
1054
|
this.isInitialized = true;
|
|
497
1055
|
}
|
|
498
1056
|
addTrack(trackOptions) {
|
|
@@ -508,6 +1066,32 @@ var TonePlayout = class {
|
|
|
508
1066
|
}
|
|
509
1067
|
return toneTrack;
|
|
510
1068
|
}
|
|
1069
|
+
addMidiTrack(trackOptions) {
|
|
1070
|
+
const optionsWithDestination = {
|
|
1071
|
+
...trackOptions,
|
|
1072
|
+
destination: this.masterVolume
|
|
1073
|
+
};
|
|
1074
|
+
const midiTrack = new MidiToneTrack(optionsWithDestination);
|
|
1075
|
+
this.tracks.set(midiTrack.id, midiTrack);
|
|
1076
|
+
this.manualMuteState.set(midiTrack.id, trackOptions.track.muted ?? false);
|
|
1077
|
+
if (trackOptions.track.soloed) {
|
|
1078
|
+
this.soloedTracks.add(midiTrack.id);
|
|
1079
|
+
}
|
|
1080
|
+
return midiTrack;
|
|
1081
|
+
}
|
|
1082
|
+
addSoundFontTrack(trackOptions) {
|
|
1083
|
+
const optionsWithDestination = {
|
|
1084
|
+
...trackOptions,
|
|
1085
|
+
destination: this.masterVolume
|
|
1086
|
+
};
|
|
1087
|
+
const sfTrack = new SoundFontToneTrack(optionsWithDestination);
|
|
1088
|
+
this.tracks.set(sfTrack.id, sfTrack);
|
|
1089
|
+
this.manualMuteState.set(sfTrack.id, trackOptions.track.muted ?? false);
|
|
1090
|
+
if (trackOptions.track.soloed) {
|
|
1091
|
+
this.soloedTracks.add(sfTrack.id);
|
|
1092
|
+
}
|
|
1093
|
+
return sfTrack;
|
|
1094
|
+
}
|
|
511
1095
|
/**
|
|
512
1096
|
* Apply solo muting after all tracks have been added.
|
|
513
1097
|
* Call this after adding all tracks to ensure solo logic is applied correctly.
|
|
@@ -531,8 +1115,8 @@ var TonePlayout = class {
|
|
|
531
1115
|
if (!this.isInitialized) {
|
|
532
1116
|
throw new Error("[waveform-playlist] TonePlayout not initialized. Call init() first.");
|
|
533
1117
|
}
|
|
534
|
-
const startTime = when ?? (0,
|
|
535
|
-
const transport = (0,
|
|
1118
|
+
const startTime = when ?? (0, import_tone4.now)();
|
|
1119
|
+
const transport = (0, import_tone4.getTransport)();
|
|
536
1120
|
this.clearCompletionEvent();
|
|
537
1121
|
const transportOffset = offset ?? 0;
|
|
538
1122
|
this.tracks.forEach((track) => {
|
|
@@ -578,7 +1162,7 @@ var TonePlayout = class {
|
|
|
578
1162
|
}
|
|
579
1163
|
}
|
|
580
1164
|
pause() {
|
|
581
|
-
const transport = (0,
|
|
1165
|
+
const transport = (0, import_tone4.getTransport)();
|
|
582
1166
|
try {
|
|
583
1167
|
transport.pause();
|
|
584
1168
|
} catch (err) {
|
|
@@ -589,7 +1173,7 @@ var TonePlayout = class {
|
|
|
589
1173
|
this.clearCompletionEvent();
|
|
590
1174
|
}
|
|
591
1175
|
stop() {
|
|
592
|
-
const transport = (0,
|
|
1176
|
+
const transport = (0, import_tone4.getTransport)();
|
|
593
1177
|
try {
|
|
594
1178
|
transport.stop();
|
|
595
1179
|
} catch (err) {
|
|
@@ -649,7 +1233,7 @@ var TonePlayout = class {
|
|
|
649
1233
|
this._loopEnabled = enabled;
|
|
650
1234
|
this._loopStart = loopStart;
|
|
651
1235
|
this._loopEnd = loopEnd;
|
|
652
|
-
const transport = (0,
|
|
1236
|
+
const transport = (0, import_tone4.getTransport)();
|
|
653
1237
|
try {
|
|
654
1238
|
transport.loopStart = loopStart;
|
|
655
1239
|
transport.loopEnd = loopEnd;
|
|
@@ -660,7 +1244,7 @@ var TonePlayout = class {
|
|
|
660
1244
|
}
|
|
661
1245
|
if (enabled && !this._loopHandler) {
|
|
662
1246
|
this._loopHandler = () => {
|
|
663
|
-
const currentTime = (0,
|
|
1247
|
+
const currentTime = (0, import_tone4.now)();
|
|
664
1248
|
this.tracks.forEach((track) => {
|
|
665
1249
|
try {
|
|
666
1250
|
track.stopAllSources();
|
|
@@ -683,16 +1267,16 @@ var TonePlayout = class {
|
|
|
683
1267
|
}
|
|
684
1268
|
}
|
|
685
1269
|
getCurrentTime() {
|
|
686
|
-
return (0,
|
|
1270
|
+
return (0, import_tone4.getTransport)().seconds;
|
|
687
1271
|
}
|
|
688
1272
|
seekTo(time) {
|
|
689
|
-
(0,
|
|
1273
|
+
(0, import_tone4.getTransport)().seconds = time;
|
|
690
1274
|
}
|
|
691
1275
|
dispose() {
|
|
692
1276
|
this.clearCompletionEvent();
|
|
693
1277
|
if (this._loopHandler) {
|
|
694
1278
|
try {
|
|
695
|
-
(0,
|
|
1279
|
+
(0, import_tone4.getTransport)().off("loop", this._loopHandler);
|
|
696
1280
|
} catch (err) {
|
|
697
1281
|
console.warn("[waveform-playlist] Error removing Transport loop handler:", err);
|
|
698
1282
|
}
|
|
@@ -720,23 +1304,187 @@ var TonePlayout = class {
|
|
|
720
1304
|
}
|
|
721
1305
|
}
|
|
722
1306
|
get context() {
|
|
723
|
-
return (0,
|
|
1307
|
+
return (0, import_tone4.getContext)();
|
|
724
1308
|
}
|
|
725
1309
|
get sampleRate() {
|
|
726
|
-
return (0,
|
|
1310
|
+
return (0, import_tone4.getContext)().sampleRate;
|
|
727
1311
|
}
|
|
728
1312
|
setOnPlaybackComplete(callback) {
|
|
729
1313
|
this.onPlaybackCompleteCallback = callback;
|
|
730
1314
|
}
|
|
731
1315
|
};
|
|
732
1316
|
|
|
1317
|
+
// src/SoundFontCache.ts
|
|
1318
|
+
var import_soundfont2 = require("soundfont2");
|
|
1319
|
+
function timecentsToSeconds(tc) {
|
|
1320
|
+
return Math.pow(2, tc / 1200);
|
|
1321
|
+
}
|
|
1322
|
+
var MAX_RELEASE_SECONDS = 5;
|
|
1323
|
+
function getGeneratorValue(generators, type) {
|
|
1324
|
+
return generators[type]?.value;
|
|
1325
|
+
}
|
|
1326
|
+
var SoundFontCache = class {
|
|
1327
|
+
/**
|
|
1328
|
+
* @param context Optional AudioContext for createBuffer(). If omitted, uses
|
|
1329
|
+
* an OfflineAudioContext which doesn't require user gesture — safe to
|
|
1330
|
+
* construct before user interaction (avoids Firefox autoplay warnings).
|
|
1331
|
+
*/
|
|
1332
|
+
constructor(context) {
|
|
1333
|
+
this.sf2 = null;
|
|
1334
|
+
this.audioBufferCache = /* @__PURE__ */ new Map();
|
|
1335
|
+
this.context = context ?? new OfflineAudioContext(1, 1, 44100);
|
|
1336
|
+
}
|
|
1337
|
+
/**
|
|
1338
|
+
* Load and parse an SF2 file from a URL.
|
|
1339
|
+
*/
|
|
1340
|
+
async load(url, signal) {
|
|
1341
|
+
const response = await fetch(url, { signal });
|
|
1342
|
+
if (!response.ok) {
|
|
1343
|
+
throw new Error(`Failed to fetch SoundFont ${url}: ${response.statusText}`);
|
|
1344
|
+
}
|
|
1345
|
+
const arrayBuffer = await response.arrayBuffer();
|
|
1346
|
+
try {
|
|
1347
|
+
this.sf2 = new import_soundfont2.SoundFont2(new Uint8Array(arrayBuffer));
|
|
1348
|
+
} catch (err) {
|
|
1349
|
+
throw new Error(
|
|
1350
|
+
`Failed to parse SoundFont ${url}: ${err instanceof Error ? err.message : String(err)}`
|
|
1351
|
+
);
|
|
1352
|
+
}
|
|
1353
|
+
}
|
|
1354
|
+
/**
|
|
1355
|
+
* Load from an already-fetched ArrayBuffer.
|
|
1356
|
+
*/
|
|
1357
|
+
loadFromBuffer(data) {
|
|
1358
|
+
try {
|
|
1359
|
+
this.sf2 = new import_soundfont2.SoundFont2(new Uint8Array(data));
|
|
1360
|
+
} catch (err) {
|
|
1361
|
+
throw new Error(
|
|
1362
|
+
`Failed to parse SoundFont from buffer: ${err instanceof Error ? err.message : String(err)}`
|
|
1363
|
+
);
|
|
1364
|
+
}
|
|
1365
|
+
}
|
|
1366
|
+
get isLoaded() {
|
|
1367
|
+
return this.sf2 !== null;
|
|
1368
|
+
}
|
|
1369
|
+
/**
|
|
1370
|
+
* Look up a MIDI note and return the AudioBuffer + playbackRate.
|
|
1371
|
+
*
|
|
1372
|
+
* @param midiNote - MIDI note number (0-127)
|
|
1373
|
+
* @param bankNumber - Bank number (0 for melodic, 128 for percussion/drums)
|
|
1374
|
+
* @param presetNumber - GM program number (0-127)
|
|
1375
|
+
* @returns SoundFontSample or null if no sample found for this note
|
|
1376
|
+
*/
|
|
1377
|
+
getAudioBuffer(midiNote, bankNumber = 0, presetNumber = 0) {
|
|
1378
|
+
if (!this.sf2) return null;
|
|
1379
|
+
const keyData = this.sf2.getKeyData(midiNote, bankNumber, presetNumber);
|
|
1380
|
+
if (!keyData) return null;
|
|
1381
|
+
const sample = keyData.sample;
|
|
1382
|
+
const sampleIndex = this.sf2.samples.indexOf(sample);
|
|
1383
|
+
let buffer = this.audioBufferCache.get(sampleIndex);
|
|
1384
|
+
if (!buffer) {
|
|
1385
|
+
buffer = this.int16ToAudioBuffer(sample.data, sample.header.sampleRate);
|
|
1386
|
+
this.audioBufferCache.set(sampleIndex, buffer);
|
|
1387
|
+
}
|
|
1388
|
+
const playbackRate = this.calculatePlaybackRate(midiNote, keyData);
|
|
1389
|
+
const loopAndEnvelope = this.extractLoopAndEnvelope(keyData);
|
|
1390
|
+
return { buffer, playbackRate, ...loopAndEnvelope };
|
|
1391
|
+
}
|
|
1392
|
+
/**
|
|
1393
|
+
* Extract loop points and volume envelope data from per-zone generators.
|
|
1394
|
+
*
|
|
1395
|
+
* Loop points are stored as absolute indices into the SF2 sample pool.
|
|
1396
|
+
* We convert to AudioBuffer-relative seconds by subtracting header.start
|
|
1397
|
+
* and dividing by sampleRate.
|
|
1398
|
+
*
|
|
1399
|
+
* Volume envelope times are in SF2 timecents; sustain is centibels attenuation.
|
|
1400
|
+
*/
|
|
1401
|
+
extractLoopAndEnvelope(keyData) {
|
|
1402
|
+
const { generators } = keyData;
|
|
1403
|
+
const header = keyData.sample.header;
|
|
1404
|
+
const loopMode = getGeneratorValue(generators, import_soundfont2.GeneratorType.SampleModes) ?? 0;
|
|
1405
|
+
const rawLoopStart = header.startLoop + (getGeneratorValue(generators, import_soundfont2.GeneratorType.StartLoopAddrsOffset) ?? 0) + (getGeneratorValue(generators, import_soundfont2.GeneratorType.StartLoopAddrsCoarseOffset) ?? 0) * 32768;
|
|
1406
|
+
const rawLoopEnd = header.endLoop + (getGeneratorValue(generators, import_soundfont2.GeneratorType.EndLoopAddrsOffset) ?? 0) + (getGeneratorValue(generators, import_soundfont2.GeneratorType.EndLoopAddrsCoarseOffset) ?? 0) * 32768;
|
|
1407
|
+
const loopStart = rawLoopStart / header.sampleRate;
|
|
1408
|
+
const loopEnd = rawLoopEnd / header.sampleRate;
|
|
1409
|
+
const attackVolEnv = timecentsToSeconds(
|
|
1410
|
+
getGeneratorValue(generators, import_soundfont2.GeneratorType.AttackVolEnv) ?? -12e3
|
|
1411
|
+
);
|
|
1412
|
+
const holdVolEnv = timecentsToSeconds(
|
|
1413
|
+
getGeneratorValue(generators, import_soundfont2.GeneratorType.HoldVolEnv) ?? -12e3
|
|
1414
|
+
);
|
|
1415
|
+
const decayVolEnv = timecentsToSeconds(
|
|
1416
|
+
getGeneratorValue(generators, import_soundfont2.GeneratorType.DecayVolEnv) ?? -12e3
|
|
1417
|
+
);
|
|
1418
|
+
const releaseVolEnv = Math.min(
|
|
1419
|
+
timecentsToSeconds(getGeneratorValue(generators, import_soundfont2.GeneratorType.ReleaseVolEnv) ?? -12e3),
|
|
1420
|
+
MAX_RELEASE_SECONDS
|
|
1421
|
+
);
|
|
1422
|
+
const sustainCb = getGeneratorValue(generators, import_soundfont2.GeneratorType.SustainVolEnv) ?? 0;
|
|
1423
|
+
const sustainVolEnv = Math.pow(10, -sustainCb / 200);
|
|
1424
|
+
return {
|
|
1425
|
+
loopMode,
|
|
1426
|
+
loopStart,
|
|
1427
|
+
loopEnd,
|
|
1428
|
+
attackVolEnv,
|
|
1429
|
+
holdVolEnv,
|
|
1430
|
+
decayVolEnv,
|
|
1431
|
+
sustainVolEnv,
|
|
1432
|
+
releaseVolEnv
|
|
1433
|
+
};
|
|
1434
|
+
}
|
|
1435
|
+
/**
|
|
1436
|
+
* Calculate playback rate for a MIDI note using the SF2 generator chain.
|
|
1437
|
+
*
|
|
1438
|
+
* SF2 root key resolution priority:
|
|
1439
|
+
* 1. OverridingRootKey generator (per-zone, most specific)
|
|
1440
|
+
* 2. sample.header.originalPitch (sample header)
|
|
1441
|
+
* 3. MIDI note 60 (middle C fallback)
|
|
1442
|
+
*
|
|
1443
|
+
* Tuning adjustments:
|
|
1444
|
+
* - CoarseTune generator (semitones, additive)
|
|
1445
|
+
* - FineTune generator (cents, additive)
|
|
1446
|
+
* - sample.header.pitchCorrection (cents, additive)
|
|
1447
|
+
*/
|
|
1448
|
+
calculatePlaybackRate(midiNote, keyData) {
|
|
1449
|
+
const sample = keyData.sample;
|
|
1450
|
+
const generators = keyData.generators;
|
|
1451
|
+
const overrideRootKey = getGeneratorValue(generators, import_soundfont2.GeneratorType.OverridingRootKey);
|
|
1452
|
+
const originalPitch = sample.header.originalPitch;
|
|
1453
|
+
const rootKey = overrideRootKey !== void 0 ? overrideRootKey : originalPitch !== 255 ? originalPitch : 60;
|
|
1454
|
+
const coarseTune = getGeneratorValue(generators, import_soundfont2.GeneratorType.CoarseTune) ?? 0;
|
|
1455
|
+
const fineTune = getGeneratorValue(generators, import_soundfont2.GeneratorType.FineTune) ?? 0;
|
|
1456
|
+
const pitchCorrection = sample.header.pitchCorrection ?? 0;
|
|
1457
|
+
const totalSemitones = midiNote - rootKey + coarseTune + (fineTune + pitchCorrection) / 100;
|
|
1458
|
+
return Math.pow(2, totalSemitones / 12);
|
|
1459
|
+
}
|
|
1460
|
+
/**
|
|
1461
|
+
* Convert Int16Array sample data to an AudioBuffer.
|
|
1462
|
+
* SF2 samples are 16-bit signed integers; Web Audio needs Float32 [-1, 1].
|
|
1463
|
+
*/
|
|
1464
|
+
int16ToAudioBuffer(data, sampleRate) {
|
|
1465
|
+
const buffer = this.context.createBuffer(1, data.length, sampleRate);
|
|
1466
|
+
const channel = buffer.getChannelData(0);
|
|
1467
|
+
for (let i = 0; i < data.length; i++) {
|
|
1468
|
+
channel[i] = data[i] / 32768;
|
|
1469
|
+
}
|
|
1470
|
+
return buffer;
|
|
1471
|
+
}
|
|
1472
|
+
/**
|
|
1473
|
+
* Clear all cached AudioBuffers and release the parsed SF2.
|
|
1474
|
+
*/
|
|
1475
|
+
dispose() {
|
|
1476
|
+
this.audioBufferCache.clear();
|
|
1477
|
+
this.sf2 = null;
|
|
1478
|
+
}
|
|
1479
|
+
};
|
|
1480
|
+
|
|
733
1481
|
// src/audioContext.ts
|
|
734
|
-
var
|
|
1482
|
+
var import_tone5 = require("tone");
|
|
735
1483
|
var globalToneContext = null;
|
|
736
1484
|
function getGlobalContext() {
|
|
737
1485
|
if (!globalToneContext) {
|
|
738
|
-
globalToneContext = new
|
|
739
|
-
(0,
|
|
1486
|
+
globalToneContext = new import_tone5.Context();
|
|
1487
|
+
(0, import_tone5.setContext)(globalToneContext);
|
|
740
1488
|
}
|
|
741
1489
|
return globalToneContext;
|
|
742
1490
|
}
|
|
@@ -763,14 +1511,14 @@ async function closeGlobalAudioContext() {
|
|
|
763
1511
|
}
|
|
764
1512
|
|
|
765
1513
|
// src/mediaStreamSourceManager.ts
|
|
766
|
-
var
|
|
1514
|
+
var import_tone6 = require("tone");
|
|
767
1515
|
var streamSources = /* @__PURE__ */ new Map();
|
|
768
1516
|
var streamCleanupHandlers = /* @__PURE__ */ new Map();
|
|
769
1517
|
function getMediaStreamSource(stream) {
|
|
770
1518
|
if (streamSources.has(stream)) {
|
|
771
1519
|
return streamSources.get(stream);
|
|
772
1520
|
}
|
|
773
|
-
const context = (0,
|
|
1521
|
+
const context = (0, import_tone6.getContext)();
|
|
774
1522
|
const source = context.createMediaStreamSource(stream);
|
|
775
1523
|
streamSources.set(stream, source);
|
|
776
1524
|
const cleanup = () => {
|
|
@@ -797,7 +1545,7 @@ function hasMediaStreamSource(stream) {
|
|
|
797
1545
|
|
|
798
1546
|
// src/TonePlayoutAdapter.ts
|
|
799
1547
|
var import_core = require("@waveform-playlist/core");
|
|
800
|
-
var
|
|
1548
|
+
var import_tone7 = require("tone");
|
|
801
1549
|
function createToneAdapter(options) {
|
|
802
1550
|
let playout = null;
|
|
803
1551
|
let _isPlaying = false;
|
|
@@ -830,34 +1578,82 @@ function createToneAdapter(options) {
|
|
|
830
1578
|
});
|
|
831
1579
|
}
|
|
832
1580
|
for (const track of tracks) {
|
|
833
|
-
const
|
|
834
|
-
|
|
835
|
-
|
|
836
|
-
|
|
837
|
-
|
|
838
|
-
|
|
839
|
-
|
|
840
|
-
|
|
841
|
-
|
|
842
|
-
|
|
843
|
-
|
|
844
|
-
|
|
845
|
-
|
|
846
|
-
|
|
847
|
-
|
|
848
|
-
|
|
849
|
-
|
|
850
|
-
|
|
851
|
-
|
|
852
|
-
|
|
853
|
-
|
|
854
|
-
|
|
855
|
-
|
|
856
|
-
|
|
857
|
-
|
|
858
|
-
|
|
859
|
-
|
|
860
|
-
|
|
1581
|
+
const audioClips = track.clips.filter((c) => c.audioBuffer && !c.midiNotes);
|
|
1582
|
+
const midiClips = track.clips.filter((c) => c.midiNotes && c.midiNotes.length > 0);
|
|
1583
|
+
if (audioClips.length > 0) {
|
|
1584
|
+
const startTime = Math.min(...audioClips.map(import_core.clipStartTime));
|
|
1585
|
+
const endTime = Math.max(...audioClips.map(import_core.clipEndTime));
|
|
1586
|
+
const trackObj = {
|
|
1587
|
+
id: track.id,
|
|
1588
|
+
name: track.name,
|
|
1589
|
+
gain: track.volume,
|
|
1590
|
+
muted: track.muted,
|
|
1591
|
+
soloed: track.soloed,
|
|
1592
|
+
stereoPan: track.pan,
|
|
1593
|
+
startTime,
|
|
1594
|
+
endTime
|
|
1595
|
+
};
|
|
1596
|
+
const clipInfos = audioClips.map((clip) => ({
|
|
1597
|
+
buffer: clip.audioBuffer,
|
|
1598
|
+
startTime: (0, import_core.clipStartTime)(clip) - startTime,
|
|
1599
|
+
duration: (0, import_core.clipDurationTime)(clip),
|
|
1600
|
+
offset: (0, import_core.clipOffsetTime)(clip),
|
|
1601
|
+
fadeIn: clip.fadeIn,
|
|
1602
|
+
fadeOut: clip.fadeOut,
|
|
1603
|
+
gain: clip.gain
|
|
1604
|
+
}));
|
|
1605
|
+
playout.addTrack({
|
|
1606
|
+
clips: clipInfos,
|
|
1607
|
+
track: trackObj,
|
|
1608
|
+
effects: track.effects
|
|
1609
|
+
});
|
|
1610
|
+
}
|
|
1611
|
+
if (midiClips.length > 0) {
|
|
1612
|
+
const startTime = Math.min(...midiClips.map(import_core.clipStartTime));
|
|
1613
|
+
const endTime = Math.max(...midiClips.map(import_core.clipEndTime));
|
|
1614
|
+
const trackId = audioClips.length > 0 ? `${track.id}:midi` : track.id;
|
|
1615
|
+
const trackObj = {
|
|
1616
|
+
id: trackId,
|
|
1617
|
+
name: track.name,
|
|
1618
|
+
gain: track.volume,
|
|
1619
|
+
muted: track.muted,
|
|
1620
|
+
soloed: track.soloed,
|
|
1621
|
+
stereoPan: track.pan,
|
|
1622
|
+
startTime,
|
|
1623
|
+
endTime
|
|
1624
|
+
};
|
|
1625
|
+
const midiClipInfos = midiClips.map((clip) => ({
|
|
1626
|
+
notes: clip.midiNotes,
|
|
1627
|
+
startTime: (0, import_core.clipStartTime)(clip) - startTime,
|
|
1628
|
+
duration: (0, import_core.clipDurationTime)(clip),
|
|
1629
|
+
offset: (0, import_core.clipOffsetTime)(clip)
|
|
1630
|
+
}));
|
|
1631
|
+
if (options?.soundFontCache?.isLoaded) {
|
|
1632
|
+
const firstClip = midiClips[0];
|
|
1633
|
+
const midiChannel = firstClip.midiChannel;
|
|
1634
|
+
const isPercussion = midiChannel === 9;
|
|
1635
|
+
const programNumber = firstClip.midiProgram ?? 0;
|
|
1636
|
+
playout.addSoundFontTrack({
|
|
1637
|
+
clips: midiClipInfos,
|
|
1638
|
+
track: trackObj,
|
|
1639
|
+
soundFontCache: options.soundFontCache,
|
|
1640
|
+
programNumber,
|
|
1641
|
+
isPercussion,
|
|
1642
|
+
effects: track.effects
|
|
1643
|
+
});
|
|
1644
|
+
} else {
|
|
1645
|
+
if (options?.soundFontCache) {
|
|
1646
|
+
console.warn(
|
|
1647
|
+
`[waveform-playlist] SoundFont not loaded for track "${track.name}" \u2014 falling back to PolySynth.`
|
|
1648
|
+
);
|
|
1649
|
+
}
|
|
1650
|
+
playout.addMidiTrack({
|
|
1651
|
+
clips: midiClipInfos,
|
|
1652
|
+
track: trackObj,
|
|
1653
|
+
effects: track.effects
|
|
1654
|
+
});
|
|
1655
|
+
}
|
|
1656
|
+
}
|
|
861
1657
|
}
|
|
862
1658
|
playout.applyInitialSoloState();
|
|
863
1659
|
playout.setLoop(_loopEnabled, _loopStart, _loopEnd);
|
|
@@ -885,7 +1681,7 @@ function createToneAdapter(options) {
|
|
|
885
1681
|
return;
|
|
886
1682
|
}
|
|
887
1683
|
const duration = endTime !== void 0 ? endTime - startTime : void 0;
|
|
888
|
-
playout.play((0,
|
|
1684
|
+
playout.play((0, import_tone7.now)(), startTime, duration);
|
|
889
1685
|
_isPlaying = true;
|
|
890
1686
|
},
|
|
891
1687
|
pause() {
|
|
@@ -939,6 +1735,9 @@ function createToneAdapter(options) {
|
|
|
939
1735
|
}
|
|
940
1736
|
// Annotate the CommonJS export names for ESM import in node:
|
|
941
1737
|
0 && (module.exports = {
|
|
1738
|
+
MidiToneTrack,
|
|
1739
|
+
SoundFontCache,
|
|
1740
|
+
SoundFontToneTrack,
|
|
942
1741
|
TonePlayout,
|
|
943
1742
|
ToneTrack,
|
|
944
1743
|
applyFadeIn,
|