@marmooo/midy 0.2.6 → 0.2.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +18 -13
- package/esm/midy-GM1.d.ts +73 -74
- package/esm/midy-GM1.d.ts.map +1 -1
- package/esm/midy-GM1.js +207 -218
- package/esm/midy-GM2.d.ts +125 -127
- package/esm/midy-GM2.d.ts.map +1 -1
- package/esm/midy-GM2.js +358 -418
- package/esm/midy-GMLite.d.ts +69 -70
- package/esm/midy-GMLite.d.ts.map +1 -1
- package/esm/midy-GMLite.js +195 -207
- package/esm/midy.d.ts +148 -150
- package/esm/midy.d.ts.map +1 -1
- package/esm/midy.js +404 -500
- package/package.json +1 -1
- package/script/midy-GM1.d.ts +73 -74
- package/script/midy-GM1.d.ts.map +1 -1
- package/script/midy-GM1.js +207 -218
- package/script/midy-GM2.d.ts +125 -127
- package/script/midy-GM2.d.ts.map +1 -1
- package/script/midy-GM2.js +358 -418
- package/script/midy-GMLite.d.ts +69 -70
- package/script/midy-GMLite.d.ts.map +1 -1
- package/script/midy-GMLite.js +195 -207
- package/script/midy.d.ts +148 -150
- package/script/midy.d.ts.map +1 -1
- package/script/midy.js +404 -500
package/esm/midy-GM1.js
CHANGED
|
@@ -355,6 +355,7 @@ export class MidyGM1 {
|
|
|
355
355
|
state: new ControllerState(),
|
|
356
356
|
...this.setChannelAudioNodes(audioContext),
|
|
357
357
|
scheduledNotes: new SparseMap(128),
|
|
358
|
+
sustainNotes: [],
|
|
358
359
|
};
|
|
359
360
|
});
|
|
360
361
|
return channels;
|
|
@@ -412,7 +413,7 @@ export class MidyGM1 {
|
|
|
412
413
|
}
|
|
413
414
|
/* falls through */
|
|
414
415
|
case "noteOff": {
|
|
415
|
-
const notePromise = this.
|
|
416
|
+
const notePromise = this.scheduleNoteOff(event.channel, event.noteNumber, event.velocity, startTime, false);
|
|
416
417
|
if (notePromise) {
|
|
417
418
|
this.notePromises.push(notePromise);
|
|
418
419
|
}
|
|
@@ -459,10 +460,11 @@ export class MidyGM1 {
|
|
|
459
460
|
resolve();
|
|
460
461
|
return;
|
|
461
462
|
}
|
|
462
|
-
const
|
|
463
|
+
const now = this.audioContext.currentTime;
|
|
464
|
+
const t = now + offset;
|
|
463
465
|
queueIndex = await this.scheduleTimelineEvents(t, offset, queueIndex);
|
|
464
466
|
if (this.isPausing) {
|
|
465
|
-
await this.stopNotes(0, true);
|
|
467
|
+
await this.stopNotes(0, true, now);
|
|
466
468
|
this.notePromises = [];
|
|
467
469
|
resolve();
|
|
468
470
|
this.isPausing = false;
|
|
@@ -470,7 +472,7 @@ export class MidyGM1 {
|
|
|
470
472
|
return;
|
|
471
473
|
}
|
|
472
474
|
else if (this.isStopping) {
|
|
473
|
-
await this.stopNotes(0, true);
|
|
475
|
+
await this.stopNotes(0, true, now);
|
|
474
476
|
this.notePromises = [];
|
|
475
477
|
this.exclusiveClassMap.clear();
|
|
476
478
|
this.audioBufferCache.clear();
|
|
@@ -480,7 +482,7 @@ export class MidyGM1 {
|
|
|
480
482
|
return;
|
|
481
483
|
}
|
|
482
484
|
else if (this.isSeeking) {
|
|
483
|
-
this.stopNotes(0, true);
|
|
485
|
+
this.stopNotes(0, true, now);
|
|
484
486
|
this.exclusiveClassMap.clear();
|
|
485
487
|
this.startTime = this.audioContext.currentTime;
|
|
486
488
|
queueIndex = this.getQueueIndex(this.resumeTime);
|
|
@@ -489,7 +491,6 @@ export class MidyGM1 {
|
|
|
489
491
|
await schedulePlayback();
|
|
490
492
|
}
|
|
491
493
|
else {
|
|
492
|
-
const now = this.audioContext.currentTime;
|
|
493
494
|
const waitTime = now + this.noteCheckInterval;
|
|
494
495
|
await this.scheduleTask(() => { }, waitTime);
|
|
495
496
|
await schedulePlayback();
|
|
@@ -573,24 +574,21 @@ export class MidyGM1 {
|
|
|
573
574
|
}
|
|
574
575
|
return { instruments, timeline };
|
|
575
576
|
}
|
|
576
|
-
|
|
577
|
-
const now = this.audioContext.currentTime;
|
|
577
|
+
stopChannelNotes(channelNumber, velocity, force, scheduleTime) {
|
|
578
578
|
const channel = this.channels[channelNumber];
|
|
579
|
-
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
const promise = this.scheduleNoteRelease(channelNumber, note.noteNumber, velocity, now, force);
|
|
585
|
-
this.notePromises.push(promise);
|
|
586
|
-
}
|
|
579
|
+
const promises = [];
|
|
580
|
+
this.processScheduledNotes(channel, (note) => {
|
|
581
|
+
const promise = this.scheduleNoteOff(channelNumber, note.noteNumber, velocity, scheduleTime, force);
|
|
582
|
+
this.notePromises.push(promise);
|
|
583
|
+
promises.push(promise);
|
|
587
584
|
});
|
|
588
585
|
channel.scheduledNotes.clear();
|
|
589
|
-
|
|
586
|
+
return Promise.all(promises);
|
|
590
587
|
}
|
|
591
|
-
stopNotes(velocity, force) {
|
|
588
|
+
stopNotes(velocity, force, scheduleTime) {
|
|
589
|
+
const promises = [];
|
|
592
590
|
for (let i = 0; i < this.channels.length; i++) {
|
|
593
|
-
this.stopChannelNotes(i, velocity, force);
|
|
591
|
+
promises.push(this.stopChannelNotes(i, velocity, force, scheduleTime));
|
|
594
592
|
}
|
|
595
593
|
return Promise.all(this.notePromises);
|
|
596
594
|
}
|
|
@@ -638,34 +636,32 @@ export class MidyGM1 {
|
|
|
638
636
|
const now = this.audioContext.currentTime;
|
|
639
637
|
return this.resumeTime + now - this.startTime - this.startDelay;
|
|
640
638
|
}
|
|
641
|
-
processScheduledNotes(channel,
|
|
639
|
+
processScheduledNotes(channel, callback) {
|
|
642
640
|
channel.scheduledNotes.forEach((noteList) => {
|
|
643
641
|
for (let i = 0; i < noteList.length; i++) {
|
|
644
642
|
const note = noteList[i];
|
|
645
643
|
if (!note)
|
|
646
644
|
continue;
|
|
647
|
-
if (scheduleTime < note.startTime)
|
|
648
|
-
continue;
|
|
649
645
|
callback(note);
|
|
650
646
|
}
|
|
651
647
|
});
|
|
652
648
|
}
|
|
653
|
-
getActiveNotes(channel,
|
|
649
|
+
getActiveNotes(channel, scheduleTime) {
|
|
654
650
|
const activeNotes = new SparseMap(128);
|
|
655
651
|
channel.scheduledNotes.forEach((noteList) => {
|
|
656
|
-
const activeNote = this.getActiveNote(noteList,
|
|
652
|
+
const activeNote = this.getActiveNote(noteList, scheduleTime);
|
|
657
653
|
if (activeNote) {
|
|
658
654
|
activeNotes.set(activeNote.noteNumber, activeNote);
|
|
659
655
|
}
|
|
660
656
|
});
|
|
661
657
|
return activeNotes;
|
|
662
658
|
}
|
|
663
|
-
getActiveNote(noteList,
|
|
659
|
+
getActiveNote(noteList, scheduleTime) {
|
|
664
660
|
for (let i = noteList.length - 1; i >= 0; i--) {
|
|
665
661
|
const note = noteList[i];
|
|
666
662
|
if (!note)
|
|
667
663
|
return;
|
|
668
|
-
if (
|
|
664
|
+
if (scheduleTime < note.startTime)
|
|
669
665
|
continue;
|
|
670
666
|
return (note.ending) ? null : note;
|
|
671
667
|
}
|
|
@@ -690,24 +686,17 @@ export class MidyGM1 {
|
|
|
690
686
|
const pitch = pitchWheel * pitchWheelSensitivity;
|
|
691
687
|
return tuning + pitch;
|
|
692
688
|
}
|
|
693
|
-
updateChannelDetune(channel) {
|
|
694
|
-
|
|
695
|
-
|
|
696
|
-
const note = noteList[i];
|
|
697
|
-
if (!note)
|
|
698
|
-
continue;
|
|
699
|
-
this.updateDetune(channel, note);
|
|
700
|
-
}
|
|
689
|
+
updateChannelDetune(channel, scheduleTime) {
|
|
690
|
+
this.processScheduledNotes(channel, (note) => {
|
|
691
|
+
this.updateDetune(channel, note, scheduleTime);
|
|
701
692
|
});
|
|
702
693
|
}
|
|
703
|
-
updateDetune(channel, note) {
|
|
704
|
-
const now = this.audioContext.currentTime;
|
|
694
|
+
updateDetune(channel, note, scheduleTime) {
|
|
705
695
|
note.bufferSource.detune
|
|
706
|
-
.cancelScheduledValues(
|
|
707
|
-
.setValueAtTime(channel.detune,
|
|
696
|
+
.cancelScheduledValues(scheduleTime)
|
|
697
|
+
.setValueAtTime(channel.detune, scheduleTime);
|
|
708
698
|
}
|
|
709
|
-
setVolumeEnvelope(note) {
|
|
710
|
-
const now = this.audioContext.currentTime;
|
|
699
|
+
setVolumeEnvelope(note, scheduleTime) {
|
|
711
700
|
const { voiceParams, startTime } = note;
|
|
712
701
|
const attackVolume = this.cbToRatio(-voiceParams.initialAttenuation);
|
|
713
702
|
const sustainVolume = attackVolume * (1 - voiceParams.volSustain);
|
|
@@ -716,7 +705,7 @@ export class MidyGM1 {
|
|
|
716
705
|
const volHold = volAttack + voiceParams.volHold;
|
|
717
706
|
const volDecay = volHold + voiceParams.volDecay;
|
|
718
707
|
note.volumeEnvelopeNode.gain
|
|
719
|
-
.cancelScheduledValues(
|
|
708
|
+
.cancelScheduledValues(scheduleTime)
|
|
720
709
|
.setValueAtTime(0, startTime)
|
|
721
710
|
.setValueAtTime(1e-6, volDelay) // exponentialRampToValueAtTime() requires a non-zero value
|
|
722
711
|
.exponentialRampToValueAtTime(attackVolume, volAttack)
|
|
@@ -724,7 +713,6 @@ export class MidyGM1 {
|
|
|
724
713
|
.linearRampToValueAtTime(sustainVolume, volDecay);
|
|
725
714
|
}
|
|
726
715
|
setPitchEnvelope(note, scheduleTime) {
|
|
727
|
-
scheduleTime ??= this.audioContext.currentTime;
|
|
728
716
|
const { voiceParams } = note;
|
|
729
717
|
const baseRate = voiceParams.playbackRate;
|
|
730
718
|
note.bufferSource.playbackRate
|
|
@@ -751,8 +739,7 @@ export class MidyGM1 {
|
|
|
751
739
|
const maxFrequency = 20000; // max Hz of initialFilterFc
|
|
752
740
|
return Math.max(minFrequency, Math.min(frequency, maxFrequency));
|
|
753
741
|
}
|
|
754
|
-
setFilterEnvelope(note) {
|
|
755
|
-
const now = this.audioContext.currentTime;
|
|
742
|
+
setFilterEnvelope(note, scheduleTime) {
|
|
756
743
|
const { voiceParams, startTime } = note;
|
|
757
744
|
const baseFreq = this.centToHz(voiceParams.initialFilterFc);
|
|
758
745
|
const peekFreq = this.centToHz(voiceParams.initialFilterFc + voiceParams.modEnvToFilterFc);
|
|
@@ -766,14 +753,14 @@ export class MidyGM1 {
|
|
|
766
753
|
const modHold = modAttack + voiceParams.modHold;
|
|
767
754
|
const modDecay = modHold + voiceParams.modDecay;
|
|
768
755
|
note.filterNode.frequency
|
|
769
|
-
.cancelScheduledValues(
|
|
756
|
+
.cancelScheduledValues(scheduleTime)
|
|
770
757
|
.setValueAtTime(adjustedBaseFreq, startTime)
|
|
771
758
|
.setValueAtTime(adjustedBaseFreq, modDelay)
|
|
772
759
|
.exponentialRampToValueAtTime(adjustedPeekFreq, modAttack)
|
|
773
760
|
.setValueAtTime(adjustedPeekFreq, modHold)
|
|
774
761
|
.linearRampToValueAtTime(adjustedSustainFreq, modDecay);
|
|
775
762
|
}
|
|
776
|
-
startModulation(channel, note,
|
|
763
|
+
startModulation(channel, note, scheduleTime) {
|
|
777
764
|
const { voiceParams } = note;
|
|
778
765
|
note.modulationLFO = new OscillatorNode(this.audioContext, {
|
|
779
766
|
frequency: this.centToHz(voiceParams.freqModLFO),
|
|
@@ -782,10 +769,10 @@ export class MidyGM1 {
|
|
|
782
769
|
gain: voiceParams.modLfoToFilterFc,
|
|
783
770
|
});
|
|
784
771
|
note.modulationDepth = new GainNode(this.audioContext);
|
|
785
|
-
this.setModLfoToPitch(channel, note);
|
|
772
|
+
this.setModLfoToPitch(channel, note, scheduleTime);
|
|
786
773
|
note.volumeDepth = new GainNode(this.audioContext);
|
|
787
|
-
this.setModLfoToVolume(note);
|
|
788
|
-
note.modulationLFO.start(startTime + voiceParams.delayModLFO);
|
|
774
|
+
this.setModLfoToVolume(note, scheduleTime);
|
|
775
|
+
note.modulationLFO.start(note.startTime + voiceParams.delayModLFO);
|
|
789
776
|
note.modulationLFO.connect(note.filterDepth);
|
|
790
777
|
note.filterDepth.connect(note.filterNode.frequency);
|
|
791
778
|
note.modulationLFO.connect(note.modulationDepth);
|
|
@@ -812,6 +799,7 @@ export class MidyGM1 {
|
|
|
812
799
|
}
|
|
813
800
|
}
|
|
814
801
|
async createNote(channel, voice, noteNumber, velocity, startTime, isSF3) {
|
|
802
|
+
const now = this.audioContext.currentTime;
|
|
815
803
|
const state = channel.state;
|
|
816
804
|
const controllerState = this.getControllerState(channel, noteNumber, velocity);
|
|
817
805
|
const voiceParams = voice.getAllParams(controllerState);
|
|
@@ -823,11 +811,11 @@ export class MidyGM1 {
|
|
|
823
811
|
type: "lowpass",
|
|
824
812
|
Q: voiceParams.initialFilterQ / 10, // dB
|
|
825
813
|
});
|
|
826
|
-
this.setVolumeEnvelope(note);
|
|
827
|
-
this.setFilterEnvelope(note);
|
|
828
|
-
this.setPitchEnvelope(note);
|
|
814
|
+
this.setVolumeEnvelope(note, now);
|
|
815
|
+
this.setFilterEnvelope(note, now);
|
|
816
|
+
this.setPitchEnvelope(note, now);
|
|
829
817
|
if (0 < state.modulationDepth) {
|
|
830
|
-
this.startModulation(channel, note,
|
|
818
|
+
this.startModulation(channel, note, now);
|
|
831
819
|
}
|
|
832
820
|
note.bufferSource.connect(note.filterNode);
|
|
833
821
|
note.filterNode.connect(note.volumeEnvelopeNode);
|
|
@@ -848,15 +836,17 @@ export class MidyGM1 {
|
|
|
848
836
|
const note = await this.createNote(channel, voice, noteNumber, velocity, startTime, isSF3);
|
|
849
837
|
note.volumeEnvelopeNode.connect(channel.gainL);
|
|
850
838
|
note.volumeEnvelopeNode.connect(channel.gainR);
|
|
839
|
+
if (0.5 <= channel.state.sustainPedal) {
|
|
840
|
+
channel.sustainNotes.push(note);
|
|
841
|
+
}
|
|
851
842
|
const exclusiveClass = note.voiceParams.exclusiveClass;
|
|
852
843
|
if (exclusiveClass !== 0) {
|
|
853
844
|
if (this.exclusiveClassMap.has(exclusiveClass)) {
|
|
854
845
|
const prevEntry = this.exclusiveClassMap.get(exclusiveClass);
|
|
855
846
|
const [prevNote, prevChannelNumber] = prevEntry;
|
|
856
847
|
if (!prevNote.ending) {
|
|
857
|
-
this.
|
|
858
|
-
startTime,
|
|
859
|
-
true);
|
|
848
|
+
this.scheduleNoteOff(prevChannelNumber, prevNote.noteNumber, 0, // velocity,
|
|
849
|
+
startTime, true);
|
|
860
850
|
}
|
|
861
851
|
}
|
|
862
852
|
this.exclusiveClassMap.set(exclusiveClass, [note, channelNumber]);
|
|
@@ -869,9 +859,9 @@ export class MidyGM1 {
|
|
|
869
859
|
scheduledNotes.set(noteNumber, [note]);
|
|
870
860
|
}
|
|
871
861
|
}
|
|
872
|
-
noteOn(channelNumber, noteNumber, velocity) {
|
|
873
|
-
|
|
874
|
-
return this.scheduleNoteOn(channelNumber, noteNumber, velocity,
|
|
862
|
+
noteOn(channelNumber, noteNumber, velocity, scheduleTime) {
|
|
863
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
864
|
+
return this.scheduleNoteOn(channelNumber, noteNumber, velocity, scheduleTime);
|
|
875
865
|
}
|
|
876
866
|
stopNote(endTime, stopTime, scheduledNotes, index) {
|
|
877
867
|
const note = scheduledNotes[index];
|
|
@@ -898,9 +888,9 @@ export class MidyGM1 {
|
|
|
898
888
|
note.bufferSource.stop(stopTime);
|
|
899
889
|
});
|
|
900
890
|
}
|
|
901
|
-
|
|
891
|
+
scheduleNoteOff(channelNumber, noteNumber, _velocity, endTime, force) {
|
|
902
892
|
const channel = this.channels[channelNumber];
|
|
903
|
-
if (!force && 0.5
|
|
893
|
+
if (!force && 0.5 <= channel.state.sustainPedal)
|
|
904
894
|
return;
|
|
905
895
|
if (!channel.scheduledNotes.has(noteNumber))
|
|
906
896
|
return;
|
|
@@ -920,127 +910,119 @@ export class MidyGM1 {
|
|
|
920
910
|
return this.stopNote(endTime, stopTime, scheduledNotes, i);
|
|
921
911
|
}
|
|
922
912
|
}
|
|
923
|
-
|
|
924
|
-
|
|
925
|
-
return this.
|
|
913
|
+
noteOff(channelNumber, noteNumber, velocity, scheduleTime) {
|
|
914
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
915
|
+
return this.scheduleNoteOff(channelNumber, noteNumber, velocity, scheduleTime, false);
|
|
926
916
|
}
|
|
927
|
-
releaseSustainPedal(channelNumber, halfVelocity) {
|
|
917
|
+
releaseSustainPedal(channelNumber, halfVelocity, scheduleTime) {
|
|
928
918
|
const velocity = halfVelocity * 2;
|
|
929
919
|
const channel = this.channels[channelNumber];
|
|
930
920
|
const promises = [];
|
|
931
|
-
channel.
|
|
932
|
-
|
|
933
|
-
|
|
934
|
-
|
|
935
|
-
|
|
936
|
-
continue;
|
|
937
|
-
const { noteNumber } = note;
|
|
938
|
-
const promise = this.releaseNote(channelNumber, noteNumber, velocity);
|
|
939
|
-
promises.push(promise);
|
|
940
|
-
}
|
|
941
|
-
});
|
|
921
|
+
for (let i = 0; i < channel.sustainNotes.length; i++) {
|
|
922
|
+
const promise = this.noteOff(channelNumber, channel.sustainNotes[i].noteNumber, velocity, scheduleTime);
|
|
923
|
+
promises.push(promise);
|
|
924
|
+
}
|
|
925
|
+
channel.sustainNotes = [];
|
|
942
926
|
return promises;
|
|
943
927
|
}
|
|
944
|
-
handleMIDIMessage(statusByte, data1, data2) {
|
|
928
|
+
handleMIDIMessage(statusByte, data1, data2, scheduleTime) {
|
|
945
929
|
const channelNumber = statusByte & 0x0F;
|
|
946
930
|
const messageType = statusByte & 0xF0;
|
|
947
931
|
switch (messageType) {
|
|
948
932
|
case 0x80:
|
|
949
|
-
return this.
|
|
933
|
+
return this.noteOff(channelNumber, data1, data2, scheduleTime);
|
|
950
934
|
case 0x90:
|
|
951
|
-
return this.noteOn(channelNumber, data1, data2);
|
|
935
|
+
return this.noteOn(channelNumber, data1, data2, scheduleTime);
|
|
952
936
|
case 0xB0:
|
|
953
|
-
return this.handleControlChange(channelNumber, data1, data2);
|
|
937
|
+
return this.handleControlChange(channelNumber, data1, data2, scheduleTime);
|
|
954
938
|
case 0xC0:
|
|
955
|
-
return this.handleProgramChange(channelNumber, data1);
|
|
939
|
+
return this.handleProgramChange(channelNumber, data1, scheduleTime);
|
|
956
940
|
case 0xE0:
|
|
957
|
-
return this.handlePitchBendMessage(channelNumber, data1, data2);
|
|
941
|
+
return this.handlePitchBendMessage(channelNumber, data1, data2, scheduleTime);
|
|
958
942
|
default:
|
|
959
943
|
console.warn(`Unsupported MIDI message: ${messageType.toString(16)}`);
|
|
960
944
|
}
|
|
961
945
|
}
|
|
962
|
-
handleProgramChange(channelNumber, program) {
|
|
946
|
+
handleProgramChange(channelNumber, program, _scheduleTime) {
|
|
963
947
|
const channel = this.channels[channelNumber];
|
|
964
948
|
channel.program = program;
|
|
965
949
|
}
|
|
966
|
-
handlePitchBendMessage(channelNumber, lsb, msb) {
|
|
950
|
+
handlePitchBendMessage(channelNumber, lsb, msb, scheduleTime) {
|
|
967
951
|
const pitchBend = msb * 128 + lsb;
|
|
968
|
-
this.setPitchBend(channelNumber, pitchBend);
|
|
952
|
+
this.setPitchBend(channelNumber, pitchBend, scheduleTime);
|
|
969
953
|
}
|
|
970
|
-
setPitchBend(channelNumber, value) {
|
|
954
|
+
setPitchBend(channelNumber, value, scheduleTime) {
|
|
955
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
971
956
|
const channel = this.channels[channelNumber];
|
|
972
957
|
const state = channel.state;
|
|
973
958
|
const prev = state.pitchWheel * 2 - 1;
|
|
974
959
|
const next = (value - 8192) / 8192;
|
|
975
960
|
state.pitchWheel = value / 16383;
|
|
976
961
|
channel.detune += (next - prev) * state.pitchWheelSensitivity * 12800;
|
|
977
|
-
this.updateChannelDetune(channel);
|
|
978
|
-
this.applyVoiceParams(channel, 14);
|
|
962
|
+
this.updateChannelDetune(channel, scheduleTime);
|
|
963
|
+
this.applyVoiceParams(channel, 14, scheduleTime);
|
|
979
964
|
}
|
|
980
|
-
setModLfoToPitch(channel, note) {
|
|
981
|
-
const now = this.audioContext.currentTime;
|
|
965
|
+
setModLfoToPitch(channel, note, scheduleTime) {
|
|
982
966
|
const modLfoToPitch = note.voiceParams.modLfoToPitch;
|
|
983
967
|
const baseDepth = Math.abs(modLfoToPitch) +
|
|
984
968
|
channel.state.modulationDepth;
|
|
985
969
|
const modulationDepth = baseDepth * Math.sign(modLfoToPitch);
|
|
986
970
|
note.modulationDepth.gain
|
|
987
|
-
.cancelScheduledValues(
|
|
988
|
-
.setValueAtTime(modulationDepth,
|
|
971
|
+
.cancelScheduledValues(scheduleTime)
|
|
972
|
+
.setValueAtTime(modulationDepth, scheduleTime);
|
|
989
973
|
}
|
|
990
|
-
setModLfoToFilterFc(note) {
|
|
991
|
-
const now = this.audioContext.currentTime;
|
|
974
|
+
setModLfoToFilterFc(note, scheduleTime) {
|
|
992
975
|
const modLfoToFilterFc = note.voiceParams.modLfoToFilterFc;
|
|
993
976
|
note.filterDepth.gain
|
|
994
|
-
.cancelScheduledValues(
|
|
995
|
-
.setValueAtTime(modLfoToFilterFc,
|
|
977
|
+
.cancelScheduledValues(scheduleTime)
|
|
978
|
+
.setValueAtTime(modLfoToFilterFc, scheduleTime);
|
|
996
979
|
}
|
|
997
|
-
setModLfoToVolume(note) {
|
|
998
|
-
const now = this.audioContext.currentTime;
|
|
980
|
+
setModLfoToVolume(note, scheduleTime) {
|
|
999
981
|
const modLfoToVolume = note.voiceParams.modLfoToVolume;
|
|
1000
982
|
const baseDepth = this.cbToRatio(Math.abs(modLfoToVolume)) - 1;
|
|
1001
983
|
const volumeDepth = baseDepth * Math.sign(modLfoToVolume);
|
|
1002
984
|
note.volumeDepth.gain
|
|
1003
|
-
.cancelScheduledValues(
|
|
1004
|
-
.setValueAtTime(volumeDepth,
|
|
985
|
+
.cancelScheduledValues(scheduleTime)
|
|
986
|
+
.setValueAtTime(volumeDepth, scheduleTime);
|
|
1005
987
|
}
|
|
1006
|
-
setDelayModLFO(note) {
|
|
1007
|
-
const now = this.audioContext.currentTime;
|
|
988
|
+
setDelayModLFO(note, scheduleTime) {
|
|
1008
989
|
const startTime = note.startTime;
|
|
1009
|
-
if (startTime <
|
|
990
|
+
if (startTime < scheduleTime)
|
|
1010
991
|
return;
|
|
1011
|
-
note.modulationLFO.stop(
|
|
992
|
+
note.modulationLFO.stop(scheduleTime);
|
|
1012
993
|
note.modulationLFO.start(startTime + note.voiceParams.delayModLFO);
|
|
1013
994
|
note.modulationLFO.connect(note.filterDepth);
|
|
1014
995
|
}
|
|
1015
|
-
setFreqModLFO(note) {
|
|
1016
|
-
const now = this.audioContext.currentTime;
|
|
996
|
+
setFreqModLFO(note, scheduleTime) {
|
|
1017
997
|
const freqModLFO = note.voiceParams.freqModLFO;
|
|
1018
998
|
note.modulationLFO.frequency
|
|
1019
|
-
.cancelScheduledValues(
|
|
1020
|
-
.setValueAtTime(freqModLFO,
|
|
999
|
+
.cancelScheduledValues(scheduleTime)
|
|
1000
|
+
.setValueAtTime(freqModLFO, scheduleTime);
|
|
1021
1001
|
}
|
|
1022
1002
|
createVoiceParamsHandlers() {
|
|
1023
1003
|
return {
|
|
1024
|
-
modLfoToPitch: (channel, note, _prevValue) => {
|
|
1004
|
+
modLfoToPitch: (channel, note, _prevValue, scheduleTime) => {
|
|
1025
1005
|
if (0 < channel.state.modulationDepth) {
|
|
1026
|
-
this.setModLfoToPitch(channel, note);
|
|
1006
|
+
this.setModLfoToPitch(channel, note, scheduleTime);
|
|
1027
1007
|
}
|
|
1028
1008
|
},
|
|
1029
|
-
vibLfoToPitch: (_channel, _note, _prevValue) => { },
|
|
1030
|
-
modLfoToFilterFc: (channel, note, _prevValue) => {
|
|
1031
|
-
if (0 < channel.state.modulationDepth)
|
|
1032
|
-
this.setModLfoToFilterFc(note);
|
|
1009
|
+
vibLfoToPitch: (_channel, _note, _prevValue, _scheduleTime) => { },
|
|
1010
|
+
modLfoToFilterFc: (channel, note, _prevValue, scheduleTime) => {
|
|
1011
|
+
if (0 < channel.state.modulationDepth) {
|
|
1012
|
+
this.setModLfoToFilterFc(note, scheduleTime);
|
|
1013
|
+
}
|
|
1033
1014
|
},
|
|
1034
|
-
modLfoToVolume: (channel, note, _prevValue) => {
|
|
1035
|
-
if (0 < channel.state.modulationDepth)
|
|
1036
|
-
this.setModLfoToVolume(note);
|
|
1015
|
+
modLfoToVolume: (channel, note, _prevValue, scheduleTime) => {
|
|
1016
|
+
if (0 < channel.state.modulationDepth) {
|
|
1017
|
+
this.setModLfoToVolume(note, scheduleTime);
|
|
1018
|
+
}
|
|
1037
1019
|
},
|
|
1038
|
-
chorusEffectsSend: (_channel, _note, _prevValue) => { },
|
|
1039
|
-
reverbEffectsSend: (_channel, _note, _prevValue) => { },
|
|
1040
|
-
delayModLFO: (_channel, note, _prevValue) => this.setDelayModLFO(note),
|
|
1041
|
-
freqModLFO: (_channel, note, _prevValue) => this.setFreqModLFO(note),
|
|
1042
|
-
delayVibLFO: (_channel, _note, _prevValue) => { },
|
|
1043
|
-
freqVibLFO: (_channel, _note, _prevValue) => { },
|
|
1020
|
+
chorusEffectsSend: (_channel, _note, _prevValue, _scheduleTime) => { },
|
|
1021
|
+
reverbEffectsSend: (_channel, _note, _prevValue, _scheduleTime) => { },
|
|
1022
|
+
delayModLFO: (_channel, note, _prevValue, scheduleTime) => this.setDelayModLFO(note, scheduleTime),
|
|
1023
|
+
freqModLFO: (_channel, note, _prevValue, scheduleTime) => this.setFreqModLFO(note, scheduleTime),
|
|
1024
|
+
delayVibLFO: (_channel, _note, _prevValue, _scheduleTime) => { },
|
|
1025
|
+
freqVibLFO: (_channel, _note, _prevValue, _scheduleTime) => { },
|
|
1044
1026
|
};
|
|
1045
1027
|
}
|
|
1046
1028
|
getControllerState(channel, noteNumber, velocity) {
|
|
@@ -1050,49 +1032,44 @@ export class MidyGM1 {
|
|
|
1050
1032
|
state[3] = noteNumber / 127;
|
|
1051
1033
|
return state;
|
|
1052
1034
|
}
|
|
1053
|
-
applyVoiceParams(channel, controllerType) {
|
|
1054
|
-
|
|
1055
|
-
|
|
1056
|
-
|
|
1057
|
-
|
|
1035
|
+
applyVoiceParams(channel, controllerType, scheduleTime) {
|
|
1036
|
+
this.processScheduledNotes(channel, (note) => {
|
|
1037
|
+
const controllerState = this.getControllerState(channel, note.noteNumber, note.velocity);
|
|
1038
|
+
const voiceParams = note.voice.getParams(controllerType, controllerState);
|
|
1039
|
+
let appliedFilterEnvelope = false;
|
|
1040
|
+
let appliedVolumeEnvelope = false;
|
|
1041
|
+
for (const [key, value] of Object.entries(voiceParams)) {
|
|
1042
|
+
const prevValue = note.voiceParams[key];
|
|
1043
|
+
if (value === prevValue)
|
|
1058
1044
|
continue;
|
|
1059
|
-
|
|
1060
|
-
|
|
1061
|
-
|
|
1062
|
-
|
|
1063
|
-
|
|
1064
|
-
|
|
1065
|
-
if (value === prevValue)
|
|
1045
|
+
note.voiceParams[key] = value;
|
|
1046
|
+
if (key in this.voiceParamsHandlers) {
|
|
1047
|
+
this.voiceParamsHandlers[key](channel, note, prevValue, scheduleTime);
|
|
1048
|
+
}
|
|
1049
|
+
else if (filterEnvelopeKeySet.has(key)) {
|
|
1050
|
+
if (appliedFilterEnvelope)
|
|
1066
1051
|
continue;
|
|
1067
|
-
|
|
1068
|
-
|
|
1069
|
-
|
|
1070
|
-
|
|
1071
|
-
|
|
1072
|
-
|
|
1073
|
-
continue;
|
|
1074
|
-
appliedFilterEnvelope = true;
|
|
1075
|
-
const noteVoiceParams = note.voiceParams;
|
|
1076
|
-
for (let i = 0; i < filterEnvelopeKeys.length; i++) {
|
|
1077
|
-
const key = filterEnvelopeKeys[i];
|
|
1078
|
-
if (key in voiceParams)
|
|
1079
|
-
noteVoiceParams[key] = voiceParams[key];
|
|
1080
|
-
}
|
|
1081
|
-
this.setFilterEnvelope(note);
|
|
1082
|
-
this.setPitchEnvelope(note);
|
|
1052
|
+
appliedFilterEnvelope = true;
|
|
1053
|
+
const noteVoiceParams = note.voiceParams;
|
|
1054
|
+
for (let i = 0; i < filterEnvelopeKeys.length; i++) {
|
|
1055
|
+
const key = filterEnvelopeKeys[i];
|
|
1056
|
+
if (key in voiceParams)
|
|
1057
|
+
noteVoiceParams[key] = voiceParams[key];
|
|
1083
1058
|
}
|
|
1084
|
-
|
|
1085
|
-
|
|
1086
|
-
|
|
1087
|
-
|
|
1088
|
-
|
|
1089
|
-
|
|
1090
|
-
|
|
1091
|
-
|
|
1092
|
-
|
|
1093
|
-
|
|
1094
|
-
|
|
1059
|
+
this.setFilterEnvelope(note, scheduleTime);
|
|
1060
|
+
this.setPitchEnvelope(note, scheduleTime);
|
|
1061
|
+
}
|
|
1062
|
+
else if (volumeEnvelopeKeySet.has(key)) {
|
|
1063
|
+
if (appliedVolumeEnvelope)
|
|
1064
|
+
continue;
|
|
1065
|
+
appliedVolumeEnvelope = true;
|
|
1066
|
+
const noteVoiceParams = note.voiceParams;
|
|
1067
|
+
for (let i = 0; i < volumeEnvelopeKeys.length; i++) {
|
|
1068
|
+
const key = volumeEnvelopeKeys[i];
|
|
1069
|
+
if (key in voiceParams)
|
|
1070
|
+
noteVoiceParams[key] = voiceParams[key];
|
|
1095
1071
|
}
|
|
1072
|
+
this.setVolumeEnvelope(note, scheduleTime);
|
|
1096
1073
|
}
|
|
1097
1074
|
}
|
|
1098
1075
|
});
|
|
@@ -1113,21 +1090,20 @@ export class MidyGM1 {
|
|
|
1113
1090
|
123: this.allNotesOff,
|
|
1114
1091
|
};
|
|
1115
1092
|
}
|
|
1116
|
-
handleControlChange(channelNumber, controllerType, value,
|
|
1093
|
+
handleControlChange(channelNumber, controllerType, value, scheduleTime) {
|
|
1117
1094
|
const handler = this.controlChangeHandlers[controllerType];
|
|
1118
1095
|
if (handler) {
|
|
1119
|
-
handler.call(this, channelNumber, value,
|
|
1096
|
+
handler.call(this, channelNumber, value, scheduleTime);
|
|
1120
1097
|
const channel = this.channels[channelNumber];
|
|
1121
|
-
this.applyVoiceParams(channel, controllerType + 128);
|
|
1098
|
+
this.applyVoiceParams(channel, controllerType + 128, scheduleTime);
|
|
1122
1099
|
}
|
|
1123
1100
|
else {
|
|
1124
1101
|
console.warn(`Unsupported Control change: controllerType=${controllerType} value=${value}`);
|
|
1125
1102
|
}
|
|
1126
1103
|
}
|
|
1127
1104
|
updateModulation(channel, scheduleTime) {
|
|
1128
|
-
scheduleTime ??= this.audioContext.currentTime;
|
|
1129
1105
|
const depth = channel.state.modulationDepth * channel.modulationDepthRange;
|
|
1130
|
-
this.processScheduledNotes(channel,
|
|
1106
|
+
this.processScheduledNotes(channel, (note) => {
|
|
1131
1107
|
if (note.modulationDepth) {
|
|
1132
1108
|
note.modulationDepth.gain.setValueAtTime(depth, scheduleTime);
|
|
1133
1109
|
}
|
|
@@ -1138,11 +1114,13 @@ export class MidyGM1 {
|
|
|
1138
1114
|
});
|
|
1139
1115
|
}
|
|
1140
1116
|
setModulationDepth(channelNumber, modulation, scheduleTime) {
|
|
1117
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1141
1118
|
const channel = this.channels[channelNumber];
|
|
1142
1119
|
channel.state.modulationDepth = modulation / 127;
|
|
1143
1120
|
this.updateModulation(channel, scheduleTime);
|
|
1144
1121
|
}
|
|
1145
1122
|
setVolume(channelNumber, volume, scheduleTime) {
|
|
1123
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1146
1124
|
const channel = this.channels[channelNumber];
|
|
1147
1125
|
channel.state.volume = volume / 127;
|
|
1148
1126
|
this.updateChannelVolume(channel, scheduleTime);
|
|
@@ -1155,35 +1133,43 @@ export class MidyGM1 {
|
|
|
1155
1133
|
};
|
|
1156
1134
|
}
|
|
1157
1135
|
setPan(channelNumber, pan, scheduleTime) {
|
|
1136
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1158
1137
|
const channel = this.channels[channelNumber];
|
|
1159
1138
|
channel.state.pan = pan / 127;
|
|
1160
1139
|
this.updateChannelVolume(channel, scheduleTime);
|
|
1161
1140
|
}
|
|
1162
1141
|
setExpression(channelNumber, expression, scheduleTime) {
|
|
1142
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1163
1143
|
const channel = this.channels[channelNumber];
|
|
1164
1144
|
channel.state.expression = expression / 127;
|
|
1165
1145
|
this.updateChannelVolume(channel, scheduleTime);
|
|
1166
1146
|
}
|
|
1167
|
-
dataEntryLSB(channelNumber, value) {
|
|
1147
|
+
dataEntryLSB(channelNumber, value, scheduleTime) {
|
|
1168
1148
|
this.channels[channelNumber].dataLSB = value;
|
|
1169
|
-
this.handleRPN(channelNumber,
|
|
1149
|
+
this.handleRPN(channelNumber, scheduleTime);
|
|
1170
1150
|
}
|
|
1171
1151
|
updateChannelVolume(channel, scheduleTime) {
|
|
1172
|
-
scheduleTime ??= this.audioContext.currentTime;
|
|
1173
1152
|
const state = channel.state;
|
|
1174
1153
|
const volume = state.volume * state.expression;
|
|
1175
1154
|
const { gainLeft, gainRight } = this.panToGain(state.pan);
|
|
1176
1155
|
channel.gainL.gain
|
|
1177
|
-
.cancelScheduledValues(
|
|
1156
|
+
.cancelScheduledValues(scheduleTime)
|
|
1178
1157
|
.setValueAtTime(volume * gainLeft, scheduleTime);
|
|
1179
1158
|
channel.gainR.gain
|
|
1180
|
-
.cancelScheduledValues(
|
|
1159
|
+
.cancelScheduledValues(scheduleTime)
|
|
1181
1160
|
.setValueAtTime(volume * gainRight, scheduleTime);
|
|
1182
1161
|
}
|
|
1183
|
-
setSustainPedal(channelNumber, value) {
|
|
1184
|
-
this.
|
|
1185
|
-
|
|
1186
|
-
|
|
1162
|
+
setSustainPedal(channelNumber, value, scheduleTime) {
|
|
1163
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1164
|
+
const channel = this.channels[channelNumber];
|
|
1165
|
+
channel.state.sustainPedal = value / 127;
|
|
1166
|
+
if (64 <= value) {
|
|
1167
|
+
this.processScheduledNotes(channel, (note) => {
|
|
1168
|
+
channel.sustainNotes.push(note);
|
|
1169
|
+
});
|
|
1170
|
+
}
|
|
1171
|
+
else {
|
|
1172
|
+
this.releaseSustainPedal(channelNumber, value, scheduleTime);
|
|
1187
1173
|
}
|
|
1188
1174
|
}
|
|
1189
1175
|
limitData(channel, minMSB, maxMSB, minLSB, maxLSB) {
|
|
@@ -1212,18 +1198,18 @@ export class MidyGM1 {
|
|
|
1212
1198
|
channel.dataMSB = minMSB;
|
|
1213
1199
|
}
|
|
1214
1200
|
}
|
|
1215
|
-
handleRPN(channelNumber) {
|
|
1201
|
+
handleRPN(channelNumber, scheduleTime) {
|
|
1216
1202
|
const channel = this.channels[channelNumber];
|
|
1217
1203
|
const rpn = channel.rpnMSB * 128 + channel.rpnLSB;
|
|
1218
1204
|
switch (rpn) {
|
|
1219
1205
|
case 0:
|
|
1220
|
-
this.handlePitchBendRangeRPN(channelNumber);
|
|
1206
|
+
this.handlePitchBendRangeRPN(channelNumber, scheduleTime);
|
|
1221
1207
|
break;
|
|
1222
1208
|
case 1:
|
|
1223
|
-
this.handleFineTuningRPN(channelNumber);
|
|
1209
|
+
this.handleFineTuningRPN(channelNumber, scheduleTime);
|
|
1224
1210
|
break;
|
|
1225
1211
|
case 2:
|
|
1226
|
-
this.handleCoarseTuningRPN(channelNumber);
|
|
1212
|
+
this.handleCoarseTuningRPN(channelNumber, scheduleTime);
|
|
1227
1213
|
break;
|
|
1228
1214
|
default:
|
|
1229
1215
|
console.warn(`Channel ${channelNumber}: Unsupported RPN MSB=${channel.rpnMSB} LSB=${channel.rpnLSB}`);
|
|
@@ -1235,56 +1221,60 @@ export class MidyGM1 {
|
|
|
1235
1221
|
setRPNLSB(channelNumber, value) {
|
|
1236
1222
|
this.channels[channelNumber].rpnLSB = value;
|
|
1237
1223
|
}
|
|
1238
|
-
dataEntryMSB(channelNumber, value) {
|
|
1224
|
+
dataEntryMSB(channelNumber, value, scheduleTime) {
|
|
1239
1225
|
this.channels[channelNumber].dataMSB = value;
|
|
1240
|
-
this.handleRPN(channelNumber);
|
|
1226
|
+
this.handleRPN(channelNumber, scheduleTime);
|
|
1241
1227
|
}
|
|
1242
|
-
handlePitchBendRangeRPN(channelNumber) {
|
|
1228
|
+
handlePitchBendRangeRPN(channelNumber, scheduleTime) {
|
|
1243
1229
|
const channel = this.channels[channelNumber];
|
|
1244
1230
|
this.limitData(channel, 0, 127, 0, 99);
|
|
1245
1231
|
const pitchBendRange = channel.dataMSB + channel.dataLSB / 100;
|
|
1246
|
-
this.setPitchBendRange(channelNumber, pitchBendRange);
|
|
1232
|
+
this.setPitchBendRange(channelNumber, pitchBendRange, scheduleTime);
|
|
1247
1233
|
}
|
|
1248
|
-
setPitchBendRange(channelNumber, value) {
|
|
1234
|
+
setPitchBendRange(channelNumber, value, scheduleTime) {
|
|
1235
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1249
1236
|
const channel = this.channels[channelNumber];
|
|
1250
1237
|
const state = channel.state;
|
|
1251
1238
|
const prev = state.pitchWheelSensitivity;
|
|
1252
1239
|
const next = value / 128;
|
|
1253
1240
|
state.pitchWheelSensitivity = next;
|
|
1254
1241
|
channel.detune += (state.pitchWheel * 2 - 1) * (next - prev) * 12800;
|
|
1255
|
-
this.updateChannelDetune(channel);
|
|
1256
|
-
this.applyVoiceParams(channel, 16);
|
|
1242
|
+
this.updateChannelDetune(channel, scheduleTime);
|
|
1243
|
+
this.applyVoiceParams(channel, 16, scheduleTime);
|
|
1257
1244
|
}
|
|
1258
|
-
handleFineTuningRPN(channelNumber) {
|
|
1245
|
+
handleFineTuningRPN(channelNumber, scheduleTime) {
|
|
1259
1246
|
const channel = this.channels[channelNumber];
|
|
1260
1247
|
this.limitData(channel, 0, 127, 0, 127);
|
|
1261
1248
|
const fineTuning = channel.dataMSB * 128 + channel.dataLSB;
|
|
1262
|
-
this.setFineTuning(channelNumber, fineTuning);
|
|
1249
|
+
this.setFineTuning(channelNumber, fineTuning, scheduleTime);
|
|
1263
1250
|
}
|
|
1264
|
-
setFineTuning(channelNumber, value) {
|
|
1251
|
+
setFineTuning(channelNumber, value, scheduleTime) {
|
|
1252
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1265
1253
|
const channel = this.channels[channelNumber];
|
|
1266
1254
|
const prev = channel.fineTuning;
|
|
1267
1255
|
const next = (value - 8192) / 8.192; // cent
|
|
1268
1256
|
channel.fineTuning = next;
|
|
1269
1257
|
channel.detune += next - prev;
|
|
1270
|
-
this.updateChannelDetune(channel);
|
|
1258
|
+
this.updateChannelDetune(channel, scheduleTime);
|
|
1271
1259
|
}
|
|
1272
|
-
handleCoarseTuningRPN(channelNumber) {
|
|
1260
|
+
handleCoarseTuningRPN(channelNumber, scheduleTime) {
|
|
1273
1261
|
const channel = this.channels[channelNumber];
|
|
1274
1262
|
this.limitDataMSB(channel, 0, 127);
|
|
1275
1263
|
const coarseTuning = channel.dataMSB;
|
|
1276
|
-
this.setCoarseTuning(channelNumber, coarseTuning);
|
|
1264
|
+
this.setCoarseTuning(channelNumber, coarseTuning, scheduleTime);
|
|
1277
1265
|
}
|
|
1278
|
-
setCoarseTuning(channelNumber, value) {
|
|
1266
|
+
setCoarseTuning(channelNumber, value, scheduleTime) {
|
|
1267
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1279
1268
|
const channel = this.channels[channelNumber];
|
|
1280
1269
|
const prev = channel.coarseTuning;
|
|
1281
1270
|
const next = (value - 64) * 100; // cent
|
|
1282
1271
|
channel.coarseTuning = next;
|
|
1283
1272
|
channel.detune += next - prev;
|
|
1284
|
-
this.updateChannelDetune(channel);
|
|
1273
|
+
this.updateChannelDetune(channel, scheduleTime);
|
|
1285
1274
|
}
|
|
1286
|
-
allSoundOff(channelNumber) {
|
|
1287
|
-
|
|
1275
|
+
allSoundOff(channelNumber, _value, scheduleTime) {
|
|
1276
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1277
|
+
return this.stopChannelNotes(channelNumber, 0, true, scheduleTime);
|
|
1288
1278
|
}
|
|
1289
1279
|
resetAllControllers(channelNumber) {
|
|
1290
1280
|
const stateTypes = [
|
|
@@ -1308,10 +1298,11 @@ export class MidyGM1 {
|
|
|
1308
1298
|
channel[type] = this.constructor.channelSettings[type];
|
|
1309
1299
|
}
|
|
1310
1300
|
}
|
|
1311
|
-
allNotesOff(channelNumber) {
|
|
1312
|
-
|
|
1301
|
+
allNotesOff(channelNumber, _value, scheduleTime) {
|
|
1302
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1303
|
+
return this.stopChannelNotes(channelNumber, 0, false, scheduleTime);
|
|
1313
1304
|
}
|
|
1314
|
-
handleUniversalNonRealTimeExclusiveMessage(data) {
|
|
1305
|
+
handleUniversalNonRealTimeExclusiveMessage(data, _scheduleTime) {
|
|
1315
1306
|
switch (data[2]) {
|
|
1316
1307
|
case 9:
|
|
1317
1308
|
switch (data[3]) {
|
|
@@ -1335,12 +1326,12 @@ export class MidyGM1 {
|
|
|
1335
1326
|
}
|
|
1336
1327
|
this.channels[9].bank = 128;
|
|
1337
1328
|
}
|
|
1338
|
-
handleUniversalRealTimeExclusiveMessage(data) {
|
|
1329
|
+
handleUniversalRealTimeExclusiveMessage(data, scheduleTime) {
|
|
1339
1330
|
switch (data[2]) {
|
|
1340
1331
|
case 4:
|
|
1341
1332
|
switch (data[3]) {
|
|
1342
1333
|
case 1:
|
|
1343
|
-
return this.handleMasterVolumeSysEx(data);
|
|
1334
|
+
return this.handleMasterVolumeSysEx(data, scheduleTime);
|
|
1344
1335
|
default:
|
|
1345
1336
|
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
1346
1337
|
}
|
|
@@ -1349,42 +1340,40 @@ export class MidyGM1 {
|
|
|
1349
1340
|
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
1350
1341
|
}
|
|
1351
1342
|
}
|
|
1352
|
-
handleMasterVolumeSysEx(data) {
|
|
1343
|
+
handleMasterVolumeSysEx(data, scheduleTime) {
|
|
1353
1344
|
const volume = (data[5] * 128 + data[4]) / 16383;
|
|
1354
|
-
this.setMasterVolume(volume);
|
|
1345
|
+
this.setMasterVolume(volume, scheduleTime);
|
|
1355
1346
|
}
|
|
1356
|
-
setMasterVolume(volume) {
|
|
1347
|
+
setMasterVolume(volume, scheduleTime) {
|
|
1348
|
+
scheduleTime ??= this.audioContext.currentTime;
|
|
1357
1349
|
if (volume < 0 && 1 < volume) {
|
|
1358
1350
|
console.error("Master Volume is out of range");
|
|
1359
1351
|
}
|
|
1360
1352
|
else {
|
|
1361
|
-
|
|
1362
|
-
|
|
1363
|
-
|
|
1353
|
+
this.masterVolume.gain
|
|
1354
|
+
.cancelScheduledValues(scheduleTime)
|
|
1355
|
+
.setValueAtTime(volume * volume, scheduleTime);
|
|
1364
1356
|
}
|
|
1365
1357
|
}
|
|
1366
|
-
|
|
1367
|
-
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
1368
|
-
}
|
|
1369
|
-
handleSysEx(data) {
|
|
1358
|
+
handleSysEx(data, scheduleTime) {
|
|
1370
1359
|
switch (data[0]) {
|
|
1371
1360
|
case 126:
|
|
1372
|
-
return this.handleUniversalNonRealTimeExclusiveMessage(data);
|
|
1361
|
+
return this.handleUniversalNonRealTimeExclusiveMessage(data, scheduleTime);
|
|
1373
1362
|
case 127:
|
|
1374
|
-
return this.handleUniversalRealTimeExclusiveMessage(data);
|
|
1363
|
+
return this.handleUniversalRealTimeExclusiveMessage(data, scheduleTime);
|
|
1375
1364
|
default:
|
|
1376
|
-
|
|
1365
|
+
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
1377
1366
|
}
|
|
1378
1367
|
}
|
|
1379
|
-
scheduleTask(callback,
|
|
1368
|
+
scheduleTask(callback, scheduleTime) {
|
|
1380
1369
|
return new Promise((resolve) => {
|
|
1381
1370
|
const bufferSource = new AudioBufferSourceNode(this.audioContext);
|
|
1382
1371
|
bufferSource.onended = () => {
|
|
1383
1372
|
callback();
|
|
1384
1373
|
resolve();
|
|
1385
1374
|
};
|
|
1386
|
-
bufferSource.start(
|
|
1387
|
-
bufferSource.stop(
|
|
1375
|
+
bufferSource.start(scheduleTime);
|
|
1376
|
+
bufferSource.stop(scheduleTime);
|
|
1388
1377
|
});
|
|
1389
1378
|
}
|
|
1390
1379
|
}
|