@coderline/alphatab 1.6.0-alpha.1442 → 1.6.0-alpha.1448
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/alphaTab.core.min.mjs +2 -2
- package/dist/alphaTab.core.mjs +496 -32
- package/dist/alphaTab.d.ts +184 -4
- package/dist/alphaTab.js +496 -32
- package/dist/alphaTab.min.js +2 -2
- package/dist/alphaTab.min.mjs +1 -1
- package/dist/alphaTab.mjs +1 -1
- package/dist/alphaTab.vite.js +1 -1
- package/dist/alphaTab.vite.mjs +1 -1
- package/dist/alphaTab.webpack.js +1 -1
- package/dist/alphaTab.webpack.mjs +1 -1
- package/dist/alphaTab.worker.min.mjs +1 -1
- package/dist/alphaTab.worker.mjs +1 -1
- package/dist/alphaTab.worklet.min.mjs +1 -1
- package/dist/alphaTab.worklet.mjs +1 -1
- package/package.json +3 -2
package/dist/alphaTab.core.mjs
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
/*!
|
|
2
|
-
* alphaTab v1.6.0-alpha.
|
|
2
|
+
* alphaTab v1.6.0-alpha.1448 (develop, build 1448)
|
|
3
3
|
*
|
|
4
4
|
* Copyright © 2025, Daniel Kuschny and Contributors, All rights reserved.
|
|
5
5
|
*
|
|
@@ -22533,7 +22533,6 @@ class MidiSequencerState {
|
|
|
22533
22533
|
this.division = MidiUtils.QuarterTime;
|
|
22534
22534
|
this.eventIndex = 0;
|
|
22535
22535
|
this.currentTime = 0;
|
|
22536
|
-
this.currentTick = 0;
|
|
22537
22536
|
this.syncPointIndex = 0;
|
|
22538
22537
|
this.playbackRange = null;
|
|
22539
22538
|
this.playbackRangeStartTime = 0;
|
|
@@ -22541,7 +22540,7 @@ class MidiSequencerState {
|
|
|
22541
22540
|
this.endTick = 0;
|
|
22542
22541
|
this.endTime = 0;
|
|
22543
22542
|
this.currentTempo = 0;
|
|
22544
|
-
this.
|
|
22543
|
+
this.syncPointTempo = 0;
|
|
22545
22544
|
}
|
|
22546
22545
|
}
|
|
22547
22546
|
/**
|
|
@@ -22598,7 +22597,13 @@ class MidiFileSequencer {
|
|
|
22598
22597
|
return this._currentState.currentTempo;
|
|
22599
22598
|
}
|
|
22600
22599
|
get modifiedTempo() {
|
|
22601
|
-
return this._currentState.
|
|
22600
|
+
return this._currentState.syncPointTempo * this.playbackSpeed;
|
|
22601
|
+
}
|
|
22602
|
+
get syncPointTempo() {
|
|
22603
|
+
return this._currentState.syncPointTempo;
|
|
22604
|
+
}
|
|
22605
|
+
get currentSyncPoints() {
|
|
22606
|
+
return this._currentState.syncPoints;
|
|
22602
22607
|
}
|
|
22603
22608
|
mainSeek(timePosition) {
|
|
22604
22609
|
// map to speed=1
|
|
@@ -22622,7 +22627,7 @@ class MidiFileSequencer {
|
|
|
22622
22627
|
this._mainState.syncPointIndex = 0;
|
|
22623
22628
|
this._mainState.tempoChangeIndex = 0;
|
|
22624
22629
|
this._mainState.currentTempo = this._mainState.tempoChanges[0].bpm;
|
|
22625
|
-
this._mainState.
|
|
22630
|
+
this._mainState.syncPointTempo =
|
|
22626
22631
|
this._mainState.syncPoints.length > 0
|
|
22627
22632
|
? this._mainState.syncPoints[0].syncBpm
|
|
22628
22633
|
: this._mainState.currentTempo;
|
|
@@ -22735,7 +22740,7 @@ class MidiFileSequencer {
|
|
|
22735
22740
|
}
|
|
22736
22741
|
}
|
|
22737
22742
|
state.currentTempo = state.tempoChanges.length > 0 ? state.tempoChanges[0].bpm : bpm;
|
|
22738
|
-
state.
|
|
22743
|
+
state.syncPointTempo = state.currentTempo;
|
|
22739
22744
|
state.synthData.sort((a, b) => {
|
|
22740
22745
|
if (a.time > b.time) {
|
|
22741
22746
|
return 1;
|
|
@@ -22852,7 +22857,7 @@ class MidiFileSequencer {
|
|
|
22852
22857
|
}
|
|
22853
22858
|
}
|
|
22854
22859
|
state.syncPointIndex = 0;
|
|
22855
|
-
state.
|
|
22860
|
+
state.syncPointTempo = state.syncPoints.length > 0 ? state.syncPoints[0].syncBpm : state.currentTempo;
|
|
22856
22861
|
}
|
|
22857
22862
|
currentTimePositionToTickPosition(timePosition) {
|
|
22858
22863
|
const state = this._currentState;
|
|
@@ -22867,6 +22872,9 @@ class MidiFileSequencer {
|
|
|
22867
22872
|
// we add 1 for possible rounding errors.(floating point issuses)
|
|
22868
22873
|
return lastTempoChange.ticks + ticks + 1;
|
|
22869
22874
|
}
|
|
22875
|
+
currentUpdateCurrentTempo(timePosition) {
|
|
22876
|
+
this.updateCurrentTempo(this._mainState, timePosition * this.playbackSpeed);
|
|
22877
|
+
}
|
|
22870
22878
|
updateCurrentTempo(state, timePosition) {
|
|
22871
22879
|
let tempoChangeIndex = state.tempoChangeIndex;
|
|
22872
22880
|
if (timePosition < state.tempoChanges[tempoChangeIndex].time) {
|
|
@@ -22881,6 +22889,9 @@ class MidiFileSequencer {
|
|
|
22881
22889
|
state.currentTempo = state.tempoChanges[state.tempoChangeIndex].bpm;
|
|
22882
22890
|
}
|
|
22883
22891
|
}
|
|
22892
|
+
currentUpdateSyncPoints(timePosition) {
|
|
22893
|
+
this.updateSyncPoints(this._mainState, timePosition);
|
|
22894
|
+
}
|
|
22884
22895
|
updateSyncPoints(state, timePosition) {
|
|
22885
22896
|
const syncPoints = state.syncPoints;
|
|
22886
22897
|
if (syncPoints.length > 0) {
|
|
@@ -22893,11 +22904,11 @@ class MidiFileSequencer {
|
|
|
22893
22904
|
}
|
|
22894
22905
|
if (syncPointIndex !== state.syncPointIndex) {
|
|
22895
22906
|
state.syncPointIndex = syncPointIndex;
|
|
22896
|
-
state.
|
|
22907
|
+
state.syncPointTempo = syncPoints[syncPointIndex].syncBpm;
|
|
22897
22908
|
}
|
|
22898
22909
|
}
|
|
22899
22910
|
else {
|
|
22900
|
-
state.
|
|
22911
|
+
state.syncPointTempo = state.currentTempo;
|
|
22901
22912
|
}
|
|
22902
22913
|
}
|
|
22903
22914
|
mainTimePositionFromBackingTrack(timePosition, backingTrackLength) {
|
|
@@ -23044,7 +23055,7 @@ class MidiFileSequencer {
|
|
|
23044
23055
|
state.endTime = metronomeTime;
|
|
23045
23056
|
state.endTick = metronomeTick;
|
|
23046
23057
|
state.currentTempo = bpm;
|
|
23047
|
-
state.
|
|
23058
|
+
state.syncPointTempo = bpm;
|
|
23048
23059
|
this._countInState = state;
|
|
23049
23060
|
}
|
|
23050
23061
|
}
|
|
@@ -28373,6 +28384,82 @@ class PlaybackRangeChangedEventArgs {
|
|
|
28373
28384
|
}
|
|
28374
28385
|
}
|
|
28375
28386
|
|
|
28387
|
+
/**
|
|
28388
|
+
* The options controlling how to export the audio.
|
|
28389
|
+
*/
|
|
28390
|
+
class AudioExportOptions {
|
|
28391
|
+
constructor() {
|
|
28392
|
+
/**
|
|
28393
|
+
* The output sample rate.
|
|
28394
|
+
* @default `44100`
|
|
28395
|
+
*/
|
|
28396
|
+
this.sampleRate = 44100;
|
|
28397
|
+
/**
|
|
28398
|
+
* Whether to respect sync point information during export.
|
|
28399
|
+
* @default `true`
|
|
28400
|
+
* @remarks
|
|
28401
|
+
* If the song contains sync point information for synchronization with an external media,
|
|
28402
|
+
* this option allows controlling whether the synthesized audio is aligned with these points.
|
|
28403
|
+
*
|
|
28404
|
+
* This is useful when mixing the exported audio together with external media, keeping the same timing.
|
|
28405
|
+
*
|
|
28406
|
+
* Disable this option if you want the original/exact timing as per music sheet in the exported audio.
|
|
28407
|
+
*/
|
|
28408
|
+
this.useSyncPoints = false;
|
|
28409
|
+
/**
|
|
28410
|
+
* The current master volume as percentage. (range: 0.0-3.0, default 1.0)
|
|
28411
|
+
*/
|
|
28412
|
+
this.masterVolume = 1;
|
|
28413
|
+
/**
|
|
28414
|
+
* The metronome volume. (range: 0.0-3.0, default 0.0)
|
|
28415
|
+
*/
|
|
28416
|
+
this.metronomeVolume = 0;
|
|
28417
|
+
/**
|
|
28418
|
+
* The volume for individual tracks as percentage (range: 0.0-3.0).
|
|
28419
|
+
* @remarks
|
|
28420
|
+
* The key is the track index, and the value is the relative volume.
|
|
28421
|
+
* The configured volume (as per data model) still applies, this is an additional volume control.
|
|
28422
|
+
* If no custom value is set, 100% is used.
|
|
28423
|
+
* No values from the currently active synthesizer are applied.
|
|
28424
|
+
*
|
|
28425
|
+
* The meaning of the key changes when used with AlphaSynth directly, in this case the key is the midi channel .
|
|
28426
|
+
*/
|
|
28427
|
+
this.trackVolume = new Map();
|
|
28428
|
+
/**
|
|
28429
|
+
* The additional semitone pitch transpose to apply for individual tracks.
|
|
28430
|
+
* @remarks
|
|
28431
|
+
* The key is the track index, and the value is the number of semitones to apply.
|
|
28432
|
+
* No values from the currently active synthesizer are applied.
|
|
28433
|
+
*
|
|
28434
|
+
* The meaning of the key changes when used with AlphaSynth directly, in this case the key is the midi channel .
|
|
28435
|
+
*/
|
|
28436
|
+
this.trackTranspositionPitches = new Map();
|
|
28437
|
+
}
|
|
28438
|
+
}
|
|
28439
|
+
/**
|
|
28440
|
+
* Represents a single chunk of audio produced.
|
|
28441
|
+
*/
|
|
28442
|
+
class AudioExportChunk {
|
|
28443
|
+
constructor() {
|
|
28444
|
+
/**
|
|
28445
|
+
* The current time position within the song in milliseconds.
|
|
28446
|
+
*/
|
|
28447
|
+
this.currentTime = 0;
|
|
28448
|
+
/**
|
|
28449
|
+
* The total length of the song in milliseconds.
|
|
28450
|
+
*/
|
|
28451
|
+
this.endTime = 0;
|
|
28452
|
+
/**
|
|
28453
|
+
* The current time position within the song in midi ticks.
|
|
28454
|
+
*/
|
|
28455
|
+
this.currentTick = 0;
|
|
28456
|
+
/**
|
|
28457
|
+
* The total length of the song in midi ticks.
|
|
28458
|
+
*/
|
|
28459
|
+
this.endTick = 0;
|
|
28460
|
+
}
|
|
28461
|
+
}
|
|
28462
|
+
|
|
28376
28463
|
/**
|
|
28377
28464
|
* This is the base class for synthesizer components which can be used to
|
|
28378
28465
|
* play a {@link MidiFile} via a {@link ISynthOutput}.
|
|
@@ -28849,6 +28936,184 @@ class AlphaSynth extends AlphaSynthBase {
|
|
|
28849
28936
|
constructor(output, bufferTimeInMilliseconds) {
|
|
28850
28937
|
super(output, new TinySoundFont(output.sampleRate), bufferTimeInMilliseconds);
|
|
28851
28938
|
}
|
|
28939
|
+
/**
|
|
28940
|
+
* Creates a new audio exporter, initialized with the given data.
|
|
28941
|
+
* @param options The export options to use.
|
|
28942
|
+
* The track volume and transposition pitches must lists must be filled with midi channels.
|
|
28943
|
+
* @param midi The midi file to use.
|
|
28944
|
+
* @param syncPoints The sync points to use
|
|
28945
|
+
* @param transpositionPitches The initial transposition pitches to apply.
|
|
28946
|
+
* @param transpositionPitches The initial transposition pitches to apply.
|
|
28947
|
+
*/
|
|
28948
|
+
exportAudio(options, midi, syncPoints, mainTranspositionPitches) {
|
|
28949
|
+
const exporter = new AlphaSynthAudioExporter(options);
|
|
28950
|
+
exporter.loadMidiFile(midi);
|
|
28951
|
+
if (options.useSyncPoints) {
|
|
28952
|
+
exporter.updateSyncPoints(syncPoints);
|
|
28953
|
+
}
|
|
28954
|
+
exporter.applyTranspositionPitches(mainTranspositionPitches);
|
|
28955
|
+
for (const [channel, semitones] of options.trackTranspositionPitches) {
|
|
28956
|
+
exporter.setChannelTranspositionPitch(channel, semitones);
|
|
28957
|
+
}
|
|
28958
|
+
for (const [channel, volume] of options.trackVolume) {
|
|
28959
|
+
exporter.channelSetMixVolume(channel, volume);
|
|
28960
|
+
}
|
|
28961
|
+
if (options.soundFonts) {
|
|
28962
|
+
for (const f of options.soundFonts) {
|
|
28963
|
+
exporter.loadSoundFont(f);
|
|
28964
|
+
}
|
|
28965
|
+
}
|
|
28966
|
+
else {
|
|
28967
|
+
exporter.loadPresets(this.synthesizer.presets);
|
|
28968
|
+
}
|
|
28969
|
+
if (options.playbackRange) {
|
|
28970
|
+
exporter.limitExport(options.playbackRange);
|
|
28971
|
+
}
|
|
28972
|
+
exporter.setup();
|
|
28973
|
+
return exporter;
|
|
28974
|
+
}
|
|
28975
|
+
}
|
|
28976
|
+
/**
|
|
28977
|
+
* A audio exporter allowing streaming synthesis of audio samples with a fixed configuration.
|
|
28978
|
+
*/
|
|
28979
|
+
class AlphaSynthAudioExporter {
|
|
28980
|
+
constructor(options) {
|
|
28981
|
+
this._generatedAudioCurrentTime = 0;
|
|
28982
|
+
this._generatedAudioEndTime = 0;
|
|
28983
|
+
this._synth = new TinySoundFont(options.sampleRate);
|
|
28984
|
+
this._sequencer = new MidiFileSequencer(this._synth);
|
|
28985
|
+
this._synth.masterVolume = Math.max(options.masterVolume, SynthConstants.MinVolume);
|
|
28986
|
+
this._synth.metronomeVolume = Math.max(options.metronomeVolume, SynthConstants.MinVolume);
|
|
28987
|
+
}
|
|
28988
|
+
/**
|
|
28989
|
+
* Loads the specified sound font.
|
|
28990
|
+
* @param data The soundfont data.
|
|
28991
|
+
*/
|
|
28992
|
+
loadSoundFont(data) {
|
|
28993
|
+
const input = ByteBuffer.fromBuffer(data);
|
|
28994
|
+
const soundFont = new Hydra();
|
|
28995
|
+
soundFont.load(input);
|
|
28996
|
+
const programs = this._sequencer.instrumentPrograms;
|
|
28997
|
+
const percussionKeys = this._sequencer.percussionKeys;
|
|
28998
|
+
this._synth.loadPresets(soundFont, programs, percussionKeys, true);
|
|
28999
|
+
}
|
|
29000
|
+
/**
|
|
29001
|
+
* Loads the specified presets.
|
|
29002
|
+
* @param presets The presets to use.
|
|
29003
|
+
*/
|
|
29004
|
+
loadPresets(presets) {
|
|
29005
|
+
this._synth.presets = presets;
|
|
29006
|
+
}
|
|
29007
|
+
/**
|
|
29008
|
+
* Limits the time range for which the export is done.
|
|
29009
|
+
* @param range The time range
|
|
29010
|
+
*/
|
|
29011
|
+
limitExport(range) {
|
|
29012
|
+
this._sequencer.mainPlaybackRange = range;
|
|
29013
|
+
this._sequencer.mainSeek(this._sequencer.mainTickPositionToTimePosition(range.startTick));
|
|
29014
|
+
}
|
|
29015
|
+
/**
|
|
29016
|
+
* Sets the transposition pitch of a given channel. This pitch is additionally applied beside the
|
|
29017
|
+
* ones applied already via {@link applyTranspositionPitches}.
|
|
29018
|
+
* @param channel The channel number
|
|
29019
|
+
* @param semitones The number of semitones to apply as pitch offset.
|
|
29020
|
+
*/
|
|
29021
|
+
setChannelTranspositionPitch(channel, semitones) {
|
|
29022
|
+
this._synth.setChannelTranspositionPitch(channel, semitones);
|
|
29023
|
+
}
|
|
29024
|
+
/**
|
|
29025
|
+
* Applies the given transposition pitches used for general pitch changes that should be applied to the song.
|
|
29026
|
+
* Used for general transpositions applied to the file.
|
|
29027
|
+
* @param transpositionPitches A map defining for a given list of midi channels the number of semitones that should be adjusted.
|
|
29028
|
+
*/
|
|
29029
|
+
applyTranspositionPitches(mainTranspositionPitches) {
|
|
29030
|
+
this._synth.applyTranspositionPitches(mainTranspositionPitches);
|
|
29031
|
+
}
|
|
29032
|
+
/**
|
|
29033
|
+
* Loads the given midi file for synthesis.
|
|
29034
|
+
* @param midi The midi file.
|
|
29035
|
+
*/
|
|
29036
|
+
loadMidiFile(midi) {
|
|
29037
|
+
this._sequencer.loadMidi(midi);
|
|
29038
|
+
}
|
|
29039
|
+
/**
|
|
29040
|
+
* Updates the sync points used for time synchronization with a backing track.
|
|
29041
|
+
* @param syncPoints The sync points.
|
|
29042
|
+
*/
|
|
29043
|
+
updateSyncPoints(syncPoints) {
|
|
29044
|
+
this._sequencer.mainUpdateSyncPoints(syncPoints);
|
|
29045
|
+
}
|
|
29046
|
+
/**
|
|
29047
|
+
* Sets the current and initial volume of the given channel.
|
|
29048
|
+
* @param channel The channel number.
|
|
29049
|
+
* @param volume The volume of of the channel (0.0-1.0)
|
|
29050
|
+
*/
|
|
29051
|
+
channelSetMixVolume(channel, volume) {
|
|
29052
|
+
volume = Math.max(volume, SynthConstants.MinVolume);
|
|
29053
|
+
this._synth.channelSetMixVolume(channel, volume);
|
|
29054
|
+
}
|
|
29055
|
+
setup() {
|
|
29056
|
+
this._synth.setupMetronomeChannel(this._synth.metronomeVolume);
|
|
29057
|
+
const syncPoints = this._sequencer.currentSyncPoints;
|
|
29058
|
+
const alphaTabEndTime = this._sequencer.currentEndTime;
|
|
29059
|
+
if (syncPoints.length === 0) {
|
|
29060
|
+
this._generatedAudioEndTime = alphaTabEndTime;
|
|
29061
|
+
}
|
|
29062
|
+
else {
|
|
29063
|
+
const lastSyncPoint = syncPoints[syncPoints.length - 1];
|
|
29064
|
+
let endTime = lastSyncPoint.syncTime;
|
|
29065
|
+
const remainingTicks = this._sequencer.currentEndTick - lastSyncPoint.synthTick;
|
|
29066
|
+
if (remainingTicks > 0) {
|
|
29067
|
+
endTime += MidiUtils.ticksToMillis(remainingTicks, lastSyncPoint.syncBpm);
|
|
29068
|
+
}
|
|
29069
|
+
this._generatedAudioEndTime = endTime;
|
|
29070
|
+
}
|
|
29071
|
+
}
|
|
29072
|
+
render(milliseconds) {
|
|
29073
|
+
if (this._sequencer.isFinished) {
|
|
29074
|
+
return undefined;
|
|
29075
|
+
}
|
|
29076
|
+
const oneMicroBufferMillis = (SynthConstants.MicroBufferSize * 1000) / this._synth.outSampleRate;
|
|
29077
|
+
const microBufferCount = Math.ceil(milliseconds / oneMicroBufferMillis);
|
|
29078
|
+
let samples = new Float32Array(SynthConstants.MicroBufferSize * microBufferCount * SynthConstants.AudioChannels);
|
|
29079
|
+
const syncPoints = this._sequencer.currentSyncPoints;
|
|
29080
|
+
let bufferPos = 0;
|
|
29081
|
+
let subBufferTime = this._generatedAudioCurrentTime;
|
|
29082
|
+
let alphaTabGeneratedMillis = 0;
|
|
29083
|
+
for (let i = 0; i < microBufferCount; i++) {
|
|
29084
|
+
// if we're applying sync points, we calculate the needed tempo and set the playback speed
|
|
29085
|
+
if (syncPoints.length > 0) {
|
|
29086
|
+
this._sequencer.currentUpdateSyncPoints(subBufferTime);
|
|
29087
|
+
this._sequencer.currentUpdateCurrentTempo(this._sequencer.currentTime);
|
|
29088
|
+
const newSpeed = this._sequencer.syncPointTempo / this._sequencer.currentTempo;
|
|
29089
|
+
if (this._sequencer.playbackSpeed !== newSpeed) {
|
|
29090
|
+
this._sequencer.playbackSpeed = newSpeed;
|
|
29091
|
+
}
|
|
29092
|
+
}
|
|
29093
|
+
this._sequencer.fillMidiEventQueue();
|
|
29094
|
+
this._synth.synthesize(samples, bufferPos, SynthConstants.MicroBufferSize);
|
|
29095
|
+
bufferPos += SynthConstants.MicroBufferSize * SynthConstants.AudioChannels;
|
|
29096
|
+
subBufferTime += oneMicroBufferMillis;
|
|
29097
|
+
alphaTabGeneratedMillis += oneMicroBufferMillis * this._sequencer.playbackSpeed;
|
|
29098
|
+
if (this._sequencer.isFinished) {
|
|
29099
|
+
break;
|
|
29100
|
+
}
|
|
29101
|
+
}
|
|
29102
|
+
if (bufferPos < samples.length) {
|
|
29103
|
+
samples = samples.subarray(0, bufferPos);
|
|
29104
|
+
}
|
|
29105
|
+
const chunk = new AudioExportChunk();
|
|
29106
|
+
chunk.currentTime = this._generatedAudioCurrentTime;
|
|
29107
|
+
chunk.endTime = this._generatedAudioEndTime;
|
|
29108
|
+
chunk.currentTick = this._sequencer.currentTimePositionToTickPosition(this._sequencer.currentTime);
|
|
29109
|
+
chunk.endTick = this._sequencer.currentEndTick;
|
|
29110
|
+
this._generatedAudioCurrentTime += milliseconds;
|
|
29111
|
+
chunk.samples = samples;
|
|
29112
|
+
if (this._sequencer.isFinished) {
|
|
29113
|
+
this._synth.noteOffAll(true);
|
|
29114
|
+
}
|
|
29115
|
+
return chunk;
|
|
29116
|
+
}
|
|
28852
29117
|
}
|
|
28853
29118
|
|
|
28854
29119
|
/**
|
|
@@ -33282,6 +33547,7 @@ AlphaSynthWorkerSynthOutput.preferredSampleRate = 0;
|
|
|
33282
33547
|
*/
|
|
33283
33548
|
class AlphaSynthWebWorker {
|
|
33284
33549
|
constructor(main, bufferTimeInMilliseconds) {
|
|
33550
|
+
this._exporter = new Map();
|
|
33285
33551
|
this._main = main;
|
|
33286
33552
|
this._main.addEventListener('message', this.handleMessage.bind(this));
|
|
33287
33553
|
this._player = new AlphaSynth(new AlphaSynthWorkerSynthOutput(), bufferTimeInMilliseconds);
|
|
@@ -33397,6 +33663,53 @@ class AlphaSynthWebWorker {
|
|
|
33397
33663
|
this._player.applyTranspositionPitches(new Map(JSON.parse(data.transpositionPitches)));
|
|
33398
33664
|
break;
|
|
33399
33665
|
}
|
|
33666
|
+
if (cmd.startsWith('alphaSynth.exporter')) {
|
|
33667
|
+
this.handleExporterMessage(e);
|
|
33668
|
+
}
|
|
33669
|
+
}
|
|
33670
|
+
handleExporterMessage(e) {
|
|
33671
|
+
const data = e.data;
|
|
33672
|
+
const cmd = data.cmd;
|
|
33673
|
+
try {
|
|
33674
|
+
switch (cmd) {
|
|
33675
|
+
case 'alphaSynth.exporter.initialize':
|
|
33676
|
+
const exporter = this._player.exportAudio(data.options, JsonConverter.jsObjectToMidiFile(data.midi), data.syncPoints, data.transpositionPitches);
|
|
33677
|
+
this._exporter.set(data.exporterId, exporter);
|
|
33678
|
+
this._main.postMessage({
|
|
33679
|
+
cmd: 'alphaSynth.exporter.initialized',
|
|
33680
|
+
exporterId: data.exporterId
|
|
33681
|
+
});
|
|
33682
|
+
break;
|
|
33683
|
+
case 'alphaSynth.exporter.render':
|
|
33684
|
+
if (this._exporter.has(data.exporterId)) {
|
|
33685
|
+
const exporter = this._exporter.get(data.exporterId);
|
|
33686
|
+
const chunk = exporter.render(data.milliseconds);
|
|
33687
|
+
this._main.postMessage({
|
|
33688
|
+
cmd: 'alphaSynth.exporter.rendered',
|
|
33689
|
+
exporterId: data.exporterId,
|
|
33690
|
+
chunk
|
|
33691
|
+
});
|
|
33692
|
+
}
|
|
33693
|
+
else {
|
|
33694
|
+
this._main.postMessage({
|
|
33695
|
+
cmd: 'alphaSynth.exporter.error',
|
|
33696
|
+
exporterId: data.exporterId,
|
|
33697
|
+
error: new Error('Unknown exporter ID')
|
|
33698
|
+
});
|
|
33699
|
+
}
|
|
33700
|
+
break;
|
|
33701
|
+
case 'alphaSynth.exporter.destroy':
|
|
33702
|
+
this._exporter.delete(data.exporterId);
|
|
33703
|
+
break;
|
|
33704
|
+
}
|
|
33705
|
+
}
|
|
33706
|
+
catch (e) {
|
|
33707
|
+
this._main.postMessage({
|
|
33708
|
+
cmd: 'alphaSynth.exporter.error',
|
|
33709
|
+
exporterId: data.exporterId,
|
|
33710
|
+
error: e
|
|
33711
|
+
});
|
|
33712
|
+
}
|
|
33400
33713
|
}
|
|
33401
33714
|
onPositionChanged(e) {
|
|
33402
33715
|
this._main.postMessage({
|
|
@@ -41703,8 +42016,7 @@ class AlphaTabApiBase {
|
|
|
41703
42016
|
if (this._isDestroyed) {
|
|
41704
42017
|
return;
|
|
41705
42018
|
}
|
|
41706
|
-
if (this.hasCursor &&
|
|
41707
|
-
this.settings.player.enableUserInteraction) {
|
|
42019
|
+
if (this.hasCursor && this.settings.player.enableUserInteraction) {
|
|
41708
42020
|
this._selectionStart = new SelectionInfo(beat);
|
|
41709
42021
|
this._selectionEnd = null;
|
|
41710
42022
|
}
|
|
@@ -41744,8 +42056,7 @@ class AlphaTabApiBase {
|
|
|
41744
42056
|
if (this._isDestroyed) {
|
|
41745
42057
|
return;
|
|
41746
42058
|
}
|
|
41747
|
-
if (this.hasCursor &&
|
|
41748
|
-
this.settings.player.enableUserInteraction) {
|
|
42059
|
+
if (this.hasCursor && this.settings.player.enableUserInteraction) {
|
|
41749
42060
|
if (this._selectionEnd) {
|
|
41750
42061
|
const startTick = this._tickCache?.getBeatStart(this._selectionStart.beat) ??
|
|
41751
42062
|
this._selectionStart.beat.absolutePlaybackStart;
|
|
@@ -41875,9 +42186,7 @@ class AlphaTabApiBase {
|
|
|
41875
42186
|
}
|
|
41876
42187
|
});
|
|
41877
42188
|
this._renderer.postRenderFinished.on(() => {
|
|
41878
|
-
if (!this._selectionStart ||
|
|
41879
|
-
!this.hasCursor ||
|
|
41880
|
-
!this.settings.player.enableUserInteraction) {
|
|
42189
|
+
if (!this._selectionStart || !this.hasCursor || !this.settings.player.enableUserInteraction) {
|
|
41881
42190
|
return;
|
|
41882
42191
|
}
|
|
41883
42192
|
this.cursorSelectRange(this._selectionStart, this._selectionEnd);
|
|
@@ -42567,6 +42876,57 @@ class AlphaTabApiBase {
|
|
|
42567
42876
|
async getOutputDevice() {
|
|
42568
42877
|
return await this._player.output.getOutputDevice();
|
|
42569
42878
|
}
|
|
42879
|
+
/**
|
|
42880
|
+
* Starts the audio export for the currently loaded song.
|
|
42881
|
+
* @remarks
|
|
42882
|
+
* This will not export or use any backing track media but will always use the synthesizer to generate the output.
|
|
42883
|
+
* This method works with any PlayerMode active but changing the mode during export can lead to unexpected side effects.
|
|
42884
|
+
* @param options The export options.
|
|
42885
|
+
* @returns An exporter instance to export the audio in a streaming fashion.
|
|
42886
|
+
*/
|
|
42887
|
+
async exportAudio(options) {
|
|
42888
|
+
if (!this.score) {
|
|
42889
|
+
throw new AlphaTabError(AlphaTabErrorType.General, 'No song loaded');
|
|
42890
|
+
}
|
|
42891
|
+
let exporter;
|
|
42892
|
+
switch (this._actualPlayerMode) {
|
|
42893
|
+
case PlayerMode.EnabledSynthesizer:
|
|
42894
|
+
exporter = this.uiFacade.createWorkerAudioExporter(this._player.instance);
|
|
42895
|
+
break;
|
|
42896
|
+
default:
|
|
42897
|
+
exporter = this.uiFacade.createWorkerAudioExporter(null);
|
|
42898
|
+
break;
|
|
42899
|
+
}
|
|
42900
|
+
const score = this.score;
|
|
42901
|
+
const midiFile = new MidiFile();
|
|
42902
|
+
const handler = new AlphaSynthMidiFileHandler(midiFile);
|
|
42903
|
+
const generator = new MidiFileGenerator(score, this.settings, handler);
|
|
42904
|
+
generator.applyTranspositionPitches = false;
|
|
42905
|
+
generator.generate();
|
|
42906
|
+
const optionsWithChannels = new AudioExportOptions();
|
|
42907
|
+
optionsWithChannels.soundFonts = options.soundFonts;
|
|
42908
|
+
optionsWithChannels.sampleRate = options.sampleRate;
|
|
42909
|
+
optionsWithChannels.useSyncPoints = options.useSyncPoints;
|
|
42910
|
+
optionsWithChannels.masterVolume = options.masterVolume;
|
|
42911
|
+
optionsWithChannels.metronomeVolume = options.metronomeVolume;
|
|
42912
|
+
optionsWithChannels.playbackRange = options.playbackRange;
|
|
42913
|
+
for (const [trackIndex, volume] of options.trackVolume) {
|
|
42914
|
+
if (trackIndex < this.score.tracks.length) {
|
|
42915
|
+
const track = this.score.tracks[trackIndex];
|
|
42916
|
+
optionsWithChannels.trackVolume.set(track.playbackInfo.primaryChannel, volume);
|
|
42917
|
+
optionsWithChannels.trackVolume.set(track.playbackInfo.secondaryChannel, volume);
|
|
42918
|
+
}
|
|
42919
|
+
}
|
|
42920
|
+
for (const [trackIndex, semitones] of options.trackTranspositionPitches) {
|
|
42921
|
+
if (trackIndex < this.score.tracks.length) {
|
|
42922
|
+
const track = this.score.tracks[trackIndex];
|
|
42923
|
+
optionsWithChannels.trackTranspositionPitches.set(track.playbackInfo.primaryChannel, semitones);
|
|
42924
|
+
optionsWithChannels.trackTranspositionPitches.set(track.playbackInfo.secondaryChannel, semitones);
|
|
42925
|
+
}
|
|
42926
|
+
}
|
|
42927
|
+
await exporter.initialize(optionsWithChannels, midiFile, generator.syncPoints, generator.transpositionPitches);
|
|
42928
|
+
return exporter;
|
|
42929
|
+
}
|
|
42570
42930
|
}
|
|
42571
42931
|
|
|
42572
42932
|
/**
|
|
@@ -43400,6 +43760,9 @@ class AlphaSynthWebWorkerApi {
|
|
|
43400
43760
|
get logLevel() {
|
|
43401
43761
|
return Logger.logLevel;
|
|
43402
43762
|
}
|
|
43763
|
+
get worker() {
|
|
43764
|
+
return this._synth;
|
|
43765
|
+
}
|
|
43403
43766
|
set logLevel(value) {
|
|
43404
43767
|
Logger.logLevel = value;
|
|
43405
43768
|
this._synth.postMessage({
|
|
@@ -44252,6 +44615,87 @@ class AudioElementBackingTrackSynthOutput {
|
|
|
44252
44615
|
}
|
|
44253
44616
|
}
|
|
44254
44617
|
|
|
44618
|
+
/**
|
|
44619
|
+
* @target web
|
|
44620
|
+
*/
|
|
44621
|
+
class AlphaSynthAudioExporterWorkerApi {
|
|
44622
|
+
constructor(synthWorker, ownsWorker) {
|
|
44623
|
+
this._promise = null;
|
|
44624
|
+
this._exporterId = AlphaSynthAudioExporterWorkerApi._nextExporterId++;
|
|
44625
|
+
this._worker = synthWorker;
|
|
44626
|
+
this._ownsWorker = ownsWorker;
|
|
44627
|
+
}
|
|
44628
|
+
async initialize(options, midi, syncPoints, transpositionPitches) {
|
|
44629
|
+
const onmessage = this.handleWorkerMessage.bind(this);
|
|
44630
|
+
this._worker.worker.addEventListener('message', onmessage, false);
|
|
44631
|
+
this._unsubscribe = () => {
|
|
44632
|
+
this._worker.worker.removeEventListener('message', onmessage, false);
|
|
44633
|
+
};
|
|
44634
|
+
this._promise = Promise.withResolvers();
|
|
44635
|
+
this._worker.worker.postMessage({
|
|
44636
|
+
cmd: 'alphaSynth.exporter.initialize',
|
|
44637
|
+
exporterId: this._exporterId,
|
|
44638
|
+
options: Environment.prepareForPostMessage(options),
|
|
44639
|
+
midi: JsonConverter.midiFileToJsObject(Environment.prepareForPostMessage(midi)),
|
|
44640
|
+
syncPoints: Environment.prepareForPostMessage(syncPoints),
|
|
44641
|
+
transpositionPitches: Environment.prepareForPostMessage(transpositionPitches)
|
|
44642
|
+
});
|
|
44643
|
+
await this._promise.promise;
|
|
44644
|
+
}
|
|
44645
|
+
handleWorkerMessage(e) {
|
|
44646
|
+
const data = e.data;
|
|
44647
|
+
// for us?
|
|
44648
|
+
if (data.exporterId !== this._exporterId) {
|
|
44649
|
+
return;
|
|
44650
|
+
}
|
|
44651
|
+
const cmd = data.cmd;
|
|
44652
|
+
switch (cmd) {
|
|
44653
|
+
case 'alphaSynth.exporter.initialized':
|
|
44654
|
+
this._promise?.resolve(null);
|
|
44655
|
+
this._promise = null;
|
|
44656
|
+
break;
|
|
44657
|
+
case 'alphaSynth.exporter.error':
|
|
44658
|
+
this._promise?.reject(data.error);
|
|
44659
|
+
this._promise = null;
|
|
44660
|
+
break;
|
|
44661
|
+
case 'alphaSynth.exporter.rendered':
|
|
44662
|
+
this._promise?.resolve(data.chunk);
|
|
44663
|
+
this._promise = null;
|
|
44664
|
+
break;
|
|
44665
|
+
case 'alphaSynth.destroyed':
|
|
44666
|
+
this._promise?.reject(new AlphaTabError(AlphaTabErrorType.General, 'Worker was destroyed'));
|
|
44667
|
+
this._promise = null;
|
|
44668
|
+
break;
|
|
44669
|
+
}
|
|
44670
|
+
}
|
|
44671
|
+
async render(milliseconds) {
|
|
44672
|
+
if (this._promise) {
|
|
44673
|
+
throw new AlphaTabError(AlphaTabErrorType.General, 'There is already an ongoing operation, wait for initialize to complete before requesting render');
|
|
44674
|
+
}
|
|
44675
|
+
this._promise = Promise.withResolvers();
|
|
44676
|
+
this._worker.worker.postMessage({
|
|
44677
|
+
cmd: 'alphaSynth.exporter.render',
|
|
44678
|
+
exporterId: this._exporterId,
|
|
44679
|
+
milliseconds: milliseconds
|
|
44680
|
+
});
|
|
44681
|
+
return (await this._promise.promise);
|
|
44682
|
+
}
|
|
44683
|
+
destroy() {
|
|
44684
|
+
this._worker.worker.postMessage({
|
|
44685
|
+
cmd: 'alphaSynth.exporter.destroy',
|
|
44686
|
+
exporterId: this._exporterId
|
|
44687
|
+
});
|
|
44688
|
+
this._unsubscribe();
|
|
44689
|
+
if (this._ownsWorker) {
|
|
44690
|
+
this._worker.destroy();
|
|
44691
|
+
}
|
|
44692
|
+
}
|
|
44693
|
+
[Symbol.dispose]() {
|
|
44694
|
+
this.destroy();
|
|
44695
|
+
}
|
|
44696
|
+
}
|
|
44697
|
+
AlphaSynthAudioExporterWorkerApi._nextExporterId = 1;
|
|
44698
|
+
|
|
44255
44699
|
/**
|
|
44256
44700
|
* @target web
|
|
44257
44701
|
*/
|
|
@@ -44818,6 +45262,14 @@ class BrowserUiFacade {
|
|
|
44818
45262
|
}
|
|
44819
45263
|
return player;
|
|
44820
45264
|
}
|
|
45265
|
+
createWorkerAudioExporter(synth) {
|
|
45266
|
+
const needNewWorker = synth === null || !(synth instanceof AlphaSynthWebWorkerApi);
|
|
45267
|
+
if (needNewWorker) {
|
|
45268
|
+
// nowadays we require browsers with workers
|
|
45269
|
+
synth = this.createWorkerPlayer();
|
|
45270
|
+
}
|
|
45271
|
+
return new AlphaSynthAudioExporterWorkerApi(synth, needNewWorker);
|
|
45272
|
+
}
|
|
44821
45273
|
beginInvoke(action) {
|
|
44822
45274
|
window.requestAnimationFrame(() => {
|
|
44823
45275
|
action();
|
|
@@ -61275,9 +61727,9 @@ class VersionInfo {
|
|
|
61275
61727
|
print(`build date: ${VersionInfo.date}`);
|
|
61276
61728
|
}
|
|
61277
61729
|
}
|
|
61278
|
-
VersionInfo.version = '1.6.0-alpha.
|
|
61279
|
-
VersionInfo.date = '2025-06-
|
|
61280
|
-
VersionInfo.commit = '
|
|
61730
|
+
VersionInfo.version = '1.6.0-alpha.1448';
|
|
61731
|
+
VersionInfo.date = '2025-06-14T22:07:33.833Z';
|
|
61732
|
+
VersionInfo.commit = 'bfeddfaced057b74c2fa71fa58aa407467dd7460';
|
|
61281
61733
|
|
|
61282
61734
|
/**
|
|
61283
61735
|
* A factory for custom layout engines.
|
|
@@ -61769,17 +62221,27 @@ class Environment {
|
|
|
61769
62221
|
* @target web
|
|
61770
62222
|
*/
|
|
61771
62223
|
static detectWebPlatform() {
|
|
61772
|
-
|
|
61773
|
-
|
|
61774
|
-
|
|
61775
|
-
|
|
61776
|
-
|
|
61777
|
-
|
|
61778
|
-
|
|
61779
|
-
|
|
62224
|
+
// There might be polyfills or platforms like Electron which have a global process object defined even in the browser.
|
|
62225
|
+
// We need to differenciate between those platforms and a real nodejs
|
|
62226
|
+
// the webPlatform is currently only relevant on the main process side and not within workers/worklets
|
|
62227
|
+
// so it is OK if we wrongly detect node.js inside them.
|
|
62228
|
+
const isBrowserLike =
|
|
62229
|
+
// browser UI thread
|
|
62230
|
+
typeof Environment.globalThis.Window !== 'undefined' &&
|
|
62231
|
+
Environment.globalThis instanceof Environment.globalThis.Window;
|
|
62232
|
+
if (!isBrowserLike) {
|
|
62233
|
+
try {
|
|
62234
|
+
// Credit of the node.js detection goes to
|
|
62235
|
+
// https://github.com/iliakan/detect-node
|
|
62236
|
+
// MIT License
|
|
62237
|
+
// Copyright (c) 2017 Ilya Kantor
|
|
62238
|
+
// tslint:disable-next-line: strict-type-predicates
|
|
62239
|
+
if (Object.prototype.toString.call(typeof process !== 'undefined' ? process : 0) === '[object process]') {
|
|
62240
|
+
return WebPlatform.NodeJs;
|
|
62241
|
+
}
|
|
62242
|
+
}
|
|
62243
|
+
catch (e) {
|
|
61780
62244
|
}
|
|
61781
|
-
}
|
|
61782
|
-
catch (e) {
|
|
61783
62245
|
}
|
|
61784
62246
|
try {
|
|
61785
62247
|
// @ts-ignore
|
|
@@ -61842,7 +62304,7 @@ class Environment {
|
|
|
61842
62304
|
}
|
|
61843
62305
|
// Solidjs unwrap: the symbol required to access the raw object is unfortunately hidden and we cannot unwrap it without importing
|
|
61844
62306
|
// import { unwrap } from "solid-js/store"
|
|
61845
|
-
// alternative for users is to replace this method during runtime.
|
|
62307
|
+
// alternative for users is to replace this method during runtime.
|
|
61846
62308
|
return object;
|
|
61847
62309
|
}
|
|
61848
62310
|
}
|
|
@@ -61937,7 +62399,7 @@ class CoreSettings {
|
|
|
61937
62399
|
/**
|
|
61938
62400
|
* Builds the default SMuFL font sources for the usage with alphaTab in cases
|
|
61939
62401
|
* where no custom {@link smuflFontSources} are provided.
|
|
61940
|
-
* @param fontDirectory The {@link fontDirectory} configured.
|
|
62402
|
+
* @param fontDirectory The {@link CoreSettings.fontDirectory} configured.
|
|
61941
62403
|
* @target web
|
|
61942
62404
|
*/
|
|
61943
62405
|
static buildDefaultSmuflFontSources(fontDirectory) {
|
|
@@ -65698,6 +66160,8 @@ const _barrel = /*#__PURE__*/Object.freeze(/*#__PURE__*/Object.defineProperty({
|
|
|
65698
66160
|
AlphaSynthScriptProcessorOutput,
|
|
65699
66161
|
AlphaSynthWebAudioOutputBase,
|
|
65700
66162
|
AlphaSynthWebWorkerApi,
|
|
66163
|
+
AudioExportChunk,
|
|
66164
|
+
AudioExportOptions,
|
|
65701
66165
|
BackingTrackSyncPoint,
|
|
65702
66166
|
CircularSampleBuffer,
|
|
65703
66167
|
MidiEventsPlayedEventArgs,
|