@coderline/alphatab 1.6.0-alpha.1418 → 1.6.0-alpha.1420
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/alphaTab.core.min.mjs +2 -2
- package/dist/alphaTab.core.mjs +217 -133
- package/dist/alphaTab.d.ts +42 -11
- package/dist/alphaTab.js +217 -133
- package/dist/alphaTab.min.js +2 -2
- package/dist/alphaTab.min.mjs +1 -1
- package/dist/alphaTab.mjs +1 -1
- package/dist/alphaTab.vite.js +1 -1
- package/dist/alphaTab.vite.mjs +1 -1
- package/dist/alphaTab.webpack.js +1 -1
- package/dist/alphaTab.webpack.mjs +1 -1
- package/dist/alphaTab.worker.min.mjs +1 -1
- package/dist/alphaTab.worker.mjs +1 -1
- package/dist/alphaTab.worklet.min.mjs +1 -1
- package/dist/alphaTab.worklet.mjs +1 -1
- package/package.json +1 -1
package/dist/alphaTab.core.mjs
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
/*!
|
|
2
|
-
* alphaTab v1.6.0-alpha.
|
|
2
|
+
* alphaTab v1.6.0-alpha.1420 (develop, build 1420)
|
|
3
3
|
*
|
|
4
4
|
* Copyright © 2025, Daniel Kuschny and Contributors, All rights reserved.
|
|
5
5
|
*
|
|
@@ -14312,13 +14312,6 @@ class XmlDocument extends XmlNode {
|
|
|
14312
14312
|
* @json_strict
|
|
14313
14313
|
*/
|
|
14314
14314
|
class BackingTrack {
|
|
14315
|
-
constructor() {
|
|
14316
|
-
/**
|
|
14317
|
-
* The number of milliseconds the audio should be shifted to align with the song.
|
|
14318
|
-
* (e.g. negative values allow skipping potential silent parts at the start of the file and directly start with the first note).
|
|
14319
|
-
*/
|
|
14320
|
-
this.padding = 0;
|
|
14321
|
-
}
|
|
14322
14315
|
}
|
|
14323
14316
|
|
|
14324
14317
|
/**
|
|
@@ -14351,6 +14344,7 @@ class GpifParser {
|
|
|
14351
14344
|
constructor() {
|
|
14352
14345
|
this._hasAnacrusis = false;
|
|
14353
14346
|
this._skipApplyLyrics = false;
|
|
14347
|
+
this._backingTrackPadding = 0;
|
|
14354
14348
|
this._doubleBars = new Set();
|
|
14355
14349
|
this._keySignatures = new Map();
|
|
14356
14350
|
}
|
|
@@ -14583,7 +14577,7 @@ class GpifParser {
|
|
|
14583
14577
|
assetId = c.innerText;
|
|
14584
14578
|
break;
|
|
14585
14579
|
case 'FramePadding':
|
|
14586
|
-
|
|
14580
|
+
this._backingTrackPadding = GpifParser.parseIntSafe(c.innerText, 0) / GpifParser.SampleRate * 1000;
|
|
14587
14581
|
break;
|
|
14588
14582
|
}
|
|
14589
14583
|
}
|
|
@@ -16797,6 +16791,7 @@ class GpifParser {
|
|
|
16797
16791
|
masterBar.tempoAutomations.push(automation);
|
|
16798
16792
|
break;
|
|
16799
16793
|
case AutomationType.SyncPoint:
|
|
16794
|
+
automation.syncPointValue.millisecondOffset -= this._backingTrackPadding;
|
|
16800
16795
|
masterBar.addSyncPoint(automation);
|
|
16801
16796
|
break;
|
|
16802
16797
|
}
|
|
@@ -22285,17 +22280,6 @@ class SynthEvent {
|
|
|
22285
22280
|
}
|
|
22286
22281
|
}
|
|
22287
22282
|
|
|
22288
|
-
/**
|
|
22289
|
-
* Rerpresents a point to sync the alphaTab time axis with an external backing track.
|
|
22290
|
-
*/
|
|
22291
|
-
class BackingTrackSyncPoint {
|
|
22292
|
-
constructor(tick, data) {
|
|
22293
|
-
this.tick = 0;
|
|
22294
|
-
this.tick = tick;
|
|
22295
|
-
this.data = data;
|
|
22296
|
-
}
|
|
22297
|
-
}
|
|
22298
|
-
|
|
22299
22283
|
class MidiFileSequencerTempoChange {
|
|
22300
22284
|
constructor(bpm, ticks, time) {
|
|
22301
22285
|
this.bpm = bpm;
|
|
@@ -22303,10 +22287,12 @@ class MidiFileSequencerTempoChange {
|
|
|
22303
22287
|
this.time = time;
|
|
22304
22288
|
}
|
|
22305
22289
|
}
|
|
22306
|
-
class BackingTrackSyncPointWithTime
|
|
22307
|
-
constructor(tick,
|
|
22308
|
-
|
|
22309
|
-
this.
|
|
22290
|
+
class BackingTrackSyncPointWithTime {
|
|
22291
|
+
constructor(tick, time, modifiedTempo, millisecondOffset) {
|
|
22292
|
+
this.alphaTabTick = tick;
|
|
22293
|
+
this.alphaTabTime = time;
|
|
22294
|
+
this.modifiedTempo = modifiedTempo;
|
|
22295
|
+
this.millisecondOffset = millisecondOffset;
|
|
22310
22296
|
}
|
|
22311
22297
|
}
|
|
22312
22298
|
class MidiSequencerState {
|
|
@@ -22412,7 +22398,7 @@ class MidiFileSequencer {
|
|
|
22412
22398
|
this._mainState.currentTempo = this._mainState.tempoChanges[0].bpm;
|
|
22413
22399
|
this._mainState.modifiedTempo =
|
|
22414
22400
|
this._mainState.syncPoints.length > 0
|
|
22415
|
-
? this._mainState.syncPoints[0].
|
|
22401
|
+
? this._mainState.syncPoints[0].modifiedTempo
|
|
22416
22402
|
: this._mainState.currentTempo;
|
|
22417
22403
|
if (this.isPlayingMain) {
|
|
22418
22404
|
const metronomeVolume = this._synthesizer.metronomeVolume;
|
|
@@ -22584,25 +22570,52 @@ class MidiFileSequencer {
|
|
|
22584
22570
|
mainUpdateSyncPoints(syncPoints) {
|
|
22585
22571
|
const state = this._mainState;
|
|
22586
22572
|
syncPoints.sort((a, b) => a.tick - b.tick); // just in case
|
|
22587
|
-
state.syncPoints =
|
|
22573
|
+
state.syncPoints = [];
|
|
22588
22574
|
if (syncPoints.length >= 0) {
|
|
22589
22575
|
let bpm = 120;
|
|
22590
22576
|
let absTick = 0;
|
|
22591
22577
|
let absTime = 0.0;
|
|
22592
|
-
let previousTick = 0;
|
|
22593
22578
|
let tempoChangeIndex = 0;
|
|
22594
22579
|
for (let i = 0; i < syncPoints.length; i++) {
|
|
22595
22580
|
const p = syncPoints[i];
|
|
22596
|
-
|
|
22597
|
-
|
|
22598
|
-
|
|
22599
|
-
|
|
22600
|
-
previousTick
|
|
22581
|
+
let deltaTick = 0;
|
|
22582
|
+
// remember state from previous sync point (or start). to handle linear interpolation
|
|
22583
|
+
let previousModifiedTempo;
|
|
22584
|
+
let previousMillisecondOffset;
|
|
22585
|
+
let previousTick;
|
|
22586
|
+
if (i === 0) {
|
|
22587
|
+
previousModifiedTempo = bpm;
|
|
22588
|
+
previousMillisecondOffset = 0;
|
|
22589
|
+
previousTick = 0;
|
|
22590
|
+
}
|
|
22591
|
+
else {
|
|
22592
|
+
const previousSyncPoint = syncPoints[i - 1];
|
|
22593
|
+
previousModifiedTempo = previousSyncPoint.data.modifiedTempo;
|
|
22594
|
+
previousMillisecondOffset = previousSyncPoint.data.millisecondOffset;
|
|
22595
|
+
previousTick = previousSyncPoint.tick;
|
|
22596
|
+
}
|
|
22597
|
+
// process time until sync point
|
|
22598
|
+
// here it gets a bit tricky. if we have tempo changes on the synthesizer time axis (inbetween two sync points)
|
|
22599
|
+
// we have to calculate a interpolated sync point on the alphaTab time axis.
|
|
22600
|
+
// otherwise the linear interpolation later in the lookup will fail.
|
|
22601
|
+
// goal is to have always a linear increase between two points, no matter if the time axis is sliced by tempo changes or sync points
|
|
22601
22602
|
while (tempoChangeIndex < state.tempoChanges.length &&
|
|
22602
|
-
state.tempoChanges[tempoChangeIndex].ticks <=
|
|
22603
|
+
state.tempoChanges[tempoChangeIndex].ticks <= p.tick) {
|
|
22604
|
+
deltaTick = state.tempoChanges[tempoChangeIndex].ticks - absTick;
|
|
22605
|
+
if (deltaTick > 0) {
|
|
22606
|
+
absTick += deltaTick;
|
|
22607
|
+
absTime += deltaTick * (60000.0 / (bpm * state.division));
|
|
22608
|
+
const millisPerTick = (p.data.millisecondOffset - previousMillisecondOffset) / (p.tick - previousTick);
|
|
22609
|
+
const interpolatedMillisecondOffset = (absTick - previousTick) * millisPerTick + previousMillisecondOffset;
|
|
22610
|
+
state.syncPoints.push(new BackingTrackSyncPointWithTime(absTick, absTime, previousModifiedTempo, interpolatedMillisecondOffset));
|
|
22611
|
+
}
|
|
22603
22612
|
bpm = state.tempoChanges[tempoChangeIndex].bpm;
|
|
22604
22613
|
tempoChangeIndex++;
|
|
22605
22614
|
}
|
|
22615
|
+
deltaTick = p.tick - absTick;
|
|
22616
|
+
absTick += deltaTick;
|
|
22617
|
+
absTime += deltaTick * (60000.0 / (bpm * state.division));
|
|
22618
|
+
state.syncPoints.push(new BackingTrackSyncPointWithTime(p.tick, absTime, p.data.modifiedTempo, p.data.millisecondOffset));
|
|
22606
22619
|
}
|
|
22607
22620
|
}
|
|
22608
22621
|
state.syncPointIndex = 0;
|
|
@@ -22616,7 +22629,7 @@ class MidiFileSequencer {
|
|
|
22616
22629
|
this.updateCurrentTempo(state, timePosition);
|
|
22617
22630
|
const lastTempoChange = state.tempoChanges[state.tempoChangeIndex];
|
|
22618
22631
|
const timeDiff = timePosition - lastTempoChange.time;
|
|
22619
|
-
const ticks = (
|
|
22632
|
+
const ticks = (timeDiff / (60000.0 / (lastTempoChange.bpm * state.division))) | 0;
|
|
22620
22633
|
// we add 1 for possible rounding errors.(floating point issuses)
|
|
22621
22634
|
return lastTempoChange.ticks + ticks + 1;
|
|
22622
22635
|
}
|
|
@@ -22636,16 +22649,16 @@ class MidiFileSequencer {
|
|
|
22636
22649
|
const syncPoints = state.syncPoints;
|
|
22637
22650
|
if (syncPoints.length > 0) {
|
|
22638
22651
|
let syncPointIndex = Math.min(state.syncPointIndex, syncPoints.length - 1);
|
|
22639
|
-
if (timePosition < syncPoints[syncPointIndex].
|
|
22652
|
+
if (timePosition < syncPoints[syncPointIndex].millisecondOffset) {
|
|
22640
22653
|
syncPointIndex = 0;
|
|
22641
22654
|
}
|
|
22642
22655
|
while (syncPointIndex + 1 < syncPoints.length &&
|
|
22643
|
-
syncPoints[syncPointIndex + 1].
|
|
22656
|
+
syncPoints[syncPointIndex + 1].millisecondOffset <= timePosition) {
|
|
22644
22657
|
syncPointIndex++;
|
|
22645
22658
|
}
|
|
22646
22659
|
if (syncPointIndex !== state.syncPointIndex) {
|
|
22647
22660
|
state.syncPointIndex = syncPointIndex;
|
|
22648
|
-
state.modifiedTempo = syncPoints[syncPointIndex].
|
|
22661
|
+
state.modifiedTempo = syncPoints[syncPointIndex].modifiedTempo;
|
|
22649
22662
|
}
|
|
22650
22663
|
}
|
|
22651
22664
|
else {
|
|
@@ -22661,18 +22674,18 @@ class MidiFileSequencer {
|
|
|
22661
22674
|
this.updateCurrentTempo(this._mainState, timePosition);
|
|
22662
22675
|
const syncPointIndex = Math.min(mainState.syncPointIndex, syncPoints.length - 1);
|
|
22663
22676
|
const currentSyncPoint = syncPoints[syncPointIndex];
|
|
22664
|
-
const timeDiff = timePosition - currentSyncPoint.
|
|
22677
|
+
const timeDiff = timePosition - currentSyncPoint.millisecondOffset;
|
|
22665
22678
|
let alphaTabTimeDiff;
|
|
22666
22679
|
if (syncPointIndex + 1 < syncPoints.length) {
|
|
22667
22680
|
const nextSyncPoint = syncPoints[syncPointIndex + 1];
|
|
22668
|
-
const relativeTimeDiff = timeDiff / (nextSyncPoint.
|
|
22669
|
-
alphaTabTimeDiff = (nextSyncPoint.
|
|
22681
|
+
const relativeTimeDiff = timeDiff / (nextSyncPoint.millisecondOffset - currentSyncPoint.millisecondOffset);
|
|
22682
|
+
alphaTabTimeDiff = (nextSyncPoint.alphaTabTime - currentSyncPoint.alphaTabTime) * relativeTimeDiff;
|
|
22670
22683
|
}
|
|
22671
22684
|
else {
|
|
22672
|
-
const relativeTimeDiff = timeDiff / (backingTrackLength - currentSyncPoint.
|
|
22673
|
-
alphaTabTimeDiff = (mainState.endTime - currentSyncPoint.
|
|
22685
|
+
const relativeTimeDiff = timeDiff / (backingTrackLength - currentSyncPoint.millisecondOffset);
|
|
22686
|
+
alphaTabTimeDiff = (mainState.endTime - currentSyncPoint.alphaTabTime) * relativeTimeDiff;
|
|
22674
22687
|
}
|
|
22675
|
-
return (currentSyncPoint.
|
|
22688
|
+
return (currentSyncPoint.alphaTabTime + alphaTabTimeDiff) / this.playbackSpeed;
|
|
22676
22689
|
}
|
|
22677
22690
|
mainTimePositionToBackingTrack(timePosition, backingTrackLength) {
|
|
22678
22691
|
const mainState = this._mainState;
|
|
@@ -22682,25 +22695,27 @@ class MidiFileSequencer {
|
|
|
22682
22695
|
}
|
|
22683
22696
|
timePosition *= this.playbackSpeed;
|
|
22684
22697
|
let syncPointIndex = Math.min(mainState.syncPointIndex, syncPoints.length - 1);
|
|
22685
|
-
if (timePosition < syncPoints[syncPointIndex].
|
|
22698
|
+
if (timePosition < syncPoints[syncPointIndex].alphaTabTime) {
|
|
22686
22699
|
syncPointIndex = 0;
|
|
22687
22700
|
}
|
|
22688
|
-
while (syncPointIndex + 1 < syncPoints.length && syncPoints[syncPointIndex + 1].
|
|
22701
|
+
while (syncPointIndex + 1 < syncPoints.length && syncPoints[syncPointIndex + 1].alphaTabTime <= timePosition) {
|
|
22689
22702
|
syncPointIndex++;
|
|
22690
22703
|
}
|
|
22704
|
+
// NOTE: this logic heavily relies on the interpolation done in mainUpdateSyncPoints
|
|
22705
|
+
// we ensure that we have a linear increase between two points
|
|
22691
22706
|
const currentSyncPoint = syncPoints[syncPointIndex];
|
|
22692
|
-
const alphaTabTimeDiff = timePosition - currentSyncPoint.
|
|
22707
|
+
const alphaTabTimeDiff = timePosition - currentSyncPoint.alphaTabTime;
|
|
22693
22708
|
let backingTrackPos;
|
|
22694
22709
|
if (syncPointIndex + 1 < syncPoints.length) {
|
|
22695
22710
|
const nextSyncPoint = syncPoints[syncPointIndex + 1];
|
|
22696
|
-
const relativeAlphaTabTimeDiff = alphaTabTimeDiff / (nextSyncPoint.
|
|
22697
|
-
const backingTrackDiff = nextSyncPoint.
|
|
22698
|
-
backingTrackPos = currentSyncPoint.
|
|
22711
|
+
const relativeAlphaTabTimeDiff = alphaTabTimeDiff / (nextSyncPoint.alphaTabTime - currentSyncPoint.alphaTabTime);
|
|
22712
|
+
const backingTrackDiff = nextSyncPoint.millisecondOffset - currentSyncPoint.millisecondOffset;
|
|
22713
|
+
backingTrackPos = currentSyncPoint.millisecondOffset + backingTrackDiff * relativeAlphaTabTimeDiff;
|
|
22699
22714
|
}
|
|
22700
22715
|
else {
|
|
22701
|
-
const relativeAlphaTabTimeDiff = alphaTabTimeDiff / (mainState.endTime - currentSyncPoint.
|
|
22702
|
-
const frameDiff = backingTrackLength - currentSyncPoint.
|
|
22703
|
-
backingTrackPos = currentSyncPoint.
|
|
22716
|
+
const relativeAlphaTabTimeDiff = alphaTabTimeDiff / (mainState.endTime - currentSyncPoint.alphaTabTime);
|
|
22717
|
+
const frameDiff = backingTrackLength - currentSyncPoint.millisecondOffset;
|
|
22718
|
+
backingTrackPos = currentSyncPoint.millisecondOffset + frameDiff * relativeAlphaTabTimeDiff;
|
|
22704
22719
|
}
|
|
22705
22720
|
return backingTrackPos;
|
|
22706
22721
|
}
|
|
@@ -28582,7 +28597,9 @@ class AlphaSynthBase {
|
|
|
28582
28597
|
hasSamplesForPercussion(key) {
|
|
28583
28598
|
return this.synthesizer.hasSamplesForPercussion(key);
|
|
28584
28599
|
}
|
|
28585
|
-
loadBackingTrack(_score
|
|
28600
|
+
loadBackingTrack(_score) {
|
|
28601
|
+
}
|
|
28602
|
+
updateSyncPoints(_syncPoints) {
|
|
28586
28603
|
}
|
|
28587
28604
|
}
|
|
28588
28605
|
/**
|
|
@@ -32479,15 +32496,9 @@ class BackingTrackSerializer {
|
|
|
32479
32496
|
return null;
|
|
32480
32497
|
}
|
|
32481
32498
|
const o = new Map();
|
|
32482
|
-
o.set("padding", obj.padding);
|
|
32483
32499
|
return o;
|
|
32484
32500
|
}
|
|
32485
32501
|
static setProperty(obj, property, v) {
|
|
32486
|
-
switch (property) {
|
|
32487
|
-
case "padding":
|
|
32488
|
-
obj.padding = v;
|
|
32489
|
-
return true;
|
|
32490
|
-
}
|
|
32491
32502
|
return false;
|
|
32492
32503
|
}
|
|
32493
32504
|
}
|
|
@@ -34950,7 +34961,13 @@ class MasterBarTickLookup {
|
|
|
34950
34961
|
* Gets the list of tempo changes within the tick lookup.
|
|
34951
34962
|
*/
|
|
34952
34963
|
this.tempoChanges = [];
|
|
34964
|
+
/**
|
|
34965
|
+
* The first beat in the bar.
|
|
34966
|
+
*/
|
|
34953
34967
|
this.firstBeat = null;
|
|
34968
|
+
/**
|
|
34969
|
+
* The last beat in the bar.
|
|
34970
|
+
*/
|
|
34954
34971
|
this.lastBeat = null;
|
|
34955
34972
|
/**
|
|
34956
34973
|
* Gets or sets the {@link MasterBarTickLookup} of the next masterbar in the {@link Score}
|
|
@@ -35364,14 +35381,13 @@ class MidiTickLookup {
|
|
|
35364
35381
|
constructor() {
|
|
35365
35382
|
this._currentMasterBar = null;
|
|
35366
35383
|
/**
|
|
35367
|
-
*
|
|
35384
|
+
* A dictionary of all master bars played. The index is the index equals to {@link MasterBar.index}.
|
|
35368
35385
|
* This lookup only contains the first time a MasterBar is played. For a whole sequence of the song refer to {@link MasterBars}.
|
|
35369
35386
|
* @internal
|
|
35370
35387
|
*/
|
|
35371
35388
|
this.masterBarLookup = new Map();
|
|
35372
35389
|
/**
|
|
35373
|
-
*
|
|
35374
|
-
* @internal
|
|
35390
|
+
* A list of all {@link MasterBarTickLookup} sorted by time.
|
|
35375
35391
|
*/
|
|
35376
35392
|
this.masterBars = [];
|
|
35377
35393
|
/**
|
|
@@ -35745,6 +35761,17 @@ class MidiTickLookup {
|
|
|
35745
35761
|
}
|
|
35746
35762
|
}
|
|
35747
35763
|
|
|
35764
|
+
/**
|
|
35765
|
+
* Rerpresents a point to sync the alphaTab time axis with an external backing track.
|
|
35766
|
+
*/
|
|
35767
|
+
class BackingTrackSyncPoint {
|
|
35768
|
+
constructor(tick, data) {
|
|
35769
|
+
this.tick = 0;
|
|
35770
|
+
this.tick = tick;
|
|
35771
|
+
this.data = data;
|
|
35772
|
+
}
|
|
35773
|
+
}
|
|
35774
|
+
|
|
35748
35775
|
class MidiNoteDuration {
|
|
35749
35776
|
constructor() {
|
|
35750
35777
|
this.noteOnly = 0;
|
|
@@ -35814,62 +35841,22 @@ class MidiFileGenerator {
|
|
|
35814
35841
|
this.generateTrack(track);
|
|
35815
35842
|
}
|
|
35816
35843
|
Logger.debug('Midi', 'Begin midi generation');
|
|
35817
|
-
|
|
35818
|
-
|
|
35819
|
-
|
|
35820
|
-
|
|
35821
|
-
|
|
35822
|
-
|
|
35823
|
-
|
|
35824
|
-
|
|
35825
|
-
const currentTick = controller.currentTick;
|
|
35826
|
-
controller.processCurrent();
|
|
35827
|
-
if (controller.shouldPlay) {
|
|
35828
|
-
let occurence = barOccurence.has(index) ? barOccurence.get(index) : -1;
|
|
35829
|
-
occurence++;
|
|
35830
|
-
barOccurence.set(index, occurence);
|
|
35831
|
-
this.generateMasterBar(bar, previousMasterBar, currentTick, currentTempo, occurence);
|
|
35832
|
-
if (bar.tempoAutomations.length > 0) {
|
|
35833
|
-
currentTempo = bar.tempoAutomations[0].value;
|
|
35834
|
-
}
|
|
35835
|
-
for (const track of this._score.tracks) {
|
|
35836
|
-
for (const staff of track.staves) {
|
|
35837
|
-
if (index < staff.bars.length) {
|
|
35838
|
-
this.generateBar(staff.bars[index], currentTick, currentTempo);
|
|
35839
|
-
}
|
|
35844
|
+
this.syncPoints = [];
|
|
35845
|
+
MidiFileGenerator.playThroughSong(this._score, this.syncPoints, (bar, previousMasterBar, currentTick, currentTempo, occurence) => {
|
|
35846
|
+
this.generateMasterBar(bar, previousMasterBar, currentTick, currentTempo, occurence);
|
|
35847
|
+
}, (index, currentTick, currentTempo) => {
|
|
35848
|
+
for (const track of this._score.tracks) {
|
|
35849
|
+
for (const staff of track.staves) {
|
|
35850
|
+
if (index < staff.bars.length) {
|
|
35851
|
+
this.generateBar(staff.bars[index], currentTick, currentTempo);
|
|
35840
35852
|
}
|
|
35841
35853
|
}
|
|
35842
|
-
if (bar.tempoAutomations.length > 0) {
|
|
35843
|
-
currentTempo = bar.tempoAutomations[bar.tempoAutomations.length - 1].value;
|
|
35844
|
-
}
|
|
35845
35854
|
}
|
|
35846
|
-
|
|
35847
|
-
|
|
35848
|
-
|
|
35849
|
-
// here we interpolate the sync point which marks the end of the sync.
|
|
35850
|
-
// Sync points define new tempos at certain positions.
|
|
35851
|
-
// looking from the last sync point to the end we do not assume the end where the audio ends,
|
|
35852
|
-
// but where it ends according to the BPM and the remaining ticks.
|
|
35853
|
-
if (this.syncPoints.length > 0) {
|
|
35854
|
-
const lastSyncPoint = this.syncPoints[this.syncPoints.length - 1];
|
|
35855
|
-
const endTick = controller.currentTick;
|
|
35856
|
-
const remainingTicks = endTick - lastSyncPoint.tick;
|
|
35857
|
-
if (remainingTicks > 0) {
|
|
35858
|
-
const syncPointData = new SyncPointData();
|
|
35859
|
-
// last occurence of the last bar
|
|
35860
|
-
syncPointData.barOccurence = barOccurence.get(this._score.masterBars.length - 1);
|
|
35861
|
-
// same tempo as last point
|
|
35862
|
-
syncPointData.modifiedTempo = lastSyncPoint.data.modifiedTempo;
|
|
35863
|
-
// interpolated end from last syncPoint
|
|
35864
|
-
syncPointData.millisecondOffset =
|
|
35865
|
-
lastSyncPoint.data.millisecondOffset +
|
|
35866
|
-
MidiUtils.ticksToMillis(remainingTicks, syncPointData.modifiedTempo);
|
|
35867
|
-
this.syncPoints.push(new BackingTrackSyncPoint(endTick, syncPointData));
|
|
35855
|
+
}, endTick => {
|
|
35856
|
+
for (const track of this._score.tracks) {
|
|
35857
|
+
this._handler.finishTrack(track.index, endTick);
|
|
35868
35858
|
}
|
|
35869
|
-
}
|
|
35870
|
-
for (const track of this._score.tracks) {
|
|
35871
|
-
this._handler.finishTrack(track.index, controller.currentTick);
|
|
35872
|
-
}
|
|
35859
|
+
});
|
|
35873
35860
|
Logger.debug('Midi', 'Midi generation done');
|
|
35874
35861
|
}
|
|
35875
35862
|
generateTrack(track) {
|
|
@@ -35914,6 +35901,81 @@ class MidiFileGenerator {
|
|
|
35914
35901
|
this._handler.addControlChange(track.index, 0, channel, ControllerType.DataEntryCoarse, MidiFileGenerator.PitchBendRangeInSemitones);
|
|
35915
35902
|
this.addProgramChange(track, 0, channel, playbackInfo.program);
|
|
35916
35903
|
}
|
|
35904
|
+
/**
|
|
35905
|
+
* Generates the sync points for the given score without re-generating the midi itself.
|
|
35906
|
+
* @remarks
|
|
35907
|
+
* Use this method if a re-generation of the sync points after modification is required.
|
|
35908
|
+
* It correctly handles repeats and places sync points accoridng to their absolute midi tick when they
|
|
35909
|
+
* need to be considered for synchronization.
|
|
35910
|
+
* @param score The song for which to regenerate the sync points.
|
|
35911
|
+
* @returns The generated sync points for usage in the backing track playback.
|
|
35912
|
+
*/
|
|
35913
|
+
static generateSyncPoints(score) {
|
|
35914
|
+
const syncPoints = [];
|
|
35915
|
+
MidiFileGenerator.playThroughSong(score, syncPoints, (_masterBar, _previousMasterBar, _currentTick, _currentTempo, _barOccurence) => {
|
|
35916
|
+
}, (_barIndex, _currentTick, _currentTempo) => {
|
|
35917
|
+
}, _endTick => {
|
|
35918
|
+
});
|
|
35919
|
+
return syncPoints;
|
|
35920
|
+
}
|
|
35921
|
+
static playThroughSong(score, syncPoints, generateMasterBar, generateTracks, finish) {
|
|
35922
|
+
const controller = new MidiPlaybackController(score);
|
|
35923
|
+
let currentTempo = score.tempo;
|
|
35924
|
+
let previousMasterBar = null;
|
|
35925
|
+
// store the previous played bar for repeats
|
|
35926
|
+
const barOccurence = new Map();
|
|
35927
|
+
while (!controller.finished) {
|
|
35928
|
+
const index = controller.index;
|
|
35929
|
+
const bar = score.masterBars[index];
|
|
35930
|
+
const currentTick = controller.currentTick;
|
|
35931
|
+
controller.processCurrent();
|
|
35932
|
+
if (controller.shouldPlay) {
|
|
35933
|
+
let occurence = barOccurence.has(index) ? barOccurence.get(index) : -1;
|
|
35934
|
+
occurence++;
|
|
35935
|
+
barOccurence.set(index, occurence);
|
|
35936
|
+
generateMasterBar(bar, previousMasterBar, currentTick, currentTempo, occurence);
|
|
35937
|
+
const barSyncPoints = bar.syncPoints;
|
|
35938
|
+
if (barSyncPoints) {
|
|
35939
|
+
for (const syncPoint of barSyncPoints) {
|
|
35940
|
+
if (syncPoint.syncPointValue.barOccurence === occurence) {
|
|
35941
|
+
const tick = currentTick + bar.calculateDuration() * syncPoint.ratioPosition;
|
|
35942
|
+
syncPoints.push(new BackingTrackSyncPoint(tick, syncPoint.syncPointValue));
|
|
35943
|
+
}
|
|
35944
|
+
}
|
|
35945
|
+
}
|
|
35946
|
+
if (bar.tempoAutomations.length > 0) {
|
|
35947
|
+
currentTempo = bar.tempoAutomations[0].value;
|
|
35948
|
+
}
|
|
35949
|
+
generateTracks(index, currentTick, currentTempo);
|
|
35950
|
+
if (bar.tempoAutomations.length > 0) {
|
|
35951
|
+
currentTempo = bar.tempoAutomations[bar.tempoAutomations.length - 1].value;
|
|
35952
|
+
}
|
|
35953
|
+
}
|
|
35954
|
+
controller.moveNext();
|
|
35955
|
+
previousMasterBar = bar;
|
|
35956
|
+
}
|
|
35957
|
+
// here we interpolate the sync point which marks the end of the sync.
|
|
35958
|
+
// Sync points define new tempos at certain positions.
|
|
35959
|
+
// looking from the last sync point to the end we do not assume the end where the audio ends,
|
|
35960
|
+
// but where it ends according to the BPM and the remaining ticks.
|
|
35961
|
+
if (syncPoints.length > 0) {
|
|
35962
|
+
const lastSyncPoint = syncPoints[syncPoints.length - 1];
|
|
35963
|
+
const remainingTicks = controller.currentTick - lastSyncPoint.tick;
|
|
35964
|
+
if (remainingTicks > 0) {
|
|
35965
|
+
const syncPointData = new SyncPointData();
|
|
35966
|
+
// last occurence of the last bar
|
|
35967
|
+
syncPointData.barOccurence = barOccurence.get(score.masterBars.length - 1);
|
|
35968
|
+
// same tempo as last point
|
|
35969
|
+
syncPointData.modifiedTempo = lastSyncPoint.data.modifiedTempo;
|
|
35970
|
+
// interpolated end from last syncPoint
|
|
35971
|
+
syncPointData.millisecondOffset =
|
|
35972
|
+
lastSyncPoint.data.millisecondOffset +
|
|
35973
|
+
MidiUtils.ticksToMillis(remainingTicks, syncPointData.modifiedTempo);
|
|
35974
|
+
syncPoints.push(new BackingTrackSyncPoint(controller.currentTick, syncPointData));
|
|
35975
|
+
}
|
|
35976
|
+
}
|
|
35977
|
+
finish(controller.currentTick);
|
|
35978
|
+
}
|
|
35917
35979
|
static toChannelShort(data) {
|
|
35918
35980
|
const value = Math.max(-32768, Math.min(32767, data * 8 - 1));
|
|
35919
35981
|
return Math.max(value, -1) + 1;
|
|
@@ -38250,21 +38312,23 @@ class BackingTrackPlayer extends AlphaSynthBase {
|
|
|
38250
38312
|
this._backingTrackOutput.seekTo(this.sequencer.mainTimePositionToBackingTrack(timePosition, this._backingTrackOutput.backingTrackDuration));
|
|
38251
38313
|
}
|
|
38252
38314
|
}
|
|
38253
|
-
loadBackingTrack(score
|
|
38315
|
+
loadBackingTrack(score) {
|
|
38254
38316
|
const backingTrackInfo = score.backingTrack;
|
|
38255
38317
|
if (backingTrackInfo) {
|
|
38256
38318
|
this._backingTrackOutput.loadBackingTrack(backingTrackInfo);
|
|
38257
|
-
this.sequencer.mainUpdateSyncPoints(syncPoints);
|
|
38258
38319
|
this.timePosition = 0;
|
|
38259
38320
|
}
|
|
38260
38321
|
}
|
|
38322
|
+
updateSyncPoints(syncPoints) {
|
|
38323
|
+
this.sequencer.mainUpdateSyncPoints(syncPoints);
|
|
38324
|
+
this.tickPosition = this.tickPosition;
|
|
38325
|
+
}
|
|
38261
38326
|
}
|
|
38262
38327
|
|
|
38263
38328
|
class ExternalMediaSynthOutput {
|
|
38264
38329
|
constructor() {
|
|
38265
38330
|
// fake rate
|
|
38266
38331
|
this.sampleRate = 44100;
|
|
38267
|
-
this._padding = 0;
|
|
38268
38332
|
this._seekPosition = 0;
|
|
38269
38333
|
this.ready = new EventEmitter();
|
|
38270
38334
|
this.samplesPlayed = new EventEmitterOfT();
|
|
@@ -38307,20 +38371,19 @@ class ExternalMediaSynthOutput {
|
|
|
38307
38371
|
seekTo(time) {
|
|
38308
38372
|
const handler = this.handler;
|
|
38309
38373
|
if (handler) {
|
|
38310
|
-
handler.seekTo(time
|
|
38374
|
+
handler.seekTo(time);
|
|
38311
38375
|
}
|
|
38312
38376
|
else {
|
|
38313
|
-
this._seekPosition = time
|
|
38377
|
+
this._seekPosition = time;
|
|
38314
38378
|
}
|
|
38315
38379
|
}
|
|
38316
|
-
loadBackingTrack(
|
|
38317
|
-
this._padding = backingTrack.padding;
|
|
38380
|
+
loadBackingTrack(_backingTrack) {
|
|
38318
38381
|
}
|
|
38319
38382
|
open(_bufferTimeInMilliseconds) {
|
|
38320
38383
|
this.ready.trigger();
|
|
38321
38384
|
}
|
|
38322
38385
|
updatePosition(currentTime) {
|
|
38323
|
-
this.timeUpdate.trigger(currentTime
|
|
38386
|
+
this.timeUpdate.trigger(currentTime);
|
|
38324
38387
|
}
|
|
38325
38388
|
play() {
|
|
38326
38389
|
this.handler?.play();
|
|
@@ -38585,9 +38648,14 @@ class AlphaSynthWrapper {
|
|
|
38585
38648
|
this._instance.loadMidiFile(midi);
|
|
38586
38649
|
}
|
|
38587
38650
|
}
|
|
38588
|
-
loadBackingTrack(score
|
|
38651
|
+
loadBackingTrack(score) {
|
|
38589
38652
|
if (this._instance) {
|
|
38590
|
-
this._instance.loadBackingTrack(score
|
|
38653
|
+
this._instance.loadBackingTrack(score);
|
|
38654
|
+
}
|
|
38655
|
+
}
|
|
38656
|
+
updateSyncPoints(syncPoints) {
|
|
38657
|
+
if (this._instance) {
|
|
38658
|
+
this._instance.updateSyncPoints(syncPoints);
|
|
38591
38659
|
}
|
|
38592
38660
|
}
|
|
38593
38661
|
applyTranspositionPitches(transpositionPitches) {
|
|
@@ -39916,7 +39984,7 @@ class AlphaTabApiBase {
|
|
|
39916
39984
|
}
|
|
39917
39985
|
appendRenderResult(result, isLast) {
|
|
39918
39986
|
// resizing the canvas and wrapper elements at the end is enough
|
|
39919
|
-
// it avoids flickering on resizes and re-renders.
|
|
39987
|
+
// it avoids flickering on resizes and re-renders.
|
|
39920
39988
|
// the individual partials are anyhow sized correctly
|
|
39921
39989
|
if (isLast) {
|
|
39922
39990
|
this.canvasElement.width = result.totalWidth;
|
|
@@ -40678,9 +40746,23 @@ class AlphaTabApiBase {
|
|
|
40678
40746
|
this.onMidiLoad(midiFile);
|
|
40679
40747
|
const player = this._player;
|
|
40680
40748
|
player.loadMidiFile(midiFile);
|
|
40681
|
-
player.loadBackingTrack(score
|
|
40749
|
+
player.loadBackingTrack(score);
|
|
40750
|
+
player.updateSyncPoints(generator.syncPoints);
|
|
40682
40751
|
player.applyTranspositionPitches(generator.transpositionPitches);
|
|
40683
40752
|
}
|
|
40753
|
+
/**
|
|
40754
|
+
* Triggers an update of the sync points for the current score after modification within the data model
|
|
40755
|
+
* @category Methods - Player
|
|
40756
|
+
* @since 1.6.0
|
|
40757
|
+
*/
|
|
40758
|
+
updateSyncPoints() {
|
|
40759
|
+
if (!this.score) {
|
|
40760
|
+
return;
|
|
40761
|
+
}
|
|
40762
|
+
const score = this.score;
|
|
40763
|
+
const player = this._player;
|
|
40764
|
+
player.updateSyncPoints(MidiFileGenerator.generateSyncPoints(score));
|
|
40765
|
+
}
|
|
40684
40766
|
/**
|
|
40685
40767
|
* Changes the volume of the given tracks.
|
|
40686
40768
|
* @param tracks The tracks for which the volume should be changed.
|
|
@@ -43348,6 +43430,8 @@ class AlphaSynthWebWorkerApi {
|
|
|
43348
43430
|
}
|
|
43349
43431
|
loadBackingTrack(_score) {
|
|
43350
43432
|
}
|
|
43433
|
+
updateSyncPoints(_syncPoints) {
|
|
43434
|
+
}
|
|
43351
43435
|
}
|
|
43352
43436
|
|
|
43353
43437
|
/**
|
|
@@ -43722,7 +43806,6 @@ class AudioElementBackingTrackSynthOutput {
|
|
|
43722
43806
|
constructor() {
|
|
43723
43807
|
// fake rate
|
|
43724
43808
|
this.sampleRate = 44100;
|
|
43725
|
-
this._padding = 0;
|
|
43726
43809
|
this._updateInterval = 0;
|
|
43727
43810
|
this.ready = new EventEmitter();
|
|
43728
43811
|
this.samplesPlayed = new EventEmitterOfT();
|
|
@@ -43746,13 +43829,12 @@ class AudioElementBackingTrackSynthOutput {
|
|
|
43746
43829
|
this.audioElement.volume = value;
|
|
43747
43830
|
}
|
|
43748
43831
|
seekTo(time) {
|
|
43749
|
-
this.audioElement.currentTime = time / 1000
|
|
43832
|
+
this.audioElement.currentTime = time / 1000;
|
|
43750
43833
|
}
|
|
43751
43834
|
loadBackingTrack(backingTrack) {
|
|
43752
43835
|
if (this.audioElement?.src) {
|
|
43753
43836
|
URL.revokeObjectURL(this.audioElement.src);
|
|
43754
43837
|
}
|
|
43755
|
-
this._padding = backingTrack.padding / 1000;
|
|
43756
43838
|
const blob = new Blob([backingTrack.rawAudioFile]);
|
|
43757
43839
|
// https://html.spec.whatwg.org/multipage/media.html#loading-the-media-resource
|
|
43758
43840
|
// Step 8. resets the playbackRate, we need to remember and restore it.
|
|
@@ -43771,7 +43853,7 @@ class AudioElementBackingTrackSynthOutput {
|
|
|
43771
43853
|
this.ready.trigger();
|
|
43772
43854
|
}
|
|
43773
43855
|
updatePosition() {
|
|
43774
|
-
const timePos =
|
|
43856
|
+
const timePos = this.audioElement.currentTime * 1000;
|
|
43775
43857
|
this.timeUpdate.trigger(timePos);
|
|
43776
43858
|
}
|
|
43777
43859
|
play() {
|
|
@@ -43990,6 +44072,7 @@ class BrowserUiFacade {
|
|
|
43990
44072
|
webFont.usages--;
|
|
43991
44073
|
if (webFont.usages <= 0) {
|
|
43992
44074
|
webFont.element.remove();
|
|
44075
|
+
BrowserUiFacade._registeredWebFonts.delete(webFont.hash);
|
|
43993
44076
|
}
|
|
43994
44077
|
}
|
|
43995
44078
|
createCanvasElement() {
|
|
@@ -44146,6 +44229,7 @@ class BrowserUiFacade {
|
|
|
44146
44229
|
checker.checkForFontAvailability();
|
|
44147
44230
|
settings.display.resources.smuflFont = new Font(familyName, Environment.MusicFontSize, FontStyle.Plain, FontWeight.Regular);
|
|
44148
44231
|
const webFont = {
|
|
44232
|
+
hash,
|
|
44149
44233
|
element: styleElement,
|
|
44150
44234
|
fontSuffix,
|
|
44151
44235
|
usages: 1,
|
|
@@ -60857,9 +60941,9 @@ class VersionInfo {
|
|
|
60857
60941
|
print(`build date: ${VersionInfo.date}`);
|
|
60858
60942
|
}
|
|
60859
60943
|
}
|
|
60860
|
-
VersionInfo.version = '1.6.0-alpha.
|
|
60861
|
-
VersionInfo.date = '2025-05-
|
|
60862
|
-
VersionInfo.commit = '
|
|
60944
|
+
VersionInfo.version = '1.6.0-alpha.1420';
|
|
60945
|
+
VersionInfo.date = '2025-05-22T03:03:04.057Z';
|
|
60946
|
+
VersionInfo.commit = '230bdd455a1906e6f334b63bad3b8cf773f890e6';
|
|
60863
60947
|
|
|
60864
60948
|
/**
|
|
60865
60949
|
* A factory for custom layout engines.
|