@coderline/alphatab 1.6.0-alpha.1401 → 1.6.0-alpha.1405

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,5 +1,5 @@
1
1
  /*!
2
- * alphaTab v1.6.0-alpha.1401 (develop, build 1401)
2
+ * alphaTab v1.6.0-alpha.1405 (develop, build 1405)
3
3
  *
4
4
  * Copyright © 2025, Daniel Kuschny and Contributors, All rights reserved.
5
5
  *
@@ -49,7 +49,127 @@
49
49
  * @license
50
50
  */
51
51
 
52
- if(typeof Symbol.dispose==='undefined'){Symbol.dispose = Symbol('Symbol.dispose')}
52
+ /**
53
+ * A very basic polyfill of the ResizeObserver which triggers
54
+ * a the callback on window resize for all registered targets.
55
+ * @target web
56
+ */
57
+ class ResizeObserverPolyfill {
58
+ constructor(callback) {
59
+ this._targets = new Set();
60
+ this._callback = callback;
61
+ window.addEventListener('resize', this.onWindowResize.bind(this), false);
62
+ }
63
+ observe(target) {
64
+ this._targets.add(target);
65
+ }
66
+ unobserve(target) {
67
+ this._targets.delete(target);
68
+ }
69
+ disconnect() {
70
+ this._targets.clear();
71
+ }
72
+ onWindowResize() {
73
+ const entries = [];
74
+ for (const t of this._targets) {
75
+ entries.push({
76
+ target: t,
77
+ // not used by alphaTab
78
+ contentRect: undefined,
79
+ borderBoxSize: undefined,
80
+ contentBoxSize: [],
81
+ devicePixelContentBoxSize: []
82
+ });
83
+ }
84
+ this._callback(entries, this);
85
+ }
86
+ }
87
+
88
+ /**
89
+ * A polyfill of the InsersectionObserver
90
+ * @target web
91
+ */
92
+ class IntersectionObserverPolyfill {
93
+ constructor(callback) {
94
+ this._elements = [];
95
+ let timer = null;
96
+ const oldCheck = this.check.bind(this);
97
+ this.check = () => {
98
+ if (!timer) {
99
+ timer = setTimeout(() => {
100
+ oldCheck();
101
+ timer = null;
102
+ }, 100);
103
+ }
104
+ };
105
+ this._callback = callback;
106
+ window.addEventListener('resize', this.check, true);
107
+ document.addEventListener('scroll', this.check, true);
108
+ }
109
+ observe(target) {
110
+ if (this._elements.indexOf(target) >= 0) {
111
+ return;
112
+ }
113
+ this._elements.push(target);
114
+ this.check();
115
+ }
116
+ unobserve(target) {
117
+ this._elements = this._elements.filter(item => {
118
+ return item !== target;
119
+ });
120
+ }
121
+ check() {
122
+ const entries = [];
123
+ for (const element of this._elements) {
124
+ const rect = element.getBoundingClientRect();
125
+ const isVisible = rect.top + rect.height >= 0 &&
126
+ rect.top <= window.innerHeight &&
127
+ rect.left + rect.width >= 0 &&
128
+ rect.left <= window.innerWidth;
129
+ if (isVisible) {
130
+ entries.push({
131
+ target: element,
132
+ isIntersecting: true
133
+ });
134
+ }
135
+ }
136
+ if (entries.length) {
137
+ this._callback(entries, this);
138
+ }
139
+ }
140
+ }
141
+
142
+ /*@target web*/
143
+ (() => {
144
+ if (typeof Symbol.dispose === 'undefined') {
145
+ Symbol.dispose = Symbol('Symbol.dispose');
146
+ }
147
+ if (typeof window !== 'undefined') {
148
+ // ResizeObserver API does not yet exist so long on Safari (only start 2020 with iOS Safari 13.7 and Desktop 13.1)
149
+ // so we better add a polyfill for it
150
+ if (!('ResizeObserver' in globalThis)) {
151
+ globalThis.ResizeObserver = ResizeObserverPolyfill;
152
+ }
153
+ // IntersectionObserver API does not on older iOS versions
154
+ // so we better add a polyfill for it
155
+ if (!('IntersectionObserver' in globalThis)) {
156
+ globalThis.IntersectionObserver = IntersectionObserverPolyfill;
157
+ }
158
+ if (!('replaceChildren' in Element.prototype)) {
159
+ Element.prototype.replaceChildren = function (...nodes) {
160
+ this.innerHTML = '';
161
+ this.append(...nodes);
162
+ };
163
+ Document.prototype.replaceChildren = Element.prototype.replaceChildren;
164
+ DocumentFragment.prototype.replaceChildren = Element.prototype.replaceChildren;
165
+ }
166
+ }
167
+ if (!('replaceAll' in String.prototype)) {
168
+ String.prototype.replaceAll = function (str, newStr) {
169
+ return this.replace(new RegExp(str, 'g'), newStr);
170
+ };
171
+ }
172
+ })();
53
173
 
54
174
  /**
55
175
  * Lists all layout modes that are supported.
@@ -1273,7 +1393,37 @@ var AutomationType;
1273
1393
  * Balance change.
1274
1394
  */
1275
1395
  AutomationType[AutomationType["Balance"] = 3] = "Balance";
1396
+ /**
1397
+ * A sync point for synchronizing the internal time axis with an external audio track.
1398
+ */
1399
+ AutomationType[AutomationType["SyncPoint"] = 4] = "SyncPoint";
1276
1400
  })(AutomationType || (AutomationType = {}));
1401
+ /**
1402
+ * Represents the data of a sync point for synchronizing the internal time axis with
1403
+ * an external audio file.
1404
+ * @cloneable
1405
+ * @json
1406
+ * @json_strict
1407
+ */
1408
+ class SyncPointData {
1409
+ constructor() {
1410
+ /**
1411
+ * Indicates for which repeat occurence this sync point is valid (e.g. 0 on the first time played, 1 on the second time played)
1412
+ */
1413
+ this.barOccurence = 0;
1414
+ /**
1415
+ * The modified tempo at which the cursor should move (aka. the tempo played within the external audio track).
1416
+ * This information is used together with the {@link originalTempo} to calculate how much faster/slower the
1417
+ * cursor playback is performed to align with the audio track.
1418
+ */
1419
+ this.modifiedTempo = 0;
1420
+ /**
1421
+ * The uadio offset marking the position within the audio track in milliseconds.
1422
+ * This information is used to regularly sync (or on seeking) to match a given external audio time axis with the internal time axis.
1423
+ */
1424
+ this.millisecondOffset = 0;
1425
+ }
1426
+ }
1277
1427
  /**
1278
1428
  * Automations are used to change the behaviour of a song.
1279
1429
  * @cloneable
@@ -2572,6 +2722,16 @@ class MasterBar {
2572
2722
  }
2573
2723
  return null;
2574
2724
  }
2725
+ /**
2726
+ * Adds the given sync point to the list of sync points for this bar.
2727
+ * @param syncPoint The sync point to add.
2728
+ */
2729
+ addSyncPoint(syncPoint) {
2730
+ if (!this.syncPoints) {
2731
+ this.syncPoints = [];
2732
+ }
2733
+ this.syncPoints.push(syncPoint);
2734
+ }
2575
2735
  }
2576
2736
  MasterBar.MaxAlternateEndings = 8;
2577
2737
 
@@ -5666,6 +5826,21 @@ class NoteCloner {
5666
5826
  }
5667
5827
  }
5668
5828
 
5829
+ // <auto-generated>
5830
+ // This code was auto-generated.
5831
+ // Changes to this file may cause incorrect behavior and will be lost if
5832
+ // the code is regenerated.
5833
+ // </auto-generated>
5834
+ class SyncPointDataCloner {
5835
+ static clone(original) {
5836
+ const clone = new SyncPointData();
5837
+ clone.barOccurence = original.barOccurence;
5838
+ clone.modifiedTempo = original.modifiedTempo;
5839
+ clone.millisecondOffset = original.millisecondOffset;
5840
+ return clone;
5841
+ }
5842
+ }
5843
+
5669
5844
  // <auto-generated>
5670
5845
  // This code was auto-generated.
5671
5846
  // Changes to this file may cause incorrect behavior and will be lost if
@@ -5677,6 +5852,7 @@ class AutomationCloner {
5677
5852
  clone.isLinear = original.isLinear;
5678
5853
  clone.type = original.type;
5679
5854
  clone.value = original.value;
5855
+ clone.syncPointValue = original.syncPointValue ? SyncPointDataCloner.clone(original.syncPointValue) : undefined;
5680
5856
  clone.ratioPosition = original.ratioPosition;
5681
5857
  clone.text = original.text;
5682
5858
  return clone;
@@ -14122,6 +14298,21 @@ class XmlDocument extends XmlNode {
14122
14298
  }
14123
14299
  }
14124
14300
 
14301
+ /**
14302
+ * Holds information about the backing track which can be played instead of synthesized audio.
14303
+ * @json
14304
+ * @json_strict
14305
+ */
14306
+ class BackingTrack {
14307
+ constructor() {
14308
+ /**
14309
+ * The number of milliseconds the audio should be shifted to align with the song.
14310
+ * (e.g. negative values allow skipping potential silent parts at the start of the file and directly start with the first note).
14311
+ */
14312
+ this.padding = 0;
14313
+ }
14314
+ }
14315
+
14125
14316
  /**
14126
14317
  * This structure represents a duration within a gpif
14127
14318
  */
@@ -14214,6 +14405,9 @@ class GpifParser {
14214
14405
  case 'MasterTrack':
14215
14406
  this.parseMasterTrackNode(n);
14216
14407
  break;
14408
+ case 'BackingTrack':
14409
+ this.parseBackingTrackNode(n);
14410
+ break;
14217
14411
  case 'Tracks':
14218
14412
  this.parseTracksNode(n);
14219
14413
  break;
@@ -14235,6 +14429,9 @@ class GpifParser {
14235
14429
  case 'Rhythms':
14236
14430
  this.parseRhythms(n);
14237
14431
  break;
14432
+ case 'Assets':
14433
+ this.parseAssets(n);
14434
+ break;
14238
14435
  }
14239
14436
  }
14240
14437
  }
@@ -14242,6 +14439,37 @@ class GpifParser {
14242
14439
  throw new UnsupportedFormatError('Root node of XML was not GPIF');
14243
14440
  }
14244
14441
  }
14442
+ parseAssets(element) {
14443
+ for (const c of element.childElements()) {
14444
+ switch (c.localName) {
14445
+ case 'Asset':
14446
+ if (c.getAttribute('id') === this._backingTrackAssetId) {
14447
+ this.parseBackingTrackAsset(c);
14448
+ }
14449
+ break;
14450
+ }
14451
+ }
14452
+ }
14453
+ parseBackingTrackAsset(element) {
14454
+ let embeddedFilePath = '';
14455
+ for (const c of element.childElements()) {
14456
+ switch (c.localName) {
14457
+ case 'EmbeddedFilePath':
14458
+ embeddedFilePath = c.innerText;
14459
+ break;
14460
+ }
14461
+ }
14462
+ const loadAsset = this.loadAsset;
14463
+ if (loadAsset) {
14464
+ const assetData = loadAsset(embeddedFilePath);
14465
+ if (assetData) {
14466
+ this.score.backingTrack.rawAudioFile = assetData;
14467
+ }
14468
+ else {
14469
+ this.score.backingTrack = undefined;
14470
+ }
14471
+ }
14472
+ }
14245
14473
  //
14246
14474
  // <Score>...</Score>
14247
14475
  //
@@ -14322,7 +14550,41 @@ class GpifParser {
14322
14550
  if (!text) {
14323
14551
  return [];
14324
14552
  }
14325
- return text.split(separator).map(t => t.trim()).filter(t => t.length > 0);
14553
+ return text
14554
+ .split(separator)
14555
+ .map(t => t.trim())
14556
+ .filter(t => t.length > 0);
14557
+ }
14558
+ //
14559
+ // <BackingTrack>...</BackingTrack>
14560
+ //
14561
+ parseBackingTrackNode(node) {
14562
+ const backingTrack = new BackingTrack();
14563
+ let enabled = false;
14564
+ let source = '';
14565
+ let assetId = '';
14566
+ for (const c of node.childElements()) {
14567
+ switch (c.localName) {
14568
+ case 'Enabled':
14569
+ enabled = c.innerText === 'true';
14570
+ break;
14571
+ case 'Source':
14572
+ source = c.innerText;
14573
+ break;
14574
+ case 'AssetId':
14575
+ assetId = c.innerText;
14576
+ break;
14577
+ case 'FramePadding':
14578
+ backingTrack.padding = GpifParser.parseIntSafe(c.innerText, 0) / GpifParser.SampleRate * 1000;
14579
+ break;
14580
+ }
14581
+ }
14582
+ // only local (contained backing tracks are supported)
14583
+ // remote / youtube links seem to come in future releases according to the gpif tags.
14584
+ if (enabled && source === 'Local') {
14585
+ this.score.backingTrack = backingTrack;
14586
+ this._backingTrackAssetId = assetId; // when the Asset tag is parsed this ID is used to load the raw data
14587
+ }
14326
14588
  }
14327
14589
  //
14328
14590
  // <MasterTrack>...</MasterTrack>
@@ -14360,6 +14622,7 @@ class GpifParser {
14360
14622
  let textValue = null;
14361
14623
  let reference = 0;
14362
14624
  let text = null;
14625
+ let syncPointValue = undefined;
14363
14626
  for (const c of node.childElements()) {
14364
14627
  switch (c.localName) {
14365
14628
  case 'Type':
@@ -14378,6 +14641,28 @@ class GpifParser {
14378
14641
  if (c.firstElement && c.firstElement.nodeType === XmlNodeType.CDATA) {
14379
14642
  textValue = c.innerText;
14380
14643
  }
14644
+ else if (c.firstElement &&
14645
+ c.firstElement.nodeType === XmlNodeType.Element &&
14646
+ type === 'SyncPoint') {
14647
+ syncPointValue = new SyncPointData();
14648
+ for (const vc of c.childElements()) {
14649
+ switch (vc.localName) {
14650
+ case 'BarIndex':
14651
+ barIndex = GpifParser.parseIntSafe(vc.innerText, 0);
14652
+ break;
14653
+ case 'BarOccurrence':
14654
+ syncPointValue.barOccurence = GpifParser.parseIntSafe(vc.innerText, 0);
14655
+ break;
14656
+ case 'ModifiedTempo':
14657
+ syncPointValue.modifiedTempo = GpifParser.parseFloatSafe(vc.innerText, 0);
14658
+ break;
14659
+ case 'FrameOffset':
14660
+ const frameOffset = GpifParser.parseFloatSafe(vc.innerText, 0);
14661
+ syncPointValue.millisecondOffset = (frameOffset / GpifParser.SampleRate) * 1000;
14662
+ break;
14663
+ }
14664
+ }
14665
+ }
14381
14666
  else {
14382
14667
  const parts = GpifParser.splitSafe(c.innerText);
14383
14668
  // Issue 391: Some GPX files might have
@@ -14405,6 +14690,13 @@ class GpifParser {
14405
14690
  case 'Tempo':
14406
14691
  automation = Automation.buildTempoAutomation(isLinear, ratioPosition, numberValue, reference);
14407
14692
  break;
14693
+ case 'SyncPoint':
14694
+ automation = new Automation();
14695
+ automation.type = AutomationType.SyncPoint;
14696
+ automation.isLinear = isLinear;
14697
+ automation.ratioPosition = ratioPosition;
14698
+ automation.syncPointValue = syncPointValue;
14699
+ break;
14408
14700
  case 'Sound':
14409
14701
  if (textValue && sounds && sounds.has(textValue)) {
14410
14702
  automation = Automation.buildInstrumentAutomation(isLinear, ratioPosition, sounds.get(textValue).program);
@@ -16486,14 +16778,19 @@ class GpifParser {
16486
16778
  const masterBar = this.score.masterBars[barNumber];
16487
16779
  for (let i = 0, j = automations.length; i < j; i++) {
16488
16780
  const automation = automations[i];
16489
- if (automation.type === AutomationType.Tempo) {
16490
- if (barNumber === 0) {
16491
- this.score.tempo = automation.value | 0;
16492
- if (automation.text) {
16493
- this.score.tempoLabel = automation.text;
16781
+ switch (automation.type) {
16782
+ case AutomationType.Tempo:
16783
+ if (barNumber === 0) {
16784
+ this.score.tempo = automation.value | 0;
16785
+ if (automation.text) {
16786
+ this.score.tempoLabel = automation.text;
16787
+ }
16494
16788
  }
16495
- }
16496
- masterBar.tempoAutomations.push(automation);
16789
+ masterBar.tempoAutomations.push(automation);
16790
+ break;
16791
+ case AutomationType.SyncPoint:
16792
+ masterBar.addSyncPoint(automation);
16793
+ break;
16497
16794
  }
16498
16795
  }
16499
16796
  }
@@ -16510,6 +16807,10 @@ GpifParser.BendPointPositionFactor = BendPoint.MaxPosition / 100.0;
16510
16807
  * Internal Range: 1 per quarter note
16511
16808
  */
16512
16809
  GpifParser.BendPointValueFactor = 1 / 25.0;
16810
+ // test have shown that Guitar Pro seem to always work with 44100hz for the frame offsets,
16811
+ // they are NOT using the sample rate of the input file.
16812
+ // Downsampling a 44100hz ogg to 8000hz and using it in as audio track resulted in the same frame offset when placing sync points.
16813
+ GpifParser.SampleRate = 44100;
16513
16814
 
16514
16815
  // PartConfiguration File Format Notes.
16515
16816
  // Based off Guitar Pro 8
@@ -17376,7 +17677,9 @@ class Gp7To8Importer extends ScoreImporter {
17376
17677
  let binaryStylesheetData = null;
17377
17678
  let partConfigurationData = null;
17378
17679
  let layoutConfigurationData = null;
17680
+ const entryLookup = new Map();
17379
17681
  for (const entry of entries) {
17682
+ entryLookup.set(entry.fullName, entry);
17380
17683
  switch (entry.fileName) {
17381
17684
  case 'score.gpif':
17382
17685
  xml = IOHelper.toString(entry.data, this.settings.importer.encoding);
@@ -17399,6 +17702,12 @@ class Gp7To8Importer extends ScoreImporter {
17399
17702
  // the score information as XML we need to parse.
17400
17703
  Logger.debug(this.name, 'Start Parsing score.gpif');
17401
17704
  const gpifParser = new GpifParser();
17705
+ gpifParser.loadAsset = (fileName) => {
17706
+ if (entryLookup.has(fileName)) {
17707
+ return entryLookup.get(fileName).data;
17708
+ }
17709
+ return undefined;
17710
+ };
17402
17711
  gpifParser.parseXml(xml, this.settings);
17403
17712
  Logger.debug(this.name, 'score.gpif parsed');
17404
17713
  const score = gpifParser.score;
@@ -21864,8 +22173,24 @@ class ProgramChangeEvent extends MidiEvent {
21864
22173
  * Represents a change of the tempo in the song.
21865
22174
  */
21866
22175
  class TempoChangeEvent extends MidiEvent {
22176
+ /**
22177
+ * The tempo in microseconds per quarter note (aka USQ). A time format typically for midi.
22178
+ */
22179
+ get microSecondsPerQuarterNote() {
22180
+ return 60000000 / this.beatsPerMinute;
22181
+ }
22182
+ /**
22183
+ * The tempo in microseconds per quarter note (aka USQ). A time format typically for midi.
22184
+ */
22185
+ set microSecondsPerQuarterNote(value) {
22186
+ this.beatsPerMinute = 60000000 / value;
22187
+ }
21867
22188
  constructor(tick, microSecondsPerQuarterNote) {
21868
22189
  super(0, tick, MidiEventType.TempoChange);
22190
+ /**
22191
+ * The tempo in beats per minute
22192
+ */
22193
+ this.beatsPerMinute = 0;
21869
22194
  this.microSecondsPerQuarterNote = microSecondsPerQuarterNote;
21870
22195
  }
21871
22196
  writeTo(s) {
@@ -21948,6 +22273,17 @@ class SynthEvent {
21948
22273
  }
21949
22274
  }
21950
22275
 
22276
+ /**
22277
+ * Rerpresents a point to sync the alphaTab time axis with an external backing track.
22278
+ */
22279
+ class BackingTrackSyncPoint {
22280
+ constructor(tick, data) {
22281
+ this.tick = 0;
22282
+ this.tick = tick;
22283
+ this.data = data;
22284
+ }
22285
+ }
22286
+
21951
22287
  class MidiFileSequencerTempoChange {
21952
22288
  constructor(bpm, ticks, time) {
21953
22289
  this.bpm = bpm;
@@ -21955,9 +22291,17 @@ class MidiFileSequencerTempoChange {
21955
22291
  this.time = time;
21956
22292
  }
21957
22293
  }
22294
+ class BackingTrackSyncPointWithTime extends BackingTrackSyncPoint {
22295
+ constructor(tick, data, time) {
22296
+ super(tick, data);
22297
+ this.time = time;
22298
+ }
22299
+ }
21958
22300
  class MidiSequencerState {
21959
22301
  constructor() {
21960
22302
  this.tempoChanges = [];
22303
+ this.tempoChangeIndex = 0;
22304
+ this.syncPoints = [];
21961
22305
  this.firstProgramEventPerChannel = new Map();
21962
22306
  this.firstTimeSignatureNumerator = 0;
21963
22307
  this.firstTimeSignatureDenominator = 0;
@@ -21965,11 +22309,15 @@ class MidiSequencerState {
21965
22309
  this.division = MidiUtils.QuarterTime;
21966
22310
  this.eventIndex = 0;
21967
22311
  this.currentTime = 0;
22312
+ this.currentTick = 0;
22313
+ this.syncPointIndex = 0;
21968
22314
  this.playbackRange = null;
21969
22315
  this.playbackRangeStartTime = 0;
21970
22316
  this.playbackRangeEndTime = 0;
21971
22317
  this.endTick = 0;
21972
22318
  this.endTime = 0;
22319
+ this.currentTempo = 0;
22320
+ this.modifiedTempo = 0;
21973
22321
  }
21974
22322
  }
21975
22323
  /**
@@ -22022,6 +22370,12 @@ class MidiFileSequencer {
22022
22370
  get currentEndTime() {
22023
22371
  return this._currentState.endTime / this.playbackSpeed;
22024
22372
  }
22373
+ get currentTempo() {
22374
+ return this._currentState.currentTempo;
22375
+ }
22376
+ get modifiedTempo() {
22377
+ return this._currentState.modifiedTempo * this.playbackSpeed;
22378
+ }
22025
22379
  mainSeek(timePosition) {
22026
22380
  // map to speed=1
22027
22381
  timePosition *= this.playbackSpeed;
@@ -22041,6 +22395,8 @@ class MidiFileSequencer {
22041
22395
  // we have to restart the midi to make sure we get the right state: instruments, volume, pan, etc
22042
22396
  this._mainState.currentTime = 0;
22043
22397
  this._mainState.eventIndex = 0;
22398
+ this._mainState.syncPointIndex = 0;
22399
+ this._mainState.tempoChangeIndex = 0;
22044
22400
  if (this.isPlayingMain) {
22045
22401
  const metronomeVolume = this._synthesizer.metronomeVolume;
22046
22402
  this._synthesizer.noteOffAll(true);
@@ -22115,7 +22471,7 @@ class MidiFileSequencer {
22115
22471
  }
22116
22472
  if (mEvent.type === MidiEventType.TempoChange) {
22117
22473
  const meta = mEvent;
22118
- bpm = 60000000 / meta.microSecondsPerQuarterNote;
22474
+ bpm = meta.beatsPerMinute;
22119
22475
  state.tempoChanges.push(new MidiFileSequencerTempoChange(bpm, absTick, absTime));
22120
22476
  metronomeLengthInMillis = metronomeLengthInTicks * (60000.0 / (bpm * midiFile.division));
22121
22477
  }
@@ -22149,6 +22505,8 @@ class MidiFileSequencer {
22149
22505
  }
22150
22506
  }
22151
22507
  }
22508
+ state.currentTempo = state.tempoChanges.length > 0 ? state.tempoChanges[0].bpm : bpm;
22509
+ state.modifiedTempo = state.currentTempo;
22152
22510
  state.synthData.sort((a, b) => {
22153
22511
  if (a.time > b.time) {
22154
22512
  return 1;
@@ -22165,6 +22523,35 @@ class MidiFileSequencer {
22165
22523
  fillMidiEventQueue() {
22166
22524
  return this.fillMidiEventQueueLimited(-1);
22167
22525
  }
22526
+ fillMidiEventQueueToEndTime(endTime) {
22527
+ while (this._mainState.currentTime < endTime) {
22528
+ if (this.fillMidiEventQueueLimited(endTime - this._mainState.currentTime)) {
22529
+ this._synthesizer.synthesizeSilent(SynthConstants.MicroBufferSize);
22530
+ }
22531
+ }
22532
+ let anyEventsDispatched = false;
22533
+ this._currentState.currentTime = endTime;
22534
+ while (this._currentState.eventIndex < this._currentState.synthData.length &&
22535
+ this._currentState.synthData[this._currentState.eventIndex].time < this._currentState.currentTime) {
22536
+ const synthEvent = this._currentState.synthData[this._currentState.eventIndex];
22537
+ this._synthesizer.dispatchEvent(synthEvent);
22538
+ while (this._currentState.syncPointIndex < this._currentState.syncPoints.length &&
22539
+ this._currentState.syncPoints[this._currentState.syncPointIndex].tick < synthEvent.event.tick) {
22540
+ this._currentState.modifiedTempo =
22541
+ this._currentState.syncPoints[this._currentState.syncPointIndex].data.modifiedTempo;
22542
+ this._currentState.syncPointIndex++;
22543
+ }
22544
+ while (this._currentState.tempoChangeIndex < this._currentState.tempoChanges.length &&
22545
+ this._currentState.tempoChanges[this._currentState.tempoChangeIndex].time <= synthEvent.time) {
22546
+ this._currentState.currentTempo =
22547
+ this._currentState.tempoChanges[this._currentState.tempoChangeIndex].bpm;
22548
+ this._currentState.tempoChangeIndex++;
22549
+ }
22550
+ this._currentState.eventIndex++;
22551
+ anyEventsDispatched = true;
22552
+ }
22553
+ return anyEventsDispatched;
22554
+ }
22168
22555
  fillMidiEventQueueLimited(maxMilliseconds) {
22169
22556
  let millisecondsPerBuffer = (SynthConstants.MicroBufferSize / this._synthesizer.outSampleRate) * 1000 * this.playbackSpeed;
22170
22557
  let endTime = this.internalEndTime;
@@ -22192,9 +22579,87 @@ class MidiFileSequencer {
22192
22579
  mainTimePositionToTickPosition(timePosition) {
22193
22580
  return this.timePositionToTickPositionWithSpeed(this._mainState, timePosition, this.playbackSpeed);
22194
22581
  }
22582
+ mainUpdateSyncPoints(syncPoints) {
22583
+ const state = this._mainState;
22584
+ syncPoints.sort((a, b) => a.tick - b.tick); // just in case
22585
+ state.syncPoints = new Array(syncPoints.length);
22586
+ if (syncPoints.length >= 0) {
22587
+ let bpm = 120;
22588
+ let absTick = 0;
22589
+ let absTime = 0.0;
22590
+ let previousTick = 0;
22591
+ let tempoChangeIndex = 0;
22592
+ for (let i = 0; i < syncPoints.length; i++) {
22593
+ const p = syncPoints[i];
22594
+ const deltaTick = p.tick - previousTick;
22595
+ absTick += deltaTick;
22596
+ absTime += deltaTick * (60000.0 / (bpm * state.division));
22597
+ state.syncPoints[i] = new BackingTrackSyncPointWithTime(p.tick, p.data, absTime);
22598
+ previousTick = p.tick;
22599
+ while (tempoChangeIndex < state.tempoChanges.length &&
22600
+ state.tempoChanges[tempoChangeIndex].ticks <= absTick) {
22601
+ bpm = state.tempoChanges[tempoChangeIndex].bpm;
22602
+ tempoChangeIndex++;
22603
+ }
22604
+ }
22605
+ }
22606
+ state.syncPointIndex = 0;
22607
+ }
22195
22608
  currentTimePositionToTickPosition(timePosition) {
22196
22609
  return this.timePositionToTickPositionWithSpeed(this._currentState, timePosition, this.playbackSpeed);
22197
22610
  }
22611
+ mainTimePositionFromBackingTrack(timePosition, backingTrackLength) {
22612
+ const mainState = this._mainState;
22613
+ const syncPoints = mainState.syncPoints;
22614
+ if (timePosition < 0 || syncPoints.length === 0) {
22615
+ return timePosition;
22616
+ }
22617
+ let syncPointIndex = timePosition >= syncPoints[mainState.syncPointIndex].data.millisecondOffset ? mainState.syncPointIndex : 0;
22618
+ while (syncPointIndex + 1 < syncPoints.length &&
22619
+ syncPoints[syncPointIndex + 1].data.millisecondOffset <= timePosition) {
22620
+ syncPointIndex++;
22621
+ }
22622
+ const currentSyncPoint = syncPoints[syncPointIndex];
22623
+ const timeDiff = timePosition - currentSyncPoint.data.millisecondOffset;
22624
+ let alphaTabTimeDiff;
22625
+ if (syncPointIndex + 1 < syncPoints.length) {
22626
+ const nextSyncPoint = syncPoints[syncPointIndex + 1];
22627
+ const relativeTimeDiff = timeDiff / (nextSyncPoint.data.millisecondOffset - currentSyncPoint.data.millisecondOffset);
22628
+ alphaTabTimeDiff = (nextSyncPoint.time - currentSyncPoint.time) * relativeTimeDiff;
22629
+ }
22630
+ else {
22631
+ const relativeTimeDiff = timeDiff / (backingTrackLength - currentSyncPoint.data.millisecondOffset);
22632
+ alphaTabTimeDiff = (mainState.endTime - currentSyncPoint.time) * relativeTimeDiff;
22633
+ }
22634
+ return (currentSyncPoint.time + alphaTabTimeDiff) / this.playbackSpeed;
22635
+ }
22636
+ mainTimePositionToBackingTrack(timePosition, backingTrackLength) {
22637
+ const mainState = this._mainState;
22638
+ const syncPoints = mainState.syncPoints;
22639
+ if (timePosition < 0 || syncPoints.length === 0) {
22640
+ return timePosition;
22641
+ }
22642
+ timePosition *= this.playbackSpeed;
22643
+ let syncPointIndex = timePosition >= syncPoints[mainState.syncPointIndex].time ? mainState.syncPointIndex : 0;
22644
+ while (syncPointIndex + 1 < syncPoints.length && syncPoints[syncPointIndex + 1].time <= timePosition) {
22645
+ syncPointIndex++;
22646
+ }
22647
+ const currentSyncPoint = syncPoints[syncPointIndex];
22648
+ const alphaTabTimeDiff = timePosition - currentSyncPoint.time;
22649
+ let backingTrackPos;
22650
+ if (syncPointIndex + 1 < syncPoints.length) {
22651
+ const nextSyncPoint = syncPoints[syncPointIndex + 1];
22652
+ const relativeAlphaTabTimeDiff = alphaTabTimeDiff / (nextSyncPoint.time - currentSyncPoint.time);
22653
+ const backingTrackDiff = nextSyncPoint.data.millisecondOffset - currentSyncPoint.data.millisecondOffset;
22654
+ backingTrackPos = currentSyncPoint.data.millisecondOffset + backingTrackDiff * relativeAlphaTabTimeDiff;
22655
+ }
22656
+ else {
22657
+ const relativeAlphaTabTimeDiff = alphaTabTimeDiff / (mainState.endTime - currentSyncPoint.time);
22658
+ const frameDiff = backingTrackLength - currentSyncPoint.data.millisecondOffset;
22659
+ backingTrackPos = currentSyncPoint.data.millisecondOffset + frameDiff * relativeAlphaTabTimeDiff;
22660
+ }
22661
+ return backingTrackPos;
22662
+ }
22198
22663
  tickPositionToTimePositionWithSpeed(state, tickPosition, playbackSpeed) {
22199
22664
  let timePosition = 0.0;
22200
22665
  let bpm = 120.0;
@@ -22304,6 +22769,8 @@ class MidiFileSequencer {
22304
22769
  });
22305
22770
  state.endTime = metronomeTime;
22306
22771
  state.endTick = metronomeTick;
22772
+ state.currentTempo = bpm;
22773
+ state.modifiedTempo = bpm;
22307
22774
  this._countInState = state;
22308
22775
  }
22309
22776
  }
@@ -22349,12 +22816,22 @@ class PositionChangedEventArgs {
22349
22816
  * @param endTick The end tick.
22350
22817
  * @param isSeek Whether the time was seeked.
22351
22818
  */
22352
- constructor(currentTime, endTime, currentTick, endTick, isSeek) {
22819
+ constructor(currentTime, endTime, currentTick, endTick, isSeek, originalTempo, modifiedTempo) {
22820
+ /**
22821
+ * The original tempo in which alphaTab internally would be playing right now.
22822
+ */
22823
+ this.originalTempo = 0;
22824
+ /**
22825
+ * The modified tempo in which the actual playback is happening (e.g. due to playback speed or external audio synchronization)
22826
+ */
22827
+ this.modifiedTempo = 0;
22353
22828
  this.currentTime = currentTime;
22354
22829
  this.endTime = endTime;
22355
22830
  this.currentTick = currentTick;
22356
22831
  this.endTick = endTick;
22357
22832
  this.isSeek = isSeek;
22833
+ this.originalTempo = originalTempo;
22834
+ this.modifiedTempo = modifiedTempo;
22358
22835
  }
22359
22836
  }
22360
22837
 
@@ -26090,38 +26567,51 @@ class Voice {
26090
26567
  */
26091
26568
  Voice.RenderEffectSampleBlock = SynthConstants.MicroBufferSize;
26092
26569
 
26570
+ class QueueItem {
26571
+ constructor(value) {
26572
+ this.value = value;
26573
+ }
26574
+ }
26093
26575
  class Queue {
26094
- constructor() {
26095
- this._items = [];
26096
- this._position = 0;
26097
- this.isEmpty = true;
26576
+ get isEmpty() {
26577
+ return this._head === undefined;
26098
26578
  }
26099
26579
  clear() {
26100
- this._items = [];
26101
- this._position = 0;
26102
- this.isEmpty = true;
26580
+ this._head = undefined;
26581
+ this._tail = undefined;
26103
26582
  }
26104
26583
  enqueue(item) {
26105
- this.isEmpty = false;
26106
- this._items.push(item);
26584
+ const queueItem = new QueueItem(item);
26585
+ if (this._tail) {
26586
+ // not empty -> add after tail
26587
+ this._tail.next = queueItem;
26588
+ this._tail = queueItem;
26589
+ }
26590
+ else {
26591
+ // empty -> new item takes head and tail
26592
+ this._head = queueItem;
26593
+ this._tail = queueItem;
26594
+ }
26107
26595
  }
26108
26596
  peek() {
26109
- return this._items[this._position];
26597
+ const head = this._head;
26598
+ if (!head) {
26599
+ return undefined;
26600
+ }
26601
+ return head.value;
26110
26602
  }
26111
26603
  dequeue() {
26112
- const item = this._items[this._position];
26113
- this._position++;
26114
- if (this._position >= this._items.length / 2) {
26115
- this._items = this._items.slice(this._position);
26116
- this._position = 0;
26604
+ const head = this._head;
26605
+ if (!head) {
26606
+ return undefined;
26117
26607
  }
26118
- this.isEmpty = this._items.length === 0;
26119
- return item;
26120
- }
26121
- toArray() {
26122
- const items = this._items.slice(this._position);
26123
- items.reverse();
26124
- return items;
26608
+ const newHead = head.next;
26609
+ this._head = newHead;
26610
+ // last item removed?
26611
+ if (!newHead) {
26612
+ this._tail = undefined;
26613
+ }
26614
+ return head.value;
26125
26615
  }
26126
26616
  }
26127
26617
 
@@ -26452,7 +26942,7 @@ class TinySoundFont {
26452
26942
  break;
26453
26943
  case MidiEventType.TempoChange:
26454
26944
  const tempoChange = e;
26455
- this.currentTempo = 60000000 / tempoChange.microSecondsPerQuarterNote;
26945
+ this.currentTempo = tempoChange.beatsPerMinute;
26456
26946
  break;
26457
26947
  case MidiEventType.PitchBend:
26458
26948
  const pitchBend = e;
@@ -27604,15 +28094,15 @@ class PlaybackRangeChangedEventArgs {
27604
28094
  }
27605
28095
 
27606
28096
  /**
27607
- * This is the main synthesizer component which can be used to
28097
+ * This is the base class for synthesizer components which can be used to
27608
28098
  * play a {@link MidiFile} via a {@link ISynthOutput}.
27609
28099
  */
27610
- class AlphaSynth {
28100
+ class AlphaSynthBase {
27611
28101
  get output() {
27612
28102
  return this._output;
27613
28103
  }
27614
28104
  get isReadyForPlayback() {
27615
- return this.isReady && this._isSoundFontLoaded && this._isMidiLoaded;
28105
+ return this.isReady && this.isSoundFontLoaded && this._isMidiLoaded;
27616
28106
  }
27617
28107
  get logLevel() {
27618
28108
  return Logger.logLevel;
@@ -27621,11 +28111,14 @@ class AlphaSynth {
27621
28111
  Logger.logLevel = value;
27622
28112
  }
27623
28113
  get masterVolume() {
27624
- return this._synthesizer.masterVolume;
28114
+ return this.synthesizer.masterVolume;
27625
28115
  }
27626
28116
  set masterVolume(value) {
27627
28117
  value = Math.max(value, SynthConstants.MinVolume);
27628
- this._synthesizer.masterVolume = value;
28118
+ this.updateMasterVolume(value);
28119
+ }
28120
+ updateMasterVolume(value) {
28121
+ this.synthesizer.masterVolume = value;
27629
28122
  }
27630
28123
  get metronomeVolume() {
27631
28124
  return this._metronomeVolume;
@@ -27633,7 +28126,7 @@ class AlphaSynth {
27633
28126
  set metronomeVolume(value) {
27634
28127
  value = Math.max(value, SynthConstants.MinVolume);
27635
28128
  this._metronomeVolume = value;
27636
- this._synthesizer.metronomeVolume = value;
28129
+ this.synthesizer.metronomeVolume = value;
27637
28130
  }
27638
28131
  get countInVolume() {
27639
28132
  return this._countInVolume;
@@ -27649,19 +28142,22 @@ class AlphaSynth {
27649
28142
  this._midiEventsPlayedFilter = new Set(value);
27650
28143
  }
27651
28144
  get playbackSpeed() {
27652
- return this._sequencer.playbackSpeed;
28145
+ return this.sequencer.playbackSpeed;
27653
28146
  }
27654
28147
  set playbackSpeed(value) {
27655
28148
  value = ModelUtils.clamp(value, SynthConstants.MinPlaybackSpeed, SynthConstants.MaxPlaybackSpeed);
27656
- const oldSpeed = this._sequencer.playbackSpeed;
27657
- this._sequencer.playbackSpeed = value;
28149
+ this.updatePlaybackSpeed(value);
28150
+ }
28151
+ updatePlaybackSpeed(value) {
28152
+ const oldSpeed = this.sequencer.playbackSpeed;
28153
+ this.sequencer.playbackSpeed = value;
27658
28154
  this.timePosition = this.timePosition * (oldSpeed / value);
27659
28155
  }
27660
28156
  get tickPosition() {
27661
28157
  return this._tickPosition;
27662
28158
  }
27663
28159
  set tickPosition(value) {
27664
- this.timePosition = this._sequencer.mainTickPositionToTimePosition(value);
28160
+ this.timePosition = this.sequencer.mainTickPositionToTimePosition(value);
27665
28161
  }
27666
28162
  get timePosition() {
27667
28163
  return this._timePosition;
@@ -27669,30 +28165,30 @@ class AlphaSynth {
27669
28165
  set timePosition(value) {
27670
28166
  Logger.debug('AlphaSynth', `Seeking to position ${value}ms (main)`);
27671
28167
  // tell the sequencer to jump to the given position
27672
- this._sequencer.mainSeek(value);
28168
+ this.sequencer.mainSeek(value);
27673
28169
  // update the internal position
27674
28170
  this.updateTimePosition(value, true);
27675
28171
  // tell the output to reset the already synthesized buffers and request data again
27676
- if (this._sequencer.isPlayingMain) {
28172
+ if (this.sequencer.isPlayingMain) {
27677
28173
  this._notPlayedSamples = 0;
27678
28174
  this.output.resetSamples();
27679
28175
  }
27680
28176
  }
27681
28177
  get playbackRange() {
27682
- return this._sequencer.mainPlaybackRange;
28178
+ return this.sequencer.mainPlaybackRange;
27683
28179
  }
27684
28180
  set playbackRange(value) {
27685
- this._sequencer.mainPlaybackRange = value;
28181
+ this.sequencer.mainPlaybackRange = value;
27686
28182
  if (value) {
27687
28183
  this.tickPosition = value.startTick;
27688
28184
  }
27689
28185
  this.playbackRangeChanged.trigger(new PlaybackRangeChangedEventArgs(value));
27690
28186
  }
27691
28187
  get isLooping() {
27692
- return this._sequencer.isLooping;
28188
+ return this.sequencer.isLooping;
27693
28189
  }
27694
28190
  set isLooping(value) {
27695
- this._sequencer.isLooping = value;
28191
+ this.sequencer.isLooping = value;
27696
28192
  }
27697
28193
  destroy() {
27698
28194
  Logger.debug('AlphaSynth', 'Destroying player');
@@ -27700,11 +28196,11 @@ class AlphaSynth {
27700
28196
  this.output.destroy();
27701
28197
  }
27702
28198
  /**
27703
- * Initializes a new instance of the {@link AlphaSynth} class.
28199
+ * Initializes a new instance of the {@link AlphaSynthBase} class.
27704
28200
  * @param output The output to use for playing the generated samples.
27705
28201
  */
27706
- constructor(output, bufferTimeInMilliseconds) {
27707
- this._isSoundFontLoaded = false;
28202
+ constructor(output, synthesizer, bufferTimeInMilliseconds) {
28203
+ this.isSoundFontLoaded = false;
27708
28204
  this._isMidiLoaded = false;
27709
28205
  this._tickPosition = 0;
27710
28206
  this._timePosition = 0;
@@ -27733,8 +28229,8 @@ class AlphaSynth {
27733
28229
  Logger.debug('AlphaSynth', 'Creating output');
27734
28230
  this._output = output;
27735
28231
  Logger.debug('AlphaSynth', 'Creating synthesizer');
27736
- this._synthesizer = new TinySoundFont(this.output.sampleRate);
27737
- this._sequencer = new MidiFileSequencer(this._synthesizer);
28232
+ this.synthesizer = synthesizer;
28233
+ this.sequencer = new MidiFileSequencer(this.synthesizer);
27738
28234
  Logger.debug('AlphaSynth', 'Opening output');
27739
28235
  this.output.ready.on(() => {
27740
28236
  this.isReady = true;
@@ -27742,42 +28238,45 @@ class AlphaSynth {
27742
28238
  this.checkReadyForPlayback();
27743
28239
  });
27744
28240
  this.output.sampleRequest.on(() => {
27745
- if (this.state === PlayerState.Playing &&
27746
- (!this._sequencer.isFinished || this._synthesizer.activeVoiceCount > 0)) {
27747
- let samples = new Float32Array(SynthConstants.MicroBufferSize * SynthConstants.MicroBufferCount * SynthConstants.AudioChannels);
27748
- let bufferPos = 0;
27749
- for (let i = 0; i < SynthConstants.MicroBufferCount; i++) {
27750
- // synthesize buffer
27751
- this._sequencer.fillMidiEventQueue();
27752
- const synthesizedEvents = this._synthesizer.synthesize(samples, bufferPos, SynthConstants.MicroBufferSize);
27753
- bufferPos += SynthConstants.MicroBufferSize * SynthConstants.AudioChannels;
27754
- // push all processed events into the queue
27755
- // for informing users about played events
27756
- for (const e of synthesizedEvents) {
27757
- if (this._midiEventsPlayedFilter.has(e.event.type)) {
27758
- this._playedEventsQueue.enqueue(e);
27759
- }
27760
- }
27761
- // tell sequencer to check whether its work is done
27762
- if (this._sequencer.isFinished) {
27763
- break;
28241
+ this.onSampleRequest();
28242
+ });
28243
+ this.output.samplesPlayed.on(this.onSamplesPlayed.bind(this));
28244
+ this.output.open(bufferTimeInMilliseconds);
28245
+ }
28246
+ onSampleRequest() {
28247
+ if (this.state === PlayerState.Playing &&
28248
+ (!this.sequencer.isFinished || this.synthesizer.activeVoiceCount > 0)) {
28249
+ let samples = new Float32Array(SynthConstants.MicroBufferSize * SynthConstants.MicroBufferCount * SynthConstants.AudioChannels);
28250
+ let bufferPos = 0;
28251
+ for (let i = 0; i < SynthConstants.MicroBufferCount; i++) {
28252
+ // synthesize buffer
28253
+ this.sequencer.fillMidiEventQueue();
28254
+ const synthesizedEvents = this.synthesizer.synthesize(samples, bufferPos, SynthConstants.MicroBufferSize);
28255
+ bufferPos += SynthConstants.MicroBufferSize * SynthConstants.AudioChannels;
28256
+ // push all processed events into the queue
28257
+ // for informing users about played events
28258
+ for (const e of synthesizedEvents) {
28259
+ if (this._midiEventsPlayedFilter.has(e.event.type)) {
28260
+ this._playedEventsQueue.enqueue(e);
27764
28261
  }
27765
28262
  }
27766
- // send it to output
27767
- if (bufferPos < samples.length) {
27768
- samples = samples.subarray(0, bufferPos);
28263
+ // tell sequencer to check whether its work is done
28264
+ if (this.sequencer.isFinished) {
28265
+ break;
27769
28266
  }
27770
- this._notPlayedSamples += samples.length;
27771
- this.output.addSamples(samples);
27772
28267
  }
27773
- else {
27774
- // Tell output that there is no data left for it.
27775
- const samples = new Float32Array(0);
27776
- this.output.addSamples(samples);
28268
+ // send it to output
28269
+ if (bufferPos < samples.length) {
28270
+ samples = samples.subarray(0, bufferPos);
27777
28271
  }
27778
- });
27779
- this.output.samplesPlayed.on(this.onSamplesPlayed.bind(this));
27780
- this.output.open(bufferTimeInMilliseconds);
28272
+ this._notPlayedSamples += samples.length;
28273
+ this.output.addSamples(samples);
28274
+ }
28275
+ else {
28276
+ // Tell output that there is no data left for it.
28277
+ const samples = new Float32Array(0);
28278
+ this.output.addSamples(samples);
28279
+ }
27781
28280
  }
27782
28281
  play() {
27783
28282
  if (this.state !== PlayerState.Paused || !this._isMidiLoaded) {
@@ -27787,20 +28286,20 @@ class AlphaSynth {
27787
28286
  this.playInternal();
27788
28287
  if (this._countInVolume > 0) {
27789
28288
  Logger.debug('AlphaSynth', 'Starting countin');
27790
- this._sequencer.startCountIn();
27791
- this._synthesizer.setupMetronomeChannel(this._countInVolume);
28289
+ this.sequencer.startCountIn();
28290
+ this.synthesizer.setupMetronomeChannel(this._countInVolume);
27792
28291
  this.updateTimePosition(0, true);
27793
28292
  }
27794
28293
  this.output.play();
27795
28294
  return true;
27796
28295
  }
27797
28296
  playInternal() {
27798
- if (this._sequencer.isPlayingOneTimeMidi) {
28297
+ if (this.sequencer.isPlayingOneTimeMidi) {
27799
28298
  Logger.debug('AlphaSynth', 'Cancelling one time midi');
27800
28299
  this.stopOneTimeMidi();
27801
28300
  }
27802
28301
  Logger.debug('AlphaSynth', 'Starting playback');
27803
- this._synthesizer.setupMetronomeChannel(this.metronomeVolume);
28302
+ this.synthesizer.setupMetronomeChannel(this.metronomeVolume);
27804
28303
  this._synthStopping = false;
27805
28304
  this.state = PlayerState.Playing;
27806
28305
  this.stateChanged.trigger(new PlayerStateChangedEventArgs(this.state, false));
@@ -27813,7 +28312,7 @@ class AlphaSynth {
27813
28312
  this.state = PlayerState.Paused;
27814
28313
  this.stateChanged.trigger(new PlayerStateChangedEventArgs(this.state, false));
27815
28314
  this.output.pause();
27816
- this._synthesizer.noteOffAll(false);
28315
+ this.synthesizer.noteOffAll(false);
27817
28316
  }
27818
28317
  playPause() {
27819
28318
  if (this.state !== PlayerState.Paused || !this._isMidiLoaded) {
@@ -27831,21 +28330,21 @@ class AlphaSynth {
27831
28330
  this.state = PlayerState.Paused;
27832
28331
  this.output.pause();
27833
28332
  this._notPlayedSamples = 0;
27834
- this._sequencer.stop();
27835
- this._synthesizer.noteOffAll(true);
27836
- this.tickPosition = this._sequencer.mainPlaybackRange ? this._sequencer.mainPlaybackRange.startTick : 0;
28333
+ this.sequencer.stop();
28334
+ this.synthesizer.noteOffAll(true);
28335
+ this.tickPosition = this.sequencer.mainPlaybackRange ? this.sequencer.mainPlaybackRange.startTick : 0;
27837
28336
  this.stateChanged.trigger(new PlayerStateChangedEventArgs(this.state, true));
27838
28337
  }
27839
28338
  playOneTimeMidiFile(midi) {
27840
- if (this._sequencer.isPlayingOneTimeMidi) {
28339
+ if (this.sequencer.isPlayingOneTimeMidi) {
27841
28340
  this.stopOneTimeMidi();
27842
28341
  }
27843
28342
  else {
27844
28343
  // pause current playback.
27845
28344
  this.pause();
27846
28345
  }
27847
- this._sequencer.loadOneTimeMidi(midi);
27848
- this._synthesizer.noteOffAll(true);
28346
+ this.sequencer.loadOneTimeMidi(midi);
28347
+ this.synthesizer.noteOffAll(true);
27849
28348
  // update the internal position
27850
28349
  this.updateTimePosition(0, true);
27851
28350
  // tell the output to reset the already synthesized buffers and request data again
@@ -27855,9 +28354,9 @@ class AlphaSynth {
27855
28354
  }
27856
28355
  resetSoundFonts() {
27857
28356
  this.stop();
27858
- this._synthesizer.resetPresets();
28357
+ this.synthesizer.resetPresets();
27859
28358
  this._loadedSoundFonts = [];
27860
- this._isSoundFontLoaded = false;
28359
+ this.isSoundFontLoaded = false;
27861
28360
  this.soundFontLoaded.trigger();
27862
28361
  }
27863
28362
  loadSoundFont(data, append) {
@@ -27871,7 +28370,7 @@ class AlphaSynth {
27871
28370
  this._loadedSoundFonts = [];
27872
28371
  }
27873
28372
  this._loadedSoundFonts.push(soundFont);
27874
- this._isSoundFontLoaded = true;
28373
+ this.isSoundFontLoaded = true;
27875
28374
  this.soundFontLoaded.trigger();
27876
28375
  Logger.debug('AlphaSynth', 'soundFont successfully loaded');
27877
28376
  this.checkReadyForPlayback();
@@ -27883,12 +28382,12 @@ class AlphaSynth {
27883
28382
  }
27884
28383
  checkReadyForPlayback() {
27885
28384
  if (this.isReadyForPlayback) {
27886
- this._synthesizer.setupMetronomeChannel(this.metronomeVolume);
27887
- const programs = this._sequencer.instrumentPrograms;
27888
- const percussionKeys = this._sequencer.percussionKeys;
28385
+ this.synthesizer.setupMetronomeChannel(this.metronomeVolume);
28386
+ const programs = this.sequencer.instrumentPrograms;
28387
+ const percussionKeys = this.sequencer.percussionKeys;
27889
28388
  let append = false;
27890
28389
  for (const soundFont of this._loadedSoundFonts) {
27891
- this._synthesizer.loadPresets(soundFont, programs, percussionKeys, append);
28390
+ this.synthesizer.loadPresets(soundFont, programs, percussionKeys, append);
27892
28391
  append = true;
27893
28392
  }
27894
28393
  this.readyForPlayback.trigger();
@@ -27902,9 +28401,9 @@ class AlphaSynth {
27902
28401
  this.stop();
27903
28402
  try {
27904
28403
  Logger.debug('AlphaSynth', 'Loading midi from model');
27905
- this._sequencer.loadMidi(midi);
28404
+ this.sequencer.loadMidi(midi);
27906
28405
  this._isMidiLoaded = true;
27907
- this.midiLoaded.trigger(new PositionChangedEventArgs(0, this._sequencer.currentEndTime, 0, this._sequencer.currentEndTick, false));
28406
+ this.midiLoaded.trigger(new PositionChangedEventArgs(0, this.sequencer.currentEndTime, 0, this.sequencer.currentEndTick, false, this.sequencer.currentTempo, this.sequencer.modifiedTempo));
27908
28407
  Logger.debug('AlphaSynth', 'Midi successfully loaded');
27909
28408
  this.checkReadyForPlayback();
27910
28409
  this.tickPosition = 0;
@@ -27915,29 +28414,29 @@ class AlphaSynth {
27915
28414
  }
27916
28415
  }
27917
28416
  applyTranspositionPitches(transpositionPitches) {
27918
- this._synthesizer.applyTranspositionPitches(transpositionPitches);
28417
+ this.synthesizer.applyTranspositionPitches(transpositionPitches);
27919
28418
  }
27920
28419
  setChannelTranspositionPitch(channel, semitones) {
27921
- this._synthesizer.setChannelTranspositionPitch(channel, semitones);
28420
+ this.synthesizer.setChannelTranspositionPitch(channel, semitones);
27922
28421
  }
27923
28422
  setChannelMute(channel, mute) {
27924
- this._synthesizer.channelSetMute(channel, mute);
28423
+ this.synthesizer.channelSetMute(channel, mute);
27925
28424
  }
27926
28425
  resetChannelStates() {
27927
- this._synthesizer.resetChannelStates();
28426
+ this.synthesizer.resetChannelStates();
27928
28427
  }
27929
28428
  setChannelSolo(channel, solo) {
27930
- this._synthesizer.channelSetSolo(channel, solo);
28429
+ this.synthesizer.channelSetSolo(channel, solo);
27931
28430
  }
27932
28431
  setChannelVolume(channel, volume) {
27933
28432
  volume = Math.max(volume, SynthConstants.MinVolume);
27934
- this._synthesizer.channelSetMixVolume(channel, volume);
28433
+ this.synthesizer.channelSetMixVolume(channel, volume);
27935
28434
  }
27936
28435
  onSamplesPlayed(sampleCount) {
27937
28436
  if (sampleCount === 0) {
27938
28437
  return;
27939
28438
  }
27940
- const playedMillis = (sampleCount / this._synthesizer.outSampleRate) * 1000;
28439
+ const playedMillis = (sampleCount / this.synthesizer.outSampleRate) * 1000;
27941
28440
  this._notPlayedSamples -= sampleCount * SynthConstants.AudioChannels;
27942
28441
  this.updateTimePosition(this._timePosition + playedMillis, false);
27943
28442
  this.checkForFinish();
@@ -27945,25 +28444,25 @@ class AlphaSynth {
27945
28444
  checkForFinish() {
27946
28445
  let startTick = 0;
27947
28446
  let endTick = 0;
27948
- if (this.playbackRange && this._sequencer.isPlayingMain) {
28447
+ if (this.playbackRange && this.sequencer.isPlayingMain) {
27949
28448
  startTick = this.playbackRange.startTick;
27950
28449
  endTick = this.playbackRange.endTick;
27951
28450
  }
27952
28451
  else {
27953
- endTick = this._sequencer.currentEndTick;
28452
+ endTick = this.sequencer.currentEndTick;
27954
28453
  }
27955
28454
  if (this._tickPosition >= endTick) {
27956
28455
  // fully done with playback of remaining samples?
27957
28456
  if (this._notPlayedSamples <= 0) {
27958
28457
  this._notPlayedSamples = 0;
27959
- if (this._sequencer.isPlayingCountIn) {
28458
+ if (this.sequencer.isPlayingCountIn) {
27960
28459
  Logger.debug('AlphaSynth', 'Finished playback (count-in)');
27961
- this._sequencer.resetCountIn();
27962
- this.timePosition = this._sequencer.currentTime;
28460
+ this.sequencer.resetCountIn();
28461
+ this.timePosition = this.sequencer.currentTime;
27963
28462
  this.playInternal();
27964
28463
  this.output.resetSamples();
27965
28464
  }
27966
- else if (this._sequencer.isPlayingOneTimeMidi) {
28465
+ else if (this.sequencer.isPlayingOneTimeMidi) {
27967
28466
  Logger.debug('AlphaSynth', 'Finished playback (one time)');
27968
28467
  this.output.resetSamples();
27969
28468
  this.state = PlayerState.Paused;
@@ -27975,11 +28474,11 @@ class AlphaSynth {
27975
28474
  this.tickPosition = startTick;
27976
28475
  this._synthStopping = false;
27977
28476
  }
27978
- else if (this._synthesizer.activeVoiceCount > 0) {
28477
+ else if (this.synthesizer.activeVoiceCount > 0) {
27979
28478
  // smooth stop
27980
28479
  if (!this._synthStopping) {
27981
28480
  Logger.debug('AlphaSynth', 'Signaling synth to stop all voices (all samples played)');
27982
- this._synthesizer.noteOffAll(true);
28481
+ this.synthesizer.noteOffAll(true);
27983
28482
  this._synthStopping = true;
27984
28483
  }
27985
28484
  }
@@ -27995,7 +28494,7 @@ class AlphaSynth {
27995
28494
  // to eventually bring the voices down to 0 and stop playing
27996
28495
  if (!this._synthStopping) {
27997
28496
  Logger.debug('AlphaSynth', 'Signaling synth to stop all voices (not all samples played)');
27998
- this._synthesizer.noteOffAll(true);
28497
+ this.synthesizer.noteOffAll(true);
27999
28498
  this._synthStopping = true;
28000
28499
  }
28001
28500
  }
@@ -28003,44 +28502,41 @@ class AlphaSynth {
28003
28502
  }
28004
28503
  stopOneTimeMidi() {
28005
28504
  this.output.pause();
28006
- this._synthesizer.noteOffAll(true);
28007
- this._sequencer.resetOneTimeMidi();
28008
- this.timePosition = this._sequencer.currentTime;
28505
+ this.synthesizer.noteOffAll(true);
28506
+ this.sequencer.resetOneTimeMidi();
28507
+ this.timePosition = this.sequencer.currentTime;
28009
28508
  }
28010
28509
  updateTimePosition(timePosition, isSeek) {
28011
28510
  // update the real positions
28012
28511
  let currentTime = timePosition;
28013
28512
  this._timePosition = currentTime;
28014
- let currentTick = this._sequencer.currentTimePositionToTickPosition(currentTime);
28513
+ let currentTick = this.sequencer.currentTimePositionToTickPosition(currentTime);
28015
28514
  this._tickPosition = currentTick;
28016
- const endTime = this._sequencer.currentEndTime;
28017
- const endTick = this._sequencer.currentEndTick;
28515
+ const endTime = this.sequencer.currentEndTime;
28516
+ const endTick = this.sequencer.currentEndTick;
28018
28517
  // on fade outs we can have some milliseconds longer, ensure we don't report this
28019
28518
  if (currentTime > endTime) {
28020
28519
  currentTime = endTime;
28021
28520
  currentTick = endTick;
28022
28521
  }
28023
- const mode = this._sequencer.isPlayingMain
28024
- ? 'main'
28025
- : this._sequencer.isPlayingCountIn
28026
- ? 'count-in'
28027
- : 'one-time';
28028
- Logger.debug('AlphaSynth', `Position changed: (time: ${currentTime}/${endTime}, tick: ${currentTick}/${endTick}, Active Voices: ${this._synthesizer.activeVoiceCount} (${mode})`);
28029
- if (this._sequencer.isPlayingMain) {
28030
- this.positionChanged.trigger(new PositionChangedEventArgs(currentTime, endTime, currentTick, endTick, isSeek));
28522
+ const mode = this.sequencer.isPlayingMain ? 'main' : this.sequencer.isPlayingCountIn ? 'count-in' : 'one-time';
28523
+ Logger.debug('AlphaSynth', `Position changed: (time: ${currentTime}/${endTime}, tick: ${currentTick}/${endTick}, Active Voices: ${this.synthesizer.activeVoiceCount} (${mode}), Tempo original: ${this.sequencer.currentTempo}, Tempo modified: ${this.sequencer.modifiedTempo})`);
28524
+ if (this.sequencer.isPlayingMain) {
28525
+ this.positionChanged.trigger(new PositionChangedEventArgs(currentTime, endTime, currentTick, endTick, isSeek, this.sequencer.currentTempo, this.sequencer.modifiedTempo));
28031
28526
  }
28032
28527
  // build events which were actually played
28033
28528
  if (isSeek) {
28034
28529
  this._playedEventsQueue.clear();
28035
28530
  }
28036
28531
  else {
28037
- const playedEvents = new Queue();
28532
+ const playedEvents = [];
28038
28533
  while (!this._playedEventsQueue.isEmpty && this._playedEventsQueue.peek().time < currentTime) {
28039
28534
  const synthEvent = this._playedEventsQueue.dequeue();
28040
- playedEvents.enqueue(synthEvent.event);
28535
+ playedEvents.push(synthEvent.event);
28041
28536
  }
28042
- if (!playedEvents.isEmpty) {
28043
- this.midiEventsPlayed.trigger(new MidiEventsPlayedEventArgs(playedEvents.toArray()));
28537
+ if (playedEvents.length > 0) {
28538
+ playedEvents.reverse();
28539
+ this.midiEventsPlayed.trigger(new MidiEventsPlayedEventArgs(playedEvents));
28044
28540
  }
28045
28541
  }
28046
28542
  }
@@ -28048,13 +28544,28 @@ class AlphaSynth {
28048
28544
  * @internal
28049
28545
  */
28050
28546
  hasSamplesForProgram(program) {
28051
- return this._synthesizer.hasSamplesForProgram(program);
28547
+ return this.synthesizer.hasSamplesForProgram(program);
28052
28548
  }
28053
28549
  /**
28054
28550
  * @internal
28055
28551
  */
28056
28552
  hasSamplesForPercussion(key) {
28057
- return this._synthesizer.hasSamplesForPercussion(key);
28553
+ return this.synthesizer.hasSamplesForPercussion(key);
28554
+ }
28555
+ loadBackingTrack(_score, _syncPoints) {
28556
+ }
28557
+ }
28558
+ /**
28559
+ * This is the main synthesizer component which can be used to
28560
+ * play a {@link MidiFile} via a {@link ISynthOutput}.
28561
+ */
28562
+ class AlphaSynth extends AlphaSynthBase {
28563
+ /**
28564
+ * Initializes a new instance of the {@link AlphaSynth} class.
28565
+ * @param output The output to use for playing the generated samples.
28566
+ */
28567
+ constructor(output, bufferTimeInMilliseconds) {
28568
+ super(output, new TinySoundFont(output.sampleRate), bufferTimeInMilliseconds);
28058
28569
  }
28059
28570
  }
28060
28571
 
@@ -29301,6 +29812,35 @@ var PlayerOutputMode;
29301
29812
  */
29302
29813
  PlayerOutputMode[PlayerOutputMode["WebAudioScriptProcessor"] = 1] = "WebAudioScriptProcessor";
29303
29814
  })(PlayerOutputMode || (PlayerOutputMode = {}));
29815
+ /**
29816
+ * Lists the different modes how the internal alphaTab player (and related cursor behavior) is working.
29817
+ */
29818
+ var PlayerMode;
29819
+ (function (PlayerMode) {
29820
+ /**
29821
+ * The player functionality is fully disabled.
29822
+ */
29823
+ PlayerMode[PlayerMode["Disabled"] = 0] = "Disabled";
29824
+ /**
29825
+ * The player functionality is enabled.
29826
+ * If the loaded file provides a backing track, it is used for playback.
29827
+ * If no backing track is provided, the midi synthesizer is used.
29828
+ */
29829
+ PlayerMode[PlayerMode["EnabledAutomatic"] = 1] = "EnabledAutomatic";
29830
+ /**
29831
+ * The player functionality is enabled and the synthesizer is used (even if a backing track is embedded in the file).
29832
+ */
29833
+ PlayerMode[PlayerMode["EnabledSynthesizer"] = 2] = "EnabledSynthesizer";
29834
+ /**
29835
+ * The player functionality is enabled. If the input data model has no backing track configured, the player might not work as expected (as playback completes instantly).
29836
+ */
29837
+ PlayerMode[PlayerMode["EnabledBackingTrack"] = 3] = "EnabledBackingTrack";
29838
+ /**
29839
+ * The player functionality is enabled and an external audio/video source is used as time axis.
29840
+ * The related player APIs need to be used to update the current position of the external audio source within alphaTab.
29841
+ */
29842
+ PlayerMode[PlayerMode["EnabledExternalMedia"] = 4] = "EnabledExternalMedia";
29843
+ })(PlayerMode || (PlayerMode = {}));
29304
29844
  /**
29305
29845
  * The player settings control how the audio playback and UI is behaving.
29306
29846
  * @json
@@ -29347,6 +29887,7 @@ class PlayerSettings {
29347
29887
  * @since 0.9.6
29348
29888
  * @defaultValue `false`
29349
29889
  * @category Player
29890
+ * @deprecated Use {@link playerMode} instead.
29350
29891
  * @remarks
29351
29892
  * This setting configures whether the player feature is enabled or not. Depending on the platform enabling the player needs some additional actions of the developer.
29352
29893
  * For the JavaScript version the [player.soundFont](/docs/reference/settings/player/soundfont) property must be set to the URL of the sound font that should be used or it must be loaded manually via API.
@@ -29355,6 +29896,37 @@ class PlayerSettings {
29355
29896
  * AlphaTab does not ship a default UI for the player. The API must be hooked up to some UI controls to allow the user to interact with the player.
29356
29897
  */
29357
29898
  this.enablePlayer = false;
29899
+ /**
29900
+ * Whether the player should be enabled and which mode it should use.
29901
+ * @since 1.6.0
29902
+ * @defaultValue `PlayerMode.Disabled`
29903
+ * @category Player
29904
+ * @remarks
29905
+ * This setting configures whether the player feature is enabled or not. Depending on the platform enabling the player needs some additional actions of the developer.
29906
+ *
29907
+ * **Synthesizer**
29908
+ *
29909
+ * If the synthesizer is used (via {@link PlayerMode.EnabledAutomatic} or {@link PlayerMode.EnabledSynthesizer}) a sound font is needed so that the midi synthesizer can produce the audio samples.
29910
+ *
29911
+ * For the JavaScript version the [player.soundFont](/docs/reference/settings/player/soundfont) property must be set to the URL of the sound font that should be used or it must be loaded manually via API.
29912
+ * For .net manually the soundfont must be loaded.
29913
+ *
29914
+ * **Backing Track**
29915
+ *
29916
+ * For a built-in backing track of the input file no additional data needs to be loaded (assuming everything is filled via the input file).
29917
+ * Otherwise the `score.backingTrack` needs to be filled before loading and the related sync points need to be configured.
29918
+ *
29919
+ * **External Media**
29920
+ *
29921
+ * For synchronizing alphaTab with an external media no data needs to be loaded into alphaTab. The configured sync points on the MasterBars are used
29922
+ * as reference to synchronize the external media with the internal time axis. Then the related APIs on the AlphaTabApi object need to be used
29923
+ * to update the playback state and exterrnal audio position during playback.
29924
+ *
29925
+ * **User Interface**
29926
+ *
29927
+ * AlphaTab does not ship a default UI for the player. The API must be hooked up to some UI controls to allow the user to interact with the player.
29928
+ */
29929
+ this.playerMode = PlayerMode.Disabled;
29358
29930
  /**
29359
29931
  * Whether playback cursors should be displayed.
29360
29932
  * @since 0.9.6
@@ -30060,6 +30632,7 @@ class PlayerSettingsSerializer {
30060
30632
  /*@target web*/
30061
30633
  o.set("outputmode", obj.outputMode);
30062
30634
  o.set("enableplayer", obj.enablePlayer);
30635
+ o.set("playermode", obj.playerMode);
30063
30636
  o.set("enablecursor", obj.enableCursor);
30064
30637
  o.set("enableanimatedbeatcursor", obj.enableAnimatedBeatCursor);
30065
30638
  o.set("enableelementhighlighting", obj.enableElementHighlighting);
@@ -30095,6 +30668,9 @@ class PlayerSettingsSerializer {
30095
30668
  case "enableplayer":
30096
30669
  obj.enablePlayer = v;
30097
30670
  return true;
30671
+ case "playermode":
30672
+ obj.playerMode = JsonHelper.parseEnum(v, PlayerMode);
30673
+ return true;
30098
30674
  case "enablecursor":
30099
30675
  obj.enableCursor = v;
30100
30676
  return true;
@@ -30329,6 +30905,39 @@ class SectionSerializer {
30329
30905
  }
30330
30906
  }
30331
30907
 
30908
+ class SyncPointDataSerializer {
30909
+ static fromJson(obj, m) {
30910
+ if (!m) {
30911
+ return;
30912
+ }
30913
+ JsonHelper.forEach(m, (v, k) => SyncPointDataSerializer.setProperty(obj, k, v));
30914
+ }
30915
+ static toJson(obj) {
30916
+ if (!obj) {
30917
+ return null;
30918
+ }
30919
+ const o = new Map();
30920
+ o.set("baroccurence", obj.barOccurence);
30921
+ o.set("modifiedtempo", obj.modifiedTempo);
30922
+ o.set("millisecondoffset", obj.millisecondOffset);
30923
+ return o;
30924
+ }
30925
+ static setProperty(obj, property, v) {
30926
+ switch (property) {
30927
+ case "baroccurence":
30928
+ obj.barOccurence = v;
30929
+ return true;
30930
+ case "modifiedtempo":
30931
+ obj.modifiedTempo = v;
30932
+ return true;
30933
+ case "millisecondoffset":
30934
+ obj.millisecondOffset = v;
30935
+ return true;
30936
+ }
30937
+ return false;
30938
+ }
30939
+ }
30940
+
30332
30941
  class AutomationSerializer {
30333
30942
  static fromJson(obj, m) {
30334
30943
  if (!m) {
@@ -30344,6 +30953,9 @@ class AutomationSerializer {
30344
30953
  o.set("islinear", obj.isLinear);
30345
30954
  o.set("type", obj.type);
30346
30955
  o.set("value", obj.value);
30956
+ if (obj.syncPointValue) {
30957
+ o.set("syncpointvalue", SyncPointDataSerializer.toJson(obj.syncPointValue));
30958
+ }
30347
30959
  o.set("ratioposition", obj.ratioPosition);
30348
30960
  o.set("text", obj.text);
30349
30961
  return o;
@@ -30359,6 +30971,15 @@ class AutomationSerializer {
30359
30971
  case "value":
30360
30972
  obj.value = v;
30361
30973
  return true;
30974
+ case "syncpointvalue":
30975
+ if (v) {
30976
+ obj.syncPointValue = new SyncPointData();
30977
+ SyncPointDataSerializer.fromJson(obj.syncPointValue, v);
30978
+ }
30979
+ else {
30980
+ obj.syncPointValue = undefined;
30981
+ }
30982
+ return true;
30362
30983
  case "ratioposition":
30363
30984
  obj.ratioPosition = v;
30364
30985
  return true;
@@ -30424,6 +31045,9 @@ class MasterBarSerializer {
30424
31045
  o.set("section", SectionSerializer.toJson(obj.section));
30425
31046
  }
30426
31047
  o.set("tempoautomations", obj.tempoAutomations.map(i => AutomationSerializer.toJson(i)));
31048
+ if (obj.syncPoints !== undefined) {
31049
+ o.set("syncpoints", obj.syncPoints?.map(i => AutomationSerializer.toJson(i)));
31050
+ }
30427
31051
  if (obj.fermata !== null) {
30428
31052
  const m = new Map();
30429
31053
  o.set("fermata", m);
@@ -30490,6 +31114,16 @@ class MasterBarSerializer {
30490
31114
  obj.tempoAutomations.push(i);
30491
31115
  }
30492
31116
  return true;
31117
+ case "syncpoints":
31118
+ if (v) {
31119
+ obj.syncPoints = [];
31120
+ for (const o of v) {
31121
+ const i = new Automation();
31122
+ AutomationSerializer.fromJson(i, o);
31123
+ obj.addSyncPoint(i);
31124
+ }
31125
+ }
31126
+ return true;
30493
31127
  case "fermata":
30494
31128
  obj.fermata = new Map();
30495
31129
  JsonHelper.forEach(v, (v, k) => {
@@ -31781,6 +32415,31 @@ class RenderStylesheetSerializer {
31781
32415
  }
31782
32416
  }
31783
32417
 
32418
+ class BackingTrackSerializer {
32419
+ static fromJson(obj, m) {
32420
+ if (!m) {
32421
+ return;
32422
+ }
32423
+ JsonHelper.forEach(m, (v, k) => BackingTrackSerializer.setProperty(obj, k, v));
32424
+ }
32425
+ static toJson(obj) {
32426
+ if (!obj) {
32427
+ return null;
32428
+ }
32429
+ const o = new Map();
32430
+ o.set("padding", obj.padding);
32431
+ return o;
32432
+ }
32433
+ static setProperty(obj, property, v) {
32434
+ switch (property) {
32435
+ case "padding":
32436
+ obj.padding = v;
32437
+ return true;
32438
+ }
32439
+ return false;
32440
+ }
32441
+ }
32442
+
31784
32443
  class HeaderFooterStyleSerializer {
31785
32444
  static fromJson(obj, m) {
31786
32445
  if (!m) {
@@ -31892,6 +32551,9 @@ class ScoreSerializer {
31892
32551
  o.set("defaultsystemslayout", obj.defaultSystemsLayout);
31893
32552
  o.set("systemslayout", obj.systemsLayout);
31894
32553
  o.set("stylesheet", RenderStylesheetSerializer.toJson(obj.stylesheet));
32554
+ if (obj.backingTrack) {
32555
+ o.set("backingtrack", BackingTrackSerializer.toJson(obj.backingTrack));
32556
+ }
31895
32557
  if (obj.style) {
31896
32558
  o.set("style", ScoreStyleSerializer.toJson(obj.style));
31897
32559
  }
@@ -31960,6 +32622,15 @@ class ScoreSerializer {
31960
32622
  case "stylesheet":
31961
32623
  RenderStylesheetSerializer.fromJson(obj.stylesheet, v);
31962
32624
  return true;
32625
+ case "backingtrack":
32626
+ if (v) {
32627
+ obj.backingTrack = new BackingTrack();
32628
+ BackingTrackSerializer.fromJson(obj.backingTrack, v);
32629
+ }
32630
+ else {
32631
+ obj.backingTrack = undefined;
32632
+ }
32633
+ return true;
31963
32634
  case "style":
31964
32635
  if (v) {
31965
32636
  obj.style = new ScoreStyle();
@@ -32136,7 +32807,9 @@ class JsonConverter {
32136
32807
  case MidiEventType.ProgramChange:
32137
32808
  return new ProgramChangeEvent(track, tick, JsonHelper.getValue(midiEvent, 'channel'), JsonHelper.getValue(midiEvent, 'program'));
32138
32809
  case MidiEventType.TempoChange:
32139
- return new TempoChangeEvent(tick, JsonHelper.getValue(midiEvent, 'microSecondsPerQuarterNote'));
32810
+ const tempo = new TempoChangeEvent(tick, 0);
32811
+ tempo.beatsPerMinute = JsonHelper.getValue(midiEvent, 'beatsPerMinute');
32812
+ return tempo;
32140
32813
  case MidiEventType.PitchBend:
32141
32814
  return new PitchBendEvent(track, tick, JsonHelper.getValue(midiEvent, 'channel'), JsonHelper.getValue(midiEvent, 'value'));
32142
32815
  case MidiEventType.PerNotePitchBend:
@@ -32211,7 +32884,7 @@ class JsonConverter {
32211
32884
  o.set('program', midiEvent.program);
32212
32885
  break;
32213
32886
  case MidiEventType.TempoChange:
32214
- o.set('microSecondsPerQuarterNote', midiEvent.microSecondsPerQuarterNote);
32887
+ o.set('beatsPerMinute', midiEvent.beatsPerMinute);
32215
32888
  break;
32216
32889
  case MidiEventType.PitchBend:
32217
32890
  o.set('channel', midiEvent.channel);
@@ -32438,7 +33111,9 @@ class AlphaSynthWebWorker {
32438
33111
  endTime: e.endTime,
32439
33112
  currentTick: e.currentTick,
32440
33113
  endTick: e.endTick,
32441
- isSeek: e.isSeek
33114
+ isSeek: e.isSeek,
33115
+ originalTempo: e.originalTempo,
33116
+ modifiedTempo: e.modifiedTempo
32442
33117
  });
32443
33118
  }
32444
33119
  onPlayerStateChanged(e) {
@@ -32484,7 +33159,9 @@ class AlphaSynthWebWorker {
32484
33159
  endTime: e.endTime,
32485
33160
  currentTick: e.currentTick,
32486
33161
  endTick: e.endTick,
32487
- isSeek: e.isSeek
33162
+ isSeek: e.isSeek,
33163
+ originalTempo: e.originalTempo,
33164
+ modifiedTempo: e.modifiedTempo
32488
33165
  });
32489
33166
  }
32490
33167
  onMidiLoadFailed(e) {
@@ -33793,8 +34470,9 @@ class AlphaSynthMidiFileHandler {
33793
34470
  }
33794
34471
  addTempo(tick, tempo) {
33795
34472
  // bpm -> microsecond per quarter note
33796
- const tempoInUsq = (60000000 / tempo) | 0;
33797
- this._midiFile.addEvent(new TempoChangeEvent(tick, tempoInUsq));
34473
+ const tempoEvent = new TempoChangeEvent(tick, 0);
34474
+ tempoEvent.beatsPerMinute = tempo;
34475
+ this._midiFile.addEvent(tempoEvent);
33798
34476
  }
33799
34477
  addBend(track, tick, channel, value) {
33800
34478
  if (value >= SynthConstants.MaxPitchWheel) {
@@ -35066,6 +35744,10 @@ class MidiFileGenerator {
35066
35744
  * Gets or sets whether transposition pitches should be applied to the individual midi events or not.
35067
35745
  */
35068
35746
  this.applyTranspositionPitches = true;
35747
+ /**
35748
+ * The computed sync points for synchronizing the midi file with an external backing track.
35749
+ */
35750
+ this.syncPoints = [];
35069
35751
  /**
35070
35752
  * Gets the transposition pitches for the individual midi channels.
35071
35753
  */
@@ -35092,13 +35774,17 @@ class MidiFileGenerator {
35092
35774
  let previousMasterBar = null;
35093
35775
  let currentTempo = this._score.tempo;
35094
35776
  // store the previous played bar for repeats
35777
+ const barOccurence = new Map();
35095
35778
  while (!controller.finished) {
35096
35779
  const index = controller.index;
35097
35780
  const bar = this._score.masterBars[index];
35098
35781
  const currentTick = controller.currentTick;
35099
35782
  controller.processCurrent();
35100
35783
  if (controller.shouldPlay) {
35101
- this.generateMasterBar(bar, previousMasterBar, currentTick, currentTempo);
35784
+ let occurence = barOccurence.has(index) ? barOccurence.get(index) : -1;
35785
+ occurence++;
35786
+ barOccurence.set(index, occurence);
35787
+ this.generateMasterBar(bar, previousMasterBar, currentTick, currentTempo, occurence);
35102
35788
  if (bar.tempoAutomations.length > 0) {
35103
35789
  currentTempo = bar.tempoAutomations[0].value;
35104
35790
  }
@@ -35167,7 +35853,7 @@ class MidiFileGenerator {
35167
35853
  const value = Math.max(-32768, Math.min(32767, data * 8 - 1));
35168
35854
  return Math.max(value, -1) + 1;
35169
35855
  }
35170
- generateMasterBar(masterBar, previousMasterBar, currentTick, currentTempo) {
35856
+ generateMasterBar(masterBar, previousMasterBar, currentTick, currentTempo, barOccurence) {
35171
35857
  // time signature
35172
35858
  if (!previousMasterBar ||
35173
35859
  previousMasterBar.timeSignatureDenominator !== masterBar.timeSignatureDenominator ||
@@ -35194,6 +35880,15 @@ class MidiFileGenerator {
35194
35880
  else {
35195
35881
  masterBarLookup.tempoChanges.push(new MasterBarTickLookupTempoChange(currentTick, currentTempo));
35196
35882
  }
35883
+ const syncPoints = masterBar.syncPoints;
35884
+ if (syncPoints) {
35885
+ for (const syncPoint of syncPoints) {
35886
+ if (syncPoint.syncPointValue.barOccurence === barOccurence) {
35887
+ const tick = currentTick + masterBarDuration * syncPoint.ratioPosition;
35888
+ this.syncPoints.push(new BackingTrackSyncPoint(tick, syncPoint.syncPointValue));
35889
+ }
35890
+ }
35891
+ }
35197
35892
  masterBarLookup.masterBar = masterBar;
35198
35893
  masterBarLookup.start = currentTick;
35199
35894
  masterBarLookup.end = masterBarLookup.start + masterBarDuration;
@@ -37391,6 +38086,213 @@ class ActiveBeatsChangedEventArgs {
37391
38086
  }
37392
38087
  }
37393
38088
 
38089
+ class BackingTrackAudioSynthesizer {
38090
+ constructor() {
38091
+ this._midiEventQueue = new Queue();
38092
+ this.masterVolume = 1;
38093
+ this.metronomeVolume = 0;
38094
+ this.outSampleRate = 44100;
38095
+ this.currentTempo = 120;
38096
+ this.timeSignatureNumerator = 4;
38097
+ this.timeSignatureDenominator = 4;
38098
+ this.activeVoiceCount = 0;
38099
+ }
38100
+ noteOffAll(_immediate) {
38101
+ }
38102
+ resetSoft() {
38103
+ }
38104
+ resetPresets() {
38105
+ }
38106
+ loadPresets(_hydra, _instrumentPrograms, _percussionKeys, _append) {
38107
+ }
38108
+ setupMetronomeChannel(_metronomeVolume) {
38109
+ }
38110
+ synthesizeSilent(_sampleCount) {
38111
+ this.fakeSynthesize();
38112
+ }
38113
+ processMidiMessage(e) {
38114
+ }
38115
+ dispatchEvent(synthEvent) {
38116
+ this._midiEventQueue.enqueue(synthEvent);
38117
+ }
38118
+ synthesize(_buffer, _bufferPos, _sampleCount) {
38119
+ return this.fakeSynthesize();
38120
+ }
38121
+ fakeSynthesize() {
38122
+ const processedEvents = [];
38123
+ while (!this._midiEventQueue.isEmpty) {
38124
+ const m = this._midiEventQueue.dequeue();
38125
+ if (m.isMetronome && this.metronomeVolume > 0) ;
38126
+ else if (m.event) {
38127
+ this.processMidiMessage(m.event);
38128
+ }
38129
+ processedEvents.push(m);
38130
+ }
38131
+ return processedEvents;
38132
+ }
38133
+ applyTranspositionPitches(transpositionPitches) {
38134
+ }
38135
+ setChannelTranspositionPitch(channel, semitones) {
38136
+ }
38137
+ channelSetMute(channel, mute) {
38138
+ }
38139
+ channelSetSolo(channel, solo) {
38140
+ }
38141
+ resetChannelStates() {
38142
+ }
38143
+ channelSetMixVolume(channel, volume) {
38144
+ }
38145
+ hasSamplesForProgram(program) {
38146
+ return true;
38147
+ }
38148
+ hasSamplesForPercussion(key) {
38149
+ return true;
38150
+ }
38151
+ }
38152
+ class BackingTrackPlayer extends AlphaSynthBase {
38153
+ constructor(backingTrackOutput, bufferTimeInMilliseconds) {
38154
+ super(backingTrackOutput, new BackingTrackAudioSynthesizer(), bufferTimeInMilliseconds);
38155
+ this.synthesizer.output = backingTrackOutput;
38156
+ this._backingTrackOutput = backingTrackOutput;
38157
+ backingTrackOutput.timeUpdate.on(timePosition => {
38158
+ const alphaTabTimePosition = this.sequencer.mainTimePositionFromBackingTrack(timePosition, backingTrackOutput.backingTrackDuration);
38159
+ this.sequencer.fillMidiEventQueueToEndTime(alphaTabTimePosition);
38160
+ this.synthesizer.fakeSynthesize();
38161
+ this.updateTimePosition(alphaTabTimePosition, false);
38162
+ this.checkForFinish();
38163
+ });
38164
+ }
38165
+ updateMasterVolume(value) {
38166
+ super.updateMasterVolume(value);
38167
+ this._backingTrackOutput.masterVolume = value;
38168
+ }
38169
+ updatePlaybackSpeed(value) {
38170
+ super.updatePlaybackSpeed(value);
38171
+ this._backingTrackOutput.playbackRate = value;
38172
+ }
38173
+ onSampleRequest() {
38174
+ }
38175
+ loadMidiFile(midi) {
38176
+ if (!this.isSoundFontLoaded) {
38177
+ this.isSoundFontLoaded = true;
38178
+ this.soundFontLoaded.trigger();
38179
+ }
38180
+ super.loadMidiFile(midi);
38181
+ }
38182
+ updateTimePosition(timePosition, isSeek) {
38183
+ super.updateTimePosition(timePosition, isSeek);
38184
+ if (isSeek) {
38185
+ this._backingTrackOutput.seekTo(this.sequencer.mainTimePositionToBackingTrack(timePosition, this._backingTrackOutput.backingTrackDuration));
38186
+ }
38187
+ }
38188
+ loadBackingTrack(score, syncPoints) {
38189
+ const backingTrackInfo = score.backingTrack;
38190
+ if (backingTrackInfo) {
38191
+ this._backingTrackOutput.loadBackingTrack(backingTrackInfo);
38192
+ this.sequencer.mainUpdateSyncPoints(syncPoints);
38193
+ this.timePosition = 0;
38194
+ }
38195
+ }
38196
+ }
38197
+
38198
+ class ExternalMediaSynthOutput {
38199
+ constructor() {
38200
+ // fake rate
38201
+ this.sampleRate = 44100;
38202
+ this._padding = 0;
38203
+ this._seekPosition = 0;
38204
+ this.ready = new EventEmitter();
38205
+ this.samplesPlayed = new EventEmitterOfT();
38206
+ this.timeUpdate = new EventEmitterOfT();
38207
+ this.sampleRequest = new EventEmitter();
38208
+ }
38209
+ get handler() {
38210
+ return this._handler;
38211
+ }
38212
+ set handler(value) {
38213
+ if (value) {
38214
+ if (this._seekPosition !== 0) {
38215
+ value.seekTo(this._seekPosition);
38216
+ this._seekPosition = 0;
38217
+ }
38218
+ }
38219
+ this._handler = value;
38220
+ }
38221
+ get backingTrackDuration() {
38222
+ return this.handler?.backingTrackDuration ?? 0;
38223
+ }
38224
+ get playbackRate() {
38225
+ return this.handler?.playbackRate ?? 1;
38226
+ }
38227
+ set playbackRate(value) {
38228
+ const handler = this.handler;
38229
+ if (handler) {
38230
+ handler.playbackRate = value;
38231
+ }
38232
+ }
38233
+ get masterVolume() {
38234
+ return this.handler?.masterVolume ?? 1;
38235
+ }
38236
+ set masterVolume(value) {
38237
+ const handler = this.handler;
38238
+ if (handler) {
38239
+ handler.masterVolume = value;
38240
+ }
38241
+ }
38242
+ seekTo(time) {
38243
+ const handler = this.handler;
38244
+ if (handler) {
38245
+ handler.seekTo(time - this._padding);
38246
+ }
38247
+ else {
38248
+ this._seekPosition = time - this._padding;
38249
+ }
38250
+ }
38251
+ loadBackingTrack(backingTrack) {
38252
+ this._padding = backingTrack.padding;
38253
+ }
38254
+ open(_bufferTimeInMilliseconds) {
38255
+ this.ready.trigger();
38256
+ }
38257
+ updatePosition(currentTime) {
38258
+ this.timeUpdate.trigger(currentTime + this._padding);
38259
+ }
38260
+ play() {
38261
+ this.handler?.play();
38262
+ }
38263
+ destroy() {
38264
+ }
38265
+ pause() {
38266
+ this.handler?.pause();
38267
+ }
38268
+ addSamples(_samples) {
38269
+ }
38270
+ resetSamples() {
38271
+ }
38272
+ activate() {
38273
+ }
38274
+ async enumerateOutputDevices() {
38275
+ const empty = [];
38276
+ return empty;
38277
+ }
38278
+ async setOutputDevice(_device) {
38279
+ }
38280
+ async getOutputDevice() {
38281
+ return null;
38282
+ }
38283
+ }
38284
+ class ExternalMediaPlayer extends BackingTrackPlayer {
38285
+ get handler() {
38286
+ return this.output.handler;
38287
+ }
38288
+ set handler(value) {
38289
+ this.output.handler = value;
38290
+ }
38291
+ constructor(bufferTimeInMilliseconds) {
38292
+ super(new ExternalMediaSynthOutput(), bufferTimeInMilliseconds);
38293
+ }
38294
+ }
38295
+
37394
38296
  class SelectionInfo {
37395
38297
  constructor(beat) {
37396
38298
  this.bounds = null;
@@ -37404,6 +38306,12 @@ class SelectionInfo {
37404
38306
  * @csharp_public
37405
38307
  */
37406
38308
  class AlphaTabApiBase {
38309
+ /**
38310
+ * The actual player mode which is currently active (e.g. allows determining whether a backing track or the synthesizer is active).
38311
+ */
38312
+ get actualPlayerMode() {
38313
+ return this._actualPlayerMode;
38314
+ }
37407
38315
  /**
37408
38316
  * The score holding all information about the song being rendered
37409
38317
  * @category Properties - Core
@@ -37473,10 +38381,8 @@ class AlphaTabApiBase {
37473
38381
  this._isDestroyed = false;
37474
38382
  this._score = null;
37475
38383
  this._tracks = [];
38384
+ this._actualPlayerMode = PlayerMode.Disabled;
37476
38385
  this._tickCache = null;
37477
- /**
37478
- * Gets the alphaSynth player used for playback. This is the low-level API to the Midi synthesizer used for playback.
37479
- */
37480
38386
  /**
37481
38387
  * The alphaSynth player used for playback.
37482
38388
  * @remarks
@@ -38513,6 +39419,10 @@ class AlphaTabApiBase {
38513
39419
  this.container = uiFacade.rootContainer;
38514
39420
  uiFacade.initialize(this, settings);
38515
39421
  Logger.logLevel = this.settings.core.logLevel;
39422
+ // backwards compatibility: remove in 2.0
39423
+ if (this.settings.player.playerMode === PlayerMode.Disabled && this.settings.player.enablePlayer) {
39424
+ this.settings.player.playerMode = PlayerMode.EnabledAutomatic;
39425
+ }
38516
39426
  Environment.printEnvironmentInfo(false);
38517
39427
  this.canvasElement = uiFacade.createCanvasElement();
38518
39428
  this.container.appendChild(this.canvasElement);
@@ -38556,7 +39466,7 @@ class AlphaTabApiBase {
38556
39466
  this.appendRenderResult(null); // marks last element
38557
39467
  });
38558
39468
  this.renderer.error.on(this.onError.bind(this));
38559
- if (this.settings.player.enablePlayer) {
39469
+ if (this.settings.player.playerMode !== PlayerMode.Disabled) {
38560
39470
  this.setupPlayer();
38561
39471
  }
38562
39472
  this.setupClickHandling();
@@ -38648,10 +39558,9 @@ class AlphaTabApiBase {
38648
39558
  }
38649
39559
  this.renderer.updateSettings(this.settings);
38650
39560
  // enable/disable player if needed
38651
- if (this.settings.player.enablePlayer) {
38652
- this.setupPlayer();
38653
- if (score) {
38654
- this.player?.applyTranspositionPitches(MidiFileGenerator.buildTranspositionPitches(score, this.settings));
39561
+ if (this.settings.player.playerMode !== PlayerMode.Disabled) {
39562
+ if (this.setupPlayer() && score) {
39563
+ this.loadMidiForScore();
38655
39564
  }
38656
39565
  }
38657
39566
  else {
@@ -39583,13 +40492,51 @@ class AlphaTabApiBase {
39583
40492
  this.destroyCursors();
39584
40493
  }
39585
40494
  setupPlayer() {
40495
+ let mode = this.settings.player.playerMode;
40496
+ if (mode === PlayerMode.EnabledAutomatic) {
40497
+ const score = this.score;
40498
+ if (!score) {
40499
+ return false;
40500
+ }
40501
+ if (score?.backingTrack?.rawAudioFile) {
40502
+ mode = PlayerMode.EnabledBackingTrack;
40503
+ }
40504
+ else {
40505
+ mode = PlayerMode.EnabledSynthesizer;
40506
+ }
40507
+ }
40508
+ if (mode !== this._actualPlayerMode) {
40509
+ this.destroyPlayer();
40510
+ }
39586
40511
  this.updateCursors();
39587
- if (this.player) {
39588
- return;
40512
+ this._actualPlayerMode = mode;
40513
+ switch (mode) {
40514
+ case PlayerMode.Disabled:
40515
+ this.destroyPlayer();
40516
+ return false;
40517
+ case PlayerMode.EnabledSynthesizer:
40518
+ if (this.player) {
40519
+ return true;
40520
+ }
40521
+ // new player needed
40522
+ this.player = this.uiFacade.createWorkerPlayer();
40523
+ break;
40524
+ case PlayerMode.EnabledBackingTrack:
40525
+ if (this.player) {
40526
+ return true;
40527
+ }
40528
+ // new player needed
40529
+ this.player = this.uiFacade.createBackingTrackPlayer();
40530
+ break;
40531
+ case PlayerMode.EnabledExternalMedia:
40532
+ if (this.player) {
40533
+ return true;
40534
+ }
40535
+ this.player = new ExternalMediaPlayer(this.settings.player.bufferTimeInMilliseconds);
40536
+ break;
39589
40537
  }
39590
- this.player = this.uiFacade.createWorkerPlayer();
39591
40538
  if (!this.player) {
39592
- return;
40539
+ return false;
39593
40540
  }
39594
40541
  this.player.ready.on(() => {
39595
40542
  this.loadMidiForScore();
@@ -39618,6 +40565,7 @@ class AlphaTabApiBase {
39618
40565
  this.player.playbackRangeChanged.on(this.onPlaybackRangeChanged.bind(this));
39619
40566
  this.player.finished.on(this.onPlayerFinished.bind(this));
39620
40567
  this.setupPlayerEvents();
40568
+ return false;
39621
40569
  }
39622
40570
  loadMidiForScore() {
39623
40571
  if (!this.score) {
@@ -39639,6 +40587,7 @@ class AlphaTabApiBase {
39639
40587
  const player = this.player;
39640
40588
  if (player) {
39641
40589
  player.loadMidiFile(midiFile);
40590
+ player.loadBackingTrack(score, generator.syncPoints);
39642
40591
  player.applyTranspositionPitches(generator.transpositionPitches);
39643
40592
  }
39644
40593
  }
@@ -40055,7 +41004,7 @@ class AlphaTabApiBase {
40055
41004
  this._selectionWrapper = cursors.selectionWrapper;
40056
41005
  }
40057
41006
  if (this._currentBeat !== null) {
40058
- this.cursorUpdateBeat(this._currentBeat, false, this._previousTick > 10, true);
41007
+ this.cursorUpdateBeat(this._currentBeat, false, this._previousTick > 10, 1, true);
40059
41008
  }
40060
41009
  }
40061
41010
  else if (!this.settings.player.enableCursor && this._cursorWrapper) {
@@ -40070,13 +41019,14 @@ class AlphaTabApiBase {
40070
41019
  // we need to update our position caches if we render a tablature
40071
41020
  this.renderer.postRenderFinished.on(() => {
40072
41021
  this._currentBeat = null;
40073
- this.cursorUpdateTick(this._previousTick, false, this._previousTick > 10);
41022
+ this.cursorUpdateTick(this._previousTick, false, 1, this._previousTick > 10);
40074
41023
  });
40075
41024
  if (this.player) {
40076
41025
  this.player.positionChanged.on(e => {
40077
41026
  this._previousTick = e.currentTick;
40078
41027
  this.uiFacade.beginInvoke(() => {
40079
- this.cursorUpdateTick(e.currentTick, false, false, e.isSeek);
41028
+ const cursorSpeed = e.modifiedTempo / e.originalTempo;
41029
+ this.cursorUpdateTick(e.currentTick, false, cursorSpeed, false, e.isSeek);
40080
41030
  });
40081
41031
  });
40082
41032
  this.player.stateChanged.on(e => {
@@ -40097,14 +41047,15 @@ class AlphaTabApiBase {
40097
41047
  * @param stop
40098
41048
  * @param shouldScroll whether we should scroll to the bar (if scrolling is active)
40099
41049
  */
40100
- cursorUpdateTick(tick, stop, shouldScroll = false, forceUpdate = false) {
41050
+ cursorUpdateTick(tick, stop, cursorSpeed, shouldScroll = false, forceUpdate = false) {
41051
+ this._previousTick = tick;
40101
41052
  const cache = this._tickCache;
40102
41053
  if (cache) {
40103
41054
  const tracks = this._trackIndexLookup;
40104
41055
  if (tracks != null && tracks.size > 0) {
40105
41056
  const beat = cache.findBeat(tracks, tick, this._currentBeat);
40106
41057
  if (beat) {
40107
- this.cursorUpdateBeat(beat, stop, shouldScroll, forceUpdate || this.playerState === PlayerState.Paused);
41058
+ this.cursorUpdateBeat(beat, stop, shouldScroll, cursorSpeed, forceUpdate || this.playerState === PlayerState.Paused);
40108
41059
  }
40109
41060
  }
40110
41061
  }
@@ -40112,7 +41063,7 @@ class AlphaTabApiBase {
40112
41063
  /**
40113
41064
  * updates the cursors to highlight the specified beat
40114
41065
  */
40115
- cursorUpdateBeat(lookupResult, stop, shouldScroll, forceUpdate = false) {
41066
+ cursorUpdateBeat(lookupResult, stop, shouldScroll, cursorSpeed, forceUpdate = false) {
40116
41067
  const beat = lookupResult.beat;
40117
41068
  const nextBeat = lookupResult.nextBeat?.beat ?? null;
40118
41069
  const duration = lookupResult.duration;
@@ -40144,7 +41095,7 @@ class AlphaTabApiBase {
40144
41095
  this._previousCursorCache = cache;
40145
41096
  this._previousStateForCursor = this._playerState;
40146
41097
  this.uiFacade.beginInvoke(() => {
40147
- this.internalCursorUpdateBeat(beat, nextBeat, duration, stop, beatsToHighlight, cache, beatBoundings, shouldScroll, lookupResult.cursorMode);
41098
+ this.internalCursorUpdateBeat(beat, nextBeat, duration, stop, beatsToHighlight, cache, beatBoundings, shouldScroll, lookupResult.cursorMode, cursorSpeed);
40148
41099
  });
40149
41100
  }
40150
41101
  /**
@@ -40209,7 +41160,7 @@ class AlphaTabApiBase {
40209
41160
  }
40210
41161
  }
40211
41162
  }
40212
- internalCursorUpdateBeat(beat, nextBeat, duration, stop, beatsToHighlight, cache, beatBoundings, shouldScroll, cursorMode) {
41163
+ internalCursorUpdateBeat(beat, nextBeat, duration, stop, beatsToHighlight, cache, beatBoundings, shouldScroll, cursorMode, cursorSpeed) {
40213
41164
  const barCursor = this._barCursor;
40214
41165
  const beatCursor = this._beatCursor;
40215
41166
  const barBoundings = beatBoundings.barBounds.masterBarBounds;
@@ -40218,12 +41169,29 @@ class AlphaTabApiBase {
40218
41169
  if (barCursor) {
40219
41170
  barCursor.setBounds(barBounds.x, barBounds.y, barBounds.w, barBounds.h);
40220
41171
  }
41172
+ let nextBeatX = barBoundings.visualBounds.x + barBoundings.visualBounds.w;
41173
+ // get position of next beat on same system
41174
+ if (nextBeat && cursorMode === MidiTickLookupFindBeatResultCursorMode.ToNextBext) {
41175
+ // if we are moving within the same bar or to the next bar
41176
+ // transition to the next beat, otherwise transition to the end of the bar.
41177
+ const nextBeatBoundings = cache.findBeat(nextBeat);
41178
+ if (nextBeatBoundings &&
41179
+ nextBeatBoundings.barBounds.masterBarBounds.staffSystemBounds === barBoundings.staffSystemBounds) {
41180
+ nextBeatX = nextBeatBoundings.onNotesX;
41181
+ }
41182
+ }
41183
+ let startBeatX = beatBoundings.onNotesX;
40221
41184
  if (beatCursor) {
40222
- // move beat to start position immediately
41185
+ // relative positioning of the cursor
40223
41186
  if (this.settings.player.enableAnimatedBeatCursor) {
40224
- beatCursor.stopAnimation();
41187
+ const animationWidth = nextBeatX - beatBoundings.onNotesX;
41188
+ const relativePosition = this._previousTick - this._currentBeat.start;
41189
+ const ratioPosition = relativePosition / this._currentBeat.tickDuration;
41190
+ startBeatX = beatBoundings.onNotesX + animationWidth * ratioPosition;
41191
+ duration -= duration * ratioPosition;
41192
+ beatCursor.transitionToX(0, startBeatX);
40225
41193
  }
40226
- beatCursor.setBounds(beatBoundings.onNotesX, barBounds.y, 1, barBounds.h);
41194
+ beatCursor.setBounds(startBeatX, barBounds.y, 1, barBounds.h);
40227
41195
  }
40228
41196
  // if playing, animate the cursor to the next beat
40229
41197
  if (this.settings.player.enableElementHighlighting) {
@@ -40243,22 +41211,11 @@ class AlphaTabApiBase {
40243
41211
  shouldNotifyBeatChange = true;
40244
41212
  }
40245
41213
  if (this.settings.player.enableAnimatedBeatCursor && beatCursor) {
40246
- let nextBeatX = barBoundings.visualBounds.x + barBoundings.visualBounds.w;
40247
- // get position of next beat on same system
40248
- if (nextBeat && cursorMode === MidiTickLookupFindBeatResultCursorMode.ToNextBext) {
40249
- // if we are moving within the same bar or to the next bar
40250
- // transition to the next beat, otherwise transition to the end of the bar.
40251
- const nextBeatBoundings = cache.findBeat(nextBeat);
40252
- if (nextBeatBoundings &&
40253
- nextBeatBoundings.barBounds.masterBarBounds.staffSystemBounds === barBoundings.staffSystemBounds) {
40254
- nextBeatX = nextBeatBoundings.onNotesX;
40255
- }
40256
- }
40257
41214
  if (isPlayingUpdate) {
40258
41215
  // we need to put the transition to an own animation frame
40259
41216
  // otherwise the stop animation above is not applied.
40260
41217
  this.uiFacade.beginInvoke(() => {
40261
- beatCursor.transitionToX(duration / this.playbackSpeed, nextBeatX);
41218
+ beatCursor.transitionToX(duration / cursorSpeed, nextBeatX);
40262
41219
  });
40263
41220
  }
40264
41221
  }
@@ -40289,7 +41246,7 @@ class AlphaTabApiBase {
40289
41246
  if (this._isDestroyed) {
40290
41247
  return;
40291
41248
  }
40292
- if (this.settings.player.enablePlayer &&
41249
+ if (this.settings.player.playerMode !== PlayerMode.Disabled &&
40293
41250
  this.settings.player.enableCursor &&
40294
41251
  this.settings.player.enableUserInteraction) {
40295
41252
  this._selectionStart = new SelectionInfo(beat);
@@ -40331,7 +41288,7 @@ class AlphaTabApiBase {
40331
41288
  if (this._isDestroyed) {
40332
41289
  return;
40333
41290
  }
40334
- if (this.settings.player.enablePlayer &&
41291
+ if (this.settings.player.playerMode !== PlayerMode.Disabled &&
40335
41292
  this.settings.player.enableCursor &&
40336
41293
  this.settings.player.enableUserInteraction) {
40337
41294
  if (this._selectionEnd) {
@@ -40352,7 +41309,7 @@ class AlphaTabApiBase {
40352
41309
  // move to selection start
40353
41310
  this._currentBeat = null; // reset current beat so it is updating the cursor
40354
41311
  if (this._playerState === PlayerState.Paused) {
40355
- this.cursorUpdateTick(this._tickCache.getBeatStart(this._selectionStart.beat), false);
41312
+ this.cursorUpdateTick(this._tickCache.getBeatStart(this._selectionStart.beat), false, 1);
40356
41313
  }
40357
41314
  this.tickPosition = realMasterBarStart + this._selectionStart.beat.playbackStart;
40358
41315
  // set playback range
@@ -40464,7 +41421,7 @@ class AlphaTabApiBase {
40464
41421
  });
40465
41422
  this.renderer.postRenderFinished.on(() => {
40466
41423
  if (!this._selectionStart ||
40467
- !this.settings.player.enablePlayer ||
41424
+ this.settings.player.playerMode === PlayerMode.Disabled ||
40468
41425
  !this.settings.player.enableCursor ||
40469
41426
  !this.settings.player.enableUserInteraction) {
40470
41427
  return;
@@ -40542,6 +41499,9 @@ class AlphaTabApiBase {
40542
41499
  }
40543
41500
  this.scoreLoaded.trigger(score);
40544
41501
  this.uiFacade.triggerEvent(this.container, 'scoreLoaded', score);
41502
+ if (this.setupPlayer()) {
41503
+ this.loadMidiForScore();
41504
+ }
40545
41505
  }
40546
41506
  onResize(e) {
40547
41507
  if (this._isDestroyed) {
@@ -41282,52 +42242,14 @@ class AlphaSynthWebAudioSynthOutputDevice {
41282
42242
  }
41283
42243
  }
41284
42244
  /**
42245
+ * Some shared web audio stuff.
41285
42246
  * @target web
41286
42247
  */
41287
- class AlphaSynthWebAudioOutputBase {
41288
- constructor() {
41289
- this._context = null;
41290
- this._buffer = null;
41291
- this._source = null;
41292
- this.ready = new EventEmitter();
41293
- this.samplesPlayed = new EventEmitterOfT();
41294
- this.sampleRequest = new EventEmitter();
41295
- this._knownDevices = [];
41296
- }
41297
- get sampleRate() {
41298
- return this._context ? this._context.sampleRate : AlphaSynthWebAudioOutputBase.PreferredSampleRate;
42248
+ class WebAudioHelper {
42249
+ static findKnownDevice(sinkId) {
42250
+ return WebAudioHelper._knownDevices.find(d => d.deviceId === sinkId);
41299
42251
  }
41300
- activate(resumedCallback) {
41301
- if (!this._context) {
41302
- this._context = this.createAudioContext();
41303
- }
41304
- if (this._context.state === 'suspended' || this._context.state === 'interrupted') {
41305
- Logger.debug('WebAudio', 'Audio Context is suspended, trying resume');
41306
- this._context.resume().then(() => {
41307
- Logger.debug('WebAudio', `Audio Context resume success: state=${this._context?.state}, sampleRate:${this._context?.sampleRate}`);
41308
- if (resumedCallback) {
41309
- resumedCallback();
41310
- }
41311
- }, reason => {
41312
- Logger.warning('WebAudio', `Audio Context resume failed: state=${this._context?.state}, sampleRate:${this._context?.sampleRate}, reason=${reason}`);
41313
- });
41314
- }
41315
- }
41316
- patchIosSampleRate() {
41317
- const ua = navigator.userAgent;
41318
- if (ua.indexOf('iPhone') !== -1 || ua.indexOf('iPad') !== -1) {
41319
- const context = this.createAudioContext();
41320
- const buffer = context.createBuffer(1, 1, AlphaSynthWebAudioOutputBase.PreferredSampleRate);
41321
- const dummy = context.createBufferSource();
41322
- dummy.buffer = buffer;
41323
- dummy.connect(context.destination);
41324
- dummy.start(0);
41325
- dummy.disconnect(0);
41326
- // tslint:disable-next-line: no-floating-promises
41327
- context.close();
41328
- }
41329
- }
41330
- createAudioContext() {
42252
+ static createAudioContext() {
41331
42253
  if ('AudioContext' in Environment.globalThis) {
41332
42254
  return new AudioContext();
41333
42255
  }
@@ -41336,73 +42258,18 @@ class AlphaSynthWebAudioOutputBase {
41336
42258
  }
41337
42259
  throw new AlphaTabError(AlphaTabErrorType.General, 'AudioContext not found');
41338
42260
  }
41339
- open(bufferTimeInMilliseconds) {
41340
- this.patchIosSampleRate();
41341
- this._context = this.createAudioContext();
41342
- const ctx = this._context;
41343
- if (ctx.state === 'suspended') {
41344
- this.registerResumeHandler();
41345
- }
41346
- }
41347
- registerResumeHandler() {
41348
- this._resumeHandler = (() => {
41349
- this.activate(() => {
41350
- this.unregisterResumeHandler();
41351
- });
41352
- }).bind(this);
41353
- document.body.addEventListener('touchend', this._resumeHandler, false);
41354
- document.body.addEventListener('click', this._resumeHandler, false);
41355
- }
41356
- unregisterResumeHandler() {
41357
- const resumeHandler = this._resumeHandler;
41358
- if (resumeHandler) {
41359
- document.body.removeEventListener('touchend', resumeHandler, false);
41360
- document.body.removeEventListener('click', resumeHandler, false);
41361
- }
41362
- }
41363
- play() {
41364
- const ctx = this._context;
41365
- this.activate();
41366
- // create an empty buffer source (silence)
41367
- this._buffer = ctx.createBuffer(2, AlphaSynthWebAudioOutputBase.BufferSize, ctx.sampleRate);
41368
- this._source = ctx.createBufferSource();
41369
- this._source.buffer = this._buffer;
41370
- this._source.loop = true;
41371
- }
41372
- pause() {
41373
- if (this._source) {
41374
- this._source.stop(0);
41375
- this._source.disconnect();
41376
- }
41377
- this._source = null;
41378
- }
41379
- destroy() {
41380
- this.pause();
41381
- this._context?.close();
41382
- this._context = null;
41383
- this.unregisterResumeHandler();
41384
- }
41385
- onSamplesPlayed(numberOfSamples) {
41386
- this.samplesPlayed.trigger(numberOfSamples);
41387
- }
41388
- onSampleRequest() {
41389
- this.sampleRequest.trigger();
41390
- }
41391
- onReady() {
41392
- this.ready.trigger();
41393
- }
41394
- async checkSinkIdSupport() {
42261
+ static async checkSinkIdSupport() {
41395
42262
  // https://caniuse.com/mdn-api_audiocontext_sinkid
41396
- const context = this._context ?? this.createAudioContext();
42263
+ const context = WebAudioHelper.createAudioContext();
41397
42264
  if (!('setSinkId' in context)) {
41398
42265
  Logger.warning('WebAudio', 'Browser does not support changing the output device');
41399
42266
  return false;
41400
42267
  }
41401
42268
  return true;
41402
42269
  }
41403
- async enumerateOutputDevices() {
42270
+ static async enumerateOutputDevices() {
41404
42271
  try {
41405
- if (!(await this.checkSinkIdSupport())) {
42272
+ if (!(await WebAudioHelper.checkSinkIdSupport())) {
41406
42273
  return [];
41407
42274
  }
41408
42275
  // Request permissions
@@ -41443,7 +42310,7 @@ class AlphaSynthWebAudioOutputBase {
41443
42310
  if (defaultDevice) {
41444
42311
  defaultDevice.isDefault = true;
41445
42312
  }
41446
- this._knownDevices = final;
42313
+ WebAudioHelper._knownDevices = final;
41447
42314
  return final;
41448
42315
  }
41449
42316
  catch (e) {
@@ -41451,8 +42318,113 @@ class AlphaSynthWebAudioOutputBase {
41451
42318
  return [];
41452
42319
  }
41453
42320
  }
42321
+ }
42322
+ WebAudioHelper._knownDevices = [];
42323
+ /**
42324
+ * @target web
42325
+ */
42326
+ class AlphaSynthWebAudioOutputBase {
42327
+ constructor() {
42328
+ this._context = null;
42329
+ this._buffer = null;
42330
+ this._source = null;
42331
+ this.ready = new EventEmitter();
42332
+ this.samplesPlayed = new EventEmitterOfT();
42333
+ this.sampleRequest = new EventEmitter();
42334
+ }
42335
+ get sampleRate() {
42336
+ return this._context ? this._context.sampleRate : AlphaSynthWebAudioOutputBase.PreferredSampleRate;
42337
+ }
42338
+ activate(resumedCallback) {
42339
+ if (!this._context) {
42340
+ this._context = WebAudioHelper.createAudioContext();
42341
+ }
42342
+ if (this._context.state === 'suspended' || this._context.state === 'interrupted') {
42343
+ Logger.debug('WebAudio', 'Audio Context is suspended, trying resume');
42344
+ this._context.resume().then(() => {
42345
+ Logger.debug('WebAudio', `Audio Context resume success: state=${this._context?.state}, sampleRate:${this._context?.sampleRate}`);
42346
+ if (resumedCallback) {
42347
+ resumedCallback();
42348
+ }
42349
+ }, reason => {
42350
+ Logger.warning('WebAudio', `Audio Context resume failed: state=${this._context?.state}, sampleRate:${this._context?.sampleRate}, reason=${reason}`);
42351
+ });
42352
+ }
42353
+ }
42354
+ patchIosSampleRate() {
42355
+ const ua = navigator.userAgent;
42356
+ if (ua.indexOf('iPhone') !== -1 || ua.indexOf('iPad') !== -1) {
42357
+ const context = WebAudioHelper.createAudioContext();
42358
+ const buffer = context.createBuffer(1, 1, AlphaSynthWebAudioOutputBase.PreferredSampleRate);
42359
+ const dummy = context.createBufferSource();
42360
+ dummy.buffer = buffer;
42361
+ dummy.connect(context.destination);
42362
+ dummy.start(0);
42363
+ dummy.disconnect(0);
42364
+ // tslint:disable-next-line: no-floating-promises
42365
+ context.close();
42366
+ }
42367
+ }
42368
+ open(bufferTimeInMilliseconds) {
42369
+ this.patchIosSampleRate();
42370
+ this._context = WebAudioHelper.createAudioContext();
42371
+ const ctx = this._context;
42372
+ if (ctx.state === 'suspended') {
42373
+ this.registerResumeHandler();
42374
+ }
42375
+ }
42376
+ registerResumeHandler() {
42377
+ this._resumeHandler = (() => {
42378
+ this.activate(() => {
42379
+ this.unregisterResumeHandler();
42380
+ });
42381
+ }).bind(this);
42382
+ document.body.addEventListener('touchend', this._resumeHandler, false);
42383
+ document.body.addEventListener('click', this._resumeHandler, false);
42384
+ }
42385
+ unregisterResumeHandler() {
42386
+ const resumeHandler = this._resumeHandler;
42387
+ if (resumeHandler) {
42388
+ document.body.removeEventListener('touchend', resumeHandler, false);
42389
+ document.body.removeEventListener('click', resumeHandler, false);
42390
+ }
42391
+ }
42392
+ play() {
42393
+ const ctx = this._context;
42394
+ this.activate();
42395
+ // create an empty buffer source (silence)
42396
+ this._buffer = ctx.createBuffer(2, AlphaSynthWebAudioOutputBase.BufferSize, ctx.sampleRate);
42397
+ this._source = ctx.createBufferSource();
42398
+ this._source.buffer = this._buffer;
42399
+ this._source.loop = true;
42400
+ }
42401
+ pause() {
42402
+ if (this._source) {
42403
+ this._source.stop(0);
42404
+ this._source.disconnect();
42405
+ }
42406
+ this._source = null;
42407
+ }
42408
+ destroy() {
42409
+ this.pause();
42410
+ this._context?.close();
42411
+ this._context = null;
42412
+ this.unregisterResumeHandler();
42413
+ }
42414
+ onSamplesPlayed(numberOfSamples) {
42415
+ this.samplesPlayed.trigger(numberOfSamples);
42416
+ }
42417
+ onSampleRequest() {
42418
+ this.sampleRequest.trigger();
42419
+ }
42420
+ onReady() {
42421
+ this.ready.trigger();
42422
+ }
42423
+ enumerateOutputDevices() {
42424
+ return WebAudioHelper.enumerateOutputDevices();
42425
+ }
41454
42426
  async setOutputDevice(device) {
41455
- if (!(await this.checkSinkIdSupport())) {
42427
+ if (!(await WebAudioHelper.checkSinkIdSupport())) {
41456
42428
  return;
41457
42429
  }
41458
42430
  // https://developer.mozilla.org/en-US/docs/Web/API/AudioContext/setSinkId
@@ -41464,7 +42436,7 @@ class AlphaSynthWebAudioOutputBase {
41464
42436
  }
41465
42437
  }
41466
42438
  async getOutputDevice() {
41467
- if (!(await this.checkSinkIdSupport())) {
42439
+ if (!(await WebAudioHelper.checkSinkIdSupport())) {
41468
42440
  return null;
41469
42441
  }
41470
42442
  // https://developer.mozilla.org/en-US/docs/Web/API/AudioContext/sinkId
@@ -41473,7 +42445,7 @@ class AlphaSynthWebAudioOutputBase {
41473
42445
  return null;
41474
42446
  }
41475
42447
  // fast path -> cached devices list
41476
- let device = this._knownDevices.find(d => d.deviceId === sinkId);
42448
+ let device = WebAudioHelper.findKnownDevice(sinkId);
41477
42449
  if (device) {
41478
42450
  return device;
41479
42451
  }
@@ -41921,7 +42893,7 @@ class AlphaSynthWebWorkerApi {
41921
42893
  case 'alphaSynth.positionChanged':
41922
42894
  this._timePosition = data.currentTime;
41923
42895
  this._tickPosition = data.currentTick;
41924
- this.positionChanged.trigger(new PositionChangedEventArgs(data.currentTime, data.endTime, data.currentTick, data.endTick, data.isSeek));
42896
+ this.positionChanged.trigger(new PositionChangedEventArgs(data.currentTime, data.endTime, data.currentTick, data.endTick, data.isSeek, data.originalTempo, data.modifiedTempo));
41925
42897
  break;
41926
42898
  case 'alphaSynth.midiEventsPlayed':
41927
42899
  this.midiEventsPlayed.trigger(new MidiEventsPlayedEventArgs(data.events.map(JsonConverter.jsObjectToMidiEvent)));
@@ -41945,7 +42917,7 @@ class AlphaSynthWebWorkerApi {
41945
42917
  break;
41946
42918
  case 'alphaSynth.midiLoaded':
41947
42919
  this.checkReadyForPlayback();
41948
- this.midiLoaded.trigger(new PositionChangedEventArgs(data.currentTime, data.endTime, data.currentTick, data.endTick, data.isSeek));
42920
+ this.midiLoaded.trigger(new PositionChangedEventArgs(data.currentTime, data.endTime, data.currentTick, data.endTick, data.isSeek, data.originalTempo, data.modifiedTempo));
41949
42921
  break;
41950
42922
  case 'alphaSynth.midiLoadFailed':
41951
42923
  this.checkReadyForPlayback();
@@ -41995,6 +42967,8 @@ class AlphaSynthWebWorkerApi {
41995
42967
  this._outputIsReady = true;
41996
42968
  this.checkReady();
41997
42969
  }
42970
+ loadBackingTrack(_score) {
42971
+ }
41998
42972
  }
41999
42973
 
42000
42974
  /**
@@ -42362,6 +43336,123 @@ class ScalableHtmlElementContainer extends HtmlElementContainer {
42362
43336
  }
42363
43337
  }
42364
43338
 
43339
+ /**
43340
+ * @target web
43341
+ */
43342
+ class AudioElementBackingTrackSynthOutput {
43343
+ constructor() {
43344
+ // fake rate
43345
+ this.sampleRate = 44100;
43346
+ this._padding = 0;
43347
+ this._updateInterval = 0;
43348
+ this.ready = new EventEmitter();
43349
+ this.samplesPlayed = new EventEmitterOfT();
43350
+ this.timeUpdate = new EventEmitterOfT();
43351
+ this.sampleRequest = new EventEmitter();
43352
+ }
43353
+ get backingTrackDuration() {
43354
+ const duration = this.audioElement.duration ?? 0;
43355
+ return Number.isFinite(duration) ? duration * 1000 : 0;
43356
+ }
43357
+ get playbackRate() {
43358
+ return this.audioElement.playbackRate;
43359
+ }
43360
+ set playbackRate(value) {
43361
+ this.audioElement.playbackRate = value;
43362
+ }
43363
+ get masterVolume() {
43364
+ return this.audioElement.volume;
43365
+ }
43366
+ set masterVolume(value) {
43367
+ this.audioElement.volume = value;
43368
+ }
43369
+ seekTo(time) {
43370
+ this.audioElement.currentTime = time / 1000 - this._padding;
43371
+ }
43372
+ loadBackingTrack(backingTrack) {
43373
+ if (this.audioElement?.src) {
43374
+ URL.revokeObjectURL(this.audioElement.src);
43375
+ }
43376
+ this._padding = backingTrack.padding / 1000;
43377
+ const blob = new Blob([backingTrack.rawAudioFile]);
43378
+ this.audioElement.src = URL.createObjectURL(blob);
43379
+ }
43380
+ open(_bufferTimeInMilliseconds) {
43381
+ const audioElement = document.createElement('audio');
43382
+ audioElement.style.display = 'none';
43383
+ document.body.appendChild(audioElement);
43384
+ audioElement.addEventListener('timeupdate', () => {
43385
+ this.updatePosition();
43386
+ });
43387
+ this.audioElement = audioElement;
43388
+ this.ready.trigger();
43389
+ }
43390
+ updatePosition() {
43391
+ const timePos = (this.audioElement.currentTime + this._padding) * 1000;
43392
+ this.timeUpdate.trigger(timePos);
43393
+ }
43394
+ play() {
43395
+ this.audioElement.play();
43396
+ this._updateInterval = window.setInterval(() => {
43397
+ this.updatePosition();
43398
+ }, 50);
43399
+ }
43400
+ destroy() {
43401
+ const audioElement = this.audioElement;
43402
+ if (audioElement) {
43403
+ document.body.removeChild(audioElement);
43404
+ }
43405
+ }
43406
+ pause() {
43407
+ this.audioElement.pause();
43408
+ window.clearInterval(this._updateInterval);
43409
+ }
43410
+ addSamples(_samples) {
43411
+ }
43412
+ resetSamples() {
43413
+ }
43414
+ activate() {
43415
+ }
43416
+ async enumerateOutputDevices() {
43417
+ return WebAudioHelper.enumerateOutputDevices();
43418
+ }
43419
+ async setOutputDevice(device) {
43420
+ if (!(await WebAudioHelper.checkSinkIdSupport())) {
43421
+ return;
43422
+ }
43423
+ // https://developer.mozilla.org/en-US/docs/Web/API/AudioContext/setSinkId
43424
+ if (!device) {
43425
+ await this.audioElement.setSinkId('');
43426
+ }
43427
+ else {
43428
+ await this.audioElement.setSinkId(device.deviceId);
43429
+ }
43430
+ }
43431
+ async getOutputDevice() {
43432
+ if (!(await WebAudioHelper.checkSinkIdSupport())) {
43433
+ return null;
43434
+ }
43435
+ // https://developer.mozilla.org/en-US/docs/Web/API/AudioContext/sinkId
43436
+ const sinkId = this.audioElement.sinkId;
43437
+ if (typeof sinkId !== 'string' || sinkId === '' || sinkId === 'default') {
43438
+ return null;
43439
+ }
43440
+ // fast path -> cached devices list
43441
+ let device = WebAudioHelper.findKnownDevice(sinkId);
43442
+ if (device) {
43443
+ return device;
43444
+ }
43445
+ // slow path -> enumerate devices
43446
+ const allDevices = await this.enumerateOutputDevices();
43447
+ device = allDevices.find(d => d.deviceId === sinkId);
43448
+ if (device) {
43449
+ return device;
43450
+ }
43451
+ Logger.warning('WebAudio', 'Could not find output device in device list', sinkId, allDevices);
43452
+ return null;
43453
+ }
43454
+ }
43455
+
42365
43456
  /**
42366
43457
  * @target web
42367
43458
  */
@@ -42998,6 +44089,9 @@ class BrowserUiFacade {
42998
44089
  window.requestAnimationFrame(step);
42999
44090
  }
43000
44091
  }
44092
+ createBackingTrackPlayer() {
44093
+ return new BackingTrackPlayer(new AudioElementBackingTrackSynthOutput(), this._api.settings.player.bufferTimeInMilliseconds);
44094
+ }
43001
44095
  }
43002
44096
 
43003
44097
  /**
@@ -43140,7 +44234,7 @@ class AlphaTabApi extends AlphaTabApiBase {
43140
44234
  settings.core.file = null;
43141
44235
  settings.core.tracks = null;
43142
44236
  settings.player.enableCursor = false;
43143
- settings.player.enablePlayer = false;
44237
+ settings.player.playerMode = PlayerMode.Disabled;
43144
44238
  settings.player.enableElementHighlighting = false;
43145
44239
  settings.player.enableUserInteraction = false;
43146
44240
  settings.player.soundFont = null;
@@ -57101,96 +58195,6 @@ class CapellaImporter extends ScoreImporter {
57101
58195
  }
57102
58196
  }
57103
58197
 
57104
- /**
57105
- * A very basic polyfill of the ResizeObserver which triggers
57106
- * a the callback on window resize for all registered targets.
57107
- * @target web
57108
- */
57109
- class ResizeObserverPolyfill {
57110
- constructor(callback) {
57111
- this._targets = new Set();
57112
- this._callback = callback;
57113
- window.addEventListener('resize', this.onWindowResize.bind(this), false);
57114
- }
57115
- observe(target) {
57116
- this._targets.add(target);
57117
- }
57118
- unobserve(target) {
57119
- this._targets.delete(target);
57120
- }
57121
- disconnect() {
57122
- this._targets.clear();
57123
- }
57124
- onWindowResize() {
57125
- const entries = [];
57126
- for (const t of this._targets) {
57127
- entries.push({
57128
- target: t,
57129
- // not used by alphaTab
57130
- contentRect: undefined,
57131
- borderBoxSize: undefined,
57132
- contentBoxSize: [],
57133
- devicePixelContentBoxSize: []
57134
- });
57135
- }
57136
- this._callback(entries, this);
57137
- }
57138
- }
57139
-
57140
- /**
57141
- * A polyfill of the InsersectionObserver
57142
- * @target web
57143
- */
57144
- class IntersectionObserverPolyfill {
57145
- constructor(callback) {
57146
- this._elements = [];
57147
- let timer = null;
57148
- const oldCheck = this.check.bind(this);
57149
- this.check = () => {
57150
- if (!timer) {
57151
- timer = setTimeout(() => {
57152
- oldCheck();
57153
- timer = null;
57154
- }, 100);
57155
- }
57156
- };
57157
- this._callback = callback;
57158
- window.addEventListener('resize', this.check, true);
57159
- document.addEventListener('scroll', this.check, true);
57160
- }
57161
- observe(target) {
57162
- if (this._elements.indexOf(target) >= 0) {
57163
- return;
57164
- }
57165
- this._elements.push(target);
57166
- this.check();
57167
- }
57168
- unobserve(target) {
57169
- this._elements = this._elements.filter(item => {
57170
- return item !== target;
57171
- });
57172
- }
57173
- check() {
57174
- const entries = [];
57175
- for (const element of this._elements) {
57176
- const rect = element.getBoundingClientRect();
57177
- const isVisible = rect.top + rect.height >= 0 &&
57178
- rect.top <= window.innerHeight &&
57179
- rect.left + rect.width >= 0 &&
57180
- rect.left <= window.innerWidth;
57181
- if (isVisible) {
57182
- entries.push({
57183
- target: element,
57184
- isIntersecting: true
57185
- });
57186
- }
57187
- }
57188
- if (entries.length) {
57189
- this._callback(entries, this);
57190
- }
57191
- }
57192
- }
57193
-
57194
58198
  /******************************************************************************
57195
58199
  Copyright (c) Microsoft Corporation.
57196
58200
 
@@ -59306,9 +60310,9 @@ class VersionInfo {
59306
60310
  print(`build date: ${VersionInfo.date}`);
59307
60311
  }
59308
60312
  }
59309
- VersionInfo.version = '1.6.0-alpha.1401';
59310
- VersionInfo.date = '2025-05-07T12:40:48.955Z';
59311
- VersionInfo.commit = 'e58a9704e560b3344b8fe39a2b2f46a2ee3bb5b1';
60313
+ VersionInfo.version = '1.6.0-alpha.1405';
60314
+ VersionInfo.date = '2025-05-10T17:25:30.743Z';
60315
+ VersionInfo.commit = 'a9f729a65e195d4fec684444cd2c2a259dc9729b';
59312
60316
 
59313
60317
  /**
59314
60318
  * A factory for custom layout engines.
@@ -59779,29 +60783,6 @@ class Environment {
59779
60783
  if (Environment.webPlatform === WebPlatform.Browser || Environment.webPlatform === WebPlatform.BrowserModule) {
59780
60784
  Environment.registerJQueryPlugin();
59781
60785
  Environment.HighDpiFactor = window.devicePixelRatio;
59782
- // ResizeObserver API does not yet exist so long on Safari (only start 2020 with iOS Safari 13.7 and Desktop 13.1)
59783
- // so we better add a polyfill for it
59784
- if (!('ResizeObserver' in Environment.globalThis)) {
59785
- Environment.globalThis.ResizeObserver = ResizeObserverPolyfill;
59786
- }
59787
- // IntersectionObserver API does not on older iOS versions
59788
- // so we better add a polyfill for it
59789
- if (!('IntersectionObserver' in Environment.globalThis)) {
59790
- Environment.globalThis.IntersectionObserver = IntersectionObserverPolyfill;
59791
- }
59792
- if (!('replaceChildren' in Element.prototype)) {
59793
- Element.prototype.replaceChildren = function (...nodes) {
59794
- this.innerHTML = '';
59795
- this.append(...nodes);
59796
- };
59797
- Document.prototype.replaceChildren = Element.prototype.replaceChildren;
59798
- DocumentFragment.prototype.replaceChildren = Element.prototype.replaceChildren;
59799
- }
59800
- if (!('replaceAll' in String.prototype)) {
59801
- String.prototype.replaceAll = function (str, newStr) {
59802
- return this.replace(new RegExp(str, 'g'), newStr);
59803
- };
59804
- }
59805
60786
  }
59806
60787
  Environment.createWebWorker = createWebWorker;
59807
60788
  Environment.createAudioWorklet = createAudioWorklet;
@@ -63533,6 +64514,7 @@ const _barrel$3 = /*#__PURE__*/Object.freeze(/*#__PURE__*/Object.defineProperty(
63533
64514
  get AccidentalType () { return AccidentalType; },
63534
64515
  Automation,
63535
64516
  get AutomationType () { return AutomationType; },
64517
+ BackingTrack,
63536
64518
  Bar,
63537
64519
  get BarLineStyle () { return BarLineStyle; },
63538
64520
  BarStyle,
@@ -63595,6 +64577,7 @@ const _barrel$3 = /*#__PURE__*/Object.freeze(/*#__PURE__*/Object.defineProperty(
63595
64577
  Staff,
63596
64578
  SustainPedalMarker,
63597
64579
  get SustainPedalMarkerType () { return SustainPedalMarkerType; },
64580
+ SyncPointData,
63598
64581
  Track,
63599
64582
  get TrackNameMode () { return TrackNameMode; },
63600
64583
  get TrackNameOrientation () { return TrackNameOrientation; },
@@ -63659,4 +64642,4 @@ const _jsonbarrel = /*#__PURE__*/Object.freeze(/*#__PURE__*/Object.definePropert
63659
64642
  __proto__: null
63660
64643
  }, Symbol.toStringTag, { value: 'Module' }));
63661
64644
 
63662
- export { AlphaTabApi, AlphaTabApiBase, AlphaTabError, AlphaTabErrorType, ConsoleLogger, CoreSettings, DisplaySettings, Environment, FileLoadError, FingeringMode, FormatError, ImporterSettings, LayoutMode, LogLevel, Logger, NotationElement, NotationMode, NotationSettings, PlayerOutputMode, PlayerSettings, ProgressEventArgs, RenderEngineFactory, RenderingResources, ResizeEventArgs, ScrollMode, Settings, SlidePlaybackSettings, StaveProfile, SystemsLayoutMode, TabRhythmMode, VibratoPlaybackSettings, WebPlatform, _barrel$5 as exporter, _barrel$7 as importer, _barrel$6 as io, _jsonbarrel as json, VersionInfo as meta, _barrel$4 as midi, _barrel$3 as model, _barrel$1 as platform, _barrel$2 as rendering, _barrel as synth };
64645
+ export { AlphaTabApi, AlphaTabApiBase, AlphaTabError, AlphaTabErrorType, ConsoleLogger, CoreSettings, DisplaySettings, Environment, FileLoadError, FingeringMode, FormatError, ImporterSettings, LayoutMode, LogLevel, Logger, NotationElement, NotationMode, NotationSettings, PlayerMode, PlayerOutputMode, PlayerSettings, ProgressEventArgs, RenderEngineFactory, RenderingResources, ResizeEventArgs, ScrollMode, Settings, SlidePlaybackSettings, StaveProfile, SystemsLayoutMode, TabRhythmMode, VibratoPlaybackSettings, WebPlatform, _barrel$5 as exporter, _barrel$7 as importer, _barrel$6 as io, _jsonbarrel as json, VersionInfo as meta, _barrel$4 as midi, _barrel$3 as model, _barrel$1 as platform, _barrel$2 as rendering, _barrel as synth };