@coderline/alphatab 1.6.0-alpha.1401 → 1.6.0-alpha.1403

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/alphaTab.js CHANGED
@@ -1,5 +1,5 @@
1
1
  /*!
2
- * alphaTab v1.6.0-alpha.1401 (develop, build 1401)
2
+ * alphaTab v1.6.0-alpha.1403 (develop, build 1403)
3
3
  *
4
4
  * Copyright © 2025, Daniel Kuschny and Contributors, All rights reserved.
5
5
  *
@@ -55,7 +55,127 @@
55
55
  (global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.alphaTab = {}));
56
56
  })(this, (function (exports) { 'use strict';
57
57
 
58
- if(typeof Symbol.dispose==='undefined'){Symbol.dispose = Symbol('Symbol.dispose')}
58
+ /**
59
+ * A very basic polyfill of the ResizeObserver which triggers
60
+ * a the callback on window resize for all registered targets.
61
+ * @target web
62
+ */
63
+ class ResizeObserverPolyfill {
64
+ constructor(callback) {
65
+ this._targets = new Set();
66
+ this._callback = callback;
67
+ window.addEventListener('resize', this.onWindowResize.bind(this), false);
68
+ }
69
+ observe(target) {
70
+ this._targets.add(target);
71
+ }
72
+ unobserve(target) {
73
+ this._targets.delete(target);
74
+ }
75
+ disconnect() {
76
+ this._targets.clear();
77
+ }
78
+ onWindowResize() {
79
+ const entries = [];
80
+ for (const t of this._targets) {
81
+ entries.push({
82
+ target: t,
83
+ // not used by alphaTab
84
+ contentRect: undefined,
85
+ borderBoxSize: undefined,
86
+ contentBoxSize: [],
87
+ devicePixelContentBoxSize: []
88
+ });
89
+ }
90
+ this._callback(entries, this);
91
+ }
92
+ }
93
+
94
+ /**
95
+ * A polyfill of the InsersectionObserver
96
+ * @target web
97
+ */
98
+ class IntersectionObserverPolyfill {
99
+ constructor(callback) {
100
+ this._elements = [];
101
+ let timer = null;
102
+ const oldCheck = this.check.bind(this);
103
+ this.check = () => {
104
+ if (!timer) {
105
+ timer = setTimeout(() => {
106
+ oldCheck();
107
+ timer = null;
108
+ }, 100);
109
+ }
110
+ };
111
+ this._callback = callback;
112
+ window.addEventListener('resize', this.check, true);
113
+ document.addEventListener('scroll', this.check, true);
114
+ }
115
+ observe(target) {
116
+ if (this._elements.indexOf(target) >= 0) {
117
+ return;
118
+ }
119
+ this._elements.push(target);
120
+ this.check();
121
+ }
122
+ unobserve(target) {
123
+ this._elements = this._elements.filter(item => {
124
+ return item !== target;
125
+ });
126
+ }
127
+ check() {
128
+ const entries = [];
129
+ for (const element of this._elements) {
130
+ const rect = element.getBoundingClientRect();
131
+ const isVisible = rect.top + rect.height >= 0 &&
132
+ rect.top <= window.innerHeight &&
133
+ rect.left + rect.width >= 0 &&
134
+ rect.left <= window.innerWidth;
135
+ if (isVisible) {
136
+ entries.push({
137
+ target: element,
138
+ isIntersecting: true
139
+ });
140
+ }
141
+ }
142
+ if (entries.length) {
143
+ this._callback(entries, this);
144
+ }
145
+ }
146
+ }
147
+
148
+ /*@target web*/
149
+ (() => {
150
+ if (typeof Symbol.dispose === 'undefined') {
151
+ Symbol.dispose = Symbol('Symbol.dispose');
152
+ }
153
+ if (typeof window !== 'undefined') {
154
+ // ResizeObserver API does not yet exist so long on Safari (only start 2020 with iOS Safari 13.7 and Desktop 13.1)
155
+ // so we better add a polyfill for it
156
+ if (!('ResizeObserver' in globalThis)) {
157
+ globalThis.ResizeObserver = ResizeObserverPolyfill;
158
+ }
159
+ // IntersectionObserver API does not on older iOS versions
160
+ // so we better add a polyfill for it
161
+ if (!('IntersectionObserver' in globalThis)) {
162
+ globalThis.IntersectionObserver = IntersectionObserverPolyfill;
163
+ }
164
+ if (!('replaceChildren' in Element.prototype)) {
165
+ Element.prototype.replaceChildren = function (...nodes) {
166
+ this.innerHTML = '';
167
+ this.append(...nodes);
168
+ };
169
+ Document.prototype.replaceChildren = Element.prototype.replaceChildren;
170
+ DocumentFragment.prototype.replaceChildren = Element.prototype.replaceChildren;
171
+ }
172
+ }
173
+ if (!('replaceAll' in String.prototype)) {
174
+ String.prototype.replaceAll = function (str, newStr) {
175
+ return this.replace(new RegExp(str, 'g'), newStr);
176
+ };
177
+ }
178
+ })();
59
179
 
60
180
  /**
61
181
  * Lists all layout modes that are supported.
@@ -1279,7 +1399,37 @@
1279
1399
  * Balance change.
1280
1400
  */
1281
1401
  AutomationType[AutomationType["Balance"] = 3] = "Balance";
1402
+ /**
1403
+ * A sync point for synchronizing the internal time axis with an external audio track.
1404
+ */
1405
+ AutomationType[AutomationType["SyncPoint"] = 4] = "SyncPoint";
1282
1406
  })(AutomationType || (AutomationType = {}));
1407
+ /**
1408
+ * Represents the data of a sync point for synchronizing the internal time axis with
1409
+ * an external audio file.
1410
+ * @cloneable
1411
+ * @json
1412
+ * @json_strict
1413
+ */
1414
+ class SyncPointData {
1415
+ constructor() {
1416
+ /**
1417
+ * Indicates for which repeat occurence this sync point is valid (e.g. 0 on the first time played, 1 on the second time played)
1418
+ */
1419
+ this.barOccurence = 0;
1420
+ /**
1421
+ * The modified tempo at which the cursor should move (aka. the tempo played within the external audio track).
1422
+ * This information is used together with the {@link originalTempo} to calculate how much faster/slower the
1423
+ * cursor playback is performed to align with the audio track.
1424
+ */
1425
+ this.modifiedTempo = 0;
1426
+ /**
1427
+ * The uadio offset marking the position within the audio track in milliseconds.
1428
+ * This information is used to regularly sync (or on seeking) to match a given external audio time axis with the internal time axis.
1429
+ */
1430
+ this.millisecondOffset = 0;
1431
+ }
1432
+ }
1283
1433
  /**
1284
1434
  * Automations are used to change the behaviour of a song.
1285
1435
  * @cloneable
@@ -2578,6 +2728,16 @@
2578
2728
  }
2579
2729
  return null;
2580
2730
  }
2731
+ /**
2732
+ * Adds the given sync point to the list of sync points for this bar.
2733
+ * @param syncPoint The sync point to add.
2734
+ */
2735
+ addSyncPoint(syncPoint) {
2736
+ if (!this.syncPoints) {
2737
+ this.syncPoints = [];
2738
+ }
2739
+ this.syncPoints.push(syncPoint);
2740
+ }
2581
2741
  }
2582
2742
  MasterBar.MaxAlternateEndings = 8;
2583
2743
 
@@ -5672,6 +5832,21 @@
5672
5832
  }
5673
5833
  }
5674
5834
 
5835
+ // <auto-generated>
5836
+ // This code was auto-generated.
5837
+ // Changes to this file may cause incorrect behavior and will be lost if
5838
+ // the code is regenerated.
5839
+ // </auto-generated>
5840
+ class SyncPointDataCloner {
5841
+ static clone(original) {
5842
+ const clone = new SyncPointData();
5843
+ clone.barOccurence = original.barOccurence;
5844
+ clone.modifiedTempo = original.modifiedTempo;
5845
+ clone.millisecondOffset = original.millisecondOffset;
5846
+ return clone;
5847
+ }
5848
+ }
5849
+
5675
5850
  // <auto-generated>
5676
5851
  // This code was auto-generated.
5677
5852
  // Changes to this file may cause incorrect behavior and will be lost if
@@ -5683,6 +5858,7 @@
5683
5858
  clone.isLinear = original.isLinear;
5684
5859
  clone.type = original.type;
5685
5860
  clone.value = original.value;
5861
+ clone.syncPointValue = original.syncPointValue ? SyncPointDataCloner.clone(original.syncPointValue) : undefined;
5686
5862
  clone.ratioPosition = original.ratioPosition;
5687
5863
  clone.text = original.text;
5688
5864
  return clone;
@@ -14128,6 +14304,21 @@
14128
14304
  }
14129
14305
  }
14130
14306
 
14307
+ /**
14308
+ * Holds information about the backing track which can be played instead of synthesized audio.
14309
+ * @json
14310
+ * @json_strict
14311
+ */
14312
+ class BackingTrack {
14313
+ constructor() {
14314
+ /**
14315
+ * The number of milliseconds the audio should be shifted to align with the song.
14316
+ * (e.g. negative values allow skipping potential silent parts at the start of the file and directly start with the first note).
14317
+ */
14318
+ this.padding = 0;
14319
+ }
14320
+ }
14321
+
14131
14322
  /**
14132
14323
  * This structure represents a duration within a gpif
14133
14324
  */
@@ -14220,6 +14411,9 @@
14220
14411
  case 'MasterTrack':
14221
14412
  this.parseMasterTrackNode(n);
14222
14413
  break;
14414
+ case 'BackingTrack':
14415
+ this.parseBackingTrackNode(n);
14416
+ break;
14223
14417
  case 'Tracks':
14224
14418
  this.parseTracksNode(n);
14225
14419
  break;
@@ -14241,6 +14435,9 @@
14241
14435
  case 'Rhythms':
14242
14436
  this.parseRhythms(n);
14243
14437
  break;
14438
+ case 'Assets':
14439
+ this.parseAssets(n);
14440
+ break;
14244
14441
  }
14245
14442
  }
14246
14443
  }
@@ -14248,6 +14445,37 @@
14248
14445
  throw new UnsupportedFormatError('Root node of XML was not GPIF');
14249
14446
  }
14250
14447
  }
14448
+ parseAssets(element) {
14449
+ for (const c of element.childElements()) {
14450
+ switch (c.localName) {
14451
+ case 'Asset':
14452
+ if (c.getAttribute('id') === this._backingTrackAssetId) {
14453
+ this.parseBackingTrackAsset(c);
14454
+ }
14455
+ break;
14456
+ }
14457
+ }
14458
+ }
14459
+ parseBackingTrackAsset(element) {
14460
+ let embeddedFilePath = '';
14461
+ for (const c of element.childElements()) {
14462
+ switch (c.localName) {
14463
+ case 'EmbeddedFilePath':
14464
+ embeddedFilePath = c.innerText;
14465
+ break;
14466
+ }
14467
+ }
14468
+ const loadAsset = this.loadAsset;
14469
+ if (loadAsset) {
14470
+ const assetData = loadAsset(embeddedFilePath);
14471
+ if (assetData) {
14472
+ this.score.backingTrack.rawAudioFile = assetData;
14473
+ }
14474
+ else {
14475
+ this.score.backingTrack = undefined;
14476
+ }
14477
+ }
14478
+ }
14251
14479
  //
14252
14480
  // <Score>...</Score>
14253
14481
  //
@@ -14328,7 +14556,41 @@
14328
14556
  if (!text) {
14329
14557
  return [];
14330
14558
  }
14331
- return text.split(separator).map(t => t.trim()).filter(t => t.length > 0);
14559
+ return text
14560
+ .split(separator)
14561
+ .map(t => t.trim())
14562
+ .filter(t => t.length > 0);
14563
+ }
14564
+ //
14565
+ // <BackingTrack>...</BackingTrack>
14566
+ //
14567
+ parseBackingTrackNode(node) {
14568
+ const backingTrack = new BackingTrack();
14569
+ let enabled = false;
14570
+ let source = '';
14571
+ let assetId = '';
14572
+ for (const c of node.childElements()) {
14573
+ switch (c.localName) {
14574
+ case 'Enabled':
14575
+ enabled = c.innerText === 'true';
14576
+ break;
14577
+ case 'Source':
14578
+ source = c.innerText;
14579
+ break;
14580
+ case 'AssetId':
14581
+ assetId = c.innerText;
14582
+ break;
14583
+ case 'FramePadding':
14584
+ backingTrack.padding = GpifParser.parseIntSafe(c.innerText, 0) / GpifParser.SampleRate * 1000;
14585
+ break;
14586
+ }
14587
+ }
14588
+ // only local (contained backing tracks are supported)
14589
+ // remote / youtube links seem to come in future releases according to the gpif tags.
14590
+ if (enabled && source === 'Local') {
14591
+ this.score.backingTrack = backingTrack;
14592
+ this._backingTrackAssetId = assetId; // when the Asset tag is parsed this ID is used to load the raw data
14593
+ }
14332
14594
  }
14333
14595
  //
14334
14596
  // <MasterTrack>...</MasterTrack>
@@ -14366,6 +14628,7 @@
14366
14628
  let textValue = null;
14367
14629
  let reference = 0;
14368
14630
  let text = null;
14631
+ let syncPointValue = undefined;
14369
14632
  for (const c of node.childElements()) {
14370
14633
  switch (c.localName) {
14371
14634
  case 'Type':
@@ -14384,6 +14647,28 @@
14384
14647
  if (c.firstElement && c.firstElement.nodeType === XmlNodeType.CDATA) {
14385
14648
  textValue = c.innerText;
14386
14649
  }
14650
+ else if (c.firstElement &&
14651
+ c.firstElement.nodeType === XmlNodeType.Element &&
14652
+ type === 'SyncPoint') {
14653
+ syncPointValue = new SyncPointData();
14654
+ for (const vc of c.childElements()) {
14655
+ switch (vc.localName) {
14656
+ case 'BarIndex':
14657
+ barIndex = GpifParser.parseIntSafe(vc.innerText, 0);
14658
+ break;
14659
+ case 'BarOccurrence':
14660
+ syncPointValue.barOccurence = GpifParser.parseIntSafe(vc.innerText, 0);
14661
+ break;
14662
+ case 'ModifiedTempo':
14663
+ syncPointValue.modifiedTempo = GpifParser.parseFloatSafe(vc.innerText, 0);
14664
+ break;
14665
+ case 'FrameOffset':
14666
+ const frameOffset = GpifParser.parseFloatSafe(vc.innerText, 0);
14667
+ syncPointValue.millisecondOffset = (frameOffset / GpifParser.SampleRate) * 1000;
14668
+ break;
14669
+ }
14670
+ }
14671
+ }
14387
14672
  else {
14388
14673
  const parts = GpifParser.splitSafe(c.innerText);
14389
14674
  // Issue 391: Some GPX files might have
@@ -14411,6 +14696,13 @@
14411
14696
  case 'Tempo':
14412
14697
  automation = Automation.buildTempoAutomation(isLinear, ratioPosition, numberValue, reference);
14413
14698
  break;
14699
+ case 'SyncPoint':
14700
+ automation = new Automation();
14701
+ automation.type = AutomationType.SyncPoint;
14702
+ automation.isLinear = isLinear;
14703
+ automation.ratioPosition = ratioPosition;
14704
+ automation.syncPointValue = syncPointValue;
14705
+ break;
14414
14706
  case 'Sound':
14415
14707
  if (textValue && sounds && sounds.has(textValue)) {
14416
14708
  automation = Automation.buildInstrumentAutomation(isLinear, ratioPosition, sounds.get(textValue).program);
@@ -16492,14 +16784,19 @@
16492
16784
  const masterBar = this.score.masterBars[barNumber];
16493
16785
  for (let i = 0, j = automations.length; i < j; i++) {
16494
16786
  const automation = automations[i];
16495
- if (automation.type === AutomationType.Tempo) {
16496
- if (barNumber === 0) {
16497
- this.score.tempo = automation.value | 0;
16498
- if (automation.text) {
16499
- this.score.tempoLabel = automation.text;
16787
+ switch (automation.type) {
16788
+ case AutomationType.Tempo:
16789
+ if (barNumber === 0) {
16790
+ this.score.tempo = automation.value | 0;
16791
+ if (automation.text) {
16792
+ this.score.tempoLabel = automation.text;
16793
+ }
16500
16794
  }
16501
- }
16502
- masterBar.tempoAutomations.push(automation);
16795
+ masterBar.tempoAutomations.push(automation);
16796
+ break;
16797
+ case AutomationType.SyncPoint:
16798
+ masterBar.addSyncPoint(automation);
16799
+ break;
16503
16800
  }
16504
16801
  }
16505
16802
  }
@@ -16516,6 +16813,10 @@
16516
16813
  * Internal Range: 1 per quarter note
16517
16814
  */
16518
16815
  GpifParser.BendPointValueFactor = 1 / 25.0;
16816
+ // test have shown that Guitar Pro seem to always work with 44100hz for the frame offsets,
16817
+ // they are NOT using the sample rate of the input file.
16818
+ // Downsampling a 44100hz ogg to 8000hz and using it in as audio track resulted in the same frame offset when placing sync points.
16819
+ GpifParser.SampleRate = 44100;
16519
16820
 
16520
16821
  // PartConfiguration File Format Notes.
16521
16822
  // Based off Guitar Pro 8
@@ -17382,7 +17683,9 @@
17382
17683
  let binaryStylesheetData = null;
17383
17684
  let partConfigurationData = null;
17384
17685
  let layoutConfigurationData = null;
17686
+ const entryLookup = new Map();
17385
17687
  for (const entry of entries) {
17688
+ entryLookup.set(entry.fullName, entry);
17386
17689
  switch (entry.fileName) {
17387
17690
  case 'score.gpif':
17388
17691
  xml = IOHelper.toString(entry.data, this.settings.importer.encoding);
@@ -17405,6 +17708,12 @@
17405
17708
  // the score information as XML we need to parse.
17406
17709
  Logger.debug(this.name, 'Start Parsing score.gpif');
17407
17710
  const gpifParser = new GpifParser();
17711
+ gpifParser.loadAsset = (fileName) => {
17712
+ if (entryLookup.has(fileName)) {
17713
+ return entryLookup.get(fileName).data;
17714
+ }
17715
+ return undefined;
17716
+ };
17408
17717
  gpifParser.parseXml(xml, this.settings);
17409
17718
  Logger.debug(this.name, 'score.gpif parsed');
17410
17719
  const score = gpifParser.score;
@@ -21870,8 +22179,24 @@
21870
22179
  * Represents a change of the tempo in the song.
21871
22180
  */
21872
22181
  class TempoChangeEvent extends MidiEvent {
22182
+ /**
22183
+ * The tempo in microseconds per quarter note (aka USQ). A time format typically for midi.
22184
+ */
22185
+ get microSecondsPerQuarterNote() {
22186
+ return 60000000 / this.beatsPerMinute;
22187
+ }
22188
+ /**
22189
+ * The tempo in microseconds per quarter note (aka USQ). A time format typically for midi.
22190
+ */
22191
+ set microSecondsPerQuarterNote(value) {
22192
+ this.beatsPerMinute = 60000000 / value;
22193
+ }
21873
22194
  constructor(tick, microSecondsPerQuarterNote) {
21874
22195
  super(0, tick, MidiEventType.TempoChange);
22196
+ /**
22197
+ * The tempo in beats per minute
22198
+ */
22199
+ this.beatsPerMinute = 0;
21875
22200
  this.microSecondsPerQuarterNote = microSecondsPerQuarterNote;
21876
22201
  }
21877
22202
  writeTo(s) {
@@ -21954,6 +22279,17 @@
21954
22279
  }
21955
22280
  }
21956
22281
 
22282
+ /**
22283
+ * Rerpresents a point to sync the alphaTab time axis with an external backing track.
22284
+ */
22285
+ class BackingTrackSyncPoint {
22286
+ constructor(tick, data) {
22287
+ this.tick = 0;
22288
+ this.tick = tick;
22289
+ this.data = data;
22290
+ }
22291
+ }
22292
+
21957
22293
  class MidiFileSequencerTempoChange {
21958
22294
  constructor(bpm, ticks, time) {
21959
22295
  this.bpm = bpm;
@@ -21961,9 +22297,17 @@
21961
22297
  this.time = time;
21962
22298
  }
21963
22299
  }
22300
+ class BackingTrackSyncPointWithTime extends BackingTrackSyncPoint {
22301
+ constructor(tick, data, time) {
22302
+ super(tick, data);
22303
+ this.time = time;
22304
+ }
22305
+ }
21964
22306
  class MidiSequencerState {
21965
22307
  constructor() {
21966
22308
  this.tempoChanges = [];
22309
+ this.tempoChangeIndex = 0;
22310
+ this.syncPoints = [];
21967
22311
  this.firstProgramEventPerChannel = new Map();
21968
22312
  this.firstTimeSignatureNumerator = 0;
21969
22313
  this.firstTimeSignatureDenominator = 0;
@@ -21971,11 +22315,15 @@
21971
22315
  this.division = MidiUtils.QuarterTime;
21972
22316
  this.eventIndex = 0;
21973
22317
  this.currentTime = 0;
22318
+ this.currentTick = 0;
22319
+ this.syncPointIndex = 0;
21974
22320
  this.playbackRange = null;
21975
22321
  this.playbackRangeStartTime = 0;
21976
22322
  this.playbackRangeEndTime = 0;
21977
22323
  this.endTick = 0;
21978
22324
  this.endTime = 0;
22325
+ this.currentTempo = 0;
22326
+ this.modifiedTempo = 0;
21979
22327
  }
21980
22328
  }
21981
22329
  /**
@@ -22028,6 +22376,12 @@
22028
22376
  get currentEndTime() {
22029
22377
  return this._currentState.endTime / this.playbackSpeed;
22030
22378
  }
22379
+ get currentTempo() {
22380
+ return this._currentState.currentTempo;
22381
+ }
22382
+ get modifiedTempo() {
22383
+ return this._currentState.modifiedTempo * this.playbackSpeed;
22384
+ }
22031
22385
  mainSeek(timePosition) {
22032
22386
  // map to speed=1
22033
22387
  timePosition *= this.playbackSpeed;
@@ -22047,6 +22401,8 @@
22047
22401
  // we have to restart the midi to make sure we get the right state: instruments, volume, pan, etc
22048
22402
  this._mainState.currentTime = 0;
22049
22403
  this._mainState.eventIndex = 0;
22404
+ this._mainState.syncPointIndex = 0;
22405
+ this._mainState.tempoChangeIndex = 0;
22050
22406
  if (this.isPlayingMain) {
22051
22407
  const metronomeVolume = this._synthesizer.metronomeVolume;
22052
22408
  this._synthesizer.noteOffAll(true);
@@ -22121,7 +22477,7 @@
22121
22477
  }
22122
22478
  if (mEvent.type === MidiEventType.TempoChange) {
22123
22479
  const meta = mEvent;
22124
- bpm = 60000000 / meta.microSecondsPerQuarterNote;
22480
+ bpm = meta.beatsPerMinute;
22125
22481
  state.tempoChanges.push(new MidiFileSequencerTempoChange(bpm, absTick, absTime));
22126
22482
  metronomeLengthInMillis = metronomeLengthInTicks * (60000.0 / (bpm * midiFile.division));
22127
22483
  }
@@ -22155,6 +22511,8 @@
22155
22511
  }
22156
22512
  }
22157
22513
  }
22514
+ state.currentTempo = state.tempoChanges.length > 0 ? state.tempoChanges[0].bpm : bpm;
22515
+ state.modifiedTempo = state.currentTempo;
22158
22516
  state.synthData.sort((a, b) => {
22159
22517
  if (a.time > b.time) {
22160
22518
  return 1;
@@ -22171,6 +22529,35 @@
22171
22529
  fillMidiEventQueue() {
22172
22530
  return this.fillMidiEventQueueLimited(-1);
22173
22531
  }
22532
+ fillMidiEventQueueToEndTime(endTime) {
22533
+ while (this._mainState.currentTime < endTime) {
22534
+ if (this.fillMidiEventQueueLimited(endTime - this._mainState.currentTime)) {
22535
+ this._synthesizer.synthesizeSilent(SynthConstants.MicroBufferSize);
22536
+ }
22537
+ }
22538
+ let anyEventsDispatched = false;
22539
+ this._currentState.currentTime = endTime;
22540
+ while (this._currentState.eventIndex < this._currentState.synthData.length &&
22541
+ this._currentState.synthData[this._currentState.eventIndex].time < this._currentState.currentTime) {
22542
+ const synthEvent = this._currentState.synthData[this._currentState.eventIndex];
22543
+ this._synthesizer.dispatchEvent(synthEvent);
22544
+ while (this._currentState.syncPointIndex < this._currentState.syncPoints.length &&
22545
+ this._currentState.syncPoints[this._currentState.syncPointIndex].tick < synthEvent.event.tick) {
22546
+ this._currentState.modifiedTempo =
22547
+ this._currentState.syncPoints[this._currentState.syncPointIndex].data.modifiedTempo;
22548
+ this._currentState.syncPointIndex++;
22549
+ }
22550
+ while (this._currentState.tempoChangeIndex < this._currentState.tempoChanges.length &&
22551
+ this._currentState.tempoChanges[this._currentState.tempoChangeIndex].time <= synthEvent.time) {
22552
+ this._currentState.currentTempo =
22553
+ this._currentState.tempoChanges[this._currentState.tempoChangeIndex].bpm;
22554
+ this._currentState.tempoChangeIndex++;
22555
+ }
22556
+ this._currentState.eventIndex++;
22557
+ anyEventsDispatched = true;
22558
+ }
22559
+ return anyEventsDispatched;
22560
+ }
22174
22561
  fillMidiEventQueueLimited(maxMilliseconds) {
22175
22562
  let millisecondsPerBuffer = (SynthConstants.MicroBufferSize / this._synthesizer.outSampleRate) * 1000 * this.playbackSpeed;
22176
22563
  let endTime = this.internalEndTime;
@@ -22198,9 +22585,87 @@
22198
22585
  mainTimePositionToTickPosition(timePosition) {
22199
22586
  return this.timePositionToTickPositionWithSpeed(this._mainState, timePosition, this.playbackSpeed);
22200
22587
  }
22588
+ mainUpdateSyncPoints(syncPoints) {
22589
+ const state = this._mainState;
22590
+ syncPoints.sort((a, b) => a.tick - b.tick); // just in case
22591
+ state.syncPoints = new Array(syncPoints.length);
22592
+ if (syncPoints.length >= 0) {
22593
+ let bpm = 120;
22594
+ let absTick = 0;
22595
+ let absTime = 0.0;
22596
+ let previousTick = 0;
22597
+ let tempoChangeIndex = 0;
22598
+ for (let i = 0; i < syncPoints.length; i++) {
22599
+ const p = syncPoints[i];
22600
+ const deltaTick = p.tick - previousTick;
22601
+ absTick += deltaTick;
22602
+ absTime += deltaTick * (60000.0 / (bpm * state.division));
22603
+ state.syncPoints[i] = new BackingTrackSyncPointWithTime(p.tick, p.data, absTime);
22604
+ previousTick = p.tick;
22605
+ while (tempoChangeIndex < state.tempoChanges.length &&
22606
+ state.tempoChanges[tempoChangeIndex].ticks <= absTick) {
22607
+ bpm = state.tempoChanges[tempoChangeIndex].bpm;
22608
+ tempoChangeIndex++;
22609
+ }
22610
+ }
22611
+ }
22612
+ state.syncPointIndex = 0;
22613
+ }
22201
22614
  currentTimePositionToTickPosition(timePosition) {
22202
22615
  return this.timePositionToTickPositionWithSpeed(this._currentState, timePosition, this.playbackSpeed);
22203
22616
  }
22617
+ mainTimePositionFromBackingTrack(timePosition, backingTrackLength) {
22618
+ const mainState = this._mainState;
22619
+ const syncPoints = mainState.syncPoints;
22620
+ if (timePosition < 0 || syncPoints.length === 0) {
22621
+ return timePosition;
22622
+ }
22623
+ let syncPointIndex = timePosition >= syncPoints[mainState.syncPointIndex].data.millisecondOffset ? mainState.syncPointIndex : 0;
22624
+ while (syncPointIndex + 1 < syncPoints.length &&
22625
+ syncPoints[syncPointIndex + 1].data.millisecondOffset <= timePosition) {
22626
+ syncPointIndex++;
22627
+ }
22628
+ const currentSyncPoint = syncPoints[syncPointIndex];
22629
+ const timeDiff = timePosition - currentSyncPoint.data.millisecondOffset;
22630
+ let alphaTabTimeDiff;
22631
+ if (syncPointIndex + 1 < syncPoints.length) {
22632
+ const nextSyncPoint = syncPoints[syncPointIndex + 1];
22633
+ const relativeTimeDiff = timeDiff / (nextSyncPoint.data.millisecondOffset - currentSyncPoint.data.millisecondOffset);
22634
+ alphaTabTimeDiff = (nextSyncPoint.time - currentSyncPoint.time) * relativeTimeDiff;
22635
+ }
22636
+ else {
22637
+ const relativeTimeDiff = timeDiff / (backingTrackLength - currentSyncPoint.data.millisecondOffset);
22638
+ alphaTabTimeDiff = (mainState.endTime - currentSyncPoint.time) * relativeTimeDiff;
22639
+ }
22640
+ return (currentSyncPoint.time + alphaTabTimeDiff) / this.playbackSpeed;
22641
+ }
22642
+ mainTimePositionToBackingTrack(timePosition, backingTrackLength) {
22643
+ const mainState = this._mainState;
22644
+ const syncPoints = mainState.syncPoints;
22645
+ if (timePosition < 0 || syncPoints.length === 0) {
22646
+ return timePosition;
22647
+ }
22648
+ timePosition *= this.playbackSpeed;
22649
+ let syncPointIndex = timePosition >= syncPoints[mainState.syncPointIndex].time ? mainState.syncPointIndex : 0;
22650
+ while (syncPointIndex + 1 < syncPoints.length && syncPoints[syncPointIndex + 1].time <= timePosition) {
22651
+ syncPointIndex++;
22652
+ }
22653
+ const currentSyncPoint = syncPoints[syncPointIndex];
22654
+ const alphaTabTimeDiff = timePosition - currentSyncPoint.time;
22655
+ let backingTrackPos;
22656
+ if (syncPointIndex + 1 < syncPoints.length) {
22657
+ const nextSyncPoint = syncPoints[syncPointIndex + 1];
22658
+ const relativeAlphaTabTimeDiff = alphaTabTimeDiff / (nextSyncPoint.time - currentSyncPoint.time);
22659
+ const backingTrackDiff = nextSyncPoint.data.millisecondOffset - currentSyncPoint.data.millisecondOffset;
22660
+ backingTrackPos = currentSyncPoint.data.millisecondOffset + backingTrackDiff * relativeAlphaTabTimeDiff;
22661
+ }
22662
+ else {
22663
+ const relativeAlphaTabTimeDiff = alphaTabTimeDiff / (mainState.endTime - currentSyncPoint.time);
22664
+ const frameDiff = backingTrackLength - currentSyncPoint.data.millisecondOffset;
22665
+ backingTrackPos = currentSyncPoint.data.millisecondOffset + frameDiff * relativeAlphaTabTimeDiff;
22666
+ }
22667
+ return backingTrackPos;
22668
+ }
22204
22669
  tickPositionToTimePositionWithSpeed(state, tickPosition, playbackSpeed) {
22205
22670
  let timePosition = 0.0;
22206
22671
  let bpm = 120.0;
@@ -22310,6 +22775,8 @@
22310
22775
  });
22311
22776
  state.endTime = metronomeTime;
22312
22777
  state.endTick = metronomeTick;
22778
+ state.currentTempo = bpm;
22779
+ state.modifiedTempo = bpm;
22313
22780
  this._countInState = state;
22314
22781
  }
22315
22782
  }
@@ -22355,12 +22822,22 @@
22355
22822
  * @param endTick The end tick.
22356
22823
  * @param isSeek Whether the time was seeked.
22357
22824
  */
22358
- constructor(currentTime, endTime, currentTick, endTick, isSeek) {
22825
+ constructor(currentTime, endTime, currentTick, endTick, isSeek, originalTempo, modifiedTempo) {
22826
+ /**
22827
+ * The original tempo in which alphaTab internally would be playing right now.
22828
+ */
22829
+ this.originalTempo = 0;
22830
+ /**
22831
+ * The modified tempo in which the actual playback is happening (e.g. due to playback speed or external audio synchronization)
22832
+ */
22833
+ this.modifiedTempo = 0;
22359
22834
  this.currentTime = currentTime;
22360
22835
  this.endTime = endTime;
22361
22836
  this.currentTick = currentTick;
22362
22837
  this.endTick = endTick;
22363
22838
  this.isSeek = isSeek;
22839
+ this.originalTempo = originalTempo;
22840
+ this.modifiedTempo = modifiedTempo;
22364
22841
  }
22365
22842
  }
22366
22843
 
@@ -26458,7 +26935,7 @@
26458
26935
  break;
26459
26936
  case MidiEventType.TempoChange:
26460
26937
  const tempoChange = e;
26461
- this.currentTempo = 60000000 / tempoChange.microSecondsPerQuarterNote;
26938
+ this.currentTempo = tempoChange.beatsPerMinute;
26462
26939
  break;
26463
26940
  case MidiEventType.PitchBend:
26464
26941
  const pitchBend = e;
@@ -27610,15 +28087,15 @@
27610
28087
  }
27611
28088
 
27612
28089
  /**
27613
- * This is the main synthesizer component which can be used to
28090
+ * This is the base class for synthesizer components which can be used to
27614
28091
  * play a {@link MidiFile} via a {@link ISynthOutput}.
27615
28092
  */
27616
- class AlphaSynth {
28093
+ class AlphaSynthBase {
27617
28094
  get output() {
27618
28095
  return this._output;
27619
28096
  }
27620
28097
  get isReadyForPlayback() {
27621
- return this.isReady && this._isSoundFontLoaded && this._isMidiLoaded;
28098
+ return this.isReady && this.isSoundFontLoaded && this._isMidiLoaded;
27622
28099
  }
27623
28100
  get logLevel() {
27624
28101
  return Logger.logLevel;
@@ -27627,11 +28104,14 @@
27627
28104
  Logger.logLevel = value;
27628
28105
  }
27629
28106
  get masterVolume() {
27630
- return this._synthesizer.masterVolume;
28107
+ return this.synthesizer.masterVolume;
27631
28108
  }
27632
28109
  set masterVolume(value) {
27633
28110
  value = Math.max(value, SynthConstants.MinVolume);
27634
- this._synthesizer.masterVolume = value;
28111
+ this.updateMasterVolume(value);
28112
+ }
28113
+ updateMasterVolume(value) {
28114
+ this.synthesizer.masterVolume = value;
27635
28115
  }
27636
28116
  get metronomeVolume() {
27637
28117
  return this._metronomeVolume;
@@ -27639,7 +28119,7 @@
27639
28119
  set metronomeVolume(value) {
27640
28120
  value = Math.max(value, SynthConstants.MinVolume);
27641
28121
  this._metronomeVolume = value;
27642
- this._synthesizer.metronomeVolume = value;
28122
+ this.synthesizer.metronomeVolume = value;
27643
28123
  }
27644
28124
  get countInVolume() {
27645
28125
  return this._countInVolume;
@@ -27655,19 +28135,22 @@
27655
28135
  this._midiEventsPlayedFilter = new Set(value);
27656
28136
  }
27657
28137
  get playbackSpeed() {
27658
- return this._sequencer.playbackSpeed;
28138
+ return this.sequencer.playbackSpeed;
27659
28139
  }
27660
28140
  set playbackSpeed(value) {
27661
28141
  value = ModelUtils.clamp(value, SynthConstants.MinPlaybackSpeed, SynthConstants.MaxPlaybackSpeed);
27662
- const oldSpeed = this._sequencer.playbackSpeed;
27663
- this._sequencer.playbackSpeed = value;
28142
+ this.updatePlaybackSpeed(value);
28143
+ }
28144
+ updatePlaybackSpeed(value) {
28145
+ const oldSpeed = this.sequencer.playbackSpeed;
28146
+ this.sequencer.playbackSpeed = value;
27664
28147
  this.timePosition = this.timePosition * (oldSpeed / value);
27665
28148
  }
27666
28149
  get tickPosition() {
27667
28150
  return this._tickPosition;
27668
28151
  }
27669
28152
  set tickPosition(value) {
27670
- this.timePosition = this._sequencer.mainTickPositionToTimePosition(value);
28153
+ this.timePosition = this.sequencer.mainTickPositionToTimePosition(value);
27671
28154
  }
27672
28155
  get timePosition() {
27673
28156
  return this._timePosition;
@@ -27675,30 +28158,30 @@
27675
28158
  set timePosition(value) {
27676
28159
  Logger.debug('AlphaSynth', `Seeking to position ${value}ms (main)`);
27677
28160
  // tell the sequencer to jump to the given position
27678
- this._sequencer.mainSeek(value);
28161
+ this.sequencer.mainSeek(value);
27679
28162
  // update the internal position
27680
28163
  this.updateTimePosition(value, true);
27681
28164
  // tell the output to reset the already synthesized buffers and request data again
27682
- if (this._sequencer.isPlayingMain) {
28165
+ if (this.sequencer.isPlayingMain) {
27683
28166
  this._notPlayedSamples = 0;
27684
28167
  this.output.resetSamples();
27685
28168
  }
27686
28169
  }
27687
28170
  get playbackRange() {
27688
- return this._sequencer.mainPlaybackRange;
28171
+ return this.sequencer.mainPlaybackRange;
27689
28172
  }
27690
28173
  set playbackRange(value) {
27691
- this._sequencer.mainPlaybackRange = value;
28174
+ this.sequencer.mainPlaybackRange = value;
27692
28175
  if (value) {
27693
28176
  this.tickPosition = value.startTick;
27694
28177
  }
27695
28178
  this.playbackRangeChanged.trigger(new PlaybackRangeChangedEventArgs(value));
27696
28179
  }
27697
28180
  get isLooping() {
27698
- return this._sequencer.isLooping;
28181
+ return this.sequencer.isLooping;
27699
28182
  }
27700
28183
  set isLooping(value) {
27701
- this._sequencer.isLooping = value;
28184
+ this.sequencer.isLooping = value;
27702
28185
  }
27703
28186
  destroy() {
27704
28187
  Logger.debug('AlphaSynth', 'Destroying player');
@@ -27706,11 +28189,11 @@
27706
28189
  this.output.destroy();
27707
28190
  }
27708
28191
  /**
27709
- * Initializes a new instance of the {@link AlphaSynth} class.
28192
+ * Initializes a new instance of the {@link AlphaSynthBase} class.
27710
28193
  * @param output The output to use for playing the generated samples.
27711
28194
  */
27712
- constructor(output, bufferTimeInMilliseconds) {
27713
- this._isSoundFontLoaded = false;
28195
+ constructor(output, synthesizer, bufferTimeInMilliseconds) {
28196
+ this.isSoundFontLoaded = false;
27714
28197
  this._isMidiLoaded = false;
27715
28198
  this._tickPosition = 0;
27716
28199
  this._timePosition = 0;
@@ -27739,8 +28222,8 @@
27739
28222
  Logger.debug('AlphaSynth', 'Creating output');
27740
28223
  this._output = output;
27741
28224
  Logger.debug('AlphaSynth', 'Creating synthesizer');
27742
- this._synthesizer = new TinySoundFont(this.output.sampleRate);
27743
- this._sequencer = new MidiFileSequencer(this._synthesizer);
28225
+ this.synthesizer = synthesizer;
28226
+ this.sequencer = new MidiFileSequencer(this.synthesizer);
27744
28227
  Logger.debug('AlphaSynth', 'Opening output');
27745
28228
  this.output.ready.on(() => {
27746
28229
  this.isReady = true;
@@ -27748,42 +28231,45 @@
27748
28231
  this.checkReadyForPlayback();
27749
28232
  });
27750
28233
  this.output.sampleRequest.on(() => {
27751
- if (this.state === PlayerState.Playing &&
27752
- (!this._sequencer.isFinished || this._synthesizer.activeVoiceCount > 0)) {
27753
- let samples = new Float32Array(SynthConstants.MicroBufferSize * SynthConstants.MicroBufferCount * SynthConstants.AudioChannels);
27754
- let bufferPos = 0;
27755
- for (let i = 0; i < SynthConstants.MicroBufferCount; i++) {
27756
- // synthesize buffer
27757
- this._sequencer.fillMidiEventQueue();
27758
- const synthesizedEvents = this._synthesizer.synthesize(samples, bufferPos, SynthConstants.MicroBufferSize);
27759
- bufferPos += SynthConstants.MicroBufferSize * SynthConstants.AudioChannels;
27760
- // push all processed events into the queue
27761
- // for informing users about played events
27762
- for (const e of synthesizedEvents) {
27763
- if (this._midiEventsPlayedFilter.has(e.event.type)) {
27764
- this._playedEventsQueue.enqueue(e);
27765
- }
27766
- }
27767
- // tell sequencer to check whether its work is done
27768
- if (this._sequencer.isFinished) {
27769
- break;
28234
+ this.onSampleRequest();
28235
+ });
28236
+ this.output.samplesPlayed.on(this.onSamplesPlayed.bind(this));
28237
+ this.output.open(bufferTimeInMilliseconds);
28238
+ }
28239
+ onSampleRequest() {
28240
+ if (this.state === PlayerState.Playing &&
28241
+ (!this.sequencer.isFinished || this.synthesizer.activeVoiceCount > 0)) {
28242
+ let samples = new Float32Array(SynthConstants.MicroBufferSize * SynthConstants.MicroBufferCount * SynthConstants.AudioChannels);
28243
+ let bufferPos = 0;
28244
+ for (let i = 0; i < SynthConstants.MicroBufferCount; i++) {
28245
+ // synthesize buffer
28246
+ this.sequencer.fillMidiEventQueue();
28247
+ const synthesizedEvents = this.synthesizer.synthesize(samples, bufferPos, SynthConstants.MicroBufferSize);
28248
+ bufferPos += SynthConstants.MicroBufferSize * SynthConstants.AudioChannels;
28249
+ // push all processed events into the queue
28250
+ // for informing users about played events
28251
+ for (const e of synthesizedEvents) {
28252
+ if (this._midiEventsPlayedFilter.has(e.event.type)) {
28253
+ this._playedEventsQueue.enqueue(e);
27770
28254
  }
27771
28255
  }
27772
- // send it to output
27773
- if (bufferPos < samples.length) {
27774
- samples = samples.subarray(0, bufferPos);
28256
+ // tell sequencer to check whether its work is done
28257
+ if (this.sequencer.isFinished) {
28258
+ break;
27775
28259
  }
27776
- this._notPlayedSamples += samples.length;
27777
- this.output.addSamples(samples);
27778
28260
  }
27779
- else {
27780
- // Tell output that there is no data left for it.
27781
- const samples = new Float32Array(0);
27782
- this.output.addSamples(samples);
28261
+ // send it to output
28262
+ if (bufferPos < samples.length) {
28263
+ samples = samples.subarray(0, bufferPos);
27783
28264
  }
27784
- });
27785
- this.output.samplesPlayed.on(this.onSamplesPlayed.bind(this));
27786
- this.output.open(bufferTimeInMilliseconds);
28265
+ this._notPlayedSamples += samples.length;
28266
+ this.output.addSamples(samples);
28267
+ }
28268
+ else {
28269
+ // Tell output that there is no data left for it.
28270
+ const samples = new Float32Array(0);
28271
+ this.output.addSamples(samples);
28272
+ }
27787
28273
  }
27788
28274
  play() {
27789
28275
  if (this.state !== PlayerState.Paused || !this._isMidiLoaded) {
@@ -27793,20 +28279,20 @@
27793
28279
  this.playInternal();
27794
28280
  if (this._countInVolume > 0) {
27795
28281
  Logger.debug('AlphaSynth', 'Starting countin');
27796
- this._sequencer.startCountIn();
27797
- this._synthesizer.setupMetronomeChannel(this._countInVolume);
28282
+ this.sequencer.startCountIn();
28283
+ this.synthesizer.setupMetronomeChannel(this._countInVolume);
27798
28284
  this.updateTimePosition(0, true);
27799
28285
  }
27800
28286
  this.output.play();
27801
28287
  return true;
27802
28288
  }
27803
28289
  playInternal() {
27804
- if (this._sequencer.isPlayingOneTimeMidi) {
28290
+ if (this.sequencer.isPlayingOneTimeMidi) {
27805
28291
  Logger.debug('AlphaSynth', 'Cancelling one time midi');
27806
28292
  this.stopOneTimeMidi();
27807
28293
  }
27808
28294
  Logger.debug('AlphaSynth', 'Starting playback');
27809
- this._synthesizer.setupMetronomeChannel(this.metronomeVolume);
28295
+ this.synthesizer.setupMetronomeChannel(this.metronomeVolume);
27810
28296
  this._synthStopping = false;
27811
28297
  this.state = PlayerState.Playing;
27812
28298
  this.stateChanged.trigger(new PlayerStateChangedEventArgs(this.state, false));
@@ -27819,7 +28305,7 @@
27819
28305
  this.state = PlayerState.Paused;
27820
28306
  this.stateChanged.trigger(new PlayerStateChangedEventArgs(this.state, false));
27821
28307
  this.output.pause();
27822
- this._synthesizer.noteOffAll(false);
28308
+ this.synthesizer.noteOffAll(false);
27823
28309
  }
27824
28310
  playPause() {
27825
28311
  if (this.state !== PlayerState.Paused || !this._isMidiLoaded) {
@@ -27837,21 +28323,21 @@
27837
28323
  this.state = PlayerState.Paused;
27838
28324
  this.output.pause();
27839
28325
  this._notPlayedSamples = 0;
27840
- this._sequencer.stop();
27841
- this._synthesizer.noteOffAll(true);
27842
- this.tickPosition = this._sequencer.mainPlaybackRange ? this._sequencer.mainPlaybackRange.startTick : 0;
28326
+ this.sequencer.stop();
28327
+ this.synthesizer.noteOffAll(true);
28328
+ this.tickPosition = this.sequencer.mainPlaybackRange ? this.sequencer.mainPlaybackRange.startTick : 0;
27843
28329
  this.stateChanged.trigger(new PlayerStateChangedEventArgs(this.state, true));
27844
28330
  }
27845
28331
  playOneTimeMidiFile(midi) {
27846
- if (this._sequencer.isPlayingOneTimeMidi) {
28332
+ if (this.sequencer.isPlayingOneTimeMidi) {
27847
28333
  this.stopOneTimeMidi();
27848
28334
  }
27849
28335
  else {
27850
28336
  // pause current playback.
27851
28337
  this.pause();
27852
28338
  }
27853
- this._sequencer.loadOneTimeMidi(midi);
27854
- this._synthesizer.noteOffAll(true);
28339
+ this.sequencer.loadOneTimeMidi(midi);
28340
+ this.synthesizer.noteOffAll(true);
27855
28341
  // update the internal position
27856
28342
  this.updateTimePosition(0, true);
27857
28343
  // tell the output to reset the already synthesized buffers and request data again
@@ -27861,9 +28347,9 @@
27861
28347
  }
27862
28348
  resetSoundFonts() {
27863
28349
  this.stop();
27864
- this._synthesizer.resetPresets();
28350
+ this.synthesizer.resetPresets();
27865
28351
  this._loadedSoundFonts = [];
27866
- this._isSoundFontLoaded = false;
28352
+ this.isSoundFontLoaded = false;
27867
28353
  this.soundFontLoaded.trigger();
27868
28354
  }
27869
28355
  loadSoundFont(data, append) {
@@ -27877,7 +28363,7 @@
27877
28363
  this._loadedSoundFonts = [];
27878
28364
  }
27879
28365
  this._loadedSoundFonts.push(soundFont);
27880
- this._isSoundFontLoaded = true;
28366
+ this.isSoundFontLoaded = true;
27881
28367
  this.soundFontLoaded.trigger();
27882
28368
  Logger.debug('AlphaSynth', 'soundFont successfully loaded');
27883
28369
  this.checkReadyForPlayback();
@@ -27889,12 +28375,12 @@
27889
28375
  }
27890
28376
  checkReadyForPlayback() {
27891
28377
  if (this.isReadyForPlayback) {
27892
- this._synthesizer.setupMetronomeChannel(this.metronomeVolume);
27893
- const programs = this._sequencer.instrumentPrograms;
27894
- const percussionKeys = this._sequencer.percussionKeys;
28378
+ this.synthesizer.setupMetronomeChannel(this.metronomeVolume);
28379
+ const programs = this.sequencer.instrumentPrograms;
28380
+ const percussionKeys = this.sequencer.percussionKeys;
27895
28381
  let append = false;
27896
28382
  for (const soundFont of this._loadedSoundFonts) {
27897
- this._synthesizer.loadPresets(soundFont, programs, percussionKeys, append);
28383
+ this.synthesizer.loadPresets(soundFont, programs, percussionKeys, append);
27898
28384
  append = true;
27899
28385
  }
27900
28386
  this.readyForPlayback.trigger();
@@ -27908,9 +28394,9 @@
27908
28394
  this.stop();
27909
28395
  try {
27910
28396
  Logger.debug('AlphaSynth', 'Loading midi from model');
27911
- this._sequencer.loadMidi(midi);
28397
+ this.sequencer.loadMidi(midi);
27912
28398
  this._isMidiLoaded = true;
27913
- this.midiLoaded.trigger(new PositionChangedEventArgs(0, this._sequencer.currentEndTime, 0, this._sequencer.currentEndTick, false));
28399
+ this.midiLoaded.trigger(new PositionChangedEventArgs(0, this.sequencer.currentEndTime, 0, this.sequencer.currentEndTick, false, this.sequencer.currentTempo, this.sequencer.modifiedTempo));
27914
28400
  Logger.debug('AlphaSynth', 'Midi successfully loaded');
27915
28401
  this.checkReadyForPlayback();
27916
28402
  this.tickPosition = 0;
@@ -27921,29 +28407,29 @@
27921
28407
  }
27922
28408
  }
27923
28409
  applyTranspositionPitches(transpositionPitches) {
27924
- this._synthesizer.applyTranspositionPitches(transpositionPitches);
28410
+ this.synthesizer.applyTranspositionPitches(transpositionPitches);
27925
28411
  }
27926
28412
  setChannelTranspositionPitch(channel, semitones) {
27927
- this._synthesizer.setChannelTranspositionPitch(channel, semitones);
28413
+ this.synthesizer.setChannelTranspositionPitch(channel, semitones);
27928
28414
  }
27929
28415
  setChannelMute(channel, mute) {
27930
- this._synthesizer.channelSetMute(channel, mute);
28416
+ this.synthesizer.channelSetMute(channel, mute);
27931
28417
  }
27932
28418
  resetChannelStates() {
27933
- this._synthesizer.resetChannelStates();
28419
+ this.synthesizer.resetChannelStates();
27934
28420
  }
27935
28421
  setChannelSolo(channel, solo) {
27936
- this._synthesizer.channelSetSolo(channel, solo);
28422
+ this.synthesizer.channelSetSolo(channel, solo);
27937
28423
  }
27938
28424
  setChannelVolume(channel, volume) {
27939
28425
  volume = Math.max(volume, SynthConstants.MinVolume);
27940
- this._synthesizer.channelSetMixVolume(channel, volume);
28426
+ this.synthesizer.channelSetMixVolume(channel, volume);
27941
28427
  }
27942
28428
  onSamplesPlayed(sampleCount) {
27943
28429
  if (sampleCount === 0) {
27944
28430
  return;
27945
28431
  }
27946
- const playedMillis = (sampleCount / this._synthesizer.outSampleRate) * 1000;
28432
+ const playedMillis = (sampleCount / this.synthesizer.outSampleRate) * 1000;
27947
28433
  this._notPlayedSamples -= sampleCount * SynthConstants.AudioChannels;
27948
28434
  this.updateTimePosition(this._timePosition + playedMillis, false);
27949
28435
  this.checkForFinish();
@@ -27951,25 +28437,25 @@
27951
28437
  checkForFinish() {
27952
28438
  let startTick = 0;
27953
28439
  let endTick = 0;
27954
- if (this.playbackRange && this._sequencer.isPlayingMain) {
28440
+ if (this.playbackRange && this.sequencer.isPlayingMain) {
27955
28441
  startTick = this.playbackRange.startTick;
27956
28442
  endTick = this.playbackRange.endTick;
27957
28443
  }
27958
28444
  else {
27959
- endTick = this._sequencer.currentEndTick;
28445
+ endTick = this.sequencer.currentEndTick;
27960
28446
  }
27961
28447
  if (this._tickPosition >= endTick) {
27962
28448
  // fully done with playback of remaining samples?
27963
28449
  if (this._notPlayedSamples <= 0) {
27964
28450
  this._notPlayedSamples = 0;
27965
- if (this._sequencer.isPlayingCountIn) {
28451
+ if (this.sequencer.isPlayingCountIn) {
27966
28452
  Logger.debug('AlphaSynth', 'Finished playback (count-in)');
27967
- this._sequencer.resetCountIn();
27968
- this.timePosition = this._sequencer.currentTime;
28453
+ this.sequencer.resetCountIn();
28454
+ this.timePosition = this.sequencer.currentTime;
27969
28455
  this.playInternal();
27970
28456
  this.output.resetSamples();
27971
28457
  }
27972
- else if (this._sequencer.isPlayingOneTimeMidi) {
28458
+ else if (this.sequencer.isPlayingOneTimeMidi) {
27973
28459
  Logger.debug('AlphaSynth', 'Finished playback (one time)');
27974
28460
  this.output.resetSamples();
27975
28461
  this.state = PlayerState.Paused;
@@ -27981,11 +28467,11 @@
27981
28467
  this.tickPosition = startTick;
27982
28468
  this._synthStopping = false;
27983
28469
  }
27984
- else if (this._synthesizer.activeVoiceCount > 0) {
28470
+ else if (this.synthesizer.activeVoiceCount > 0) {
27985
28471
  // smooth stop
27986
28472
  if (!this._synthStopping) {
27987
28473
  Logger.debug('AlphaSynth', 'Signaling synth to stop all voices (all samples played)');
27988
- this._synthesizer.noteOffAll(true);
28474
+ this.synthesizer.noteOffAll(true);
27989
28475
  this._synthStopping = true;
27990
28476
  }
27991
28477
  }
@@ -28001,7 +28487,7 @@
28001
28487
  // to eventually bring the voices down to 0 and stop playing
28002
28488
  if (!this._synthStopping) {
28003
28489
  Logger.debug('AlphaSynth', 'Signaling synth to stop all voices (not all samples played)');
28004
- this._synthesizer.noteOffAll(true);
28490
+ this.synthesizer.noteOffAll(true);
28005
28491
  this._synthStopping = true;
28006
28492
  }
28007
28493
  }
@@ -28009,31 +28495,27 @@
28009
28495
  }
28010
28496
  stopOneTimeMidi() {
28011
28497
  this.output.pause();
28012
- this._synthesizer.noteOffAll(true);
28013
- this._sequencer.resetOneTimeMidi();
28014
- this.timePosition = this._sequencer.currentTime;
28498
+ this.synthesizer.noteOffAll(true);
28499
+ this.sequencer.resetOneTimeMidi();
28500
+ this.timePosition = this.sequencer.currentTime;
28015
28501
  }
28016
28502
  updateTimePosition(timePosition, isSeek) {
28017
28503
  // update the real positions
28018
28504
  let currentTime = timePosition;
28019
28505
  this._timePosition = currentTime;
28020
- let currentTick = this._sequencer.currentTimePositionToTickPosition(currentTime);
28506
+ let currentTick = this.sequencer.currentTimePositionToTickPosition(currentTime);
28021
28507
  this._tickPosition = currentTick;
28022
- const endTime = this._sequencer.currentEndTime;
28023
- const endTick = this._sequencer.currentEndTick;
28508
+ const endTime = this.sequencer.currentEndTime;
28509
+ const endTick = this.sequencer.currentEndTick;
28024
28510
  // on fade outs we can have some milliseconds longer, ensure we don't report this
28025
28511
  if (currentTime > endTime) {
28026
28512
  currentTime = endTime;
28027
28513
  currentTick = endTick;
28028
28514
  }
28029
- const mode = this._sequencer.isPlayingMain
28030
- ? 'main'
28031
- : this._sequencer.isPlayingCountIn
28032
- ? 'count-in'
28033
- : 'one-time';
28034
- Logger.debug('AlphaSynth', `Position changed: (time: ${currentTime}/${endTime}, tick: ${currentTick}/${endTick}, Active Voices: ${this._synthesizer.activeVoiceCount} (${mode})`);
28035
- if (this._sequencer.isPlayingMain) {
28036
- this.positionChanged.trigger(new PositionChangedEventArgs(currentTime, endTime, currentTick, endTick, isSeek));
28515
+ const mode = this.sequencer.isPlayingMain ? 'main' : this.sequencer.isPlayingCountIn ? 'count-in' : 'one-time';
28516
+ Logger.debug('AlphaSynth', `Position changed: (time: ${currentTime}/${endTime}, tick: ${currentTick}/${endTick}, Active Voices: ${this.synthesizer.activeVoiceCount} (${mode}), Tempo original: ${this.sequencer.currentTempo}, Tempo modified: ${this.sequencer.modifiedTempo})`);
28517
+ if (this.sequencer.isPlayingMain) {
28518
+ this.positionChanged.trigger(new PositionChangedEventArgs(currentTime, endTime, currentTick, endTick, isSeek, this.sequencer.currentTempo, this.sequencer.modifiedTempo));
28037
28519
  }
28038
28520
  // build events which were actually played
28039
28521
  if (isSeek) {
@@ -28054,13 +28536,28 @@
28054
28536
  * @internal
28055
28537
  */
28056
28538
  hasSamplesForProgram(program) {
28057
- return this._synthesizer.hasSamplesForProgram(program);
28539
+ return this.synthesizer.hasSamplesForProgram(program);
28058
28540
  }
28059
28541
  /**
28060
28542
  * @internal
28061
28543
  */
28062
28544
  hasSamplesForPercussion(key) {
28063
- return this._synthesizer.hasSamplesForPercussion(key);
28545
+ return this.synthesizer.hasSamplesForPercussion(key);
28546
+ }
28547
+ loadBackingTrack(_score, _syncPoints) {
28548
+ }
28549
+ }
28550
+ /**
28551
+ * This is the main synthesizer component which can be used to
28552
+ * play a {@link MidiFile} via a {@link ISynthOutput}.
28553
+ */
28554
+ class AlphaSynth extends AlphaSynthBase {
28555
+ /**
28556
+ * Initializes a new instance of the {@link AlphaSynth} class.
28557
+ * @param output The output to use for playing the generated samples.
28558
+ */
28559
+ constructor(output, bufferTimeInMilliseconds) {
28560
+ super(output, new TinySoundFont(output.sampleRate), bufferTimeInMilliseconds);
28064
28561
  }
28065
28562
  }
28066
28563
 
@@ -29307,6 +29804,35 @@
29307
29804
  */
29308
29805
  PlayerOutputMode[PlayerOutputMode["WebAudioScriptProcessor"] = 1] = "WebAudioScriptProcessor";
29309
29806
  })(exports.PlayerOutputMode || (exports.PlayerOutputMode = {}));
29807
+ /**
29808
+ * Lists the different modes how the internal alphaTab player (and related cursor behavior) is working.
29809
+ */
29810
+ exports.PlayerMode = void 0;
29811
+ (function (PlayerMode) {
29812
+ /**
29813
+ * The player functionality is fully disabled.
29814
+ */
29815
+ PlayerMode[PlayerMode["Disabled"] = 0] = "Disabled";
29816
+ /**
29817
+ * The player functionality is enabled.
29818
+ * If the loaded file provides a backing track, it is used for playback.
29819
+ * If no backing track is provided, the midi synthesizer is used.
29820
+ */
29821
+ PlayerMode[PlayerMode["EnabledAutomatic"] = 1] = "EnabledAutomatic";
29822
+ /**
29823
+ * The player functionality is enabled and the synthesizer is used (even if a backing track is embedded in the file).
29824
+ */
29825
+ PlayerMode[PlayerMode["EnabledSynthesizer"] = 2] = "EnabledSynthesizer";
29826
+ /**
29827
+ * The player functionality is enabled. If the input data model has no backing track configured, the player might not work as expected (as playback completes instantly).
29828
+ */
29829
+ PlayerMode[PlayerMode["EnabledBackingTrack"] = 3] = "EnabledBackingTrack";
29830
+ /**
29831
+ * The player functionality is enabled and an external audio/video source is used as time axis.
29832
+ * The related player APIs need to be used to update the current position of the external audio source within alphaTab.
29833
+ */
29834
+ PlayerMode[PlayerMode["EnabledExternalMedia"] = 4] = "EnabledExternalMedia";
29835
+ })(exports.PlayerMode || (exports.PlayerMode = {}));
29310
29836
  /**
29311
29837
  * The player settings control how the audio playback and UI is behaving.
29312
29838
  * @json
@@ -29353,6 +29879,7 @@
29353
29879
  * @since 0.9.6
29354
29880
  * @defaultValue `false`
29355
29881
  * @category Player
29882
+ * @deprecated Use {@link playerMode} instead.
29356
29883
  * @remarks
29357
29884
  * This setting configures whether the player feature is enabled or not. Depending on the platform enabling the player needs some additional actions of the developer.
29358
29885
  * For the JavaScript version the [player.soundFont](/docs/reference/settings/player/soundfont) property must be set to the URL of the sound font that should be used or it must be loaded manually via API.
@@ -29361,6 +29888,37 @@
29361
29888
  * AlphaTab does not ship a default UI for the player. The API must be hooked up to some UI controls to allow the user to interact with the player.
29362
29889
  */
29363
29890
  this.enablePlayer = false;
29891
+ /**
29892
+ * Whether the player should be enabled and which mode it should use.
29893
+ * @since 1.6.0
29894
+ * @defaultValue `PlayerMode.Disabled`
29895
+ * @category Player
29896
+ * @remarks
29897
+ * This setting configures whether the player feature is enabled or not. Depending on the platform enabling the player needs some additional actions of the developer.
29898
+ *
29899
+ * **Synthesizer**
29900
+ *
29901
+ * If the synthesizer is used (via {@link PlayerMode.EnabledAutomatic} or {@link PlayerMode.EnabledSynthesizer}) a sound font is needed so that the midi synthesizer can produce the audio samples.
29902
+ *
29903
+ * For the JavaScript version the [player.soundFont](/docs/reference/settings/player/soundfont) property must be set to the URL of the sound font that should be used or it must be loaded manually via API.
29904
+ * For .net manually the soundfont must be loaded.
29905
+ *
29906
+ * **Backing Track**
29907
+ *
29908
+ * For a built-in backing track of the input file no additional data needs to be loaded (assuming everything is filled via the input file).
29909
+ * Otherwise the `score.backingTrack` needs to be filled before loading and the related sync points need to be configured.
29910
+ *
29911
+ * **External Media**
29912
+ *
29913
+ * For synchronizing alphaTab with an external media no data needs to be loaded into alphaTab. The configured sync points on the MasterBars are used
29914
+ * as reference to synchronize the external media with the internal time axis. Then the related APIs on the AlphaTabApi object need to be used
29915
+ * to update the playback state and exterrnal audio position during playback.
29916
+ *
29917
+ * **User Interface**
29918
+ *
29919
+ * AlphaTab does not ship a default UI for the player. The API must be hooked up to some UI controls to allow the user to interact with the player.
29920
+ */
29921
+ this.playerMode = exports.PlayerMode.Disabled;
29364
29922
  /**
29365
29923
  * Whether playback cursors should be displayed.
29366
29924
  * @since 0.9.6
@@ -30066,6 +30624,7 @@
30066
30624
  /*@target web*/
30067
30625
  o.set("outputmode", obj.outputMode);
30068
30626
  o.set("enableplayer", obj.enablePlayer);
30627
+ o.set("playermode", obj.playerMode);
30069
30628
  o.set("enablecursor", obj.enableCursor);
30070
30629
  o.set("enableanimatedbeatcursor", obj.enableAnimatedBeatCursor);
30071
30630
  o.set("enableelementhighlighting", obj.enableElementHighlighting);
@@ -30101,6 +30660,9 @@
30101
30660
  case "enableplayer":
30102
30661
  obj.enablePlayer = v;
30103
30662
  return true;
30663
+ case "playermode":
30664
+ obj.playerMode = JsonHelper.parseEnum(v, exports.PlayerMode);
30665
+ return true;
30104
30666
  case "enablecursor":
30105
30667
  obj.enableCursor = v;
30106
30668
  return true;
@@ -30335,6 +30897,39 @@
30335
30897
  }
30336
30898
  }
30337
30899
 
30900
+ class SyncPointDataSerializer {
30901
+ static fromJson(obj, m) {
30902
+ if (!m) {
30903
+ return;
30904
+ }
30905
+ JsonHelper.forEach(m, (v, k) => SyncPointDataSerializer.setProperty(obj, k, v));
30906
+ }
30907
+ static toJson(obj) {
30908
+ if (!obj) {
30909
+ return null;
30910
+ }
30911
+ const o = new Map();
30912
+ o.set("baroccurence", obj.barOccurence);
30913
+ o.set("modifiedtempo", obj.modifiedTempo);
30914
+ o.set("millisecondoffset", obj.millisecondOffset);
30915
+ return o;
30916
+ }
30917
+ static setProperty(obj, property, v) {
30918
+ switch (property) {
30919
+ case "baroccurence":
30920
+ obj.barOccurence = v;
30921
+ return true;
30922
+ case "modifiedtempo":
30923
+ obj.modifiedTempo = v;
30924
+ return true;
30925
+ case "millisecondoffset":
30926
+ obj.millisecondOffset = v;
30927
+ return true;
30928
+ }
30929
+ return false;
30930
+ }
30931
+ }
30932
+
30338
30933
  class AutomationSerializer {
30339
30934
  static fromJson(obj, m) {
30340
30935
  if (!m) {
@@ -30350,6 +30945,9 @@
30350
30945
  o.set("islinear", obj.isLinear);
30351
30946
  o.set("type", obj.type);
30352
30947
  o.set("value", obj.value);
30948
+ if (obj.syncPointValue) {
30949
+ o.set("syncpointvalue", SyncPointDataSerializer.toJson(obj.syncPointValue));
30950
+ }
30353
30951
  o.set("ratioposition", obj.ratioPosition);
30354
30952
  o.set("text", obj.text);
30355
30953
  return o;
@@ -30365,6 +30963,15 @@
30365
30963
  case "value":
30366
30964
  obj.value = v;
30367
30965
  return true;
30966
+ case "syncpointvalue":
30967
+ if (v) {
30968
+ obj.syncPointValue = new SyncPointData();
30969
+ SyncPointDataSerializer.fromJson(obj.syncPointValue, v);
30970
+ }
30971
+ else {
30972
+ obj.syncPointValue = undefined;
30973
+ }
30974
+ return true;
30368
30975
  case "ratioposition":
30369
30976
  obj.ratioPosition = v;
30370
30977
  return true;
@@ -30430,6 +31037,9 @@
30430
31037
  o.set("section", SectionSerializer.toJson(obj.section));
30431
31038
  }
30432
31039
  o.set("tempoautomations", obj.tempoAutomations.map(i => AutomationSerializer.toJson(i)));
31040
+ if (obj.syncPoints !== undefined) {
31041
+ o.set("syncpoints", obj.syncPoints?.map(i => AutomationSerializer.toJson(i)));
31042
+ }
30433
31043
  if (obj.fermata !== null) {
30434
31044
  const m = new Map();
30435
31045
  o.set("fermata", m);
@@ -30496,6 +31106,16 @@
30496
31106
  obj.tempoAutomations.push(i);
30497
31107
  }
30498
31108
  return true;
31109
+ case "syncpoints":
31110
+ if (v) {
31111
+ obj.syncPoints = [];
31112
+ for (const o of v) {
31113
+ const i = new Automation();
31114
+ AutomationSerializer.fromJson(i, o);
31115
+ obj.addSyncPoint(i);
31116
+ }
31117
+ }
31118
+ return true;
30499
31119
  case "fermata":
30500
31120
  obj.fermata = new Map();
30501
31121
  JsonHelper.forEach(v, (v, k) => {
@@ -31787,6 +32407,31 @@
31787
32407
  }
31788
32408
  }
31789
32409
 
32410
+ class BackingTrackSerializer {
32411
+ static fromJson(obj, m) {
32412
+ if (!m) {
32413
+ return;
32414
+ }
32415
+ JsonHelper.forEach(m, (v, k) => BackingTrackSerializer.setProperty(obj, k, v));
32416
+ }
32417
+ static toJson(obj) {
32418
+ if (!obj) {
32419
+ return null;
32420
+ }
32421
+ const o = new Map();
32422
+ o.set("padding", obj.padding);
32423
+ return o;
32424
+ }
32425
+ static setProperty(obj, property, v) {
32426
+ switch (property) {
32427
+ case "padding":
32428
+ obj.padding = v;
32429
+ return true;
32430
+ }
32431
+ return false;
32432
+ }
32433
+ }
32434
+
31790
32435
  class HeaderFooterStyleSerializer {
31791
32436
  static fromJson(obj, m) {
31792
32437
  if (!m) {
@@ -31898,6 +32543,9 @@
31898
32543
  o.set("defaultsystemslayout", obj.defaultSystemsLayout);
31899
32544
  o.set("systemslayout", obj.systemsLayout);
31900
32545
  o.set("stylesheet", RenderStylesheetSerializer.toJson(obj.stylesheet));
32546
+ if (obj.backingTrack) {
32547
+ o.set("backingtrack", BackingTrackSerializer.toJson(obj.backingTrack));
32548
+ }
31901
32549
  if (obj.style) {
31902
32550
  o.set("style", ScoreStyleSerializer.toJson(obj.style));
31903
32551
  }
@@ -31966,6 +32614,15 @@
31966
32614
  case "stylesheet":
31967
32615
  RenderStylesheetSerializer.fromJson(obj.stylesheet, v);
31968
32616
  return true;
32617
+ case "backingtrack":
32618
+ if (v) {
32619
+ obj.backingTrack = new BackingTrack();
32620
+ BackingTrackSerializer.fromJson(obj.backingTrack, v);
32621
+ }
32622
+ else {
32623
+ obj.backingTrack = undefined;
32624
+ }
32625
+ return true;
31969
32626
  case "style":
31970
32627
  if (v) {
31971
32628
  obj.style = new ScoreStyle();
@@ -32142,7 +32799,9 @@
32142
32799
  case MidiEventType.ProgramChange:
32143
32800
  return new ProgramChangeEvent(track, tick, JsonHelper.getValue(midiEvent, 'channel'), JsonHelper.getValue(midiEvent, 'program'));
32144
32801
  case MidiEventType.TempoChange:
32145
- return new TempoChangeEvent(tick, JsonHelper.getValue(midiEvent, 'microSecondsPerQuarterNote'));
32802
+ const tempo = new TempoChangeEvent(tick, 0);
32803
+ tempo.beatsPerMinute = JsonHelper.getValue(midiEvent, 'beatsPerMinute');
32804
+ return tempo;
32146
32805
  case MidiEventType.PitchBend:
32147
32806
  return new PitchBendEvent(track, tick, JsonHelper.getValue(midiEvent, 'channel'), JsonHelper.getValue(midiEvent, 'value'));
32148
32807
  case MidiEventType.PerNotePitchBend:
@@ -32217,7 +32876,7 @@
32217
32876
  o.set('program', midiEvent.program);
32218
32877
  break;
32219
32878
  case MidiEventType.TempoChange:
32220
- o.set('microSecondsPerQuarterNote', midiEvent.microSecondsPerQuarterNote);
32879
+ o.set('beatsPerMinute', midiEvent.beatsPerMinute);
32221
32880
  break;
32222
32881
  case MidiEventType.PitchBend:
32223
32882
  o.set('channel', midiEvent.channel);
@@ -32444,7 +33103,9 @@
32444
33103
  endTime: e.endTime,
32445
33104
  currentTick: e.currentTick,
32446
33105
  endTick: e.endTick,
32447
- isSeek: e.isSeek
33106
+ isSeek: e.isSeek,
33107
+ originalTempo: e.originalTempo,
33108
+ modifiedTempo: e.modifiedTempo
32448
33109
  });
32449
33110
  }
32450
33111
  onPlayerStateChanged(e) {
@@ -32490,7 +33151,9 @@
32490
33151
  endTime: e.endTime,
32491
33152
  currentTick: e.currentTick,
32492
33153
  endTick: e.endTick,
32493
- isSeek: e.isSeek
33154
+ isSeek: e.isSeek,
33155
+ originalTempo: e.originalTempo,
33156
+ modifiedTempo: e.modifiedTempo
32494
33157
  });
32495
33158
  }
32496
33159
  onMidiLoadFailed(e) {
@@ -33799,8 +34462,9 @@
33799
34462
  }
33800
34463
  addTempo(tick, tempo) {
33801
34464
  // bpm -> microsecond per quarter note
33802
- const tempoInUsq = (60000000 / tempo) | 0;
33803
- this._midiFile.addEvent(new TempoChangeEvent(tick, tempoInUsq));
34465
+ const tempoEvent = new TempoChangeEvent(tick, 0);
34466
+ tempoEvent.beatsPerMinute = tempo;
34467
+ this._midiFile.addEvent(tempoEvent);
33804
34468
  }
33805
34469
  addBend(track, tick, channel, value) {
33806
34470
  if (value >= SynthConstants.MaxPitchWheel) {
@@ -35072,6 +35736,10 @@
35072
35736
  * Gets or sets whether transposition pitches should be applied to the individual midi events or not.
35073
35737
  */
35074
35738
  this.applyTranspositionPitches = true;
35739
+ /**
35740
+ * The computed sync points for synchronizing the midi file with an external backing track.
35741
+ */
35742
+ this.syncPoints = [];
35075
35743
  /**
35076
35744
  * Gets the transposition pitches for the individual midi channels.
35077
35745
  */
@@ -35098,13 +35766,17 @@
35098
35766
  let previousMasterBar = null;
35099
35767
  let currentTempo = this._score.tempo;
35100
35768
  // store the previous played bar for repeats
35769
+ const barOccurence = new Map();
35101
35770
  while (!controller.finished) {
35102
35771
  const index = controller.index;
35103
35772
  const bar = this._score.masterBars[index];
35104
35773
  const currentTick = controller.currentTick;
35105
35774
  controller.processCurrent();
35106
35775
  if (controller.shouldPlay) {
35107
- this.generateMasterBar(bar, previousMasterBar, currentTick, currentTempo);
35776
+ let occurence = barOccurence.has(index) ? barOccurence.get(index) : -1;
35777
+ occurence++;
35778
+ barOccurence.set(index, occurence);
35779
+ this.generateMasterBar(bar, previousMasterBar, currentTick, currentTempo, occurence);
35108
35780
  if (bar.tempoAutomations.length > 0) {
35109
35781
  currentTempo = bar.tempoAutomations[0].value;
35110
35782
  }
@@ -35173,7 +35845,7 @@
35173
35845
  const value = Math.max(-32768, Math.min(32767, data * 8 - 1));
35174
35846
  return Math.max(value, -1) + 1;
35175
35847
  }
35176
- generateMasterBar(masterBar, previousMasterBar, currentTick, currentTempo) {
35848
+ generateMasterBar(masterBar, previousMasterBar, currentTick, currentTempo, barOccurence) {
35177
35849
  // time signature
35178
35850
  if (!previousMasterBar ||
35179
35851
  previousMasterBar.timeSignatureDenominator !== masterBar.timeSignatureDenominator ||
@@ -35200,6 +35872,15 @@
35200
35872
  else {
35201
35873
  masterBarLookup.tempoChanges.push(new MasterBarTickLookupTempoChange(currentTick, currentTempo));
35202
35874
  }
35875
+ const syncPoints = masterBar.syncPoints;
35876
+ if (syncPoints) {
35877
+ for (const syncPoint of syncPoints) {
35878
+ if (syncPoint.syncPointValue.barOccurence === barOccurence) {
35879
+ const tick = currentTick + masterBarDuration * syncPoint.ratioPosition;
35880
+ this.syncPoints.push(new BackingTrackSyncPoint(tick, syncPoint.syncPointValue));
35881
+ }
35882
+ }
35883
+ }
35203
35884
  masterBarLookup.masterBar = masterBar;
35204
35885
  masterBarLookup.start = currentTick;
35205
35886
  masterBarLookup.end = masterBarLookup.start + masterBarDuration;
@@ -37397,6 +38078,213 @@
37397
38078
  }
37398
38079
  }
37399
38080
 
38081
+ class BackingTrackAudioSynthesizer {
38082
+ constructor() {
38083
+ this._midiEventQueue = new Queue();
38084
+ this.masterVolume = 1;
38085
+ this.metronomeVolume = 0;
38086
+ this.outSampleRate = 44100;
38087
+ this.currentTempo = 120;
38088
+ this.timeSignatureNumerator = 4;
38089
+ this.timeSignatureDenominator = 4;
38090
+ this.activeVoiceCount = 0;
38091
+ }
38092
+ noteOffAll(_immediate) {
38093
+ }
38094
+ resetSoft() {
38095
+ }
38096
+ resetPresets() {
38097
+ }
38098
+ loadPresets(_hydra, _instrumentPrograms, _percussionKeys, _append) {
38099
+ }
38100
+ setupMetronomeChannel(_metronomeVolume) {
38101
+ }
38102
+ synthesizeSilent(_sampleCount) {
38103
+ this.fakeSynthesize();
38104
+ }
38105
+ processMidiMessage(e) {
38106
+ }
38107
+ dispatchEvent(synthEvent) {
38108
+ this._midiEventQueue.enqueue(synthEvent);
38109
+ }
38110
+ synthesize(_buffer, _bufferPos, _sampleCount) {
38111
+ return this.fakeSynthesize();
38112
+ }
38113
+ fakeSynthesize() {
38114
+ const processedEvents = [];
38115
+ while (!this._midiEventQueue.isEmpty) {
38116
+ const m = this._midiEventQueue.dequeue();
38117
+ if (m.isMetronome && this.metronomeVolume > 0) ;
38118
+ else if (m.event) {
38119
+ this.processMidiMessage(m.event);
38120
+ }
38121
+ processedEvents.push(m);
38122
+ }
38123
+ return processedEvents;
38124
+ }
38125
+ applyTranspositionPitches(transpositionPitches) {
38126
+ }
38127
+ setChannelTranspositionPitch(channel, semitones) {
38128
+ }
38129
+ channelSetMute(channel, mute) {
38130
+ }
38131
+ channelSetSolo(channel, solo) {
38132
+ }
38133
+ resetChannelStates() {
38134
+ }
38135
+ channelSetMixVolume(channel, volume) {
38136
+ }
38137
+ hasSamplesForProgram(program) {
38138
+ return true;
38139
+ }
38140
+ hasSamplesForPercussion(key) {
38141
+ return true;
38142
+ }
38143
+ }
38144
+ class BackingTrackPlayer extends AlphaSynthBase {
38145
+ constructor(backingTrackOutput, bufferTimeInMilliseconds) {
38146
+ super(backingTrackOutput, new BackingTrackAudioSynthesizer(), bufferTimeInMilliseconds);
38147
+ this.synthesizer.output = backingTrackOutput;
38148
+ this._backingTrackOutput = backingTrackOutput;
38149
+ backingTrackOutput.timeUpdate.on(timePosition => {
38150
+ const alphaTabTimePosition = this.sequencer.mainTimePositionFromBackingTrack(timePosition, backingTrackOutput.backingTrackDuration);
38151
+ this.sequencer.fillMidiEventQueueToEndTime(alphaTabTimePosition);
38152
+ this.synthesizer.fakeSynthesize();
38153
+ this.updateTimePosition(alphaTabTimePosition, false);
38154
+ this.checkForFinish();
38155
+ });
38156
+ }
38157
+ updateMasterVolume(value) {
38158
+ super.updateMasterVolume(value);
38159
+ this._backingTrackOutput.masterVolume = value;
38160
+ }
38161
+ updatePlaybackSpeed(value) {
38162
+ super.updatePlaybackSpeed(value);
38163
+ this._backingTrackOutput.playbackRate = value;
38164
+ }
38165
+ onSampleRequest() {
38166
+ }
38167
+ loadMidiFile(midi) {
38168
+ if (!this.isSoundFontLoaded) {
38169
+ this.isSoundFontLoaded = true;
38170
+ this.soundFontLoaded.trigger();
38171
+ }
38172
+ super.loadMidiFile(midi);
38173
+ }
38174
+ updateTimePosition(timePosition, isSeek) {
38175
+ super.updateTimePosition(timePosition, isSeek);
38176
+ if (isSeek) {
38177
+ this._backingTrackOutput.seekTo(this.sequencer.mainTimePositionToBackingTrack(timePosition, this._backingTrackOutput.backingTrackDuration));
38178
+ }
38179
+ }
38180
+ loadBackingTrack(score, syncPoints) {
38181
+ const backingTrackInfo = score.backingTrack;
38182
+ if (backingTrackInfo) {
38183
+ this._backingTrackOutput.loadBackingTrack(backingTrackInfo);
38184
+ this.sequencer.mainUpdateSyncPoints(syncPoints);
38185
+ this.timePosition = 0;
38186
+ }
38187
+ }
38188
+ }
38189
+
38190
+ class ExternalMediaSynthOutput {
38191
+ constructor() {
38192
+ // fake rate
38193
+ this.sampleRate = 44100;
38194
+ this._padding = 0;
38195
+ this._seekPosition = 0;
38196
+ this.ready = new EventEmitter();
38197
+ this.samplesPlayed = new EventEmitterOfT();
38198
+ this.timeUpdate = new EventEmitterOfT();
38199
+ this.sampleRequest = new EventEmitter();
38200
+ }
38201
+ get handler() {
38202
+ return this._handler;
38203
+ }
38204
+ set handler(value) {
38205
+ if (value) {
38206
+ if (this._seekPosition !== 0) {
38207
+ value.seekTo(this._seekPosition);
38208
+ this._seekPosition = 0;
38209
+ }
38210
+ }
38211
+ this._handler = value;
38212
+ }
38213
+ get backingTrackDuration() {
38214
+ return this.handler?.backingTrackDuration ?? 0;
38215
+ }
38216
+ get playbackRate() {
38217
+ return this.handler?.playbackRate ?? 1;
38218
+ }
38219
+ set playbackRate(value) {
38220
+ const handler = this.handler;
38221
+ if (handler) {
38222
+ handler.playbackRate = value;
38223
+ }
38224
+ }
38225
+ get masterVolume() {
38226
+ return this.handler?.masterVolume ?? 1;
38227
+ }
38228
+ set masterVolume(value) {
38229
+ const handler = this.handler;
38230
+ if (handler) {
38231
+ handler.masterVolume = value;
38232
+ }
38233
+ }
38234
+ seekTo(time) {
38235
+ const handler = this.handler;
38236
+ if (handler) {
38237
+ handler.seekTo(time - this._padding);
38238
+ }
38239
+ else {
38240
+ this._seekPosition = time - this._padding;
38241
+ }
38242
+ }
38243
+ loadBackingTrack(backingTrack) {
38244
+ this._padding = backingTrack.padding;
38245
+ }
38246
+ open(_bufferTimeInMilliseconds) {
38247
+ this.ready.trigger();
38248
+ }
38249
+ updatePosition(currentTime) {
38250
+ this.timeUpdate.trigger(currentTime + this._padding);
38251
+ }
38252
+ play() {
38253
+ this.handler?.play();
38254
+ }
38255
+ destroy() {
38256
+ }
38257
+ pause() {
38258
+ this.handler?.pause();
38259
+ }
38260
+ addSamples(_samples) {
38261
+ }
38262
+ resetSamples() {
38263
+ }
38264
+ activate() {
38265
+ }
38266
+ async enumerateOutputDevices() {
38267
+ const empty = [];
38268
+ return empty;
38269
+ }
38270
+ async setOutputDevice(_device) {
38271
+ }
38272
+ async getOutputDevice() {
38273
+ return null;
38274
+ }
38275
+ }
38276
+ class ExternalMediaPlayer extends BackingTrackPlayer {
38277
+ get handler() {
38278
+ return this.output.handler;
38279
+ }
38280
+ set handler(value) {
38281
+ this.output.handler = value;
38282
+ }
38283
+ constructor(bufferTimeInMilliseconds) {
38284
+ super(new ExternalMediaSynthOutput(), bufferTimeInMilliseconds);
38285
+ }
38286
+ }
38287
+
37400
38288
  class SelectionInfo {
37401
38289
  constructor(beat) {
37402
38290
  this.bounds = null;
@@ -37410,6 +38298,12 @@
37410
38298
  * @csharp_public
37411
38299
  */
37412
38300
  class AlphaTabApiBase {
38301
+ /**
38302
+ * The actual player mode which is currently active (e.g. allows determining whether a backing track or the synthesizer is active).
38303
+ */
38304
+ get actualPlayerMode() {
38305
+ return this._actualPlayerMode;
38306
+ }
37413
38307
  /**
37414
38308
  * The score holding all information about the song being rendered
37415
38309
  * @category Properties - Core
@@ -37479,10 +38373,8 @@
37479
38373
  this._isDestroyed = false;
37480
38374
  this._score = null;
37481
38375
  this._tracks = [];
38376
+ this._actualPlayerMode = exports.PlayerMode.Disabled;
37482
38377
  this._tickCache = null;
37483
- /**
37484
- * Gets the alphaSynth player used for playback. This is the low-level API to the Midi synthesizer used for playback.
37485
- */
37486
38378
  /**
37487
38379
  * The alphaSynth player used for playback.
37488
38380
  * @remarks
@@ -38519,6 +39411,10 @@
38519
39411
  this.container = uiFacade.rootContainer;
38520
39412
  uiFacade.initialize(this, settings);
38521
39413
  Logger.logLevel = this.settings.core.logLevel;
39414
+ // backwards compatibility: remove in 2.0
39415
+ if (this.settings.player.playerMode === exports.PlayerMode.Disabled && this.settings.player.enablePlayer) {
39416
+ this.settings.player.playerMode = exports.PlayerMode.EnabledAutomatic;
39417
+ }
38522
39418
  Environment.printEnvironmentInfo(false);
38523
39419
  this.canvasElement = uiFacade.createCanvasElement();
38524
39420
  this.container.appendChild(this.canvasElement);
@@ -38562,7 +39458,7 @@
38562
39458
  this.appendRenderResult(null); // marks last element
38563
39459
  });
38564
39460
  this.renderer.error.on(this.onError.bind(this));
38565
- if (this.settings.player.enablePlayer) {
39461
+ if (this.settings.player.playerMode !== exports.PlayerMode.Disabled) {
38566
39462
  this.setupPlayer();
38567
39463
  }
38568
39464
  this.setupClickHandling();
@@ -38654,10 +39550,9 @@
38654
39550
  }
38655
39551
  this.renderer.updateSettings(this.settings);
38656
39552
  // enable/disable player if needed
38657
- if (this.settings.player.enablePlayer) {
38658
- this.setupPlayer();
38659
- if (score) {
38660
- this.player?.applyTranspositionPitches(MidiFileGenerator.buildTranspositionPitches(score, this.settings));
39553
+ if (this.settings.player.playerMode !== exports.PlayerMode.Disabled) {
39554
+ if (this.setupPlayer() && score) {
39555
+ this.loadMidiForScore();
38661
39556
  }
38662
39557
  }
38663
39558
  else {
@@ -39589,13 +40484,51 @@
39589
40484
  this.destroyCursors();
39590
40485
  }
39591
40486
  setupPlayer() {
40487
+ let mode = this.settings.player.playerMode;
40488
+ if (mode === exports.PlayerMode.EnabledAutomatic) {
40489
+ const score = this.score;
40490
+ if (!score) {
40491
+ return false;
40492
+ }
40493
+ if (score?.backingTrack?.rawAudioFile) {
40494
+ mode = exports.PlayerMode.EnabledBackingTrack;
40495
+ }
40496
+ else {
40497
+ mode = exports.PlayerMode.EnabledSynthesizer;
40498
+ }
40499
+ }
40500
+ if (mode !== this._actualPlayerMode) {
40501
+ this.destroyPlayer();
40502
+ }
39592
40503
  this.updateCursors();
39593
- if (this.player) {
39594
- return;
40504
+ this._actualPlayerMode = mode;
40505
+ switch (mode) {
40506
+ case exports.PlayerMode.Disabled:
40507
+ this.destroyPlayer();
40508
+ return false;
40509
+ case exports.PlayerMode.EnabledSynthesizer:
40510
+ if (this.player) {
40511
+ return true;
40512
+ }
40513
+ // new player needed
40514
+ this.player = this.uiFacade.createWorkerPlayer();
40515
+ break;
40516
+ case exports.PlayerMode.EnabledBackingTrack:
40517
+ if (this.player) {
40518
+ return true;
40519
+ }
40520
+ // new player needed
40521
+ this.player = this.uiFacade.createBackingTrackPlayer();
40522
+ break;
40523
+ case exports.PlayerMode.EnabledExternalMedia:
40524
+ if (this.player) {
40525
+ return true;
40526
+ }
40527
+ this.player = new ExternalMediaPlayer(this.settings.player.bufferTimeInMilliseconds);
40528
+ break;
39595
40529
  }
39596
- this.player = this.uiFacade.createWorkerPlayer();
39597
40530
  if (!this.player) {
39598
- return;
40531
+ return false;
39599
40532
  }
39600
40533
  this.player.ready.on(() => {
39601
40534
  this.loadMidiForScore();
@@ -39624,6 +40557,7 @@
39624
40557
  this.player.playbackRangeChanged.on(this.onPlaybackRangeChanged.bind(this));
39625
40558
  this.player.finished.on(this.onPlayerFinished.bind(this));
39626
40559
  this.setupPlayerEvents();
40560
+ return false;
39627
40561
  }
39628
40562
  loadMidiForScore() {
39629
40563
  if (!this.score) {
@@ -39645,6 +40579,7 @@
39645
40579
  const player = this.player;
39646
40580
  if (player) {
39647
40581
  player.loadMidiFile(midiFile);
40582
+ player.loadBackingTrack(score, generator.syncPoints);
39648
40583
  player.applyTranspositionPitches(generator.transpositionPitches);
39649
40584
  }
39650
40585
  }
@@ -40061,7 +40996,7 @@
40061
40996
  this._selectionWrapper = cursors.selectionWrapper;
40062
40997
  }
40063
40998
  if (this._currentBeat !== null) {
40064
- this.cursorUpdateBeat(this._currentBeat, false, this._previousTick > 10, true);
40999
+ this.cursorUpdateBeat(this._currentBeat, false, this._previousTick > 10, 1, true);
40065
41000
  }
40066
41001
  }
40067
41002
  else if (!this.settings.player.enableCursor && this._cursorWrapper) {
@@ -40076,13 +41011,14 @@
40076
41011
  // we need to update our position caches if we render a tablature
40077
41012
  this.renderer.postRenderFinished.on(() => {
40078
41013
  this._currentBeat = null;
40079
- this.cursorUpdateTick(this._previousTick, false, this._previousTick > 10);
41014
+ this.cursorUpdateTick(this._previousTick, false, 1, this._previousTick > 10);
40080
41015
  });
40081
41016
  if (this.player) {
40082
41017
  this.player.positionChanged.on(e => {
40083
41018
  this._previousTick = e.currentTick;
40084
41019
  this.uiFacade.beginInvoke(() => {
40085
- this.cursorUpdateTick(e.currentTick, false, false, e.isSeek);
41020
+ const cursorSpeed = e.modifiedTempo / e.originalTempo;
41021
+ this.cursorUpdateTick(e.currentTick, false, cursorSpeed, false, e.isSeek);
40086
41022
  });
40087
41023
  });
40088
41024
  this.player.stateChanged.on(e => {
@@ -40103,14 +41039,15 @@
40103
41039
  * @param stop
40104
41040
  * @param shouldScroll whether we should scroll to the bar (if scrolling is active)
40105
41041
  */
40106
- cursorUpdateTick(tick, stop, shouldScroll = false, forceUpdate = false) {
41042
+ cursorUpdateTick(tick, stop, cursorSpeed, shouldScroll = false, forceUpdate = false) {
41043
+ this._previousTick = tick;
40107
41044
  const cache = this._tickCache;
40108
41045
  if (cache) {
40109
41046
  const tracks = this._trackIndexLookup;
40110
41047
  if (tracks != null && tracks.size > 0) {
40111
41048
  const beat = cache.findBeat(tracks, tick, this._currentBeat);
40112
41049
  if (beat) {
40113
- this.cursorUpdateBeat(beat, stop, shouldScroll, forceUpdate || this.playerState === PlayerState.Paused);
41050
+ this.cursorUpdateBeat(beat, stop, shouldScroll, cursorSpeed, forceUpdate || this.playerState === PlayerState.Paused);
40114
41051
  }
40115
41052
  }
40116
41053
  }
@@ -40118,7 +41055,7 @@
40118
41055
  /**
40119
41056
  * updates the cursors to highlight the specified beat
40120
41057
  */
40121
- cursorUpdateBeat(lookupResult, stop, shouldScroll, forceUpdate = false) {
41058
+ cursorUpdateBeat(lookupResult, stop, shouldScroll, cursorSpeed, forceUpdate = false) {
40122
41059
  const beat = lookupResult.beat;
40123
41060
  const nextBeat = lookupResult.nextBeat?.beat ?? null;
40124
41061
  const duration = lookupResult.duration;
@@ -40150,7 +41087,7 @@
40150
41087
  this._previousCursorCache = cache;
40151
41088
  this._previousStateForCursor = this._playerState;
40152
41089
  this.uiFacade.beginInvoke(() => {
40153
- this.internalCursorUpdateBeat(beat, nextBeat, duration, stop, beatsToHighlight, cache, beatBoundings, shouldScroll, lookupResult.cursorMode);
41090
+ this.internalCursorUpdateBeat(beat, nextBeat, duration, stop, beatsToHighlight, cache, beatBoundings, shouldScroll, lookupResult.cursorMode, cursorSpeed);
40154
41091
  });
40155
41092
  }
40156
41093
  /**
@@ -40215,7 +41152,7 @@
40215
41152
  }
40216
41153
  }
40217
41154
  }
40218
- internalCursorUpdateBeat(beat, nextBeat, duration, stop, beatsToHighlight, cache, beatBoundings, shouldScroll, cursorMode) {
41155
+ internalCursorUpdateBeat(beat, nextBeat, duration, stop, beatsToHighlight, cache, beatBoundings, shouldScroll, cursorMode, cursorSpeed) {
40219
41156
  const barCursor = this._barCursor;
40220
41157
  const beatCursor = this._beatCursor;
40221
41158
  const barBoundings = beatBoundings.barBounds.masterBarBounds;
@@ -40224,12 +41161,29 @@
40224
41161
  if (barCursor) {
40225
41162
  barCursor.setBounds(barBounds.x, barBounds.y, barBounds.w, barBounds.h);
40226
41163
  }
41164
+ let nextBeatX = barBoundings.visualBounds.x + barBoundings.visualBounds.w;
41165
+ // get position of next beat on same system
41166
+ if (nextBeat && cursorMode === MidiTickLookupFindBeatResultCursorMode.ToNextBext) {
41167
+ // if we are moving within the same bar or to the next bar
41168
+ // transition to the next beat, otherwise transition to the end of the bar.
41169
+ const nextBeatBoundings = cache.findBeat(nextBeat);
41170
+ if (nextBeatBoundings &&
41171
+ nextBeatBoundings.barBounds.masterBarBounds.staffSystemBounds === barBoundings.staffSystemBounds) {
41172
+ nextBeatX = nextBeatBoundings.onNotesX;
41173
+ }
41174
+ }
41175
+ let startBeatX = beatBoundings.onNotesX;
40227
41176
  if (beatCursor) {
40228
- // move beat to start position immediately
41177
+ // relative positioning of the cursor
40229
41178
  if (this.settings.player.enableAnimatedBeatCursor) {
40230
- beatCursor.stopAnimation();
41179
+ const animationWidth = nextBeatX - beatBoundings.onNotesX;
41180
+ const relativePosition = this._previousTick - this._currentBeat.start;
41181
+ const ratioPosition = relativePosition / this._currentBeat.tickDuration;
41182
+ startBeatX = beatBoundings.onNotesX + animationWidth * ratioPosition;
41183
+ duration -= duration * ratioPosition;
41184
+ beatCursor.transitionToX(0, startBeatX);
40231
41185
  }
40232
- beatCursor.setBounds(beatBoundings.onNotesX, barBounds.y, 1, barBounds.h);
41186
+ beatCursor.setBounds(startBeatX, barBounds.y, 1, barBounds.h);
40233
41187
  }
40234
41188
  // if playing, animate the cursor to the next beat
40235
41189
  if (this.settings.player.enableElementHighlighting) {
@@ -40249,22 +41203,11 @@
40249
41203
  shouldNotifyBeatChange = true;
40250
41204
  }
40251
41205
  if (this.settings.player.enableAnimatedBeatCursor && beatCursor) {
40252
- let nextBeatX = barBoundings.visualBounds.x + barBoundings.visualBounds.w;
40253
- // get position of next beat on same system
40254
- if (nextBeat && cursorMode === MidiTickLookupFindBeatResultCursorMode.ToNextBext) {
40255
- // if we are moving within the same bar or to the next bar
40256
- // transition to the next beat, otherwise transition to the end of the bar.
40257
- const nextBeatBoundings = cache.findBeat(nextBeat);
40258
- if (nextBeatBoundings &&
40259
- nextBeatBoundings.barBounds.masterBarBounds.staffSystemBounds === barBoundings.staffSystemBounds) {
40260
- nextBeatX = nextBeatBoundings.onNotesX;
40261
- }
40262
- }
40263
41206
  if (isPlayingUpdate) {
40264
41207
  // we need to put the transition to an own animation frame
40265
41208
  // otherwise the stop animation above is not applied.
40266
41209
  this.uiFacade.beginInvoke(() => {
40267
- beatCursor.transitionToX(duration / this.playbackSpeed, nextBeatX);
41210
+ beatCursor.transitionToX(duration / cursorSpeed, nextBeatX);
40268
41211
  });
40269
41212
  }
40270
41213
  }
@@ -40295,7 +41238,7 @@
40295
41238
  if (this._isDestroyed) {
40296
41239
  return;
40297
41240
  }
40298
- if (this.settings.player.enablePlayer &&
41241
+ if (this.settings.player.playerMode !== exports.PlayerMode.Disabled &&
40299
41242
  this.settings.player.enableCursor &&
40300
41243
  this.settings.player.enableUserInteraction) {
40301
41244
  this._selectionStart = new SelectionInfo(beat);
@@ -40337,7 +41280,7 @@
40337
41280
  if (this._isDestroyed) {
40338
41281
  return;
40339
41282
  }
40340
- if (this.settings.player.enablePlayer &&
41283
+ if (this.settings.player.playerMode !== exports.PlayerMode.Disabled &&
40341
41284
  this.settings.player.enableCursor &&
40342
41285
  this.settings.player.enableUserInteraction) {
40343
41286
  if (this._selectionEnd) {
@@ -40358,7 +41301,7 @@
40358
41301
  // move to selection start
40359
41302
  this._currentBeat = null; // reset current beat so it is updating the cursor
40360
41303
  if (this._playerState === PlayerState.Paused) {
40361
- this.cursorUpdateTick(this._tickCache.getBeatStart(this._selectionStart.beat), false);
41304
+ this.cursorUpdateTick(this._tickCache.getBeatStart(this._selectionStart.beat), false, 1);
40362
41305
  }
40363
41306
  this.tickPosition = realMasterBarStart + this._selectionStart.beat.playbackStart;
40364
41307
  // set playback range
@@ -40470,7 +41413,7 @@
40470
41413
  });
40471
41414
  this.renderer.postRenderFinished.on(() => {
40472
41415
  if (!this._selectionStart ||
40473
- !this.settings.player.enablePlayer ||
41416
+ this.settings.player.playerMode === exports.PlayerMode.Disabled ||
40474
41417
  !this.settings.player.enableCursor ||
40475
41418
  !this.settings.player.enableUserInteraction) {
40476
41419
  return;
@@ -40548,6 +41491,9 @@
40548
41491
  }
40549
41492
  this.scoreLoaded.trigger(score);
40550
41493
  this.uiFacade.triggerEvent(this.container, 'scoreLoaded', score);
41494
+ if (this.setupPlayer()) {
41495
+ this.loadMidiForScore();
41496
+ }
40551
41497
  }
40552
41498
  onResize(e) {
40553
41499
  if (this._isDestroyed) {
@@ -41287,6 +42233,85 @@
41287
42233
  return this.device.label;
41288
42234
  }
41289
42235
  }
42236
+ /**
42237
+ * Some shared web audio stuff.
42238
+ * @target web
42239
+ */
42240
+ class WebAudioHelper {
42241
+ static findKnownDevice(sinkId) {
42242
+ return WebAudioHelper._knownDevices.find(d => d.deviceId === sinkId);
42243
+ }
42244
+ static createAudioContext() {
42245
+ if ('AudioContext' in Environment.globalThis) {
42246
+ return new AudioContext();
42247
+ }
42248
+ if ('webkitAudioContext' in Environment.globalThis) {
42249
+ return new webkitAudioContext();
42250
+ }
42251
+ throw new AlphaTabError(exports.AlphaTabErrorType.General, 'AudioContext not found');
42252
+ }
42253
+ static async checkSinkIdSupport() {
42254
+ // https://caniuse.com/mdn-api_audiocontext_sinkid
42255
+ const context = WebAudioHelper.createAudioContext();
42256
+ if (!('setSinkId' in context)) {
42257
+ Logger.warning('WebAudio', 'Browser does not support changing the output device');
42258
+ return false;
42259
+ }
42260
+ return true;
42261
+ }
42262
+ static async enumerateOutputDevices() {
42263
+ try {
42264
+ if (!(await WebAudioHelper.checkSinkIdSupport())) {
42265
+ return [];
42266
+ }
42267
+ // Request permissions
42268
+ try {
42269
+ await navigator.mediaDevices.getUserMedia({ audio: true });
42270
+ }
42271
+ catch (e) {
42272
+ // sometimes we get an error but can still enumerate, e.g. if microphone access is denied,
42273
+ // we can still load the output devices in some cases.
42274
+ Logger.warning('WebAudio', 'Output device permission rejected', e);
42275
+ }
42276
+ // load devices
42277
+ const devices = await navigator.mediaDevices.enumerateDevices();
42278
+ // default device candidates
42279
+ let defaultDeviceGroupId = '';
42280
+ let defaultDeviceId = '';
42281
+ const realDevices = new Map();
42282
+ for (const device of devices) {
42283
+ if (device.kind === 'audiooutput') {
42284
+ realDevices.set(device.groupId, new AlphaSynthWebAudioSynthOutputDevice(device));
42285
+ // chromium has the default device as deviceID: 'default'
42286
+ // the standard defines empty-string as default
42287
+ if (device.deviceId === 'default' || device.deviceId === '') {
42288
+ defaultDeviceGroupId = device.groupId;
42289
+ defaultDeviceId = device.deviceId;
42290
+ }
42291
+ }
42292
+ }
42293
+ const final = Array.from(realDevices.values());
42294
+ // flag default device
42295
+ let defaultDevice = final.find(d => d.deviceId === defaultDeviceId);
42296
+ if (!defaultDevice) {
42297
+ defaultDevice = final.find(d => d.device.groupId === defaultDeviceGroupId);
42298
+ }
42299
+ if (!defaultDevice && final.length > 0) {
42300
+ defaultDevice = final[0];
42301
+ }
42302
+ if (defaultDevice) {
42303
+ defaultDevice.isDefault = true;
42304
+ }
42305
+ WebAudioHelper._knownDevices = final;
42306
+ return final;
42307
+ }
42308
+ catch (e) {
42309
+ Logger.error('WebAudio', 'Failed to enumerate output devices', e);
42310
+ return [];
42311
+ }
42312
+ }
42313
+ }
42314
+ WebAudioHelper._knownDevices = [];
41290
42315
  /**
41291
42316
  * @target web
41292
42317
  */
@@ -41298,14 +42323,13 @@
41298
42323
  this.ready = new EventEmitter();
41299
42324
  this.samplesPlayed = new EventEmitterOfT();
41300
42325
  this.sampleRequest = new EventEmitter();
41301
- this._knownDevices = [];
41302
42326
  }
41303
42327
  get sampleRate() {
41304
42328
  return this._context ? this._context.sampleRate : AlphaSynthWebAudioOutputBase.PreferredSampleRate;
41305
42329
  }
41306
42330
  activate(resumedCallback) {
41307
42331
  if (!this._context) {
41308
- this._context = this.createAudioContext();
42332
+ this._context = WebAudioHelper.createAudioContext();
41309
42333
  }
41310
42334
  if (this._context.state === 'suspended' || this._context.state === 'interrupted') {
41311
42335
  Logger.debug('WebAudio', 'Audio Context is suspended, trying resume');
@@ -41322,7 +42346,7 @@
41322
42346
  patchIosSampleRate() {
41323
42347
  const ua = navigator.userAgent;
41324
42348
  if (ua.indexOf('iPhone') !== -1 || ua.indexOf('iPad') !== -1) {
41325
- const context = this.createAudioContext();
42349
+ const context = WebAudioHelper.createAudioContext();
41326
42350
  const buffer = context.createBuffer(1, 1, AlphaSynthWebAudioOutputBase.PreferredSampleRate);
41327
42351
  const dummy = context.createBufferSource();
41328
42352
  dummy.buffer = buffer;
@@ -41333,18 +42357,9 @@
41333
42357
  context.close();
41334
42358
  }
41335
42359
  }
41336
- createAudioContext() {
41337
- if ('AudioContext' in Environment.globalThis) {
41338
- return new AudioContext();
41339
- }
41340
- if ('webkitAudioContext' in Environment.globalThis) {
41341
- return new webkitAudioContext();
41342
- }
41343
- throw new AlphaTabError(exports.AlphaTabErrorType.General, 'AudioContext not found');
41344
- }
41345
42360
  open(bufferTimeInMilliseconds) {
41346
42361
  this.patchIosSampleRate();
41347
- this._context = this.createAudioContext();
42362
+ this._context = WebAudioHelper.createAudioContext();
41348
42363
  const ctx = this._context;
41349
42364
  if (ctx.state === 'suspended') {
41350
42365
  this.registerResumeHandler();
@@ -41397,68 +42412,11 @@
41397
42412
  onReady() {
41398
42413
  this.ready.trigger();
41399
42414
  }
41400
- async checkSinkIdSupport() {
41401
- // https://caniuse.com/mdn-api_audiocontext_sinkid
41402
- const context = this._context ?? this.createAudioContext();
41403
- if (!('setSinkId' in context)) {
41404
- Logger.warning('WebAudio', 'Browser does not support changing the output device');
41405
- return false;
41406
- }
41407
- return true;
41408
- }
41409
- async enumerateOutputDevices() {
41410
- try {
41411
- if (!(await this.checkSinkIdSupport())) {
41412
- return [];
41413
- }
41414
- // Request permissions
41415
- try {
41416
- await navigator.mediaDevices.getUserMedia({ audio: true });
41417
- }
41418
- catch (e) {
41419
- // sometimes we get an error but can still enumerate, e.g. if microphone access is denied,
41420
- // we can still load the output devices in some cases.
41421
- Logger.warning('WebAudio', 'Output device permission rejected', e);
41422
- }
41423
- // load devices
41424
- const devices = await navigator.mediaDevices.enumerateDevices();
41425
- // default device candidates
41426
- let defaultDeviceGroupId = '';
41427
- let defaultDeviceId = '';
41428
- const realDevices = new Map();
41429
- for (const device of devices) {
41430
- if (device.kind === 'audiooutput') {
41431
- realDevices.set(device.groupId, new AlphaSynthWebAudioSynthOutputDevice(device));
41432
- // chromium has the default device as deviceID: 'default'
41433
- // the standard defines empty-string as default
41434
- if (device.deviceId === 'default' || device.deviceId === '') {
41435
- defaultDeviceGroupId = device.groupId;
41436
- defaultDeviceId = device.deviceId;
41437
- }
41438
- }
41439
- }
41440
- const final = Array.from(realDevices.values());
41441
- // flag default device
41442
- let defaultDevice = final.find(d => d.deviceId === defaultDeviceId);
41443
- if (!defaultDevice) {
41444
- defaultDevice = final.find(d => d.device.groupId === defaultDeviceGroupId);
41445
- }
41446
- if (!defaultDevice && final.length > 0) {
41447
- defaultDevice = final[0];
41448
- }
41449
- if (defaultDevice) {
41450
- defaultDevice.isDefault = true;
41451
- }
41452
- this._knownDevices = final;
41453
- return final;
41454
- }
41455
- catch (e) {
41456
- Logger.error('WebAudio', 'Failed to enumerate output devices', e);
41457
- return [];
41458
- }
42415
+ enumerateOutputDevices() {
42416
+ return WebAudioHelper.enumerateOutputDevices();
41459
42417
  }
41460
42418
  async setOutputDevice(device) {
41461
- if (!(await this.checkSinkIdSupport())) {
42419
+ if (!(await WebAudioHelper.checkSinkIdSupport())) {
41462
42420
  return;
41463
42421
  }
41464
42422
  // https://developer.mozilla.org/en-US/docs/Web/API/AudioContext/setSinkId
@@ -41470,7 +42428,7 @@
41470
42428
  }
41471
42429
  }
41472
42430
  async getOutputDevice() {
41473
- if (!(await this.checkSinkIdSupport())) {
42431
+ if (!(await WebAudioHelper.checkSinkIdSupport())) {
41474
42432
  return null;
41475
42433
  }
41476
42434
  // https://developer.mozilla.org/en-US/docs/Web/API/AudioContext/sinkId
@@ -41479,7 +42437,7 @@
41479
42437
  return null;
41480
42438
  }
41481
42439
  // fast path -> cached devices list
41482
- let device = this._knownDevices.find(d => d.deviceId === sinkId);
42440
+ let device = WebAudioHelper.findKnownDevice(sinkId);
41483
42441
  if (device) {
41484
42442
  return device;
41485
42443
  }
@@ -41927,7 +42885,7 @@
41927
42885
  case 'alphaSynth.positionChanged':
41928
42886
  this._timePosition = data.currentTime;
41929
42887
  this._tickPosition = data.currentTick;
41930
- this.positionChanged.trigger(new PositionChangedEventArgs(data.currentTime, data.endTime, data.currentTick, data.endTick, data.isSeek));
42888
+ this.positionChanged.trigger(new PositionChangedEventArgs(data.currentTime, data.endTime, data.currentTick, data.endTick, data.isSeek, data.originalTempo, data.modifiedTempo));
41931
42889
  break;
41932
42890
  case 'alphaSynth.midiEventsPlayed':
41933
42891
  this.midiEventsPlayed.trigger(new MidiEventsPlayedEventArgs(data.events.map(JsonConverter.jsObjectToMidiEvent)));
@@ -41951,7 +42909,7 @@
41951
42909
  break;
41952
42910
  case 'alphaSynth.midiLoaded':
41953
42911
  this.checkReadyForPlayback();
41954
- this.midiLoaded.trigger(new PositionChangedEventArgs(data.currentTime, data.endTime, data.currentTick, data.endTick, data.isSeek));
42912
+ this.midiLoaded.trigger(new PositionChangedEventArgs(data.currentTime, data.endTime, data.currentTick, data.endTick, data.isSeek, data.originalTempo, data.modifiedTempo));
41955
42913
  break;
41956
42914
  case 'alphaSynth.midiLoadFailed':
41957
42915
  this.checkReadyForPlayback();
@@ -42001,6 +42959,8 @@
42001
42959
  this._outputIsReady = true;
42002
42960
  this.checkReady();
42003
42961
  }
42962
+ loadBackingTrack(_score) {
42963
+ }
42004
42964
  }
42005
42965
 
42006
42966
  /**
@@ -42368,6 +43328,123 @@
42368
43328
  }
42369
43329
  }
42370
43330
 
43331
+ /**
43332
+ * @target web
43333
+ */
43334
+ class AudioElementBackingTrackSynthOutput {
43335
+ constructor() {
43336
+ // fake rate
43337
+ this.sampleRate = 44100;
43338
+ this._padding = 0;
43339
+ this._updateInterval = 0;
43340
+ this.ready = new EventEmitter();
43341
+ this.samplesPlayed = new EventEmitterOfT();
43342
+ this.timeUpdate = new EventEmitterOfT();
43343
+ this.sampleRequest = new EventEmitter();
43344
+ }
43345
+ get backingTrackDuration() {
43346
+ const duration = this.audioElement.duration ?? 0;
43347
+ return Number.isFinite(duration) ? duration * 1000 : 0;
43348
+ }
43349
+ get playbackRate() {
43350
+ return this.audioElement.playbackRate;
43351
+ }
43352
+ set playbackRate(value) {
43353
+ this.audioElement.playbackRate = value;
43354
+ }
43355
+ get masterVolume() {
43356
+ return this.audioElement.volume;
43357
+ }
43358
+ set masterVolume(value) {
43359
+ this.audioElement.volume = value;
43360
+ }
43361
+ seekTo(time) {
43362
+ this.audioElement.currentTime = time / 1000 - this._padding;
43363
+ }
43364
+ loadBackingTrack(backingTrack) {
43365
+ if (this.audioElement?.src) {
43366
+ URL.revokeObjectURL(this.audioElement.src);
43367
+ }
43368
+ this._padding = backingTrack.padding / 1000;
43369
+ const blob = new Blob([backingTrack.rawAudioFile]);
43370
+ this.audioElement.src = URL.createObjectURL(blob);
43371
+ }
43372
+ open(_bufferTimeInMilliseconds) {
43373
+ const audioElement = document.createElement('audio');
43374
+ audioElement.style.display = 'none';
43375
+ document.body.appendChild(audioElement);
43376
+ audioElement.addEventListener('timeupdate', () => {
43377
+ this.updatePosition();
43378
+ });
43379
+ this.audioElement = audioElement;
43380
+ this.ready.trigger();
43381
+ }
43382
+ updatePosition() {
43383
+ const timePos = (this.audioElement.currentTime + this._padding) * 1000;
43384
+ this.timeUpdate.trigger(timePos);
43385
+ }
43386
+ play() {
43387
+ this.audioElement.play();
43388
+ this._updateInterval = window.setInterval(() => {
43389
+ this.updatePosition();
43390
+ }, 50);
43391
+ }
43392
+ destroy() {
43393
+ const audioElement = this.audioElement;
43394
+ if (audioElement) {
43395
+ document.body.removeChild(audioElement);
43396
+ }
43397
+ }
43398
+ pause() {
43399
+ this.audioElement.pause();
43400
+ window.clearInterval(this._updateInterval);
43401
+ }
43402
+ addSamples(_samples) {
43403
+ }
43404
+ resetSamples() {
43405
+ }
43406
+ activate() {
43407
+ }
43408
+ async enumerateOutputDevices() {
43409
+ return WebAudioHelper.enumerateOutputDevices();
43410
+ }
43411
+ async setOutputDevice(device) {
43412
+ if (!(await WebAudioHelper.checkSinkIdSupport())) {
43413
+ return;
43414
+ }
43415
+ // https://developer.mozilla.org/en-US/docs/Web/API/AudioContext/setSinkId
43416
+ if (!device) {
43417
+ await this.audioElement.setSinkId('');
43418
+ }
43419
+ else {
43420
+ await this.audioElement.setSinkId(device.deviceId);
43421
+ }
43422
+ }
43423
+ async getOutputDevice() {
43424
+ if (!(await WebAudioHelper.checkSinkIdSupport())) {
43425
+ return null;
43426
+ }
43427
+ // https://developer.mozilla.org/en-US/docs/Web/API/AudioContext/sinkId
43428
+ const sinkId = this.audioElement.sinkId;
43429
+ if (typeof sinkId !== 'string' || sinkId === '' || sinkId === 'default') {
43430
+ return null;
43431
+ }
43432
+ // fast path -> cached devices list
43433
+ let device = WebAudioHelper.findKnownDevice(sinkId);
43434
+ if (device) {
43435
+ return device;
43436
+ }
43437
+ // slow path -> enumerate devices
43438
+ const allDevices = await this.enumerateOutputDevices();
43439
+ device = allDevices.find(d => d.deviceId === sinkId);
43440
+ if (device) {
43441
+ return device;
43442
+ }
43443
+ Logger.warning('WebAudio', 'Could not find output device in device list', sinkId, allDevices);
43444
+ return null;
43445
+ }
43446
+ }
43447
+
42371
43448
  /**
42372
43449
  * @target web
42373
43450
  */
@@ -43004,6 +44081,9 @@
43004
44081
  window.requestAnimationFrame(step);
43005
44082
  }
43006
44083
  }
44084
+ createBackingTrackPlayer() {
44085
+ return new BackingTrackPlayer(new AudioElementBackingTrackSynthOutput(), this._api.settings.player.bufferTimeInMilliseconds);
44086
+ }
43007
44087
  }
43008
44088
 
43009
44089
  /**
@@ -43146,7 +44226,7 @@
43146
44226
  settings.core.file = null;
43147
44227
  settings.core.tracks = null;
43148
44228
  settings.player.enableCursor = false;
43149
- settings.player.enablePlayer = false;
44229
+ settings.player.playerMode = exports.PlayerMode.Disabled;
43150
44230
  settings.player.enableElementHighlighting = false;
43151
44231
  settings.player.enableUserInteraction = false;
43152
44232
  settings.player.soundFont = null;
@@ -57107,96 +58187,6 @@
57107
58187
  }
57108
58188
  }
57109
58189
 
57110
- /**
57111
- * A very basic polyfill of the ResizeObserver which triggers
57112
- * a the callback on window resize for all registered targets.
57113
- * @target web
57114
- */
57115
- class ResizeObserverPolyfill {
57116
- constructor(callback) {
57117
- this._targets = new Set();
57118
- this._callback = callback;
57119
- window.addEventListener('resize', this.onWindowResize.bind(this), false);
57120
- }
57121
- observe(target) {
57122
- this._targets.add(target);
57123
- }
57124
- unobserve(target) {
57125
- this._targets.delete(target);
57126
- }
57127
- disconnect() {
57128
- this._targets.clear();
57129
- }
57130
- onWindowResize() {
57131
- const entries = [];
57132
- for (const t of this._targets) {
57133
- entries.push({
57134
- target: t,
57135
- // not used by alphaTab
57136
- contentRect: undefined,
57137
- borderBoxSize: undefined,
57138
- contentBoxSize: [],
57139
- devicePixelContentBoxSize: []
57140
- });
57141
- }
57142
- this._callback(entries, this);
57143
- }
57144
- }
57145
-
57146
- /**
57147
- * A polyfill of the InsersectionObserver
57148
- * @target web
57149
- */
57150
- class IntersectionObserverPolyfill {
57151
- constructor(callback) {
57152
- this._elements = [];
57153
- let timer = null;
57154
- const oldCheck = this.check.bind(this);
57155
- this.check = () => {
57156
- if (!timer) {
57157
- timer = setTimeout(() => {
57158
- oldCheck();
57159
- timer = null;
57160
- }, 100);
57161
- }
57162
- };
57163
- this._callback = callback;
57164
- window.addEventListener('resize', this.check, true);
57165
- document.addEventListener('scroll', this.check, true);
57166
- }
57167
- observe(target) {
57168
- if (this._elements.indexOf(target) >= 0) {
57169
- return;
57170
- }
57171
- this._elements.push(target);
57172
- this.check();
57173
- }
57174
- unobserve(target) {
57175
- this._elements = this._elements.filter(item => {
57176
- return item !== target;
57177
- });
57178
- }
57179
- check() {
57180
- const entries = [];
57181
- for (const element of this._elements) {
57182
- const rect = element.getBoundingClientRect();
57183
- const isVisible = rect.top + rect.height >= 0 &&
57184
- rect.top <= window.innerHeight &&
57185
- rect.left + rect.width >= 0 &&
57186
- rect.left <= window.innerWidth;
57187
- if (isVisible) {
57188
- entries.push({
57189
- target: element,
57190
- isIntersecting: true
57191
- });
57192
- }
57193
- }
57194
- if (entries.length) {
57195
- this._callback(entries, this);
57196
- }
57197
- }
57198
- }
57199
-
57200
58190
  /******************************************************************************
57201
58191
  Copyright (c) Microsoft Corporation.
57202
58192
 
@@ -59312,9 +60302,9 @@
59312
60302
  print(`build date: ${VersionInfo.date}`);
59313
60303
  }
59314
60304
  }
59315
- VersionInfo.version = '1.6.0-alpha.1401';
59316
- VersionInfo.date = '2025-05-07T12:40:48.955Z';
59317
- VersionInfo.commit = 'e58a9704e560b3344b8fe39a2b2f46a2ee3bb5b1';
60305
+ VersionInfo.version = '1.6.0-alpha.1403';
60306
+ VersionInfo.date = '2025-05-09T02:06:22.101Z';
60307
+ VersionInfo.commit = '3644a11f557063573413de459c607a1f9c302a6a';
59318
60308
 
59319
60309
  /**
59320
60310
  * A factory for custom layout engines.
@@ -59785,29 +60775,6 @@
59785
60775
  if (Environment.webPlatform === exports.WebPlatform.Browser || Environment.webPlatform === exports.WebPlatform.BrowserModule) {
59786
60776
  Environment.registerJQueryPlugin();
59787
60777
  Environment.HighDpiFactor = window.devicePixelRatio;
59788
- // ResizeObserver API does not yet exist so long on Safari (only start 2020 with iOS Safari 13.7 and Desktop 13.1)
59789
- // so we better add a polyfill for it
59790
- if (!('ResizeObserver' in Environment.globalThis)) {
59791
- Environment.globalThis.ResizeObserver = ResizeObserverPolyfill;
59792
- }
59793
- // IntersectionObserver API does not on older iOS versions
59794
- // so we better add a polyfill for it
59795
- if (!('IntersectionObserver' in Environment.globalThis)) {
59796
- Environment.globalThis.IntersectionObserver = IntersectionObserverPolyfill;
59797
- }
59798
- if (!('replaceChildren' in Element.prototype)) {
59799
- Element.prototype.replaceChildren = function (...nodes) {
59800
- this.innerHTML = '';
59801
- this.append(...nodes);
59802
- };
59803
- Document.prototype.replaceChildren = Element.prototype.replaceChildren;
59804
- DocumentFragment.prototype.replaceChildren = Element.prototype.replaceChildren;
59805
- }
59806
- if (!('replaceAll' in String.prototype)) {
59807
- String.prototype.replaceAll = function (str, newStr) {
59808
- return this.replace(new RegExp(str, 'g'), newStr);
59809
- };
59810
- }
59811
60778
  }
59812
60779
  Environment.createWebWorker = createWebWorker;
59813
60780
  Environment.createAudioWorklet = createAudioWorklet;
@@ -63539,6 +64506,7 @@
63539
64506
  get AccidentalType () { return AccidentalType; },
63540
64507
  Automation,
63541
64508
  get AutomationType () { return AutomationType; },
64509
+ BackingTrack,
63542
64510
  Bar,
63543
64511
  get BarLineStyle () { return BarLineStyle; },
63544
64512
  BarStyle,
@@ -63601,6 +64569,7 @@
63601
64569
  Staff,
63602
64570
  SustainPedalMarker,
63603
64571
  get SustainPedalMarkerType () { return SustainPedalMarkerType; },
64572
+ SyncPointData,
63604
64573
  Track,
63605
64574
  get TrackNameMode () { return TrackNameMode; },
63606
64575
  get TrackNameOrientation () { return TrackNameOrientation; },