@coderline/alphatab 1.6.0-alpha.1399 → 1.6.0-alpha.1403

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/alphaTab.js CHANGED
@@ -1,5 +1,5 @@
1
1
  /*!
2
- * alphaTab v1.6.0-alpha.1399 (develop, build 1399)
2
+ * alphaTab v1.6.0-alpha.1403 (develop, build 1403)
3
3
  *
4
4
  * Copyright © 2025, Daniel Kuschny and Contributors, All rights reserved.
5
5
  *
@@ -55,7 +55,127 @@
55
55
  (global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.alphaTab = {}));
56
56
  })(this, (function (exports) { 'use strict';
57
57
 
58
- if(typeof Symbol.dispose==='undefined'){Symbol.dispose = Symbol('Symbol.dispose')}
58
+ /**
59
+ * A very basic polyfill of the ResizeObserver which triggers
60
+ * a the callback on window resize for all registered targets.
61
+ * @target web
62
+ */
63
+ class ResizeObserverPolyfill {
64
+ constructor(callback) {
65
+ this._targets = new Set();
66
+ this._callback = callback;
67
+ window.addEventListener('resize', this.onWindowResize.bind(this), false);
68
+ }
69
+ observe(target) {
70
+ this._targets.add(target);
71
+ }
72
+ unobserve(target) {
73
+ this._targets.delete(target);
74
+ }
75
+ disconnect() {
76
+ this._targets.clear();
77
+ }
78
+ onWindowResize() {
79
+ const entries = [];
80
+ for (const t of this._targets) {
81
+ entries.push({
82
+ target: t,
83
+ // not used by alphaTab
84
+ contentRect: undefined,
85
+ borderBoxSize: undefined,
86
+ contentBoxSize: [],
87
+ devicePixelContentBoxSize: []
88
+ });
89
+ }
90
+ this._callback(entries, this);
91
+ }
92
+ }
93
+
94
+ /**
95
+ * A polyfill of the InsersectionObserver
96
+ * @target web
97
+ */
98
+ class IntersectionObserverPolyfill {
99
+ constructor(callback) {
100
+ this._elements = [];
101
+ let timer = null;
102
+ const oldCheck = this.check.bind(this);
103
+ this.check = () => {
104
+ if (!timer) {
105
+ timer = setTimeout(() => {
106
+ oldCheck();
107
+ timer = null;
108
+ }, 100);
109
+ }
110
+ };
111
+ this._callback = callback;
112
+ window.addEventListener('resize', this.check, true);
113
+ document.addEventListener('scroll', this.check, true);
114
+ }
115
+ observe(target) {
116
+ if (this._elements.indexOf(target) >= 0) {
117
+ return;
118
+ }
119
+ this._elements.push(target);
120
+ this.check();
121
+ }
122
+ unobserve(target) {
123
+ this._elements = this._elements.filter(item => {
124
+ return item !== target;
125
+ });
126
+ }
127
+ check() {
128
+ const entries = [];
129
+ for (const element of this._elements) {
130
+ const rect = element.getBoundingClientRect();
131
+ const isVisible = rect.top + rect.height >= 0 &&
132
+ rect.top <= window.innerHeight &&
133
+ rect.left + rect.width >= 0 &&
134
+ rect.left <= window.innerWidth;
135
+ if (isVisible) {
136
+ entries.push({
137
+ target: element,
138
+ isIntersecting: true
139
+ });
140
+ }
141
+ }
142
+ if (entries.length) {
143
+ this._callback(entries, this);
144
+ }
145
+ }
146
+ }
147
+
148
+ /*@target web*/
149
+ (() => {
150
+ if (typeof Symbol.dispose === 'undefined') {
151
+ Symbol.dispose = Symbol('Symbol.dispose');
152
+ }
153
+ if (typeof window !== 'undefined') {
154
+ // ResizeObserver API does not yet exist so long on Safari (only start 2020 with iOS Safari 13.7 and Desktop 13.1)
155
+ // so we better add a polyfill for it
156
+ if (!('ResizeObserver' in globalThis)) {
157
+ globalThis.ResizeObserver = ResizeObserverPolyfill;
158
+ }
159
+ // IntersectionObserver API does not on older iOS versions
160
+ // so we better add a polyfill for it
161
+ if (!('IntersectionObserver' in globalThis)) {
162
+ globalThis.IntersectionObserver = IntersectionObserverPolyfill;
163
+ }
164
+ if (!('replaceChildren' in Element.prototype)) {
165
+ Element.prototype.replaceChildren = function (...nodes) {
166
+ this.innerHTML = '';
167
+ this.append(...nodes);
168
+ };
169
+ Document.prototype.replaceChildren = Element.prototype.replaceChildren;
170
+ DocumentFragment.prototype.replaceChildren = Element.prototype.replaceChildren;
171
+ }
172
+ }
173
+ if (!('replaceAll' in String.prototype)) {
174
+ String.prototype.replaceAll = function (str, newStr) {
175
+ return this.replace(new RegExp(str, 'g'), newStr);
176
+ };
177
+ }
178
+ })();
59
179
 
60
180
  /**
61
181
  * Lists all layout modes that are supported.
@@ -1279,7 +1399,37 @@
1279
1399
  * Balance change.
1280
1400
  */
1281
1401
  AutomationType[AutomationType["Balance"] = 3] = "Balance";
1402
+ /**
1403
+ * A sync point for synchronizing the internal time axis with an external audio track.
1404
+ */
1405
+ AutomationType[AutomationType["SyncPoint"] = 4] = "SyncPoint";
1282
1406
  })(AutomationType || (AutomationType = {}));
1407
+ /**
1408
+ * Represents the data of a sync point for synchronizing the internal time axis with
1409
+ * an external audio file.
1410
+ * @cloneable
1411
+ * @json
1412
+ * @json_strict
1413
+ */
1414
+ class SyncPointData {
1415
+ constructor() {
1416
+ /**
1417
+ * Indicates for which repeat occurence this sync point is valid (e.g. 0 on the first time played, 1 on the second time played)
1418
+ */
1419
+ this.barOccurence = 0;
1420
+ /**
1421
+ * The modified tempo at which the cursor should move (aka. the tempo played within the external audio track).
1422
+ * This information is used together with the {@link originalTempo} to calculate how much faster/slower the
1423
+ * cursor playback is performed to align with the audio track.
1424
+ */
1425
+ this.modifiedTempo = 0;
1426
+ /**
1427
+ * The uadio offset marking the position within the audio track in milliseconds.
1428
+ * This information is used to regularly sync (or on seeking) to match a given external audio time axis with the internal time axis.
1429
+ */
1430
+ this.millisecondOffset = 0;
1431
+ }
1432
+ }
1283
1433
  /**
1284
1434
  * Automations are used to change the behaviour of a song.
1285
1435
  * @cloneable
@@ -2578,6 +2728,16 @@
2578
2728
  }
2579
2729
  return null;
2580
2730
  }
2731
+ /**
2732
+ * Adds the given sync point to the list of sync points for this bar.
2733
+ * @param syncPoint The sync point to add.
2734
+ */
2735
+ addSyncPoint(syncPoint) {
2736
+ if (!this.syncPoints) {
2737
+ this.syncPoints = [];
2738
+ }
2739
+ this.syncPoints.push(syncPoint);
2740
+ }
2581
2741
  }
2582
2742
  MasterBar.MaxAlternateEndings = 8;
2583
2743
 
@@ -5672,6 +5832,21 @@
5672
5832
  }
5673
5833
  }
5674
5834
 
5835
+ // <auto-generated>
5836
+ // This code was auto-generated.
5837
+ // Changes to this file may cause incorrect behavior and will be lost if
5838
+ // the code is regenerated.
5839
+ // </auto-generated>
5840
+ class SyncPointDataCloner {
5841
+ static clone(original) {
5842
+ const clone = new SyncPointData();
5843
+ clone.barOccurence = original.barOccurence;
5844
+ clone.modifiedTempo = original.modifiedTempo;
5845
+ clone.millisecondOffset = original.millisecondOffset;
5846
+ return clone;
5847
+ }
5848
+ }
5849
+
5675
5850
  // <auto-generated>
5676
5851
  // This code was auto-generated.
5677
5852
  // Changes to this file may cause incorrect behavior and will be lost if
@@ -5683,6 +5858,7 @@
5683
5858
  clone.isLinear = original.isLinear;
5684
5859
  clone.type = original.type;
5685
5860
  clone.value = original.value;
5861
+ clone.syncPointValue = original.syncPointValue ? SyncPointDataCloner.clone(original.syncPointValue) : undefined;
5686
5862
  clone.ratioPosition = original.ratioPosition;
5687
5863
  clone.text = original.text;
5688
5864
  return clone;
@@ -14128,6 +14304,21 @@
14128
14304
  }
14129
14305
  }
14130
14306
 
14307
+ /**
14308
+ * Holds information about the backing track which can be played instead of synthesized audio.
14309
+ * @json
14310
+ * @json_strict
14311
+ */
14312
+ class BackingTrack {
14313
+ constructor() {
14314
+ /**
14315
+ * The number of milliseconds the audio should be shifted to align with the song.
14316
+ * (e.g. negative values allow skipping potential silent parts at the start of the file and directly start with the first note).
14317
+ */
14318
+ this.padding = 0;
14319
+ }
14320
+ }
14321
+
14131
14322
  /**
14132
14323
  * This structure represents a duration within a gpif
14133
14324
  */
@@ -14220,6 +14411,9 @@
14220
14411
  case 'MasterTrack':
14221
14412
  this.parseMasterTrackNode(n);
14222
14413
  break;
14414
+ case 'BackingTrack':
14415
+ this.parseBackingTrackNode(n);
14416
+ break;
14223
14417
  case 'Tracks':
14224
14418
  this.parseTracksNode(n);
14225
14419
  break;
@@ -14241,6 +14435,9 @@
14241
14435
  case 'Rhythms':
14242
14436
  this.parseRhythms(n);
14243
14437
  break;
14438
+ case 'Assets':
14439
+ this.parseAssets(n);
14440
+ break;
14244
14441
  }
14245
14442
  }
14246
14443
  }
@@ -14248,6 +14445,37 @@
14248
14445
  throw new UnsupportedFormatError('Root node of XML was not GPIF');
14249
14446
  }
14250
14447
  }
14448
+ parseAssets(element) {
14449
+ for (const c of element.childElements()) {
14450
+ switch (c.localName) {
14451
+ case 'Asset':
14452
+ if (c.getAttribute('id') === this._backingTrackAssetId) {
14453
+ this.parseBackingTrackAsset(c);
14454
+ }
14455
+ break;
14456
+ }
14457
+ }
14458
+ }
14459
+ parseBackingTrackAsset(element) {
14460
+ let embeddedFilePath = '';
14461
+ for (const c of element.childElements()) {
14462
+ switch (c.localName) {
14463
+ case 'EmbeddedFilePath':
14464
+ embeddedFilePath = c.innerText;
14465
+ break;
14466
+ }
14467
+ }
14468
+ const loadAsset = this.loadAsset;
14469
+ if (loadAsset) {
14470
+ const assetData = loadAsset(embeddedFilePath);
14471
+ if (assetData) {
14472
+ this.score.backingTrack.rawAudioFile = assetData;
14473
+ }
14474
+ else {
14475
+ this.score.backingTrack = undefined;
14476
+ }
14477
+ }
14478
+ }
14251
14479
  //
14252
14480
  // <Score>...</Score>
14253
14481
  //
@@ -14328,7 +14556,41 @@
14328
14556
  if (!text) {
14329
14557
  return [];
14330
14558
  }
14331
- return text.split(separator).map(t => t.trim()).filter(t => t.length > 0);
14559
+ return text
14560
+ .split(separator)
14561
+ .map(t => t.trim())
14562
+ .filter(t => t.length > 0);
14563
+ }
14564
+ //
14565
+ // <BackingTrack>...</BackingTrack>
14566
+ //
14567
+ parseBackingTrackNode(node) {
14568
+ const backingTrack = new BackingTrack();
14569
+ let enabled = false;
14570
+ let source = '';
14571
+ let assetId = '';
14572
+ for (const c of node.childElements()) {
14573
+ switch (c.localName) {
14574
+ case 'Enabled':
14575
+ enabled = c.innerText === 'true';
14576
+ break;
14577
+ case 'Source':
14578
+ source = c.innerText;
14579
+ break;
14580
+ case 'AssetId':
14581
+ assetId = c.innerText;
14582
+ break;
14583
+ case 'FramePadding':
14584
+ backingTrack.padding = GpifParser.parseIntSafe(c.innerText, 0) / GpifParser.SampleRate * 1000;
14585
+ break;
14586
+ }
14587
+ }
14588
+ // only local (contained backing tracks are supported)
14589
+ // remote / youtube links seem to come in future releases according to the gpif tags.
14590
+ if (enabled && source === 'Local') {
14591
+ this.score.backingTrack = backingTrack;
14592
+ this._backingTrackAssetId = assetId; // when the Asset tag is parsed this ID is used to load the raw data
14593
+ }
14332
14594
  }
14333
14595
  //
14334
14596
  // <MasterTrack>...</MasterTrack>
@@ -14366,6 +14628,7 @@
14366
14628
  let textValue = null;
14367
14629
  let reference = 0;
14368
14630
  let text = null;
14631
+ let syncPointValue = undefined;
14369
14632
  for (const c of node.childElements()) {
14370
14633
  switch (c.localName) {
14371
14634
  case 'Type':
@@ -14384,6 +14647,28 @@
14384
14647
  if (c.firstElement && c.firstElement.nodeType === XmlNodeType.CDATA) {
14385
14648
  textValue = c.innerText;
14386
14649
  }
14650
+ else if (c.firstElement &&
14651
+ c.firstElement.nodeType === XmlNodeType.Element &&
14652
+ type === 'SyncPoint') {
14653
+ syncPointValue = new SyncPointData();
14654
+ for (const vc of c.childElements()) {
14655
+ switch (vc.localName) {
14656
+ case 'BarIndex':
14657
+ barIndex = GpifParser.parseIntSafe(vc.innerText, 0);
14658
+ break;
14659
+ case 'BarOccurrence':
14660
+ syncPointValue.barOccurence = GpifParser.parseIntSafe(vc.innerText, 0);
14661
+ break;
14662
+ case 'ModifiedTempo':
14663
+ syncPointValue.modifiedTempo = GpifParser.parseFloatSafe(vc.innerText, 0);
14664
+ break;
14665
+ case 'FrameOffset':
14666
+ const frameOffset = GpifParser.parseFloatSafe(vc.innerText, 0);
14667
+ syncPointValue.millisecondOffset = (frameOffset / GpifParser.SampleRate) * 1000;
14668
+ break;
14669
+ }
14670
+ }
14671
+ }
14387
14672
  else {
14388
14673
  const parts = GpifParser.splitSafe(c.innerText);
14389
14674
  // Issue 391: Some GPX files might have
@@ -14411,6 +14696,13 @@
14411
14696
  case 'Tempo':
14412
14697
  automation = Automation.buildTempoAutomation(isLinear, ratioPosition, numberValue, reference);
14413
14698
  break;
14699
+ case 'SyncPoint':
14700
+ automation = new Automation();
14701
+ automation.type = AutomationType.SyncPoint;
14702
+ automation.isLinear = isLinear;
14703
+ automation.ratioPosition = ratioPosition;
14704
+ automation.syncPointValue = syncPointValue;
14705
+ break;
14414
14706
  case 'Sound':
14415
14707
  if (textValue && sounds && sounds.has(textValue)) {
14416
14708
  automation = Automation.buildInstrumentAutomation(isLinear, ratioPosition, sounds.get(textValue).program);
@@ -16492,14 +16784,19 @@
16492
16784
  const masterBar = this.score.masterBars[barNumber];
16493
16785
  for (let i = 0, j = automations.length; i < j; i++) {
16494
16786
  const automation = automations[i];
16495
- if (automation.type === AutomationType.Tempo) {
16496
- if (barNumber === 0) {
16497
- this.score.tempo = automation.value | 0;
16498
- if (automation.text) {
16499
- this.score.tempoLabel = automation.text;
16787
+ switch (automation.type) {
16788
+ case AutomationType.Tempo:
16789
+ if (barNumber === 0) {
16790
+ this.score.tempo = automation.value | 0;
16791
+ if (automation.text) {
16792
+ this.score.tempoLabel = automation.text;
16793
+ }
16500
16794
  }
16501
- }
16502
- masterBar.tempoAutomations.push(automation);
16795
+ masterBar.tempoAutomations.push(automation);
16796
+ break;
16797
+ case AutomationType.SyncPoint:
16798
+ masterBar.addSyncPoint(automation);
16799
+ break;
16503
16800
  }
16504
16801
  }
16505
16802
  }
@@ -16516,6 +16813,10 @@
16516
16813
  * Internal Range: 1 per quarter note
16517
16814
  */
16518
16815
  GpifParser.BendPointValueFactor = 1 / 25.0;
16816
+ // test have shown that Guitar Pro seem to always work with 44100hz for the frame offsets,
16817
+ // they are NOT using the sample rate of the input file.
16818
+ // Downsampling a 44100hz ogg to 8000hz and using it in as audio track resulted in the same frame offset when placing sync points.
16819
+ GpifParser.SampleRate = 44100;
16519
16820
 
16520
16821
  // PartConfiguration File Format Notes.
16521
16822
  // Based off Guitar Pro 8
@@ -17382,7 +17683,9 @@
17382
17683
  let binaryStylesheetData = null;
17383
17684
  let partConfigurationData = null;
17384
17685
  let layoutConfigurationData = null;
17686
+ const entryLookup = new Map();
17385
17687
  for (const entry of entries) {
17688
+ entryLookup.set(entry.fullName, entry);
17386
17689
  switch (entry.fileName) {
17387
17690
  case 'score.gpif':
17388
17691
  xml = IOHelper.toString(entry.data, this.settings.importer.encoding);
@@ -17405,6 +17708,12 @@
17405
17708
  // the score information as XML we need to parse.
17406
17709
  Logger.debug(this.name, 'Start Parsing score.gpif');
17407
17710
  const gpifParser = new GpifParser();
17711
+ gpifParser.loadAsset = (fileName) => {
17712
+ if (entryLookup.has(fileName)) {
17713
+ return entryLookup.get(fileName).data;
17714
+ }
17715
+ return undefined;
17716
+ };
17408
17717
  gpifParser.parseXml(xml, this.settings);
17409
17718
  Logger.debug(this.name, 'score.gpif parsed');
17410
17719
  const score = gpifParser.score;
@@ -21870,8 +22179,24 @@
21870
22179
  * Represents a change of the tempo in the song.
21871
22180
  */
21872
22181
  class TempoChangeEvent extends MidiEvent {
22182
+ /**
22183
+ * The tempo in microseconds per quarter note (aka USQ). A time format typically for midi.
22184
+ */
22185
+ get microSecondsPerQuarterNote() {
22186
+ return 60000000 / this.beatsPerMinute;
22187
+ }
22188
+ /**
22189
+ * The tempo in microseconds per quarter note (aka USQ). A time format typically for midi.
22190
+ */
22191
+ set microSecondsPerQuarterNote(value) {
22192
+ this.beatsPerMinute = 60000000 / value;
22193
+ }
21873
22194
  constructor(tick, microSecondsPerQuarterNote) {
21874
22195
  super(0, tick, MidiEventType.TempoChange);
22196
+ /**
22197
+ * The tempo in beats per minute
22198
+ */
22199
+ this.beatsPerMinute = 0;
21875
22200
  this.microSecondsPerQuarterNote = microSecondsPerQuarterNote;
21876
22201
  }
21877
22202
  writeTo(s) {
@@ -21954,6 +22279,17 @@
21954
22279
  }
21955
22280
  }
21956
22281
 
22282
+ /**
22283
+ * Rerpresents a point to sync the alphaTab time axis with an external backing track.
22284
+ */
22285
+ class BackingTrackSyncPoint {
22286
+ constructor(tick, data) {
22287
+ this.tick = 0;
22288
+ this.tick = tick;
22289
+ this.data = data;
22290
+ }
22291
+ }
22292
+
21957
22293
  class MidiFileSequencerTempoChange {
21958
22294
  constructor(bpm, ticks, time) {
21959
22295
  this.bpm = bpm;
@@ -21961,9 +22297,17 @@
21961
22297
  this.time = time;
21962
22298
  }
21963
22299
  }
22300
+ class BackingTrackSyncPointWithTime extends BackingTrackSyncPoint {
22301
+ constructor(tick, data, time) {
22302
+ super(tick, data);
22303
+ this.time = time;
22304
+ }
22305
+ }
21964
22306
  class MidiSequencerState {
21965
22307
  constructor() {
21966
22308
  this.tempoChanges = [];
22309
+ this.tempoChangeIndex = 0;
22310
+ this.syncPoints = [];
21967
22311
  this.firstProgramEventPerChannel = new Map();
21968
22312
  this.firstTimeSignatureNumerator = 0;
21969
22313
  this.firstTimeSignatureDenominator = 0;
@@ -21971,11 +22315,15 @@
21971
22315
  this.division = MidiUtils.QuarterTime;
21972
22316
  this.eventIndex = 0;
21973
22317
  this.currentTime = 0;
22318
+ this.currentTick = 0;
22319
+ this.syncPointIndex = 0;
21974
22320
  this.playbackRange = null;
21975
22321
  this.playbackRangeStartTime = 0;
21976
22322
  this.playbackRangeEndTime = 0;
21977
22323
  this.endTick = 0;
21978
22324
  this.endTime = 0;
22325
+ this.currentTempo = 0;
22326
+ this.modifiedTempo = 0;
21979
22327
  }
21980
22328
  }
21981
22329
  /**
@@ -22028,6 +22376,12 @@
22028
22376
  get currentEndTime() {
22029
22377
  return this._currentState.endTime / this.playbackSpeed;
22030
22378
  }
22379
+ get currentTempo() {
22380
+ return this._currentState.currentTempo;
22381
+ }
22382
+ get modifiedTempo() {
22383
+ return this._currentState.modifiedTempo * this.playbackSpeed;
22384
+ }
22031
22385
  mainSeek(timePosition) {
22032
22386
  // map to speed=1
22033
22387
  timePosition *= this.playbackSpeed;
@@ -22047,6 +22401,8 @@
22047
22401
  // we have to restart the midi to make sure we get the right state: instruments, volume, pan, etc
22048
22402
  this._mainState.currentTime = 0;
22049
22403
  this._mainState.eventIndex = 0;
22404
+ this._mainState.syncPointIndex = 0;
22405
+ this._mainState.tempoChangeIndex = 0;
22050
22406
  if (this.isPlayingMain) {
22051
22407
  const metronomeVolume = this._synthesizer.metronomeVolume;
22052
22408
  this._synthesizer.noteOffAll(true);
@@ -22121,7 +22477,7 @@
22121
22477
  }
22122
22478
  if (mEvent.type === MidiEventType.TempoChange) {
22123
22479
  const meta = mEvent;
22124
- bpm = 60000000 / meta.microSecondsPerQuarterNote;
22480
+ bpm = meta.beatsPerMinute;
22125
22481
  state.tempoChanges.push(new MidiFileSequencerTempoChange(bpm, absTick, absTime));
22126
22482
  metronomeLengthInMillis = metronomeLengthInTicks * (60000.0 / (bpm * midiFile.division));
22127
22483
  }
@@ -22155,6 +22511,8 @@
22155
22511
  }
22156
22512
  }
22157
22513
  }
22514
+ state.currentTempo = state.tempoChanges.length > 0 ? state.tempoChanges[0].bpm : bpm;
22515
+ state.modifiedTempo = state.currentTempo;
22158
22516
  state.synthData.sort((a, b) => {
22159
22517
  if (a.time > b.time) {
22160
22518
  return 1;
@@ -22171,6 +22529,35 @@
22171
22529
  fillMidiEventQueue() {
22172
22530
  return this.fillMidiEventQueueLimited(-1);
22173
22531
  }
22532
+ fillMidiEventQueueToEndTime(endTime) {
22533
+ while (this._mainState.currentTime < endTime) {
22534
+ if (this.fillMidiEventQueueLimited(endTime - this._mainState.currentTime)) {
22535
+ this._synthesizer.synthesizeSilent(SynthConstants.MicroBufferSize);
22536
+ }
22537
+ }
22538
+ let anyEventsDispatched = false;
22539
+ this._currentState.currentTime = endTime;
22540
+ while (this._currentState.eventIndex < this._currentState.synthData.length &&
22541
+ this._currentState.synthData[this._currentState.eventIndex].time < this._currentState.currentTime) {
22542
+ const synthEvent = this._currentState.synthData[this._currentState.eventIndex];
22543
+ this._synthesizer.dispatchEvent(synthEvent);
22544
+ while (this._currentState.syncPointIndex < this._currentState.syncPoints.length &&
22545
+ this._currentState.syncPoints[this._currentState.syncPointIndex].tick < synthEvent.event.tick) {
22546
+ this._currentState.modifiedTempo =
22547
+ this._currentState.syncPoints[this._currentState.syncPointIndex].data.modifiedTempo;
22548
+ this._currentState.syncPointIndex++;
22549
+ }
22550
+ while (this._currentState.tempoChangeIndex < this._currentState.tempoChanges.length &&
22551
+ this._currentState.tempoChanges[this._currentState.tempoChangeIndex].time <= synthEvent.time) {
22552
+ this._currentState.currentTempo =
22553
+ this._currentState.tempoChanges[this._currentState.tempoChangeIndex].bpm;
22554
+ this._currentState.tempoChangeIndex++;
22555
+ }
22556
+ this._currentState.eventIndex++;
22557
+ anyEventsDispatched = true;
22558
+ }
22559
+ return anyEventsDispatched;
22560
+ }
22174
22561
  fillMidiEventQueueLimited(maxMilliseconds) {
22175
22562
  let millisecondsPerBuffer = (SynthConstants.MicroBufferSize / this._synthesizer.outSampleRate) * 1000 * this.playbackSpeed;
22176
22563
  let endTime = this.internalEndTime;
@@ -22198,9 +22585,87 @@
22198
22585
  mainTimePositionToTickPosition(timePosition) {
22199
22586
  return this.timePositionToTickPositionWithSpeed(this._mainState, timePosition, this.playbackSpeed);
22200
22587
  }
22588
+ mainUpdateSyncPoints(syncPoints) {
22589
+ const state = this._mainState;
22590
+ syncPoints.sort((a, b) => a.tick - b.tick); // just in case
22591
+ state.syncPoints = new Array(syncPoints.length);
22592
+ if (syncPoints.length >= 0) {
22593
+ let bpm = 120;
22594
+ let absTick = 0;
22595
+ let absTime = 0.0;
22596
+ let previousTick = 0;
22597
+ let tempoChangeIndex = 0;
22598
+ for (let i = 0; i < syncPoints.length; i++) {
22599
+ const p = syncPoints[i];
22600
+ const deltaTick = p.tick - previousTick;
22601
+ absTick += deltaTick;
22602
+ absTime += deltaTick * (60000.0 / (bpm * state.division));
22603
+ state.syncPoints[i] = new BackingTrackSyncPointWithTime(p.tick, p.data, absTime);
22604
+ previousTick = p.tick;
22605
+ while (tempoChangeIndex < state.tempoChanges.length &&
22606
+ state.tempoChanges[tempoChangeIndex].ticks <= absTick) {
22607
+ bpm = state.tempoChanges[tempoChangeIndex].bpm;
22608
+ tempoChangeIndex++;
22609
+ }
22610
+ }
22611
+ }
22612
+ state.syncPointIndex = 0;
22613
+ }
22201
22614
  currentTimePositionToTickPosition(timePosition) {
22202
22615
  return this.timePositionToTickPositionWithSpeed(this._currentState, timePosition, this.playbackSpeed);
22203
22616
  }
22617
+ mainTimePositionFromBackingTrack(timePosition, backingTrackLength) {
22618
+ const mainState = this._mainState;
22619
+ const syncPoints = mainState.syncPoints;
22620
+ if (timePosition < 0 || syncPoints.length === 0) {
22621
+ return timePosition;
22622
+ }
22623
+ let syncPointIndex = timePosition >= syncPoints[mainState.syncPointIndex].data.millisecondOffset ? mainState.syncPointIndex : 0;
22624
+ while (syncPointIndex + 1 < syncPoints.length &&
22625
+ syncPoints[syncPointIndex + 1].data.millisecondOffset <= timePosition) {
22626
+ syncPointIndex++;
22627
+ }
22628
+ const currentSyncPoint = syncPoints[syncPointIndex];
22629
+ const timeDiff = timePosition - currentSyncPoint.data.millisecondOffset;
22630
+ let alphaTabTimeDiff;
22631
+ if (syncPointIndex + 1 < syncPoints.length) {
22632
+ const nextSyncPoint = syncPoints[syncPointIndex + 1];
22633
+ const relativeTimeDiff = timeDiff / (nextSyncPoint.data.millisecondOffset - currentSyncPoint.data.millisecondOffset);
22634
+ alphaTabTimeDiff = (nextSyncPoint.time - currentSyncPoint.time) * relativeTimeDiff;
22635
+ }
22636
+ else {
22637
+ const relativeTimeDiff = timeDiff / (backingTrackLength - currentSyncPoint.data.millisecondOffset);
22638
+ alphaTabTimeDiff = (mainState.endTime - currentSyncPoint.time) * relativeTimeDiff;
22639
+ }
22640
+ return (currentSyncPoint.time + alphaTabTimeDiff) / this.playbackSpeed;
22641
+ }
22642
+ mainTimePositionToBackingTrack(timePosition, backingTrackLength) {
22643
+ const mainState = this._mainState;
22644
+ const syncPoints = mainState.syncPoints;
22645
+ if (timePosition < 0 || syncPoints.length === 0) {
22646
+ return timePosition;
22647
+ }
22648
+ timePosition *= this.playbackSpeed;
22649
+ let syncPointIndex = timePosition >= syncPoints[mainState.syncPointIndex].time ? mainState.syncPointIndex : 0;
22650
+ while (syncPointIndex + 1 < syncPoints.length && syncPoints[syncPointIndex + 1].time <= timePosition) {
22651
+ syncPointIndex++;
22652
+ }
22653
+ const currentSyncPoint = syncPoints[syncPointIndex];
22654
+ const alphaTabTimeDiff = timePosition - currentSyncPoint.time;
22655
+ let backingTrackPos;
22656
+ if (syncPointIndex + 1 < syncPoints.length) {
22657
+ const nextSyncPoint = syncPoints[syncPointIndex + 1];
22658
+ const relativeAlphaTabTimeDiff = alphaTabTimeDiff / (nextSyncPoint.time - currentSyncPoint.time);
22659
+ const backingTrackDiff = nextSyncPoint.data.millisecondOffset - currentSyncPoint.data.millisecondOffset;
22660
+ backingTrackPos = currentSyncPoint.data.millisecondOffset + backingTrackDiff * relativeAlphaTabTimeDiff;
22661
+ }
22662
+ else {
22663
+ const relativeAlphaTabTimeDiff = alphaTabTimeDiff / (mainState.endTime - currentSyncPoint.time);
22664
+ const frameDiff = backingTrackLength - currentSyncPoint.data.millisecondOffset;
22665
+ backingTrackPos = currentSyncPoint.data.millisecondOffset + frameDiff * relativeAlphaTabTimeDiff;
22666
+ }
22667
+ return backingTrackPos;
22668
+ }
22204
22669
  tickPositionToTimePositionWithSpeed(state, tickPosition, playbackSpeed) {
22205
22670
  let timePosition = 0.0;
22206
22671
  let bpm = 120.0;
@@ -22310,6 +22775,8 @@
22310
22775
  });
22311
22776
  state.endTime = metronomeTime;
22312
22777
  state.endTick = metronomeTick;
22778
+ state.currentTempo = bpm;
22779
+ state.modifiedTempo = bpm;
22313
22780
  this._countInState = state;
22314
22781
  }
22315
22782
  }
@@ -22355,12 +22822,22 @@
22355
22822
  * @param endTick The end tick.
22356
22823
  * @param isSeek Whether the time was seeked.
22357
22824
  */
22358
- constructor(currentTime, endTime, currentTick, endTick, isSeek) {
22825
+ constructor(currentTime, endTime, currentTick, endTick, isSeek, originalTempo, modifiedTempo) {
22826
+ /**
22827
+ * The original tempo in which alphaTab internally would be playing right now.
22828
+ */
22829
+ this.originalTempo = 0;
22830
+ /**
22831
+ * The modified tempo in which the actual playback is happening (e.g. due to playback speed or external audio synchronization)
22832
+ */
22833
+ this.modifiedTempo = 0;
22359
22834
  this.currentTime = currentTime;
22360
22835
  this.endTime = endTime;
22361
22836
  this.currentTick = currentTick;
22362
22837
  this.endTick = endTick;
22363
22838
  this.isSeek = isSeek;
22839
+ this.originalTempo = originalTempo;
22840
+ this.modifiedTempo = modifiedTempo;
22364
22841
  }
22365
22842
  }
22366
22843
 
@@ -26458,7 +26935,7 @@
26458
26935
  break;
26459
26936
  case MidiEventType.TempoChange:
26460
26937
  const tempoChange = e;
26461
- this.currentTempo = 60000000 / tempoChange.microSecondsPerQuarterNote;
26938
+ this.currentTempo = tempoChange.beatsPerMinute;
26462
26939
  break;
26463
26940
  case MidiEventType.PitchBend:
26464
26941
  const pitchBend = e;
@@ -27610,15 +28087,15 @@
27610
28087
  }
27611
28088
 
27612
28089
  /**
27613
- * This is the main synthesizer component which can be used to
28090
+ * This is the base class for synthesizer components which can be used to
27614
28091
  * play a {@link MidiFile} via a {@link ISynthOutput}.
27615
28092
  */
27616
- class AlphaSynth {
28093
+ class AlphaSynthBase {
27617
28094
  get output() {
27618
28095
  return this._output;
27619
28096
  }
27620
28097
  get isReadyForPlayback() {
27621
- return this.isReady && this._isSoundFontLoaded && this._isMidiLoaded;
28098
+ return this.isReady && this.isSoundFontLoaded && this._isMidiLoaded;
27622
28099
  }
27623
28100
  get logLevel() {
27624
28101
  return Logger.logLevel;
@@ -27627,11 +28104,14 @@
27627
28104
  Logger.logLevel = value;
27628
28105
  }
27629
28106
  get masterVolume() {
27630
- return this._synthesizer.masterVolume;
28107
+ return this.synthesizer.masterVolume;
27631
28108
  }
27632
28109
  set masterVolume(value) {
27633
28110
  value = Math.max(value, SynthConstants.MinVolume);
27634
- this._synthesizer.masterVolume = value;
28111
+ this.updateMasterVolume(value);
28112
+ }
28113
+ updateMasterVolume(value) {
28114
+ this.synthesizer.masterVolume = value;
27635
28115
  }
27636
28116
  get metronomeVolume() {
27637
28117
  return this._metronomeVolume;
@@ -27639,7 +28119,7 @@
27639
28119
  set metronomeVolume(value) {
27640
28120
  value = Math.max(value, SynthConstants.MinVolume);
27641
28121
  this._metronomeVolume = value;
27642
- this._synthesizer.metronomeVolume = value;
28122
+ this.synthesizer.metronomeVolume = value;
27643
28123
  }
27644
28124
  get countInVolume() {
27645
28125
  return this._countInVolume;
@@ -27655,19 +28135,22 @@
27655
28135
  this._midiEventsPlayedFilter = new Set(value);
27656
28136
  }
27657
28137
  get playbackSpeed() {
27658
- return this._sequencer.playbackSpeed;
28138
+ return this.sequencer.playbackSpeed;
27659
28139
  }
27660
28140
  set playbackSpeed(value) {
27661
28141
  value = ModelUtils.clamp(value, SynthConstants.MinPlaybackSpeed, SynthConstants.MaxPlaybackSpeed);
27662
- const oldSpeed = this._sequencer.playbackSpeed;
27663
- this._sequencer.playbackSpeed = value;
28142
+ this.updatePlaybackSpeed(value);
28143
+ }
28144
+ updatePlaybackSpeed(value) {
28145
+ const oldSpeed = this.sequencer.playbackSpeed;
28146
+ this.sequencer.playbackSpeed = value;
27664
28147
  this.timePosition = this.timePosition * (oldSpeed / value);
27665
28148
  }
27666
28149
  get tickPosition() {
27667
28150
  return this._tickPosition;
27668
28151
  }
27669
28152
  set tickPosition(value) {
27670
- this.timePosition = this._sequencer.mainTickPositionToTimePosition(value);
28153
+ this.timePosition = this.sequencer.mainTickPositionToTimePosition(value);
27671
28154
  }
27672
28155
  get timePosition() {
27673
28156
  return this._timePosition;
@@ -27675,30 +28158,30 @@
27675
28158
  set timePosition(value) {
27676
28159
  Logger.debug('AlphaSynth', `Seeking to position ${value}ms (main)`);
27677
28160
  // tell the sequencer to jump to the given position
27678
- this._sequencer.mainSeek(value);
28161
+ this.sequencer.mainSeek(value);
27679
28162
  // update the internal position
27680
28163
  this.updateTimePosition(value, true);
27681
28164
  // tell the output to reset the already synthesized buffers and request data again
27682
- if (this._sequencer.isPlayingMain) {
28165
+ if (this.sequencer.isPlayingMain) {
27683
28166
  this._notPlayedSamples = 0;
27684
28167
  this.output.resetSamples();
27685
28168
  }
27686
28169
  }
27687
28170
  get playbackRange() {
27688
- return this._sequencer.mainPlaybackRange;
28171
+ return this.sequencer.mainPlaybackRange;
27689
28172
  }
27690
28173
  set playbackRange(value) {
27691
- this._sequencer.mainPlaybackRange = value;
28174
+ this.sequencer.mainPlaybackRange = value;
27692
28175
  if (value) {
27693
28176
  this.tickPosition = value.startTick;
27694
28177
  }
27695
28178
  this.playbackRangeChanged.trigger(new PlaybackRangeChangedEventArgs(value));
27696
28179
  }
27697
28180
  get isLooping() {
27698
- return this._sequencer.isLooping;
28181
+ return this.sequencer.isLooping;
27699
28182
  }
27700
28183
  set isLooping(value) {
27701
- this._sequencer.isLooping = value;
28184
+ this.sequencer.isLooping = value;
27702
28185
  }
27703
28186
  destroy() {
27704
28187
  Logger.debug('AlphaSynth', 'Destroying player');
@@ -27706,11 +28189,11 @@
27706
28189
  this.output.destroy();
27707
28190
  }
27708
28191
  /**
27709
- * Initializes a new instance of the {@link AlphaSynth} class.
28192
+ * Initializes a new instance of the {@link AlphaSynthBase} class.
27710
28193
  * @param output The output to use for playing the generated samples.
27711
28194
  */
27712
- constructor(output, bufferTimeInMilliseconds) {
27713
- this._isSoundFontLoaded = false;
28195
+ constructor(output, synthesizer, bufferTimeInMilliseconds) {
28196
+ this.isSoundFontLoaded = false;
27714
28197
  this._isMidiLoaded = false;
27715
28198
  this._tickPosition = 0;
27716
28199
  this._timePosition = 0;
@@ -27739,8 +28222,8 @@
27739
28222
  Logger.debug('AlphaSynth', 'Creating output');
27740
28223
  this._output = output;
27741
28224
  Logger.debug('AlphaSynth', 'Creating synthesizer');
27742
- this._synthesizer = new TinySoundFont(this.output.sampleRate);
27743
- this._sequencer = new MidiFileSequencer(this._synthesizer);
28225
+ this.synthesizer = synthesizer;
28226
+ this.sequencer = new MidiFileSequencer(this.synthesizer);
27744
28227
  Logger.debug('AlphaSynth', 'Opening output');
27745
28228
  this.output.ready.on(() => {
27746
28229
  this.isReady = true;
@@ -27748,42 +28231,45 @@
27748
28231
  this.checkReadyForPlayback();
27749
28232
  });
27750
28233
  this.output.sampleRequest.on(() => {
27751
- if (this.state === PlayerState.Playing &&
27752
- (!this._sequencer.isFinished || this._synthesizer.activeVoiceCount > 0)) {
27753
- let samples = new Float32Array(SynthConstants.MicroBufferSize * SynthConstants.MicroBufferCount * SynthConstants.AudioChannels);
27754
- let bufferPos = 0;
27755
- for (let i = 0; i < SynthConstants.MicroBufferCount; i++) {
27756
- // synthesize buffer
27757
- this._sequencer.fillMidiEventQueue();
27758
- const synthesizedEvents = this._synthesizer.synthesize(samples, bufferPos, SynthConstants.MicroBufferSize);
27759
- bufferPos += SynthConstants.MicroBufferSize * SynthConstants.AudioChannels;
27760
- // push all processed events into the queue
27761
- // for informing users about played events
27762
- for (const e of synthesizedEvents) {
27763
- if (this._midiEventsPlayedFilter.has(e.event.type)) {
27764
- this._playedEventsQueue.enqueue(e);
27765
- }
27766
- }
27767
- // tell sequencer to check whether its work is done
27768
- if (this._sequencer.isFinished) {
27769
- break;
28234
+ this.onSampleRequest();
28235
+ });
28236
+ this.output.samplesPlayed.on(this.onSamplesPlayed.bind(this));
28237
+ this.output.open(bufferTimeInMilliseconds);
28238
+ }
28239
+ onSampleRequest() {
28240
+ if (this.state === PlayerState.Playing &&
28241
+ (!this.sequencer.isFinished || this.synthesizer.activeVoiceCount > 0)) {
28242
+ let samples = new Float32Array(SynthConstants.MicroBufferSize * SynthConstants.MicroBufferCount * SynthConstants.AudioChannels);
28243
+ let bufferPos = 0;
28244
+ for (let i = 0; i < SynthConstants.MicroBufferCount; i++) {
28245
+ // synthesize buffer
28246
+ this.sequencer.fillMidiEventQueue();
28247
+ const synthesizedEvents = this.synthesizer.synthesize(samples, bufferPos, SynthConstants.MicroBufferSize);
28248
+ bufferPos += SynthConstants.MicroBufferSize * SynthConstants.AudioChannels;
28249
+ // push all processed events into the queue
28250
+ // for informing users about played events
28251
+ for (const e of synthesizedEvents) {
28252
+ if (this._midiEventsPlayedFilter.has(e.event.type)) {
28253
+ this._playedEventsQueue.enqueue(e);
27770
28254
  }
27771
28255
  }
27772
- // send it to output
27773
- if (bufferPos < samples.length) {
27774
- samples = samples.subarray(0, bufferPos);
28256
+ // tell sequencer to check whether its work is done
28257
+ if (this.sequencer.isFinished) {
28258
+ break;
27775
28259
  }
27776
- this._notPlayedSamples += samples.length;
27777
- this.output.addSamples(samples);
27778
28260
  }
27779
- else {
27780
- // Tell output that there is no data left for it.
27781
- const samples = new Float32Array(0);
27782
- this.output.addSamples(samples);
28261
+ // send it to output
28262
+ if (bufferPos < samples.length) {
28263
+ samples = samples.subarray(0, bufferPos);
27783
28264
  }
27784
- });
27785
- this.output.samplesPlayed.on(this.onSamplesPlayed.bind(this));
27786
- this.output.open(bufferTimeInMilliseconds);
28265
+ this._notPlayedSamples += samples.length;
28266
+ this.output.addSamples(samples);
28267
+ }
28268
+ else {
28269
+ // Tell output that there is no data left for it.
28270
+ const samples = new Float32Array(0);
28271
+ this.output.addSamples(samples);
28272
+ }
27787
28273
  }
27788
28274
  play() {
27789
28275
  if (this.state !== PlayerState.Paused || !this._isMidiLoaded) {
@@ -27793,20 +28279,20 @@
27793
28279
  this.playInternal();
27794
28280
  if (this._countInVolume > 0) {
27795
28281
  Logger.debug('AlphaSynth', 'Starting countin');
27796
- this._sequencer.startCountIn();
27797
- this._synthesizer.setupMetronomeChannel(this._countInVolume);
28282
+ this.sequencer.startCountIn();
28283
+ this.synthesizer.setupMetronomeChannel(this._countInVolume);
27798
28284
  this.updateTimePosition(0, true);
27799
28285
  }
27800
28286
  this.output.play();
27801
28287
  return true;
27802
28288
  }
27803
28289
  playInternal() {
27804
- if (this._sequencer.isPlayingOneTimeMidi) {
28290
+ if (this.sequencer.isPlayingOneTimeMidi) {
27805
28291
  Logger.debug('AlphaSynth', 'Cancelling one time midi');
27806
28292
  this.stopOneTimeMidi();
27807
28293
  }
27808
28294
  Logger.debug('AlphaSynth', 'Starting playback');
27809
- this._synthesizer.setupMetronomeChannel(this.metronomeVolume);
28295
+ this.synthesizer.setupMetronomeChannel(this.metronomeVolume);
27810
28296
  this._synthStopping = false;
27811
28297
  this.state = PlayerState.Playing;
27812
28298
  this.stateChanged.trigger(new PlayerStateChangedEventArgs(this.state, false));
@@ -27819,7 +28305,7 @@
27819
28305
  this.state = PlayerState.Paused;
27820
28306
  this.stateChanged.trigger(new PlayerStateChangedEventArgs(this.state, false));
27821
28307
  this.output.pause();
27822
- this._synthesizer.noteOffAll(false);
28308
+ this.synthesizer.noteOffAll(false);
27823
28309
  }
27824
28310
  playPause() {
27825
28311
  if (this.state !== PlayerState.Paused || !this._isMidiLoaded) {
@@ -27837,21 +28323,21 @@
27837
28323
  this.state = PlayerState.Paused;
27838
28324
  this.output.pause();
27839
28325
  this._notPlayedSamples = 0;
27840
- this._sequencer.stop();
27841
- this._synthesizer.noteOffAll(true);
27842
- this.tickPosition = this._sequencer.mainPlaybackRange ? this._sequencer.mainPlaybackRange.startTick : 0;
28326
+ this.sequencer.stop();
28327
+ this.synthesizer.noteOffAll(true);
28328
+ this.tickPosition = this.sequencer.mainPlaybackRange ? this.sequencer.mainPlaybackRange.startTick : 0;
27843
28329
  this.stateChanged.trigger(new PlayerStateChangedEventArgs(this.state, true));
27844
28330
  }
27845
28331
  playOneTimeMidiFile(midi) {
27846
- if (this._sequencer.isPlayingOneTimeMidi) {
28332
+ if (this.sequencer.isPlayingOneTimeMidi) {
27847
28333
  this.stopOneTimeMidi();
27848
28334
  }
27849
28335
  else {
27850
28336
  // pause current playback.
27851
28337
  this.pause();
27852
28338
  }
27853
- this._sequencer.loadOneTimeMidi(midi);
27854
- this._synthesizer.noteOffAll(true);
28339
+ this.sequencer.loadOneTimeMidi(midi);
28340
+ this.synthesizer.noteOffAll(true);
27855
28341
  // update the internal position
27856
28342
  this.updateTimePosition(0, true);
27857
28343
  // tell the output to reset the already synthesized buffers and request data again
@@ -27861,9 +28347,9 @@
27861
28347
  }
27862
28348
  resetSoundFonts() {
27863
28349
  this.stop();
27864
- this._synthesizer.resetPresets();
28350
+ this.synthesizer.resetPresets();
27865
28351
  this._loadedSoundFonts = [];
27866
- this._isSoundFontLoaded = false;
28352
+ this.isSoundFontLoaded = false;
27867
28353
  this.soundFontLoaded.trigger();
27868
28354
  }
27869
28355
  loadSoundFont(data, append) {
@@ -27877,7 +28363,7 @@
27877
28363
  this._loadedSoundFonts = [];
27878
28364
  }
27879
28365
  this._loadedSoundFonts.push(soundFont);
27880
- this._isSoundFontLoaded = true;
28366
+ this.isSoundFontLoaded = true;
27881
28367
  this.soundFontLoaded.trigger();
27882
28368
  Logger.debug('AlphaSynth', 'soundFont successfully loaded');
27883
28369
  this.checkReadyForPlayback();
@@ -27889,12 +28375,12 @@
27889
28375
  }
27890
28376
  checkReadyForPlayback() {
27891
28377
  if (this.isReadyForPlayback) {
27892
- this._synthesizer.setupMetronomeChannel(this.metronomeVolume);
27893
- const programs = this._sequencer.instrumentPrograms;
27894
- const percussionKeys = this._sequencer.percussionKeys;
28378
+ this.synthesizer.setupMetronomeChannel(this.metronomeVolume);
28379
+ const programs = this.sequencer.instrumentPrograms;
28380
+ const percussionKeys = this.sequencer.percussionKeys;
27895
28381
  let append = false;
27896
28382
  for (const soundFont of this._loadedSoundFonts) {
27897
- this._synthesizer.loadPresets(soundFont, programs, percussionKeys, append);
28383
+ this.synthesizer.loadPresets(soundFont, programs, percussionKeys, append);
27898
28384
  append = true;
27899
28385
  }
27900
28386
  this.readyForPlayback.trigger();
@@ -27908,9 +28394,9 @@
27908
28394
  this.stop();
27909
28395
  try {
27910
28396
  Logger.debug('AlphaSynth', 'Loading midi from model');
27911
- this._sequencer.loadMidi(midi);
28397
+ this.sequencer.loadMidi(midi);
27912
28398
  this._isMidiLoaded = true;
27913
- this.midiLoaded.trigger(new PositionChangedEventArgs(0, this._sequencer.currentEndTime, 0, this._sequencer.currentEndTick, false));
28399
+ this.midiLoaded.trigger(new PositionChangedEventArgs(0, this.sequencer.currentEndTime, 0, this.sequencer.currentEndTick, false, this.sequencer.currentTempo, this.sequencer.modifiedTempo));
27914
28400
  Logger.debug('AlphaSynth', 'Midi successfully loaded');
27915
28401
  this.checkReadyForPlayback();
27916
28402
  this.tickPosition = 0;
@@ -27921,29 +28407,29 @@
27921
28407
  }
27922
28408
  }
27923
28409
  applyTranspositionPitches(transpositionPitches) {
27924
- this._synthesizer.applyTranspositionPitches(transpositionPitches);
28410
+ this.synthesizer.applyTranspositionPitches(transpositionPitches);
27925
28411
  }
27926
28412
  setChannelTranspositionPitch(channel, semitones) {
27927
- this._synthesizer.setChannelTranspositionPitch(channel, semitones);
28413
+ this.synthesizer.setChannelTranspositionPitch(channel, semitones);
27928
28414
  }
27929
28415
  setChannelMute(channel, mute) {
27930
- this._synthesizer.channelSetMute(channel, mute);
28416
+ this.synthesizer.channelSetMute(channel, mute);
27931
28417
  }
27932
28418
  resetChannelStates() {
27933
- this._synthesizer.resetChannelStates();
28419
+ this.synthesizer.resetChannelStates();
27934
28420
  }
27935
28421
  setChannelSolo(channel, solo) {
27936
- this._synthesizer.channelSetSolo(channel, solo);
28422
+ this.synthesizer.channelSetSolo(channel, solo);
27937
28423
  }
27938
28424
  setChannelVolume(channel, volume) {
27939
28425
  volume = Math.max(volume, SynthConstants.MinVolume);
27940
- this._synthesizer.channelSetMixVolume(channel, volume);
28426
+ this.synthesizer.channelSetMixVolume(channel, volume);
27941
28427
  }
27942
28428
  onSamplesPlayed(sampleCount) {
27943
28429
  if (sampleCount === 0) {
27944
28430
  return;
27945
28431
  }
27946
- const playedMillis = (sampleCount / this._synthesizer.outSampleRate) * 1000;
28432
+ const playedMillis = (sampleCount / this.synthesizer.outSampleRate) * 1000;
27947
28433
  this._notPlayedSamples -= sampleCount * SynthConstants.AudioChannels;
27948
28434
  this.updateTimePosition(this._timePosition + playedMillis, false);
27949
28435
  this.checkForFinish();
@@ -27951,76 +28437,85 @@
27951
28437
  checkForFinish() {
27952
28438
  let startTick = 0;
27953
28439
  let endTick = 0;
27954
- if (this.playbackRange && this._sequencer.isPlayingMain) {
28440
+ if (this.playbackRange && this.sequencer.isPlayingMain) {
27955
28441
  startTick = this.playbackRange.startTick;
27956
28442
  endTick = this.playbackRange.endTick;
27957
28443
  }
27958
28444
  else {
27959
- endTick = this._sequencer.currentEndTick;
27960
- }
27961
- if (this._tickPosition >= endTick && this._notPlayedSamples <= 0) {
27962
- this._notPlayedSamples = 0;
27963
- if (this._sequencer.isPlayingCountIn) {
27964
- Logger.debug('AlphaSynth', 'Finished playback (count-in)');
27965
- this._sequencer.resetCountIn();
27966
- this.timePosition = this._sequencer.currentTime;
27967
- this.playInternal();
27968
- this.output.resetSamples();
27969
- }
27970
- else if (this._sequencer.isPlayingOneTimeMidi) {
27971
- Logger.debug('AlphaSynth', 'Finished playback (one time)');
27972
- this.output.resetSamples();
27973
- this.state = PlayerState.Paused;
27974
- this.stopOneTimeMidi();
27975
- }
27976
- else if (this.isLooping) {
27977
- Logger.debug('AlphaSynth', 'Finished playback (main looping)');
27978
- this.finished.trigger();
27979
- this.tickPosition = startTick;
27980
- this._synthStopping = false;
28445
+ endTick = this.sequencer.currentEndTick;
28446
+ }
28447
+ if (this._tickPosition >= endTick) {
28448
+ // fully done with playback of remaining samples?
28449
+ if (this._notPlayedSamples <= 0) {
28450
+ this._notPlayedSamples = 0;
28451
+ if (this.sequencer.isPlayingCountIn) {
28452
+ Logger.debug('AlphaSynth', 'Finished playback (count-in)');
28453
+ this.sequencer.resetCountIn();
28454
+ this.timePosition = this.sequencer.currentTime;
28455
+ this.playInternal();
28456
+ this.output.resetSamples();
28457
+ }
28458
+ else if (this.sequencer.isPlayingOneTimeMidi) {
28459
+ Logger.debug('AlphaSynth', 'Finished playback (one time)');
28460
+ this.output.resetSamples();
28461
+ this.state = PlayerState.Paused;
28462
+ this.stopOneTimeMidi();
28463
+ }
28464
+ else if (this.isLooping) {
28465
+ Logger.debug('AlphaSynth', 'Finished playback (main looping)');
28466
+ this.finished.trigger();
28467
+ this.tickPosition = startTick;
28468
+ this._synthStopping = false;
28469
+ }
28470
+ else if (this.synthesizer.activeVoiceCount > 0) {
28471
+ // smooth stop
28472
+ if (!this._synthStopping) {
28473
+ Logger.debug('AlphaSynth', 'Signaling synth to stop all voices (all samples played)');
28474
+ this.synthesizer.noteOffAll(true);
28475
+ this._synthStopping = true;
28476
+ }
28477
+ }
28478
+ else {
28479
+ this._synthStopping = false;
28480
+ Logger.debug('AlphaSynth', 'Finished playback (main)');
28481
+ this.finished.trigger();
28482
+ this.stop();
28483
+ }
27981
28484
  }
27982
- else if (this._synthesizer.activeVoiceCount > 0) {
27983
- // smooth stop
28485
+ else {
28486
+ // the output still has to play some samples, signal the synth to stop
28487
+ // to eventually bring the voices down to 0 and stop playing
27984
28488
  if (!this._synthStopping) {
27985
- this._synthesizer.noteOffAll(true);
28489
+ Logger.debug('AlphaSynth', 'Signaling synth to stop all voices (not all samples played)');
28490
+ this.synthesizer.noteOffAll(true);
27986
28491
  this._synthStopping = true;
27987
28492
  }
27988
28493
  }
27989
- else {
27990
- this._synthStopping = false;
27991
- Logger.debug('AlphaSynth', 'Finished playback (main)');
27992
- this.finished.trigger();
27993
- this.stop();
27994
- }
27995
28494
  }
27996
28495
  }
27997
28496
  stopOneTimeMidi() {
27998
28497
  this.output.pause();
27999
- this._synthesizer.noteOffAll(true);
28000
- this._sequencer.resetOneTimeMidi();
28001
- this.timePosition = this._sequencer.currentTime;
28498
+ this.synthesizer.noteOffAll(true);
28499
+ this.sequencer.resetOneTimeMidi();
28500
+ this.timePosition = this.sequencer.currentTime;
28002
28501
  }
28003
28502
  updateTimePosition(timePosition, isSeek) {
28004
28503
  // update the real positions
28005
28504
  let currentTime = timePosition;
28006
28505
  this._timePosition = currentTime;
28007
- let currentTick = this._sequencer.currentTimePositionToTickPosition(currentTime);
28506
+ let currentTick = this.sequencer.currentTimePositionToTickPosition(currentTime);
28008
28507
  this._tickPosition = currentTick;
28009
- const endTime = this._sequencer.currentEndTime;
28010
- const endTick = this._sequencer.currentEndTick;
28508
+ const endTime = this.sequencer.currentEndTime;
28509
+ const endTick = this.sequencer.currentEndTick;
28011
28510
  // on fade outs we can have some milliseconds longer, ensure we don't report this
28012
28511
  if (currentTime > endTime) {
28013
28512
  currentTime = endTime;
28014
28513
  currentTick = endTick;
28015
28514
  }
28016
- const mode = this._sequencer.isPlayingMain
28017
- ? 'main'
28018
- : this._sequencer.isPlayingCountIn
28019
- ? 'count-in'
28020
- : 'one-time';
28021
- Logger.debug('AlphaSynth', `Position changed: (time: ${currentTime}/${endTime}, tick: ${currentTick}/${endTick}, Active Voices: ${this._synthesizer.activeVoiceCount} (${mode})`);
28022
- if (this._sequencer.isPlayingMain) {
28023
- this.positionChanged.trigger(new PositionChangedEventArgs(currentTime, endTime, currentTick, endTick, isSeek));
28515
+ const mode = this.sequencer.isPlayingMain ? 'main' : this.sequencer.isPlayingCountIn ? 'count-in' : 'one-time';
28516
+ Logger.debug('AlphaSynth', `Position changed: (time: ${currentTime}/${endTime}, tick: ${currentTick}/${endTick}, Active Voices: ${this.synthesizer.activeVoiceCount} (${mode}), Tempo original: ${this.sequencer.currentTempo}, Tempo modified: ${this.sequencer.modifiedTempo})`);
28517
+ if (this.sequencer.isPlayingMain) {
28518
+ this.positionChanged.trigger(new PositionChangedEventArgs(currentTime, endTime, currentTick, endTick, isSeek, this.sequencer.currentTempo, this.sequencer.modifiedTempo));
28024
28519
  }
28025
28520
  // build events which were actually played
28026
28521
  if (isSeek) {
@@ -28041,13 +28536,28 @@
28041
28536
  * @internal
28042
28537
  */
28043
28538
  hasSamplesForProgram(program) {
28044
- return this._synthesizer.hasSamplesForProgram(program);
28539
+ return this.synthesizer.hasSamplesForProgram(program);
28045
28540
  }
28046
28541
  /**
28047
28542
  * @internal
28048
28543
  */
28049
28544
  hasSamplesForPercussion(key) {
28050
- return this._synthesizer.hasSamplesForPercussion(key);
28545
+ return this.synthesizer.hasSamplesForPercussion(key);
28546
+ }
28547
+ loadBackingTrack(_score, _syncPoints) {
28548
+ }
28549
+ }
28550
+ /**
28551
+ * This is the main synthesizer component which can be used to
28552
+ * play a {@link MidiFile} via a {@link ISynthOutput}.
28553
+ */
28554
+ class AlphaSynth extends AlphaSynthBase {
28555
+ /**
28556
+ * Initializes a new instance of the {@link AlphaSynth} class.
28557
+ * @param output The output to use for playing the generated samples.
28558
+ */
28559
+ constructor(output, bufferTimeInMilliseconds) {
28560
+ super(output, new TinySoundFont(output.sampleRate), bufferTimeInMilliseconds);
28051
28561
  }
28052
28562
  }
28053
28563
 
@@ -29294,6 +29804,35 @@
29294
29804
  */
29295
29805
  PlayerOutputMode[PlayerOutputMode["WebAudioScriptProcessor"] = 1] = "WebAudioScriptProcessor";
29296
29806
  })(exports.PlayerOutputMode || (exports.PlayerOutputMode = {}));
29807
+ /**
29808
+ * Lists the different modes how the internal alphaTab player (and related cursor behavior) is working.
29809
+ */
29810
+ exports.PlayerMode = void 0;
29811
+ (function (PlayerMode) {
29812
+ /**
29813
+ * The player functionality is fully disabled.
29814
+ */
29815
+ PlayerMode[PlayerMode["Disabled"] = 0] = "Disabled";
29816
+ /**
29817
+ * The player functionality is enabled.
29818
+ * If the loaded file provides a backing track, it is used for playback.
29819
+ * If no backing track is provided, the midi synthesizer is used.
29820
+ */
29821
+ PlayerMode[PlayerMode["EnabledAutomatic"] = 1] = "EnabledAutomatic";
29822
+ /**
29823
+ * The player functionality is enabled and the synthesizer is used (even if a backing track is embedded in the file).
29824
+ */
29825
+ PlayerMode[PlayerMode["EnabledSynthesizer"] = 2] = "EnabledSynthesizer";
29826
+ /**
29827
+ * The player functionality is enabled. If the input data model has no backing track configured, the player might not work as expected (as playback completes instantly).
29828
+ */
29829
+ PlayerMode[PlayerMode["EnabledBackingTrack"] = 3] = "EnabledBackingTrack";
29830
+ /**
29831
+ * The player functionality is enabled and an external audio/video source is used as time axis.
29832
+ * The related player APIs need to be used to update the current position of the external audio source within alphaTab.
29833
+ */
29834
+ PlayerMode[PlayerMode["EnabledExternalMedia"] = 4] = "EnabledExternalMedia";
29835
+ })(exports.PlayerMode || (exports.PlayerMode = {}));
29297
29836
  /**
29298
29837
  * The player settings control how the audio playback and UI is behaving.
29299
29838
  * @json
@@ -29340,6 +29879,7 @@
29340
29879
  * @since 0.9.6
29341
29880
  * @defaultValue `false`
29342
29881
  * @category Player
29882
+ * @deprecated Use {@link playerMode} instead.
29343
29883
  * @remarks
29344
29884
  * This setting configures whether the player feature is enabled or not. Depending on the platform enabling the player needs some additional actions of the developer.
29345
29885
  * For the JavaScript version the [player.soundFont](/docs/reference/settings/player/soundfont) property must be set to the URL of the sound font that should be used or it must be loaded manually via API.
@@ -29348,6 +29888,37 @@
29348
29888
  * AlphaTab does not ship a default UI for the player. The API must be hooked up to some UI controls to allow the user to interact with the player.
29349
29889
  */
29350
29890
  this.enablePlayer = false;
29891
+ /**
29892
+ * Whether the player should be enabled and which mode it should use.
29893
+ * @since 1.6.0
29894
+ * @defaultValue `PlayerMode.Disabled`
29895
+ * @category Player
29896
+ * @remarks
29897
+ * This setting configures whether the player feature is enabled or not. Depending on the platform enabling the player needs some additional actions of the developer.
29898
+ *
29899
+ * **Synthesizer**
29900
+ *
29901
+ * If the synthesizer is used (via {@link PlayerMode.EnabledAutomatic} or {@link PlayerMode.EnabledSynthesizer}) a sound font is needed so that the midi synthesizer can produce the audio samples.
29902
+ *
29903
+ * For the JavaScript version the [player.soundFont](/docs/reference/settings/player/soundfont) property must be set to the URL of the sound font that should be used or it must be loaded manually via API.
29904
+ * For .net manually the soundfont must be loaded.
29905
+ *
29906
+ * **Backing Track**
29907
+ *
29908
+ * For a built-in backing track of the input file no additional data needs to be loaded (assuming everything is filled via the input file).
29909
+ * Otherwise the `score.backingTrack` needs to be filled before loading and the related sync points need to be configured.
29910
+ *
29911
+ * **External Media**
29912
+ *
29913
+ * For synchronizing alphaTab with an external media no data needs to be loaded into alphaTab. The configured sync points on the MasterBars are used
29914
+ * as reference to synchronize the external media with the internal time axis. Then the related APIs on the AlphaTabApi object need to be used
29915
+ * to update the playback state and exterrnal audio position during playback.
29916
+ *
29917
+ * **User Interface**
29918
+ *
29919
+ * AlphaTab does not ship a default UI for the player. The API must be hooked up to some UI controls to allow the user to interact with the player.
29920
+ */
29921
+ this.playerMode = exports.PlayerMode.Disabled;
29351
29922
  /**
29352
29923
  * Whether playback cursors should be displayed.
29353
29924
  * @since 0.9.6
@@ -30053,6 +30624,7 @@
30053
30624
  /*@target web*/
30054
30625
  o.set("outputmode", obj.outputMode);
30055
30626
  o.set("enableplayer", obj.enablePlayer);
30627
+ o.set("playermode", obj.playerMode);
30056
30628
  o.set("enablecursor", obj.enableCursor);
30057
30629
  o.set("enableanimatedbeatcursor", obj.enableAnimatedBeatCursor);
30058
30630
  o.set("enableelementhighlighting", obj.enableElementHighlighting);
@@ -30088,6 +30660,9 @@
30088
30660
  case "enableplayer":
30089
30661
  obj.enablePlayer = v;
30090
30662
  return true;
30663
+ case "playermode":
30664
+ obj.playerMode = JsonHelper.parseEnum(v, exports.PlayerMode);
30665
+ return true;
30091
30666
  case "enablecursor":
30092
30667
  obj.enableCursor = v;
30093
30668
  return true;
@@ -30322,6 +30897,39 @@
30322
30897
  }
30323
30898
  }
30324
30899
 
30900
+ class SyncPointDataSerializer {
30901
+ static fromJson(obj, m) {
30902
+ if (!m) {
30903
+ return;
30904
+ }
30905
+ JsonHelper.forEach(m, (v, k) => SyncPointDataSerializer.setProperty(obj, k, v));
30906
+ }
30907
+ static toJson(obj) {
30908
+ if (!obj) {
30909
+ return null;
30910
+ }
30911
+ const o = new Map();
30912
+ o.set("baroccurence", obj.barOccurence);
30913
+ o.set("modifiedtempo", obj.modifiedTempo);
30914
+ o.set("millisecondoffset", obj.millisecondOffset);
30915
+ return o;
30916
+ }
30917
+ static setProperty(obj, property, v) {
30918
+ switch (property) {
30919
+ case "baroccurence":
30920
+ obj.barOccurence = v;
30921
+ return true;
30922
+ case "modifiedtempo":
30923
+ obj.modifiedTempo = v;
30924
+ return true;
30925
+ case "millisecondoffset":
30926
+ obj.millisecondOffset = v;
30927
+ return true;
30928
+ }
30929
+ return false;
30930
+ }
30931
+ }
30932
+
30325
30933
  class AutomationSerializer {
30326
30934
  static fromJson(obj, m) {
30327
30935
  if (!m) {
@@ -30337,6 +30945,9 @@
30337
30945
  o.set("islinear", obj.isLinear);
30338
30946
  o.set("type", obj.type);
30339
30947
  o.set("value", obj.value);
30948
+ if (obj.syncPointValue) {
30949
+ o.set("syncpointvalue", SyncPointDataSerializer.toJson(obj.syncPointValue));
30950
+ }
30340
30951
  o.set("ratioposition", obj.ratioPosition);
30341
30952
  o.set("text", obj.text);
30342
30953
  return o;
@@ -30352,6 +30963,15 @@
30352
30963
  case "value":
30353
30964
  obj.value = v;
30354
30965
  return true;
30966
+ case "syncpointvalue":
30967
+ if (v) {
30968
+ obj.syncPointValue = new SyncPointData();
30969
+ SyncPointDataSerializer.fromJson(obj.syncPointValue, v);
30970
+ }
30971
+ else {
30972
+ obj.syncPointValue = undefined;
30973
+ }
30974
+ return true;
30355
30975
  case "ratioposition":
30356
30976
  obj.ratioPosition = v;
30357
30977
  return true;
@@ -30417,6 +31037,9 @@
30417
31037
  o.set("section", SectionSerializer.toJson(obj.section));
30418
31038
  }
30419
31039
  o.set("tempoautomations", obj.tempoAutomations.map(i => AutomationSerializer.toJson(i)));
31040
+ if (obj.syncPoints !== undefined) {
31041
+ o.set("syncpoints", obj.syncPoints?.map(i => AutomationSerializer.toJson(i)));
31042
+ }
30420
31043
  if (obj.fermata !== null) {
30421
31044
  const m = new Map();
30422
31045
  o.set("fermata", m);
@@ -30483,6 +31106,16 @@
30483
31106
  obj.tempoAutomations.push(i);
30484
31107
  }
30485
31108
  return true;
31109
+ case "syncpoints":
31110
+ if (v) {
31111
+ obj.syncPoints = [];
31112
+ for (const o of v) {
31113
+ const i = new Automation();
31114
+ AutomationSerializer.fromJson(i, o);
31115
+ obj.addSyncPoint(i);
31116
+ }
31117
+ }
31118
+ return true;
30486
31119
  case "fermata":
30487
31120
  obj.fermata = new Map();
30488
31121
  JsonHelper.forEach(v, (v, k) => {
@@ -31774,6 +32407,31 @@
31774
32407
  }
31775
32408
  }
31776
32409
 
32410
+ class BackingTrackSerializer {
32411
+ static fromJson(obj, m) {
32412
+ if (!m) {
32413
+ return;
32414
+ }
32415
+ JsonHelper.forEach(m, (v, k) => BackingTrackSerializer.setProperty(obj, k, v));
32416
+ }
32417
+ static toJson(obj) {
32418
+ if (!obj) {
32419
+ return null;
32420
+ }
32421
+ const o = new Map();
32422
+ o.set("padding", obj.padding);
32423
+ return o;
32424
+ }
32425
+ static setProperty(obj, property, v) {
32426
+ switch (property) {
32427
+ case "padding":
32428
+ obj.padding = v;
32429
+ return true;
32430
+ }
32431
+ return false;
32432
+ }
32433
+ }
32434
+
31777
32435
  class HeaderFooterStyleSerializer {
31778
32436
  static fromJson(obj, m) {
31779
32437
  if (!m) {
@@ -31885,6 +32543,9 @@
31885
32543
  o.set("defaultsystemslayout", obj.defaultSystemsLayout);
31886
32544
  o.set("systemslayout", obj.systemsLayout);
31887
32545
  o.set("stylesheet", RenderStylesheetSerializer.toJson(obj.stylesheet));
32546
+ if (obj.backingTrack) {
32547
+ o.set("backingtrack", BackingTrackSerializer.toJson(obj.backingTrack));
32548
+ }
31888
32549
  if (obj.style) {
31889
32550
  o.set("style", ScoreStyleSerializer.toJson(obj.style));
31890
32551
  }
@@ -31953,6 +32614,15 @@
31953
32614
  case "stylesheet":
31954
32615
  RenderStylesheetSerializer.fromJson(obj.stylesheet, v);
31955
32616
  return true;
32617
+ case "backingtrack":
32618
+ if (v) {
32619
+ obj.backingTrack = new BackingTrack();
32620
+ BackingTrackSerializer.fromJson(obj.backingTrack, v);
32621
+ }
32622
+ else {
32623
+ obj.backingTrack = undefined;
32624
+ }
32625
+ return true;
31956
32626
  case "style":
31957
32627
  if (v) {
31958
32628
  obj.style = new ScoreStyle();
@@ -32129,7 +32799,9 @@
32129
32799
  case MidiEventType.ProgramChange:
32130
32800
  return new ProgramChangeEvent(track, tick, JsonHelper.getValue(midiEvent, 'channel'), JsonHelper.getValue(midiEvent, 'program'));
32131
32801
  case MidiEventType.TempoChange:
32132
- return new TempoChangeEvent(tick, JsonHelper.getValue(midiEvent, 'microSecondsPerQuarterNote'));
32802
+ const tempo = new TempoChangeEvent(tick, 0);
32803
+ tempo.beatsPerMinute = JsonHelper.getValue(midiEvent, 'beatsPerMinute');
32804
+ return tempo;
32133
32805
  case MidiEventType.PitchBend:
32134
32806
  return new PitchBendEvent(track, tick, JsonHelper.getValue(midiEvent, 'channel'), JsonHelper.getValue(midiEvent, 'value'));
32135
32807
  case MidiEventType.PerNotePitchBend:
@@ -32204,7 +32876,7 @@
32204
32876
  o.set('program', midiEvent.program);
32205
32877
  break;
32206
32878
  case MidiEventType.TempoChange:
32207
- o.set('microSecondsPerQuarterNote', midiEvent.microSecondsPerQuarterNote);
32879
+ o.set('beatsPerMinute', midiEvent.beatsPerMinute);
32208
32880
  break;
32209
32881
  case MidiEventType.PitchBend:
32210
32882
  o.set('channel', midiEvent.channel);
@@ -32431,7 +33103,9 @@
32431
33103
  endTime: e.endTime,
32432
33104
  currentTick: e.currentTick,
32433
33105
  endTick: e.endTick,
32434
- isSeek: e.isSeek
33106
+ isSeek: e.isSeek,
33107
+ originalTempo: e.originalTempo,
33108
+ modifiedTempo: e.modifiedTempo
32435
33109
  });
32436
33110
  }
32437
33111
  onPlayerStateChanged(e) {
@@ -32477,7 +33151,9 @@
32477
33151
  endTime: e.endTime,
32478
33152
  currentTick: e.currentTick,
32479
33153
  endTick: e.endTick,
32480
- isSeek: e.isSeek
33154
+ isSeek: e.isSeek,
33155
+ originalTempo: e.originalTempo,
33156
+ modifiedTempo: e.modifiedTempo
32481
33157
  });
32482
33158
  }
32483
33159
  onMidiLoadFailed(e) {
@@ -33786,8 +34462,9 @@
33786
34462
  }
33787
34463
  addTempo(tick, tempo) {
33788
34464
  // bpm -> microsecond per quarter note
33789
- const tempoInUsq = (60000000 / tempo) | 0;
33790
- this._midiFile.addEvent(new TempoChangeEvent(tick, tempoInUsq));
34465
+ const tempoEvent = new TempoChangeEvent(tick, 0);
34466
+ tempoEvent.beatsPerMinute = tempo;
34467
+ this._midiFile.addEvent(tempoEvent);
33791
34468
  }
33792
34469
  addBend(track, tick, channel, value) {
33793
34470
  if (value >= SynthConstants.MaxPitchWheel) {
@@ -35059,6 +35736,10 @@
35059
35736
  * Gets or sets whether transposition pitches should be applied to the individual midi events or not.
35060
35737
  */
35061
35738
  this.applyTranspositionPitches = true;
35739
+ /**
35740
+ * The computed sync points for synchronizing the midi file with an external backing track.
35741
+ */
35742
+ this.syncPoints = [];
35062
35743
  /**
35063
35744
  * Gets the transposition pitches for the individual midi channels.
35064
35745
  */
@@ -35085,13 +35766,17 @@
35085
35766
  let previousMasterBar = null;
35086
35767
  let currentTempo = this._score.tempo;
35087
35768
  // store the previous played bar for repeats
35769
+ const barOccurence = new Map();
35088
35770
  while (!controller.finished) {
35089
35771
  const index = controller.index;
35090
35772
  const bar = this._score.masterBars[index];
35091
35773
  const currentTick = controller.currentTick;
35092
35774
  controller.processCurrent();
35093
35775
  if (controller.shouldPlay) {
35094
- this.generateMasterBar(bar, previousMasterBar, currentTick, currentTempo);
35776
+ let occurence = barOccurence.has(index) ? barOccurence.get(index) : -1;
35777
+ occurence++;
35778
+ barOccurence.set(index, occurence);
35779
+ this.generateMasterBar(bar, previousMasterBar, currentTick, currentTempo, occurence);
35095
35780
  if (bar.tempoAutomations.length > 0) {
35096
35781
  currentTempo = bar.tempoAutomations[0].value;
35097
35782
  }
@@ -35160,7 +35845,7 @@
35160
35845
  const value = Math.max(-32768, Math.min(32767, data * 8 - 1));
35161
35846
  return Math.max(value, -1) + 1;
35162
35847
  }
35163
- generateMasterBar(masterBar, previousMasterBar, currentTick, currentTempo) {
35848
+ generateMasterBar(masterBar, previousMasterBar, currentTick, currentTempo, barOccurence) {
35164
35849
  // time signature
35165
35850
  if (!previousMasterBar ||
35166
35851
  previousMasterBar.timeSignatureDenominator !== masterBar.timeSignatureDenominator ||
@@ -35187,6 +35872,15 @@
35187
35872
  else {
35188
35873
  masterBarLookup.tempoChanges.push(new MasterBarTickLookupTempoChange(currentTick, currentTempo));
35189
35874
  }
35875
+ const syncPoints = masterBar.syncPoints;
35876
+ if (syncPoints) {
35877
+ for (const syncPoint of syncPoints) {
35878
+ if (syncPoint.syncPointValue.barOccurence === barOccurence) {
35879
+ const tick = currentTick + masterBarDuration * syncPoint.ratioPosition;
35880
+ this.syncPoints.push(new BackingTrackSyncPoint(tick, syncPoint.syncPointValue));
35881
+ }
35882
+ }
35883
+ }
35190
35884
  masterBarLookup.masterBar = masterBar;
35191
35885
  masterBarLookup.start = currentTick;
35192
35886
  masterBarLookup.end = masterBarLookup.start + masterBarDuration;
@@ -37384,6 +38078,213 @@
37384
38078
  }
37385
38079
  }
37386
38080
 
38081
+ class BackingTrackAudioSynthesizer {
38082
+ constructor() {
38083
+ this._midiEventQueue = new Queue();
38084
+ this.masterVolume = 1;
38085
+ this.metronomeVolume = 0;
38086
+ this.outSampleRate = 44100;
38087
+ this.currentTempo = 120;
38088
+ this.timeSignatureNumerator = 4;
38089
+ this.timeSignatureDenominator = 4;
38090
+ this.activeVoiceCount = 0;
38091
+ }
38092
+ noteOffAll(_immediate) {
38093
+ }
38094
+ resetSoft() {
38095
+ }
38096
+ resetPresets() {
38097
+ }
38098
+ loadPresets(_hydra, _instrumentPrograms, _percussionKeys, _append) {
38099
+ }
38100
+ setupMetronomeChannel(_metronomeVolume) {
38101
+ }
38102
+ synthesizeSilent(_sampleCount) {
38103
+ this.fakeSynthesize();
38104
+ }
38105
+ processMidiMessage(e) {
38106
+ }
38107
+ dispatchEvent(synthEvent) {
38108
+ this._midiEventQueue.enqueue(synthEvent);
38109
+ }
38110
+ synthesize(_buffer, _bufferPos, _sampleCount) {
38111
+ return this.fakeSynthesize();
38112
+ }
38113
+ fakeSynthesize() {
38114
+ const processedEvents = [];
38115
+ while (!this._midiEventQueue.isEmpty) {
38116
+ const m = this._midiEventQueue.dequeue();
38117
+ if (m.isMetronome && this.metronomeVolume > 0) ;
38118
+ else if (m.event) {
38119
+ this.processMidiMessage(m.event);
38120
+ }
38121
+ processedEvents.push(m);
38122
+ }
38123
+ return processedEvents;
38124
+ }
38125
+ applyTranspositionPitches(transpositionPitches) {
38126
+ }
38127
+ setChannelTranspositionPitch(channel, semitones) {
38128
+ }
38129
+ channelSetMute(channel, mute) {
38130
+ }
38131
+ channelSetSolo(channel, solo) {
38132
+ }
38133
+ resetChannelStates() {
38134
+ }
38135
+ channelSetMixVolume(channel, volume) {
38136
+ }
38137
+ hasSamplesForProgram(program) {
38138
+ return true;
38139
+ }
38140
+ hasSamplesForPercussion(key) {
38141
+ return true;
38142
+ }
38143
+ }
38144
+ class BackingTrackPlayer extends AlphaSynthBase {
38145
+ constructor(backingTrackOutput, bufferTimeInMilliseconds) {
38146
+ super(backingTrackOutput, new BackingTrackAudioSynthesizer(), bufferTimeInMilliseconds);
38147
+ this.synthesizer.output = backingTrackOutput;
38148
+ this._backingTrackOutput = backingTrackOutput;
38149
+ backingTrackOutput.timeUpdate.on(timePosition => {
38150
+ const alphaTabTimePosition = this.sequencer.mainTimePositionFromBackingTrack(timePosition, backingTrackOutput.backingTrackDuration);
38151
+ this.sequencer.fillMidiEventQueueToEndTime(alphaTabTimePosition);
38152
+ this.synthesizer.fakeSynthesize();
38153
+ this.updateTimePosition(alphaTabTimePosition, false);
38154
+ this.checkForFinish();
38155
+ });
38156
+ }
38157
+ updateMasterVolume(value) {
38158
+ super.updateMasterVolume(value);
38159
+ this._backingTrackOutput.masterVolume = value;
38160
+ }
38161
+ updatePlaybackSpeed(value) {
38162
+ super.updatePlaybackSpeed(value);
38163
+ this._backingTrackOutput.playbackRate = value;
38164
+ }
38165
+ onSampleRequest() {
38166
+ }
38167
+ loadMidiFile(midi) {
38168
+ if (!this.isSoundFontLoaded) {
38169
+ this.isSoundFontLoaded = true;
38170
+ this.soundFontLoaded.trigger();
38171
+ }
38172
+ super.loadMidiFile(midi);
38173
+ }
38174
+ updateTimePosition(timePosition, isSeek) {
38175
+ super.updateTimePosition(timePosition, isSeek);
38176
+ if (isSeek) {
38177
+ this._backingTrackOutput.seekTo(this.sequencer.mainTimePositionToBackingTrack(timePosition, this._backingTrackOutput.backingTrackDuration));
38178
+ }
38179
+ }
38180
+ loadBackingTrack(score, syncPoints) {
38181
+ const backingTrackInfo = score.backingTrack;
38182
+ if (backingTrackInfo) {
38183
+ this._backingTrackOutput.loadBackingTrack(backingTrackInfo);
38184
+ this.sequencer.mainUpdateSyncPoints(syncPoints);
38185
+ this.timePosition = 0;
38186
+ }
38187
+ }
38188
+ }
38189
+
38190
+ class ExternalMediaSynthOutput {
38191
+ constructor() {
38192
+ // fake rate
38193
+ this.sampleRate = 44100;
38194
+ this._padding = 0;
38195
+ this._seekPosition = 0;
38196
+ this.ready = new EventEmitter();
38197
+ this.samplesPlayed = new EventEmitterOfT();
38198
+ this.timeUpdate = new EventEmitterOfT();
38199
+ this.sampleRequest = new EventEmitter();
38200
+ }
38201
+ get handler() {
38202
+ return this._handler;
38203
+ }
38204
+ set handler(value) {
38205
+ if (value) {
38206
+ if (this._seekPosition !== 0) {
38207
+ value.seekTo(this._seekPosition);
38208
+ this._seekPosition = 0;
38209
+ }
38210
+ }
38211
+ this._handler = value;
38212
+ }
38213
+ get backingTrackDuration() {
38214
+ return this.handler?.backingTrackDuration ?? 0;
38215
+ }
38216
+ get playbackRate() {
38217
+ return this.handler?.playbackRate ?? 1;
38218
+ }
38219
+ set playbackRate(value) {
38220
+ const handler = this.handler;
38221
+ if (handler) {
38222
+ handler.playbackRate = value;
38223
+ }
38224
+ }
38225
+ get masterVolume() {
38226
+ return this.handler?.masterVolume ?? 1;
38227
+ }
38228
+ set masterVolume(value) {
38229
+ const handler = this.handler;
38230
+ if (handler) {
38231
+ handler.masterVolume = value;
38232
+ }
38233
+ }
38234
+ seekTo(time) {
38235
+ const handler = this.handler;
38236
+ if (handler) {
38237
+ handler.seekTo(time - this._padding);
38238
+ }
38239
+ else {
38240
+ this._seekPosition = time - this._padding;
38241
+ }
38242
+ }
38243
+ loadBackingTrack(backingTrack) {
38244
+ this._padding = backingTrack.padding;
38245
+ }
38246
+ open(_bufferTimeInMilliseconds) {
38247
+ this.ready.trigger();
38248
+ }
38249
+ updatePosition(currentTime) {
38250
+ this.timeUpdate.trigger(currentTime + this._padding);
38251
+ }
38252
+ play() {
38253
+ this.handler?.play();
38254
+ }
38255
+ destroy() {
38256
+ }
38257
+ pause() {
38258
+ this.handler?.pause();
38259
+ }
38260
+ addSamples(_samples) {
38261
+ }
38262
+ resetSamples() {
38263
+ }
38264
+ activate() {
38265
+ }
38266
+ async enumerateOutputDevices() {
38267
+ const empty = [];
38268
+ return empty;
38269
+ }
38270
+ async setOutputDevice(_device) {
38271
+ }
38272
+ async getOutputDevice() {
38273
+ return null;
38274
+ }
38275
+ }
38276
+ class ExternalMediaPlayer extends BackingTrackPlayer {
38277
+ get handler() {
38278
+ return this.output.handler;
38279
+ }
38280
+ set handler(value) {
38281
+ this.output.handler = value;
38282
+ }
38283
+ constructor(bufferTimeInMilliseconds) {
38284
+ super(new ExternalMediaSynthOutput(), bufferTimeInMilliseconds);
38285
+ }
38286
+ }
38287
+
37387
38288
  class SelectionInfo {
37388
38289
  constructor(beat) {
37389
38290
  this.bounds = null;
@@ -37397,6 +38298,12 @@
37397
38298
  * @csharp_public
37398
38299
  */
37399
38300
  class AlphaTabApiBase {
38301
+ /**
38302
+ * The actual player mode which is currently active (e.g. allows determining whether a backing track or the synthesizer is active).
38303
+ */
38304
+ get actualPlayerMode() {
38305
+ return this._actualPlayerMode;
38306
+ }
37400
38307
  /**
37401
38308
  * The score holding all information about the song being rendered
37402
38309
  * @category Properties - Core
@@ -37466,10 +38373,8 @@
37466
38373
  this._isDestroyed = false;
37467
38374
  this._score = null;
37468
38375
  this._tracks = [];
38376
+ this._actualPlayerMode = exports.PlayerMode.Disabled;
37469
38377
  this._tickCache = null;
37470
- /**
37471
- * Gets the alphaSynth player used for playback. This is the low-level API to the Midi synthesizer used for playback.
37472
- */
37473
38378
  /**
37474
38379
  * The alphaSynth player used for playback.
37475
38380
  * @remarks
@@ -38506,6 +39411,10 @@
38506
39411
  this.container = uiFacade.rootContainer;
38507
39412
  uiFacade.initialize(this, settings);
38508
39413
  Logger.logLevel = this.settings.core.logLevel;
39414
+ // backwards compatibility: remove in 2.0
39415
+ if (this.settings.player.playerMode === exports.PlayerMode.Disabled && this.settings.player.enablePlayer) {
39416
+ this.settings.player.playerMode = exports.PlayerMode.EnabledAutomatic;
39417
+ }
38509
39418
  Environment.printEnvironmentInfo(false);
38510
39419
  this.canvasElement = uiFacade.createCanvasElement();
38511
39420
  this.container.appendChild(this.canvasElement);
@@ -38549,7 +39458,7 @@
38549
39458
  this.appendRenderResult(null); // marks last element
38550
39459
  });
38551
39460
  this.renderer.error.on(this.onError.bind(this));
38552
- if (this.settings.player.enablePlayer) {
39461
+ if (this.settings.player.playerMode !== exports.PlayerMode.Disabled) {
38553
39462
  this.setupPlayer();
38554
39463
  }
38555
39464
  this.setupClickHandling();
@@ -38641,10 +39550,9 @@
38641
39550
  }
38642
39551
  this.renderer.updateSettings(this.settings);
38643
39552
  // enable/disable player if needed
38644
- if (this.settings.player.enablePlayer) {
38645
- this.setupPlayer();
38646
- if (score) {
38647
- this.player?.applyTranspositionPitches(MidiFileGenerator.buildTranspositionPitches(score, this.settings));
39553
+ if (this.settings.player.playerMode !== exports.PlayerMode.Disabled) {
39554
+ if (this.setupPlayer() && score) {
39555
+ this.loadMidiForScore();
38648
39556
  }
38649
39557
  }
38650
39558
  else {
@@ -39576,13 +40484,51 @@
39576
40484
  this.destroyCursors();
39577
40485
  }
39578
40486
  setupPlayer() {
40487
+ let mode = this.settings.player.playerMode;
40488
+ if (mode === exports.PlayerMode.EnabledAutomatic) {
40489
+ const score = this.score;
40490
+ if (!score) {
40491
+ return false;
40492
+ }
40493
+ if (score?.backingTrack?.rawAudioFile) {
40494
+ mode = exports.PlayerMode.EnabledBackingTrack;
40495
+ }
40496
+ else {
40497
+ mode = exports.PlayerMode.EnabledSynthesizer;
40498
+ }
40499
+ }
40500
+ if (mode !== this._actualPlayerMode) {
40501
+ this.destroyPlayer();
40502
+ }
39579
40503
  this.updateCursors();
39580
- if (this.player) {
39581
- return;
40504
+ this._actualPlayerMode = mode;
40505
+ switch (mode) {
40506
+ case exports.PlayerMode.Disabled:
40507
+ this.destroyPlayer();
40508
+ return false;
40509
+ case exports.PlayerMode.EnabledSynthesizer:
40510
+ if (this.player) {
40511
+ return true;
40512
+ }
40513
+ // new player needed
40514
+ this.player = this.uiFacade.createWorkerPlayer();
40515
+ break;
40516
+ case exports.PlayerMode.EnabledBackingTrack:
40517
+ if (this.player) {
40518
+ return true;
40519
+ }
40520
+ // new player needed
40521
+ this.player = this.uiFacade.createBackingTrackPlayer();
40522
+ break;
40523
+ case exports.PlayerMode.EnabledExternalMedia:
40524
+ if (this.player) {
40525
+ return true;
40526
+ }
40527
+ this.player = new ExternalMediaPlayer(this.settings.player.bufferTimeInMilliseconds);
40528
+ break;
39582
40529
  }
39583
- this.player = this.uiFacade.createWorkerPlayer();
39584
40530
  if (!this.player) {
39585
- return;
40531
+ return false;
39586
40532
  }
39587
40533
  this.player.ready.on(() => {
39588
40534
  this.loadMidiForScore();
@@ -39611,6 +40557,7 @@
39611
40557
  this.player.playbackRangeChanged.on(this.onPlaybackRangeChanged.bind(this));
39612
40558
  this.player.finished.on(this.onPlayerFinished.bind(this));
39613
40559
  this.setupPlayerEvents();
40560
+ return false;
39614
40561
  }
39615
40562
  loadMidiForScore() {
39616
40563
  if (!this.score) {
@@ -39632,6 +40579,7 @@
39632
40579
  const player = this.player;
39633
40580
  if (player) {
39634
40581
  player.loadMidiFile(midiFile);
40582
+ player.loadBackingTrack(score, generator.syncPoints);
39635
40583
  player.applyTranspositionPitches(generator.transpositionPitches);
39636
40584
  }
39637
40585
  }
@@ -40048,7 +40996,7 @@
40048
40996
  this._selectionWrapper = cursors.selectionWrapper;
40049
40997
  }
40050
40998
  if (this._currentBeat !== null) {
40051
- this.cursorUpdateBeat(this._currentBeat, false, this._previousTick > 10, true);
40999
+ this.cursorUpdateBeat(this._currentBeat, false, this._previousTick > 10, 1, true);
40052
41000
  }
40053
41001
  }
40054
41002
  else if (!this.settings.player.enableCursor && this._cursorWrapper) {
@@ -40063,13 +41011,14 @@
40063
41011
  // we need to update our position caches if we render a tablature
40064
41012
  this.renderer.postRenderFinished.on(() => {
40065
41013
  this._currentBeat = null;
40066
- this.cursorUpdateTick(this._previousTick, false, this._previousTick > 10);
41014
+ this.cursorUpdateTick(this._previousTick, false, 1, this._previousTick > 10);
40067
41015
  });
40068
41016
  if (this.player) {
40069
41017
  this.player.positionChanged.on(e => {
40070
41018
  this._previousTick = e.currentTick;
40071
41019
  this.uiFacade.beginInvoke(() => {
40072
- this.cursorUpdateTick(e.currentTick, false, false, e.isSeek);
41020
+ const cursorSpeed = e.modifiedTempo / e.originalTempo;
41021
+ this.cursorUpdateTick(e.currentTick, false, cursorSpeed, false, e.isSeek);
40073
41022
  });
40074
41023
  });
40075
41024
  this.player.stateChanged.on(e => {
@@ -40090,14 +41039,15 @@
40090
41039
  * @param stop
40091
41040
  * @param shouldScroll whether we should scroll to the bar (if scrolling is active)
40092
41041
  */
40093
- cursorUpdateTick(tick, stop, shouldScroll = false, forceUpdate = false) {
41042
+ cursorUpdateTick(tick, stop, cursorSpeed, shouldScroll = false, forceUpdate = false) {
41043
+ this._previousTick = tick;
40094
41044
  const cache = this._tickCache;
40095
41045
  if (cache) {
40096
41046
  const tracks = this._trackIndexLookup;
40097
41047
  if (tracks != null && tracks.size > 0) {
40098
41048
  const beat = cache.findBeat(tracks, tick, this._currentBeat);
40099
41049
  if (beat) {
40100
- this.cursorUpdateBeat(beat, stop, shouldScroll, forceUpdate || this.playerState === PlayerState.Paused);
41050
+ this.cursorUpdateBeat(beat, stop, shouldScroll, cursorSpeed, forceUpdate || this.playerState === PlayerState.Paused);
40101
41051
  }
40102
41052
  }
40103
41053
  }
@@ -40105,7 +41055,7 @@
40105
41055
  /**
40106
41056
  * updates the cursors to highlight the specified beat
40107
41057
  */
40108
- cursorUpdateBeat(lookupResult, stop, shouldScroll, forceUpdate = false) {
41058
+ cursorUpdateBeat(lookupResult, stop, shouldScroll, cursorSpeed, forceUpdate = false) {
40109
41059
  const beat = lookupResult.beat;
40110
41060
  const nextBeat = lookupResult.nextBeat?.beat ?? null;
40111
41061
  const duration = lookupResult.duration;
@@ -40137,7 +41087,7 @@
40137
41087
  this._previousCursorCache = cache;
40138
41088
  this._previousStateForCursor = this._playerState;
40139
41089
  this.uiFacade.beginInvoke(() => {
40140
- this.internalCursorUpdateBeat(beat, nextBeat, duration, stop, beatsToHighlight, cache, beatBoundings, shouldScroll, lookupResult.cursorMode);
41090
+ this.internalCursorUpdateBeat(beat, nextBeat, duration, stop, beatsToHighlight, cache, beatBoundings, shouldScroll, lookupResult.cursorMode, cursorSpeed);
40141
41091
  });
40142
41092
  }
40143
41093
  /**
@@ -40202,7 +41152,7 @@
40202
41152
  }
40203
41153
  }
40204
41154
  }
40205
- internalCursorUpdateBeat(beat, nextBeat, duration, stop, beatsToHighlight, cache, beatBoundings, shouldScroll, cursorMode) {
41155
+ internalCursorUpdateBeat(beat, nextBeat, duration, stop, beatsToHighlight, cache, beatBoundings, shouldScroll, cursorMode, cursorSpeed) {
40206
41156
  const barCursor = this._barCursor;
40207
41157
  const beatCursor = this._beatCursor;
40208
41158
  const barBoundings = beatBoundings.barBounds.masterBarBounds;
@@ -40211,12 +41161,29 @@
40211
41161
  if (barCursor) {
40212
41162
  barCursor.setBounds(barBounds.x, barBounds.y, barBounds.w, barBounds.h);
40213
41163
  }
41164
+ let nextBeatX = barBoundings.visualBounds.x + barBoundings.visualBounds.w;
41165
+ // get position of next beat on same system
41166
+ if (nextBeat && cursorMode === MidiTickLookupFindBeatResultCursorMode.ToNextBext) {
41167
+ // if we are moving within the same bar or to the next bar
41168
+ // transition to the next beat, otherwise transition to the end of the bar.
41169
+ const nextBeatBoundings = cache.findBeat(nextBeat);
41170
+ if (nextBeatBoundings &&
41171
+ nextBeatBoundings.barBounds.masterBarBounds.staffSystemBounds === barBoundings.staffSystemBounds) {
41172
+ nextBeatX = nextBeatBoundings.onNotesX;
41173
+ }
41174
+ }
41175
+ let startBeatX = beatBoundings.onNotesX;
40214
41176
  if (beatCursor) {
40215
- // move beat to start position immediately
41177
+ // relative positioning of the cursor
40216
41178
  if (this.settings.player.enableAnimatedBeatCursor) {
40217
- beatCursor.stopAnimation();
41179
+ const animationWidth = nextBeatX - beatBoundings.onNotesX;
41180
+ const relativePosition = this._previousTick - this._currentBeat.start;
41181
+ const ratioPosition = relativePosition / this._currentBeat.tickDuration;
41182
+ startBeatX = beatBoundings.onNotesX + animationWidth * ratioPosition;
41183
+ duration -= duration * ratioPosition;
41184
+ beatCursor.transitionToX(0, startBeatX);
40218
41185
  }
40219
- beatCursor.setBounds(beatBoundings.onNotesX, barBounds.y, 1, barBounds.h);
41186
+ beatCursor.setBounds(startBeatX, barBounds.y, 1, barBounds.h);
40220
41187
  }
40221
41188
  // if playing, animate the cursor to the next beat
40222
41189
  if (this.settings.player.enableElementHighlighting) {
@@ -40236,22 +41203,11 @@
40236
41203
  shouldNotifyBeatChange = true;
40237
41204
  }
40238
41205
  if (this.settings.player.enableAnimatedBeatCursor && beatCursor) {
40239
- let nextBeatX = barBoundings.visualBounds.x + barBoundings.visualBounds.w;
40240
- // get position of next beat on same system
40241
- if (nextBeat && cursorMode === MidiTickLookupFindBeatResultCursorMode.ToNextBext) {
40242
- // if we are moving within the same bar or to the next bar
40243
- // transition to the next beat, otherwise transition to the end of the bar.
40244
- const nextBeatBoundings = cache.findBeat(nextBeat);
40245
- if (nextBeatBoundings &&
40246
- nextBeatBoundings.barBounds.masterBarBounds.staffSystemBounds === barBoundings.staffSystemBounds) {
40247
- nextBeatX = nextBeatBoundings.onNotesX;
40248
- }
40249
- }
40250
41206
  if (isPlayingUpdate) {
40251
41207
  // we need to put the transition to an own animation frame
40252
41208
  // otherwise the stop animation above is not applied.
40253
41209
  this.uiFacade.beginInvoke(() => {
40254
- beatCursor.transitionToX(duration / this.playbackSpeed, nextBeatX);
41210
+ beatCursor.transitionToX(duration / cursorSpeed, nextBeatX);
40255
41211
  });
40256
41212
  }
40257
41213
  }
@@ -40282,7 +41238,7 @@
40282
41238
  if (this._isDestroyed) {
40283
41239
  return;
40284
41240
  }
40285
- if (this.settings.player.enablePlayer &&
41241
+ if (this.settings.player.playerMode !== exports.PlayerMode.Disabled &&
40286
41242
  this.settings.player.enableCursor &&
40287
41243
  this.settings.player.enableUserInteraction) {
40288
41244
  this._selectionStart = new SelectionInfo(beat);
@@ -40324,7 +41280,7 @@
40324
41280
  if (this._isDestroyed) {
40325
41281
  return;
40326
41282
  }
40327
- if (this.settings.player.enablePlayer &&
41283
+ if (this.settings.player.playerMode !== exports.PlayerMode.Disabled &&
40328
41284
  this.settings.player.enableCursor &&
40329
41285
  this.settings.player.enableUserInteraction) {
40330
41286
  if (this._selectionEnd) {
@@ -40345,7 +41301,7 @@
40345
41301
  // move to selection start
40346
41302
  this._currentBeat = null; // reset current beat so it is updating the cursor
40347
41303
  if (this._playerState === PlayerState.Paused) {
40348
- this.cursorUpdateTick(this._tickCache.getBeatStart(this._selectionStart.beat), false);
41304
+ this.cursorUpdateTick(this._tickCache.getBeatStart(this._selectionStart.beat), false, 1);
40349
41305
  }
40350
41306
  this.tickPosition = realMasterBarStart + this._selectionStart.beat.playbackStart;
40351
41307
  // set playback range
@@ -40457,7 +41413,7 @@
40457
41413
  });
40458
41414
  this.renderer.postRenderFinished.on(() => {
40459
41415
  if (!this._selectionStart ||
40460
- !this.settings.player.enablePlayer ||
41416
+ this.settings.player.playerMode === exports.PlayerMode.Disabled ||
40461
41417
  !this.settings.player.enableCursor ||
40462
41418
  !this.settings.player.enableUserInteraction) {
40463
41419
  return;
@@ -40535,6 +41491,9 @@
40535
41491
  }
40536
41492
  this.scoreLoaded.trigger(score);
40537
41493
  this.uiFacade.triggerEvent(this.container, 'scoreLoaded', score);
41494
+ if (this.setupPlayer()) {
41495
+ this.loadMidiForScore();
41496
+ }
40538
41497
  }
40539
41498
  onResize(e) {
40540
41499
  if (this._isDestroyed) {
@@ -41275,52 +42234,14 @@
41275
42234
  }
41276
42235
  }
41277
42236
  /**
42237
+ * Some shared web audio stuff.
41278
42238
  * @target web
41279
42239
  */
41280
- class AlphaSynthWebAudioOutputBase {
41281
- constructor() {
41282
- this._context = null;
41283
- this._buffer = null;
41284
- this._source = null;
41285
- this.ready = new EventEmitter();
41286
- this.samplesPlayed = new EventEmitterOfT();
41287
- this.sampleRequest = new EventEmitter();
41288
- this._knownDevices = [];
41289
- }
41290
- get sampleRate() {
41291
- return this._context ? this._context.sampleRate : AlphaSynthWebAudioOutputBase.PreferredSampleRate;
41292
- }
41293
- activate(resumedCallback) {
41294
- if (!this._context) {
41295
- this._context = this.createAudioContext();
41296
- }
41297
- if (this._context.state === 'suspended' || this._context.state === 'interrupted') {
41298
- Logger.debug('WebAudio', 'Audio Context is suspended, trying resume');
41299
- this._context.resume().then(() => {
41300
- Logger.debug('WebAudio', `Audio Context resume success: state=${this._context?.state}, sampleRate:${this._context?.sampleRate}`);
41301
- if (resumedCallback) {
41302
- resumedCallback();
41303
- }
41304
- }, reason => {
41305
- Logger.warning('WebAudio', `Audio Context resume failed: state=${this._context?.state}, sampleRate:${this._context?.sampleRate}, reason=${reason}`);
41306
- });
41307
- }
42240
+ class WebAudioHelper {
42241
+ static findKnownDevice(sinkId) {
42242
+ return WebAudioHelper._knownDevices.find(d => d.deviceId === sinkId);
41308
42243
  }
41309
- patchIosSampleRate() {
41310
- const ua = navigator.userAgent;
41311
- if (ua.indexOf('iPhone') !== -1 || ua.indexOf('iPad') !== -1) {
41312
- const context = this.createAudioContext();
41313
- const buffer = context.createBuffer(1, 1, AlphaSynthWebAudioOutputBase.PreferredSampleRate);
41314
- const dummy = context.createBufferSource();
41315
- dummy.buffer = buffer;
41316
- dummy.connect(context.destination);
41317
- dummy.start(0);
41318
- dummy.disconnect(0);
41319
- // tslint:disable-next-line: no-floating-promises
41320
- context.close();
41321
- }
41322
- }
41323
- createAudioContext() {
42244
+ static createAudioContext() {
41324
42245
  if ('AudioContext' in Environment.globalThis) {
41325
42246
  return new AudioContext();
41326
42247
  }
@@ -41329,73 +42250,18 @@
41329
42250
  }
41330
42251
  throw new AlphaTabError(exports.AlphaTabErrorType.General, 'AudioContext not found');
41331
42252
  }
41332
- open(bufferTimeInMilliseconds) {
41333
- this.patchIosSampleRate();
41334
- this._context = this.createAudioContext();
41335
- const ctx = this._context;
41336
- if (ctx.state === 'suspended') {
41337
- this.registerResumeHandler();
41338
- }
41339
- }
41340
- registerResumeHandler() {
41341
- this._resumeHandler = (() => {
41342
- this.activate(() => {
41343
- this.unregisterResumeHandler();
41344
- });
41345
- }).bind(this);
41346
- document.body.addEventListener('touchend', this._resumeHandler, false);
41347
- document.body.addEventListener('click', this._resumeHandler, false);
41348
- }
41349
- unregisterResumeHandler() {
41350
- const resumeHandler = this._resumeHandler;
41351
- if (resumeHandler) {
41352
- document.body.removeEventListener('touchend', resumeHandler, false);
41353
- document.body.removeEventListener('click', resumeHandler, false);
41354
- }
41355
- }
41356
- play() {
41357
- const ctx = this._context;
41358
- this.activate();
41359
- // create an empty buffer source (silence)
41360
- this._buffer = ctx.createBuffer(2, AlphaSynthWebAudioOutputBase.BufferSize, ctx.sampleRate);
41361
- this._source = ctx.createBufferSource();
41362
- this._source.buffer = this._buffer;
41363
- this._source.loop = true;
41364
- }
41365
- pause() {
41366
- if (this._source) {
41367
- this._source.stop(0);
41368
- this._source.disconnect();
41369
- }
41370
- this._source = null;
41371
- }
41372
- destroy() {
41373
- this.pause();
41374
- this._context?.close();
41375
- this._context = null;
41376
- this.unregisterResumeHandler();
41377
- }
41378
- onSamplesPlayed(numberOfSamples) {
41379
- this.samplesPlayed.trigger(numberOfSamples);
41380
- }
41381
- onSampleRequest() {
41382
- this.sampleRequest.trigger();
41383
- }
41384
- onReady() {
41385
- this.ready.trigger();
41386
- }
41387
- async checkSinkIdSupport() {
42253
+ static async checkSinkIdSupport() {
41388
42254
  // https://caniuse.com/mdn-api_audiocontext_sinkid
41389
- const context = this._context ?? this.createAudioContext();
42255
+ const context = WebAudioHelper.createAudioContext();
41390
42256
  if (!('setSinkId' in context)) {
41391
42257
  Logger.warning('WebAudio', 'Browser does not support changing the output device');
41392
42258
  return false;
41393
42259
  }
41394
42260
  return true;
41395
42261
  }
41396
- async enumerateOutputDevices() {
42262
+ static async enumerateOutputDevices() {
41397
42263
  try {
41398
- if (!(await this.checkSinkIdSupport())) {
42264
+ if (!(await WebAudioHelper.checkSinkIdSupport())) {
41399
42265
  return [];
41400
42266
  }
41401
42267
  // Request permissions
@@ -41436,7 +42302,7 @@
41436
42302
  if (defaultDevice) {
41437
42303
  defaultDevice.isDefault = true;
41438
42304
  }
41439
- this._knownDevices = final;
42305
+ WebAudioHelper._knownDevices = final;
41440
42306
  return final;
41441
42307
  }
41442
42308
  catch (e) {
@@ -41444,8 +42310,113 @@
41444
42310
  return [];
41445
42311
  }
41446
42312
  }
42313
+ }
42314
+ WebAudioHelper._knownDevices = [];
42315
+ /**
42316
+ * @target web
42317
+ */
42318
+ class AlphaSynthWebAudioOutputBase {
42319
+ constructor() {
42320
+ this._context = null;
42321
+ this._buffer = null;
42322
+ this._source = null;
42323
+ this.ready = new EventEmitter();
42324
+ this.samplesPlayed = new EventEmitterOfT();
42325
+ this.sampleRequest = new EventEmitter();
42326
+ }
42327
+ get sampleRate() {
42328
+ return this._context ? this._context.sampleRate : AlphaSynthWebAudioOutputBase.PreferredSampleRate;
42329
+ }
42330
+ activate(resumedCallback) {
42331
+ if (!this._context) {
42332
+ this._context = WebAudioHelper.createAudioContext();
42333
+ }
42334
+ if (this._context.state === 'suspended' || this._context.state === 'interrupted') {
42335
+ Logger.debug('WebAudio', 'Audio Context is suspended, trying resume');
42336
+ this._context.resume().then(() => {
42337
+ Logger.debug('WebAudio', `Audio Context resume success: state=${this._context?.state}, sampleRate:${this._context?.sampleRate}`);
42338
+ if (resumedCallback) {
42339
+ resumedCallback();
42340
+ }
42341
+ }, reason => {
42342
+ Logger.warning('WebAudio', `Audio Context resume failed: state=${this._context?.state}, sampleRate:${this._context?.sampleRate}, reason=${reason}`);
42343
+ });
42344
+ }
42345
+ }
42346
+ patchIosSampleRate() {
42347
+ const ua = navigator.userAgent;
42348
+ if (ua.indexOf('iPhone') !== -1 || ua.indexOf('iPad') !== -1) {
42349
+ const context = WebAudioHelper.createAudioContext();
42350
+ const buffer = context.createBuffer(1, 1, AlphaSynthWebAudioOutputBase.PreferredSampleRate);
42351
+ const dummy = context.createBufferSource();
42352
+ dummy.buffer = buffer;
42353
+ dummy.connect(context.destination);
42354
+ dummy.start(0);
42355
+ dummy.disconnect(0);
42356
+ // tslint:disable-next-line: no-floating-promises
42357
+ context.close();
42358
+ }
42359
+ }
42360
+ open(bufferTimeInMilliseconds) {
42361
+ this.patchIosSampleRate();
42362
+ this._context = WebAudioHelper.createAudioContext();
42363
+ const ctx = this._context;
42364
+ if (ctx.state === 'suspended') {
42365
+ this.registerResumeHandler();
42366
+ }
42367
+ }
42368
+ registerResumeHandler() {
42369
+ this._resumeHandler = (() => {
42370
+ this.activate(() => {
42371
+ this.unregisterResumeHandler();
42372
+ });
42373
+ }).bind(this);
42374
+ document.body.addEventListener('touchend', this._resumeHandler, false);
42375
+ document.body.addEventListener('click', this._resumeHandler, false);
42376
+ }
42377
+ unregisterResumeHandler() {
42378
+ const resumeHandler = this._resumeHandler;
42379
+ if (resumeHandler) {
42380
+ document.body.removeEventListener('touchend', resumeHandler, false);
42381
+ document.body.removeEventListener('click', resumeHandler, false);
42382
+ }
42383
+ }
42384
+ play() {
42385
+ const ctx = this._context;
42386
+ this.activate();
42387
+ // create an empty buffer source (silence)
42388
+ this._buffer = ctx.createBuffer(2, AlphaSynthWebAudioOutputBase.BufferSize, ctx.sampleRate);
42389
+ this._source = ctx.createBufferSource();
42390
+ this._source.buffer = this._buffer;
42391
+ this._source.loop = true;
42392
+ }
42393
+ pause() {
42394
+ if (this._source) {
42395
+ this._source.stop(0);
42396
+ this._source.disconnect();
42397
+ }
42398
+ this._source = null;
42399
+ }
42400
+ destroy() {
42401
+ this.pause();
42402
+ this._context?.close();
42403
+ this._context = null;
42404
+ this.unregisterResumeHandler();
42405
+ }
42406
+ onSamplesPlayed(numberOfSamples) {
42407
+ this.samplesPlayed.trigger(numberOfSamples);
42408
+ }
42409
+ onSampleRequest() {
42410
+ this.sampleRequest.trigger();
42411
+ }
42412
+ onReady() {
42413
+ this.ready.trigger();
42414
+ }
42415
+ enumerateOutputDevices() {
42416
+ return WebAudioHelper.enumerateOutputDevices();
42417
+ }
41447
42418
  async setOutputDevice(device) {
41448
- if (!(await this.checkSinkIdSupport())) {
42419
+ if (!(await WebAudioHelper.checkSinkIdSupport())) {
41449
42420
  return;
41450
42421
  }
41451
42422
  // https://developer.mozilla.org/en-US/docs/Web/API/AudioContext/setSinkId
@@ -41457,7 +42428,7 @@
41457
42428
  }
41458
42429
  }
41459
42430
  async getOutputDevice() {
41460
- if (!(await this.checkSinkIdSupport())) {
42431
+ if (!(await WebAudioHelper.checkSinkIdSupport())) {
41461
42432
  return null;
41462
42433
  }
41463
42434
  // https://developer.mozilla.org/en-US/docs/Web/API/AudioContext/sinkId
@@ -41466,7 +42437,7 @@
41466
42437
  return null;
41467
42438
  }
41468
42439
  // fast path -> cached devices list
41469
- let device = this._knownDevices.find(d => d.deviceId === sinkId);
42440
+ let device = WebAudioHelper.findKnownDevice(sinkId);
41470
42441
  if (device) {
41471
42442
  return device;
41472
42443
  }
@@ -41914,7 +42885,7 @@
41914
42885
  case 'alphaSynth.positionChanged':
41915
42886
  this._timePosition = data.currentTime;
41916
42887
  this._tickPosition = data.currentTick;
41917
- this.positionChanged.trigger(new PositionChangedEventArgs(data.currentTime, data.endTime, data.currentTick, data.endTick, data.isSeek));
42888
+ this.positionChanged.trigger(new PositionChangedEventArgs(data.currentTime, data.endTime, data.currentTick, data.endTick, data.isSeek, data.originalTempo, data.modifiedTempo));
41918
42889
  break;
41919
42890
  case 'alphaSynth.midiEventsPlayed':
41920
42891
  this.midiEventsPlayed.trigger(new MidiEventsPlayedEventArgs(data.events.map(JsonConverter.jsObjectToMidiEvent)));
@@ -41938,7 +42909,7 @@
41938
42909
  break;
41939
42910
  case 'alphaSynth.midiLoaded':
41940
42911
  this.checkReadyForPlayback();
41941
- this.midiLoaded.trigger(new PositionChangedEventArgs(data.currentTime, data.endTime, data.currentTick, data.endTick, data.isSeek));
42912
+ this.midiLoaded.trigger(new PositionChangedEventArgs(data.currentTime, data.endTime, data.currentTick, data.endTick, data.isSeek, data.originalTempo, data.modifiedTempo));
41942
42913
  break;
41943
42914
  case 'alphaSynth.midiLoadFailed':
41944
42915
  this.checkReadyForPlayback();
@@ -41988,6 +42959,8 @@
41988
42959
  this._outputIsReady = true;
41989
42960
  this.checkReady();
41990
42961
  }
42962
+ loadBackingTrack(_score) {
42963
+ }
41991
42964
  }
41992
42965
 
41993
42966
  /**
@@ -42355,6 +43328,123 @@
42355
43328
  }
42356
43329
  }
42357
43330
 
43331
+ /**
43332
+ * @target web
43333
+ */
43334
+ class AudioElementBackingTrackSynthOutput {
43335
+ constructor() {
43336
+ // fake rate
43337
+ this.sampleRate = 44100;
43338
+ this._padding = 0;
43339
+ this._updateInterval = 0;
43340
+ this.ready = new EventEmitter();
43341
+ this.samplesPlayed = new EventEmitterOfT();
43342
+ this.timeUpdate = new EventEmitterOfT();
43343
+ this.sampleRequest = new EventEmitter();
43344
+ }
43345
+ get backingTrackDuration() {
43346
+ const duration = this.audioElement.duration ?? 0;
43347
+ return Number.isFinite(duration) ? duration * 1000 : 0;
43348
+ }
43349
+ get playbackRate() {
43350
+ return this.audioElement.playbackRate;
43351
+ }
43352
+ set playbackRate(value) {
43353
+ this.audioElement.playbackRate = value;
43354
+ }
43355
+ get masterVolume() {
43356
+ return this.audioElement.volume;
43357
+ }
43358
+ set masterVolume(value) {
43359
+ this.audioElement.volume = value;
43360
+ }
43361
+ seekTo(time) {
43362
+ this.audioElement.currentTime = time / 1000 - this._padding;
43363
+ }
43364
+ loadBackingTrack(backingTrack) {
43365
+ if (this.audioElement?.src) {
43366
+ URL.revokeObjectURL(this.audioElement.src);
43367
+ }
43368
+ this._padding = backingTrack.padding / 1000;
43369
+ const blob = new Blob([backingTrack.rawAudioFile]);
43370
+ this.audioElement.src = URL.createObjectURL(blob);
43371
+ }
43372
+ open(_bufferTimeInMilliseconds) {
43373
+ const audioElement = document.createElement('audio');
43374
+ audioElement.style.display = 'none';
43375
+ document.body.appendChild(audioElement);
43376
+ audioElement.addEventListener('timeupdate', () => {
43377
+ this.updatePosition();
43378
+ });
43379
+ this.audioElement = audioElement;
43380
+ this.ready.trigger();
43381
+ }
43382
+ updatePosition() {
43383
+ const timePos = (this.audioElement.currentTime + this._padding) * 1000;
43384
+ this.timeUpdate.trigger(timePos);
43385
+ }
43386
+ play() {
43387
+ this.audioElement.play();
43388
+ this._updateInterval = window.setInterval(() => {
43389
+ this.updatePosition();
43390
+ }, 50);
43391
+ }
43392
+ destroy() {
43393
+ const audioElement = this.audioElement;
43394
+ if (audioElement) {
43395
+ document.body.removeChild(audioElement);
43396
+ }
43397
+ }
43398
+ pause() {
43399
+ this.audioElement.pause();
43400
+ window.clearInterval(this._updateInterval);
43401
+ }
43402
+ addSamples(_samples) {
43403
+ }
43404
+ resetSamples() {
43405
+ }
43406
+ activate() {
43407
+ }
43408
+ async enumerateOutputDevices() {
43409
+ return WebAudioHelper.enumerateOutputDevices();
43410
+ }
43411
+ async setOutputDevice(device) {
43412
+ if (!(await WebAudioHelper.checkSinkIdSupport())) {
43413
+ return;
43414
+ }
43415
+ // https://developer.mozilla.org/en-US/docs/Web/API/AudioContext/setSinkId
43416
+ if (!device) {
43417
+ await this.audioElement.setSinkId('');
43418
+ }
43419
+ else {
43420
+ await this.audioElement.setSinkId(device.deviceId);
43421
+ }
43422
+ }
43423
+ async getOutputDevice() {
43424
+ if (!(await WebAudioHelper.checkSinkIdSupport())) {
43425
+ return null;
43426
+ }
43427
+ // https://developer.mozilla.org/en-US/docs/Web/API/AudioContext/sinkId
43428
+ const sinkId = this.audioElement.sinkId;
43429
+ if (typeof sinkId !== 'string' || sinkId === '' || sinkId === 'default') {
43430
+ return null;
43431
+ }
43432
+ // fast path -> cached devices list
43433
+ let device = WebAudioHelper.findKnownDevice(sinkId);
43434
+ if (device) {
43435
+ return device;
43436
+ }
43437
+ // slow path -> enumerate devices
43438
+ const allDevices = await this.enumerateOutputDevices();
43439
+ device = allDevices.find(d => d.deviceId === sinkId);
43440
+ if (device) {
43441
+ return device;
43442
+ }
43443
+ Logger.warning('WebAudio', 'Could not find output device in device list', sinkId, allDevices);
43444
+ return null;
43445
+ }
43446
+ }
43447
+
42358
43448
  /**
42359
43449
  * @target web
42360
43450
  */
@@ -42991,6 +44081,9 @@
42991
44081
  window.requestAnimationFrame(step);
42992
44082
  }
42993
44083
  }
44084
+ createBackingTrackPlayer() {
44085
+ return new BackingTrackPlayer(new AudioElementBackingTrackSynthOutput(), this._api.settings.player.bufferTimeInMilliseconds);
44086
+ }
42994
44087
  }
42995
44088
 
42996
44089
  /**
@@ -43133,7 +44226,7 @@
43133
44226
  settings.core.file = null;
43134
44227
  settings.core.tracks = null;
43135
44228
  settings.player.enableCursor = false;
43136
- settings.player.enablePlayer = false;
44229
+ settings.player.playerMode = exports.PlayerMode.Disabled;
43137
44230
  settings.player.enableElementHighlighting = false;
43138
44231
  settings.player.enableUserInteraction = false;
43139
44232
  settings.player.soundFont = null;
@@ -57094,96 +58187,6 @@
57094
58187
  }
57095
58188
  }
57096
58189
 
57097
- /**
57098
- * A very basic polyfill of the ResizeObserver which triggers
57099
- * a the callback on window resize for all registered targets.
57100
- * @target web
57101
- */
57102
- class ResizeObserverPolyfill {
57103
- constructor(callback) {
57104
- this._targets = new Set();
57105
- this._callback = callback;
57106
- window.addEventListener('resize', this.onWindowResize.bind(this), false);
57107
- }
57108
- observe(target) {
57109
- this._targets.add(target);
57110
- }
57111
- unobserve(target) {
57112
- this._targets.delete(target);
57113
- }
57114
- disconnect() {
57115
- this._targets.clear();
57116
- }
57117
- onWindowResize() {
57118
- const entries = [];
57119
- for (const t of this._targets) {
57120
- entries.push({
57121
- target: t,
57122
- // not used by alphaTab
57123
- contentRect: undefined,
57124
- borderBoxSize: undefined,
57125
- contentBoxSize: [],
57126
- devicePixelContentBoxSize: []
57127
- });
57128
- }
57129
- this._callback(entries, this);
57130
- }
57131
- }
57132
-
57133
- /**
57134
- * A polyfill of the InsersectionObserver
57135
- * @target web
57136
- */
57137
- class IntersectionObserverPolyfill {
57138
- constructor(callback) {
57139
- this._elements = [];
57140
- let timer = null;
57141
- const oldCheck = this.check.bind(this);
57142
- this.check = () => {
57143
- if (!timer) {
57144
- timer = setTimeout(() => {
57145
- oldCheck();
57146
- timer = null;
57147
- }, 100);
57148
- }
57149
- };
57150
- this._callback = callback;
57151
- window.addEventListener('resize', this.check, true);
57152
- document.addEventListener('scroll', this.check, true);
57153
- }
57154
- observe(target) {
57155
- if (this._elements.indexOf(target) >= 0) {
57156
- return;
57157
- }
57158
- this._elements.push(target);
57159
- this.check();
57160
- }
57161
- unobserve(target) {
57162
- this._elements = this._elements.filter(item => {
57163
- return item !== target;
57164
- });
57165
- }
57166
- check() {
57167
- const entries = [];
57168
- for (const element of this._elements) {
57169
- const rect = element.getBoundingClientRect();
57170
- const isVisible = rect.top + rect.height >= 0 &&
57171
- rect.top <= window.innerHeight &&
57172
- rect.left + rect.width >= 0 &&
57173
- rect.left <= window.innerWidth;
57174
- if (isVisible) {
57175
- entries.push({
57176
- target: element,
57177
- isIntersecting: true
57178
- });
57179
- }
57180
- }
57181
- if (entries.length) {
57182
- this._callback(entries, this);
57183
- }
57184
- }
57185
- }
57186
-
57187
58190
  /******************************************************************************
57188
58191
  Copyright (c) Microsoft Corporation.
57189
58192
 
@@ -59299,9 +60302,9 @@
59299
60302
  print(`build date: ${VersionInfo.date}`);
59300
60303
  }
59301
60304
  }
59302
- VersionInfo.version = '1.6.0-alpha.1399';
59303
- VersionInfo.date = '2025-05-06T02:05:53.707Z';
59304
- VersionInfo.commit = 'b35f0f77c217a628a19f8346512c00d14c9ee778';
60305
+ VersionInfo.version = '1.6.0-alpha.1403';
60306
+ VersionInfo.date = '2025-05-09T02:06:22.101Z';
60307
+ VersionInfo.commit = '3644a11f557063573413de459c607a1f9c302a6a';
59305
60308
 
59306
60309
  /**
59307
60310
  * A factory for custom layout engines.
@@ -59772,29 +60775,6 @@
59772
60775
  if (Environment.webPlatform === exports.WebPlatform.Browser || Environment.webPlatform === exports.WebPlatform.BrowserModule) {
59773
60776
  Environment.registerJQueryPlugin();
59774
60777
  Environment.HighDpiFactor = window.devicePixelRatio;
59775
- // ResizeObserver API does not yet exist so long on Safari (only start 2020 with iOS Safari 13.7 and Desktop 13.1)
59776
- // so we better add a polyfill for it
59777
- if (!('ResizeObserver' in Environment.globalThis)) {
59778
- Environment.globalThis.ResizeObserver = ResizeObserverPolyfill;
59779
- }
59780
- // IntersectionObserver API does not on older iOS versions
59781
- // so we better add a polyfill for it
59782
- if (!('IntersectionObserver' in Environment.globalThis)) {
59783
- Environment.globalThis.IntersectionObserver = IntersectionObserverPolyfill;
59784
- }
59785
- if (!('replaceChildren' in Element.prototype)) {
59786
- Element.prototype.replaceChildren = function (...nodes) {
59787
- this.innerHTML = '';
59788
- this.append(...nodes);
59789
- };
59790
- Document.prototype.replaceChildren = Element.prototype.replaceChildren;
59791
- DocumentFragment.prototype.replaceChildren = Element.prototype.replaceChildren;
59792
- }
59793
- if (!('replaceAll' in String.prototype)) {
59794
- String.prototype.replaceAll = function (str, newStr) {
59795
- return this.replace(new RegExp(str, 'g'), newStr);
59796
- };
59797
- }
59798
60778
  }
59799
60779
  Environment.createWebWorker = createWebWorker;
59800
60780
  Environment.createAudioWorklet = createAudioWorklet;
@@ -63526,6 +64506,7 @@
63526
64506
  get AccidentalType () { return AccidentalType; },
63527
64507
  Automation,
63528
64508
  get AutomationType () { return AutomationType; },
64509
+ BackingTrack,
63529
64510
  Bar,
63530
64511
  get BarLineStyle () { return BarLineStyle; },
63531
64512
  BarStyle,
@@ -63588,6 +64569,7 @@
63588
64569
  Staff,
63589
64570
  SustainPedalMarker,
63590
64571
  get SustainPedalMarkerType () { return SustainPedalMarkerType; },
64572
+ SyncPointData,
63591
64573
  Track,
63592
64574
  get TrackNameMode () { return TrackNameMode; },
63593
64575
  get TrackNameOrientation () { return TrackNameOrientation; },