@coderline/alphatab 1.6.0-alpha.1401 → 1.6.0-alpha.1405

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/alphaTab.js CHANGED
@@ -1,5 +1,5 @@
1
1
  /*!
2
- * alphaTab v1.6.0-alpha.1401 (develop, build 1401)
2
+ * alphaTab v1.6.0-alpha.1405 (develop, build 1405)
3
3
  *
4
4
  * Copyright © 2025, Daniel Kuschny and Contributors, All rights reserved.
5
5
  *
@@ -55,7 +55,127 @@
55
55
  (global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.alphaTab = {}));
56
56
  })(this, (function (exports) { 'use strict';
57
57
 
58
- if(typeof Symbol.dispose==='undefined'){Symbol.dispose = Symbol('Symbol.dispose')}
58
+ /**
59
+ * A very basic polyfill of the ResizeObserver which triggers
60
+ * a the callback on window resize for all registered targets.
61
+ * @target web
62
+ */
63
+ class ResizeObserverPolyfill {
64
+ constructor(callback) {
65
+ this._targets = new Set();
66
+ this._callback = callback;
67
+ window.addEventListener('resize', this.onWindowResize.bind(this), false);
68
+ }
69
+ observe(target) {
70
+ this._targets.add(target);
71
+ }
72
+ unobserve(target) {
73
+ this._targets.delete(target);
74
+ }
75
+ disconnect() {
76
+ this._targets.clear();
77
+ }
78
+ onWindowResize() {
79
+ const entries = [];
80
+ for (const t of this._targets) {
81
+ entries.push({
82
+ target: t,
83
+ // not used by alphaTab
84
+ contentRect: undefined,
85
+ borderBoxSize: undefined,
86
+ contentBoxSize: [],
87
+ devicePixelContentBoxSize: []
88
+ });
89
+ }
90
+ this._callback(entries, this);
91
+ }
92
+ }
93
+
94
+ /**
95
+ * A polyfill of the InsersectionObserver
96
+ * @target web
97
+ */
98
+ class IntersectionObserverPolyfill {
99
+ constructor(callback) {
100
+ this._elements = [];
101
+ let timer = null;
102
+ const oldCheck = this.check.bind(this);
103
+ this.check = () => {
104
+ if (!timer) {
105
+ timer = setTimeout(() => {
106
+ oldCheck();
107
+ timer = null;
108
+ }, 100);
109
+ }
110
+ };
111
+ this._callback = callback;
112
+ window.addEventListener('resize', this.check, true);
113
+ document.addEventListener('scroll', this.check, true);
114
+ }
115
+ observe(target) {
116
+ if (this._elements.indexOf(target) >= 0) {
117
+ return;
118
+ }
119
+ this._elements.push(target);
120
+ this.check();
121
+ }
122
+ unobserve(target) {
123
+ this._elements = this._elements.filter(item => {
124
+ return item !== target;
125
+ });
126
+ }
127
+ check() {
128
+ const entries = [];
129
+ for (const element of this._elements) {
130
+ const rect = element.getBoundingClientRect();
131
+ const isVisible = rect.top + rect.height >= 0 &&
132
+ rect.top <= window.innerHeight &&
133
+ rect.left + rect.width >= 0 &&
134
+ rect.left <= window.innerWidth;
135
+ if (isVisible) {
136
+ entries.push({
137
+ target: element,
138
+ isIntersecting: true
139
+ });
140
+ }
141
+ }
142
+ if (entries.length) {
143
+ this._callback(entries, this);
144
+ }
145
+ }
146
+ }
147
+
148
+ /*@target web*/
149
+ (() => {
150
+ if (typeof Symbol.dispose === 'undefined') {
151
+ Symbol.dispose = Symbol('Symbol.dispose');
152
+ }
153
+ if (typeof window !== 'undefined') {
154
+ // ResizeObserver API does not yet exist so long on Safari (only start 2020 with iOS Safari 13.7 and Desktop 13.1)
155
+ // so we better add a polyfill for it
156
+ if (!('ResizeObserver' in globalThis)) {
157
+ globalThis.ResizeObserver = ResizeObserverPolyfill;
158
+ }
159
+ // IntersectionObserver API does not on older iOS versions
160
+ // so we better add a polyfill for it
161
+ if (!('IntersectionObserver' in globalThis)) {
162
+ globalThis.IntersectionObserver = IntersectionObserverPolyfill;
163
+ }
164
+ if (!('replaceChildren' in Element.prototype)) {
165
+ Element.prototype.replaceChildren = function (...nodes) {
166
+ this.innerHTML = '';
167
+ this.append(...nodes);
168
+ };
169
+ Document.prototype.replaceChildren = Element.prototype.replaceChildren;
170
+ DocumentFragment.prototype.replaceChildren = Element.prototype.replaceChildren;
171
+ }
172
+ }
173
+ if (!('replaceAll' in String.prototype)) {
174
+ String.prototype.replaceAll = function (str, newStr) {
175
+ return this.replace(new RegExp(str, 'g'), newStr);
176
+ };
177
+ }
178
+ })();
59
179
 
60
180
  /**
61
181
  * Lists all layout modes that are supported.
@@ -1279,7 +1399,37 @@
1279
1399
  * Balance change.
1280
1400
  */
1281
1401
  AutomationType[AutomationType["Balance"] = 3] = "Balance";
1402
+ /**
1403
+ * A sync point for synchronizing the internal time axis with an external audio track.
1404
+ */
1405
+ AutomationType[AutomationType["SyncPoint"] = 4] = "SyncPoint";
1282
1406
  })(AutomationType || (AutomationType = {}));
1407
+ /**
1408
+ * Represents the data of a sync point for synchronizing the internal time axis with
1409
+ * an external audio file.
1410
+ * @cloneable
1411
+ * @json
1412
+ * @json_strict
1413
+ */
1414
+ class SyncPointData {
1415
+ constructor() {
1416
+ /**
1417
+ * Indicates for which repeat occurence this sync point is valid (e.g. 0 on the first time played, 1 on the second time played)
1418
+ */
1419
+ this.barOccurence = 0;
1420
+ /**
1421
+ * The modified tempo at which the cursor should move (aka. the tempo played within the external audio track).
1422
+ * This information is used together with the {@link originalTempo} to calculate how much faster/slower the
1423
+ * cursor playback is performed to align with the audio track.
1424
+ */
1425
+ this.modifiedTempo = 0;
1426
+ /**
1427
+ * The uadio offset marking the position within the audio track in milliseconds.
1428
+ * This information is used to regularly sync (or on seeking) to match a given external audio time axis with the internal time axis.
1429
+ */
1430
+ this.millisecondOffset = 0;
1431
+ }
1432
+ }
1283
1433
  /**
1284
1434
  * Automations are used to change the behaviour of a song.
1285
1435
  * @cloneable
@@ -2578,6 +2728,16 @@
2578
2728
  }
2579
2729
  return null;
2580
2730
  }
2731
+ /**
2732
+ * Adds the given sync point to the list of sync points for this bar.
2733
+ * @param syncPoint The sync point to add.
2734
+ */
2735
+ addSyncPoint(syncPoint) {
2736
+ if (!this.syncPoints) {
2737
+ this.syncPoints = [];
2738
+ }
2739
+ this.syncPoints.push(syncPoint);
2740
+ }
2581
2741
  }
2582
2742
  MasterBar.MaxAlternateEndings = 8;
2583
2743
 
@@ -5672,6 +5832,21 @@
5672
5832
  }
5673
5833
  }
5674
5834
 
5835
+ // <auto-generated>
5836
+ // This code was auto-generated.
5837
+ // Changes to this file may cause incorrect behavior and will be lost if
5838
+ // the code is regenerated.
5839
+ // </auto-generated>
5840
+ class SyncPointDataCloner {
5841
+ static clone(original) {
5842
+ const clone = new SyncPointData();
5843
+ clone.barOccurence = original.barOccurence;
5844
+ clone.modifiedTempo = original.modifiedTempo;
5845
+ clone.millisecondOffset = original.millisecondOffset;
5846
+ return clone;
5847
+ }
5848
+ }
5849
+
5675
5850
  // <auto-generated>
5676
5851
  // This code was auto-generated.
5677
5852
  // Changes to this file may cause incorrect behavior and will be lost if
@@ -5683,6 +5858,7 @@
5683
5858
  clone.isLinear = original.isLinear;
5684
5859
  clone.type = original.type;
5685
5860
  clone.value = original.value;
5861
+ clone.syncPointValue = original.syncPointValue ? SyncPointDataCloner.clone(original.syncPointValue) : undefined;
5686
5862
  clone.ratioPosition = original.ratioPosition;
5687
5863
  clone.text = original.text;
5688
5864
  return clone;
@@ -14128,6 +14304,21 @@
14128
14304
  }
14129
14305
  }
14130
14306
 
14307
+ /**
14308
+ * Holds information about the backing track which can be played instead of synthesized audio.
14309
+ * @json
14310
+ * @json_strict
14311
+ */
14312
+ class BackingTrack {
14313
+ constructor() {
14314
+ /**
14315
+ * The number of milliseconds the audio should be shifted to align with the song.
14316
+ * (e.g. negative values allow skipping potential silent parts at the start of the file and directly start with the first note).
14317
+ */
14318
+ this.padding = 0;
14319
+ }
14320
+ }
14321
+
14131
14322
  /**
14132
14323
  * This structure represents a duration within a gpif
14133
14324
  */
@@ -14220,6 +14411,9 @@
14220
14411
  case 'MasterTrack':
14221
14412
  this.parseMasterTrackNode(n);
14222
14413
  break;
14414
+ case 'BackingTrack':
14415
+ this.parseBackingTrackNode(n);
14416
+ break;
14223
14417
  case 'Tracks':
14224
14418
  this.parseTracksNode(n);
14225
14419
  break;
@@ -14241,6 +14435,9 @@
14241
14435
  case 'Rhythms':
14242
14436
  this.parseRhythms(n);
14243
14437
  break;
14438
+ case 'Assets':
14439
+ this.parseAssets(n);
14440
+ break;
14244
14441
  }
14245
14442
  }
14246
14443
  }
@@ -14248,6 +14445,37 @@
14248
14445
  throw new UnsupportedFormatError('Root node of XML was not GPIF');
14249
14446
  }
14250
14447
  }
14448
+ parseAssets(element) {
14449
+ for (const c of element.childElements()) {
14450
+ switch (c.localName) {
14451
+ case 'Asset':
14452
+ if (c.getAttribute('id') === this._backingTrackAssetId) {
14453
+ this.parseBackingTrackAsset(c);
14454
+ }
14455
+ break;
14456
+ }
14457
+ }
14458
+ }
14459
+ parseBackingTrackAsset(element) {
14460
+ let embeddedFilePath = '';
14461
+ for (const c of element.childElements()) {
14462
+ switch (c.localName) {
14463
+ case 'EmbeddedFilePath':
14464
+ embeddedFilePath = c.innerText;
14465
+ break;
14466
+ }
14467
+ }
14468
+ const loadAsset = this.loadAsset;
14469
+ if (loadAsset) {
14470
+ const assetData = loadAsset(embeddedFilePath);
14471
+ if (assetData) {
14472
+ this.score.backingTrack.rawAudioFile = assetData;
14473
+ }
14474
+ else {
14475
+ this.score.backingTrack = undefined;
14476
+ }
14477
+ }
14478
+ }
14251
14479
  //
14252
14480
  // <Score>...</Score>
14253
14481
  //
@@ -14328,7 +14556,41 @@
14328
14556
  if (!text) {
14329
14557
  return [];
14330
14558
  }
14331
- return text.split(separator).map(t => t.trim()).filter(t => t.length > 0);
14559
+ return text
14560
+ .split(separator)
14561
+ .map(t => t.trim())
14562
+ .filter(t => t.length > 0);
14563
+ }
14564
+ //
14565
+ // <BackingTrack>...</BackingTrack>
14566
+ //
14567
+ parseBackingTrackNode(node) {
14568
+ const backingTrack = new BackingTrack();
14569
+ let enabled = false;
14570
+ let source = '';
14571
+ let assetId = '';
14572
+ for (const c of node.childElements()) {
14573
+ switch (c.localName) {
14574
+ case 'Enabled':
14575
+ enabled = c.innerText === 'true';
14576
+ break;
14577
+ case 'Source':
14578
+ source = c.innerText;
14579
+ break;
14580
+ case 'AssetId':
14581
+ assetId = c.innerText;
14582
+ break;
14583
+ case 'FramePadding':
14584
+ backingTrack.padding = GpifParser.parseIntSafe(c.innerText, 0) / GpifParser.SampleRate * 1000;
14585
+ break;
14586
+ }
14587
+ }
14588
+ // only local (contained backing tracks are supported)
14589
+ // remote / youtube links seem to come in future releases according to the gpif tags.
14590
+ if (enabled && source === 'Local') {
14591
+ this.score.backingTrack = backingTrack;
14592
+ this._backingTrackAssetId = assetId; // when the Asset tag is parsed this ID is used to load the raw data
14593
+ }
14332
14594
  }
14333
14595
  //
14334
14596
  // <MasterTrack>...</MasterTrack>
@@ -14366,6 +14628,7 @@
14366
14628
  let textValue = null;
14367
14629
  let reference = 0;
14368
14630
  let text = null;
14631
+ let syncPointValue = undefined;
14369
14632
  for (const c of node.childElements()) {
14370
14633
  switch (c.localName) {
14371
14634
  case 'Type':
@@ -14384,6 +14647,28 @@
14384
14647
  if (c.firstElement && c.firstElement.nodeType === XmlNodeType.CDATA) {
14385
14648
  textValue = c.innerText;
14386
14649
  }
14650
+ else if (c.firstElement &&
14651
+ c.firstElement.nodeType === XmlNodeType.Element &&
14652
+ type === 'SyncPoint') {
14653
+ syncPointValue = new SyncPointData();
14654
+ for (const vc of c.childElements()) {
14655
+ switch (vc.localName) {
14656
+ case 'BarIndex':
14657
+ barIndex = GpifParser.parseIntSafe(vc.innerText, 0);
14658
+ break;
14659
+ case 'BarOccurrence':
14660
+ syncPointValue.barOccurence = GpifParser.parseIntSafe(vc.innerText, 0);
14661
+ break;
14662
+ case 'ModifiedTempo':
14663
+ syncPointValue.modifiedTempo = GpifParser.parseFloatSafe(vc.innerText, 0);
14664
+ break;
14665
+ case 'FrameOffset':
14666
+ const frameOffset = GpifParser.parseFloatSafe(vc.innerText, 0);
14667
+ syncPointValue.millisecondOffset = (frameOffset / GpifParser.SampleRate) * 1000;
14668
+ break;
14669
+ }
14670
+ }
14671
+ }
14387
14672
  else {
14388
14673
  const parts = GpifParser.splitSafe(c.innerText);
14389
14674
  // Issue 391: Some GPX files might have
@@ -14411,6 +14696,13 @@
14411
14696
  case 'Tempo':
14412
14697
  automation = Automation.buildTempoAutomation(isLinear, ratioPosition, numberValue, reference);
14413
14698
  break;
14699
+ case 'SyncPoint':
14700
+ automation = new Automation();
14701
+ automation.type = AutomationType.SyncPoint;
14702
+ automation.isLinear = isLinear;
14703
+ automation.ratioPosition = ratioPosition;
14704
+ automation.syncPointValue = syncPointValue;
14705
+ break;
14414
14706
  case 'Sound':
14415
14707
  if (textValue && sounds && sounds.has(textValue)) {
14416
14708
  automation = Automation.buildInstrumentAutomation(isLinear, ratioPosition, sounds.get(textValue).program);
@@ -16492,14 +16784,19 @@
16492
16784
  const masterBar = this.score.masterBars[barNumber];
16493
16785
  for (let i = 0, j = automations.length; i < j; i++) {
16494
16786
  const automation = automations[i];
16495
- if (automation.type === AutomationType.Tempo) {
16496
- if (barNumber === 0) {
16497
- this.score.tempo = automation.value | 0;
16498
- if (automation.text) {
16499
- this.score.tempoLabel = automation.text;
16787
+ switch (automation.type) {
16788
+ case AutomationType.Tempo:
16789
+ if (barNumber === 0) {
16790
+ this.score.tempo = automation.value | 0;
16791
+ if (automation.text) {
16792
+ this.score.tempoLabel = automation.text;
16793
+ }
16500
16794
  }
16501
- }
16502
- masterBar.tempoAutomations.push(automation);
16795
+ masterBar.tempoAutomations.push(automation);
16796
+ break;
16797
+ case AutomationType.SyncPoint:
16798
+ masterBar.addSyncPoint(automation);
16799
+ break;
16503
16800
  }
16504
16801
  }
16505
16802
  }
@@ -16516,6 +16813,10 @@
16516
16813
  * Internal Range: 1 per quarter note
16517
16814
  */
16518
16815
  GpifParser.BendPointValueFactor = 1 / 25.0;
16816
+ // test have shown that Guitar Pro seem to always work with 44100hz for the frame offsets,
16817
+ // they are NOT using the sample rate of the input file.
16818
+ // Downsampling a 44100hz ogg to 8000hz and using it in as audio track resulted in the same frame offset when placing sync points.
16819
+ GpifParser.SampleRate = 44100;
16519
16820
 
16520
16821
  // PartConfiguration File Format Notes.
16521
16822
  // Based off Guitar Pro 8
@@ -17382,7 +17683,9 @@
17382
17683
  let binaryStylesheetData = null;
17383
17684
  let partConfigurationData = null;
17384
17685
  let layoutConfigurationData = null;
17686
+ const entryLookup = new Map();
17385
17687
  for (const entry of entries) {
17688
+ entryLookup.set(entry.fullName, entry);
17386
17689
  switch (entry.fileName) {
17387
17690
  case 'score.gpif':
17388
17691
  xml = IOHelper.toString(entry.data, this.settings.importer.encoding);
@@ -17405,6 +17708,12 @@
17405
17708
  // the score information as XML we need to parse.
17406
17709
  Logger.debug(this.name, 'Start Parsing score.gpif');
17407
17710
  const gpifParser = new GpifParser();
17711
+ gpifParser.loadAsset = (fileName) => {
17712
+ if (entryLookup.has(fileName)) {
17713
+ return entryLookup.get(fileName).data;
17714
+ }
17715
+ return undefined;
17716
+ };
17408
17717
  gpifParser.parseXml(xml, this.settings);
17409
17718
  Logger.debug(this.name, 'score.gpif parsed');
17410
17719
  const score = gpifParser.score;
@@ -21870,8 +22179,24 @@
21870
22179
  * Represents a change of the tempo in the song.
21871
22180
  */
21872
22181
  class TempoChangeEvent extends MidiEvent {
22182
+ /**
22183
+ * The tempo in microseconds per quarter note (aka USQ). A time format typically for midi.
22184
+ */
22185
+ get microSecondsPerQuarterNote() {
22186
+ return 60000000 / this.beatsPerMinute;
22187
+ }
22188
+ /**
22189
+ * The tempo in microseconds per quarter note (aka USQ). A time format typically for midi.
22190
+ */
22191
+ set microSecondsPerQuarterNote(value) {
22192
+ this.beatsPerMinute = 60000000 / value;
22193
+ }
21873
22194
  constructor(tick, microSecondsPerQuarterNote) {
21874
22195
  super(0, tick, MidiEventType.TempoChange);
22196
+ /**
22197
+ * The tempo in beats per minute
22198
+ */
22199
+ this.beatsPerMinute = 0;
21875
22200
  this.microSecondsPerQuarterNote = microSecondsPerQuarterNote;
21876
22201
  }
21877
22202
  writeTo(s) {
@@ -21954,6 +22279,17 @@
21954
22279
  }
21955
22280
  }
21956
22281
 
22282
+ /**
22283
+ * Rerpresents a point to sync the alphaTab time axis with an external backing track.
22284
+ */
22285
+ class BackingTrackSyncPoint {
22286
+ constructor(tick, data) {
22287
+ this.tick = 0;
22288
+ this.tick = tick;
22289
+ this.data = data;
22290
+ }
22291
+ }
22292
+
21957
22293
  class MidiFileSequencerTempoChange {
21958
22294
  constructor(bpm, ticks, time) {
21959
22295
  this.bpm = bpm;
@@ -21961,9 +22297,17 @@
21961
22297
  this.time = time;
21962
22298
  }
21963
22299
  }
22300
+ class BackingTrackSyncPointWithTime extends BackingTrackSyncPoint {
22301
+ constructor(tick, data, time) {
22302
+ super(tick, data);
22303
+ this.time = time;
22304
+ }
22305
+ }
21964
22306
  class MidiSequencerState {
21965
22307
  constructor() {
21966
22308
  this.tempoChanges = [];
22309
+ this.tempoChangeIndex = 0;
22310
+ this.syncPoints = [];
21967
22311
  this.firstProgramEventPerChannel = new Map();
21968
22312
  this.firstTimeSignatureNumerator = 0;
21969
22313
  this.firstTimeSignatureDenominator = 0;
@@ -21971,11 +22315,15 @@
21971
22315
  this.division = MidiUtils.QuarterTime;
21972
22316
  this.eventIndex = 0;
21973
22317
  this.currentTime = 0;
22318
+ this.currentTick = 0;
22319
+ this.syncPointIndex = 0;
21974
22320
  this.playbackRange = null;
21975
22321
  this.playbackRangeStartTime = 0;
21976
22322
  this.playbackRangeEndTime = 0;
21977
22323
  this.endTick = 0;
21978
22324
  this.endTime = 0;
22325
+ this.currentTempo = 0;
22326
+ this.modifiedTempo = 0;
21979
22327
  }
21980
22328
  }
21981
22329
  /**
@@ -22028,6 +22376,12 @@
22028
22376
  get currentEndTime() {
22029
22377
  return this._currentState.endTime / this.playbackSpeed;
22030
22378
  }
22379
+ get currentTempo() {
22380
+ return this._currentState.currentTempo;
22381
+ }
22382
+ get modifiedTempo() {
22383
+ return this._currentState.modifiedTempo * this.playbackSpeed;
22384
+ }
22031
22385
  mainSeek(timePosition) {
22032
22386
  // map to speed=1
22033
22387
  timePosition *= this.playbackSpeed;
@@ -22047,6 +22401,8 @@
22047
22401
  // we have to restart the midi to make sure we get the right state: instruments, volume, pan, etc
22048
22402
  this._mainState.currentTime = 0;
22049
22403
  this._mainState.eventIndex = 0;
22404
+ this._mainState.syncPointIndex = 0;
22405
+ this._mainState.tempoChangeIndex = 0;
22050
22406
  if (this.isPlayingMain) {
22051
22407
  const metronomeVolume = this._synthesizer.metronomeVolume;
22052
22408
  this._synthesizer.noteOffAll(true);
@@ -22121,7 +22477,7 @@
22121
22477
  }
22122
22478
  if (mEvent.type === MidiEventType.TempoChange) {
22123
22479
  const meta = mEvent;
22124
- bpm = 60000000 / meta.microSecondsPerQuarterNote;
22480
+ bpm = meta.beatsPerMinute;
22125
22481
  state.tempoChanges.push(new MidiFileSequencerTempoChange(bpm, absTick, absTime));
22126
22482
  metronomeLengthInMillis = metronomeLengthInTicks * (60000.0 / (bpm * midiFile.division));
22127
22483
  }
@@ -22155,6 +22511,8 @@
22155
22511
  }
22156
22512
  }
22157
22513
  }
22514
+ state.currentTempo = state.tempoChanges.length > 0 ? state.tempoChanges[0].bpm : bpm;
22515
+ state.modifiedTempo = state.currentTempo;
22158
22516
  state.synthData.sort((a, b) => {
22159
22517
  if (a.time > b.time) {
22160
22518
  return 1;
@@ -22171,6 +22529,35 @@
22171
22529
  fillMidiEventQueue() {
22172
22530
  return this.fillMidiEventQueueLimited(-1);
22173
22531
  }
22532
+ fillMidiEventQueueToEndTime(endTime) {
22533
+ while (this._mainState.currentTime < endTime) {
22534
+ if (this.fillMidiEventQueueLimited(endTime - this._mainState.currentTime)) {
22535
+ this._synthesizer.synthesizeSilent(SynthConstants.MicroBufferSize);
22536
+ }
22537
+ }
22538
+ let anyEventsDispatched = false;
22539
+ this._currentState.currentTime = endTime;
22540
+ while (this._currentState.eventIndex < this._currentState.synthData.length &&
22541
+ this._currentState.synthData[this._currentState.eventIndex].time < this._currentState.currentTime) {
22542
+ const synthEvent = this._currentState.synthData[this._currentState.eventIndex];
22543
+ this._synthesizer.dispatchEvent(synthEvent);
22544
+ while (this._currentState.syncPointIndex < this._currentState.syncPoints.length &&
22545
+ this._currentState.syncPoints[this._currentState.syncPointIndex].tick < synthEvent.event.tick) {
22546
+ this._currentState.modifiedTempo =
22547
+ this._currentState.syncPoints[this._currentState.syncPointIndex].data.modifiedTempo;
22548
+ this._currentState.syncPointIndex++;
22549
+ }
22550
+ while (this._currentState.tempoChangeIndex < this._currentState.tempoChanges.length &&
22551
+ this._currentState.tempoChanges[this._currentState.tempoChangeIndex].time <= synthEvent.time) {
22552
+ this._currentState.currentTempo =
22553
+ this._currentState.tempoChanges[this._currentState.tempoChangeIndex].bpm;
22554
+ this._currentState.tempoChangeIndex++;
22555
+ }
22556
+ this._currentState.eventIndex++;
22557
+ anyEventsDispatched = true;
22558
+ }
22559
+ return anyEventsDispatched;
22560
+ }
22174
22561
  fillMidiEventQueueLimited(maxMilliseconds) {
22175
22562
  let millisecondsPerBuffer = (SynthConstants.MicroBufferSize / this._synthesizer.outSampleRate) * 1000 * this.playbackSpeed;
22176
22563
  let endTime = this.internalEndTime;
@@ -22198,9 +22585,87 @@
22198
22585
  mainTimePositionToTickPosition(timePosition) {
22199
22586
  return this.timePositionToTickPositionWithSpeed(this._mainState, timePosition, this.playbackSpeed);
22200
22587
  }
22588
+ mainUpdateSyncPoints(syncPoints) {
22589
+ const state = this._mainState;
22590
+ syncPoints.sort((a, b) => a.tick - b.tick); // just in case
22591
+ state.syncPoints = new Array(syncPoints.length);
22592
+ if (syncPoints.length >= 0) {
22593
+ let bpm = 120;
22594
+ let absTick = 0;
22595
+ let absTime = 0.0;
22596
+ let previousTick = 0;
22597
+ let tempoChangeIndex = 0;
22598
+ for (let i = 0; i < syncPoints.length; i++) {
22599
+ const p = syncPoints[i];
22600
+ const deltaTick = p.tick - previousTick;
22601
+ absTick += deltaTick;
22602
+ absTime += deltaTick * (60000.0 / (bpm * state.division));
22603
+ state.syncPoints[i] = new BackingTrackSyncPointWithTime(p.tick, p.data, absTime);
22604
+ previousTick = p.tick;
22605
+ while (tempoChangeIndex < state.tempoChanges.length &&
22606
+ state.tempoChanges[tempoChangeIndex].ticks <= absTick) {
22607
+ bpm = state.tempoChanges[tempoChangeIndex].bpm;
22608
+ tempoChangeIndex++;
22609
+ }
22610
+ }
22611
+ }
22612
+ state.syncPointIndex = 0;
22613
+ }
22201
22614
  currentTimePositionToTickPosition(timePosition) {
22202
22615
  return this.timePositionToTickPositionWithSpeed(this._currentState, timePosition, this.playbackSpeed);
22203
22616
  }
22617
+ mainTimePositionFromBackingTrack(timePosition, backingTrackLength) {
22618
+ const mainState = this._mainState;
22619
+ const syncPoints = mainState.syncPoints;
22620
+ if (timePosition < 0 || syncPoints.length === 0) {
22621
+ return timePosition;
22622
+ }
22623
+ let syncPointIndex = timePosition >= syncPoints[mainState.syncPointIndex].data.millisecondOffset ? mainState.syncPointIndex : 0;
22624
+ while (syncPointIndex + 1 < syncPoints.length &&
22625
+ syncPoints[syncPointIndex + 1].data.millisecondOffset <= timePosition) {
22626
+ syncPointIndex++;
22627
+ }
22628
+ const currentSyncPoint = syncPoints[syncPointIndex];
22629
+ const timeDiff = timePosition - currentSyncPoint.data.millisecondOffset;
22630
+ let alphaTabTimeDiff;
22631
+ if (syncPointIndex + 1 < syncPoints.length) {
22632
+ const nextSyncPoint = syncPoints[syncPointIndex + 1];
22633
+ const relativeTimeDiff = timeDiff / (nextSyncPoint.data.millisecondOffset - currentSyncPoint.data.millisecondOffset);
22634
+ alphaTabTimeDiff = (nextSyncPoint.time - currentSyncPoint.time) * relativeTimeDiff;
22635
+ }
22636
+ else {
22637
+ const relativeTimeDiff = timeDiff / (backingTrackLength - currentSyncPoint.data.millisecondOffset);
22638
+ alphaTabTimeDiff = (mainState.endTime - currentSyncPoint.time) * relativeTimeDiff;
22639
+ }
22640
+ return (currentSyncPoint.time + alphaTabTimeDiff) / this.playbackSpeed;
22641
+ }
22642
+ mainTimePositionToBackingTrack(timePosition, backingTrackLength) {
22643
+ const mainState = this._mainState;
22644
+ const syncPoints = mainState.syncPoints;
22645
+ if (timePosition < 0 || syncPoints.length === 0) {
22646
+ return timePosition;
22647
+ }
22648
+ timePosition *= this.playbackSpeed;
22649
+ let syncPointIndex = timePosition >= syncPoints[mainState.syncPointIndex].time ? mainState.syncPointIndex : 0;
22650
+ while (syncPointIndex + 1 < syncPoints.length && syncPoints[syncPointIndex + 1].time <= timePosition) {
22651
+ syncPointIndex++;
22652
+ }
22653
+ const currentSyncPoint = syncPoints[syncPointIndex];
22654
+ const alphaTabTimeDiff = timePosition - currentSyncPoint.time;
22655
+ let backingTrackPos;
22656
+ if (syncPointIndex + 1 < syncPoints.length) {
22657
+ const nextSyncPoint = syncPoints[syncPointIndex + 1];
22658
+ const relativeAlphaTabTimeDiff = alphaTabTimeDiff / (nextSyncPoint.time - currentSyncPoint.time);
22659
+ const backingTrackDiff = nextSyncPoint.data.millisecondOffset - currentSyncPoint.data.millisecondOffset;
22660
+ backingTrackPos = currentSyncPoint.data.millisecondOffset + backingTrackDiff * relativeAlphaTabTimeDiff;
22661
+ }
22662
+ else {
22663
+ const relativeAlphaTabTimeDiff = alphaTabTimeDiff / (mainState.endTime - currentSyncPoint.time);
22664
+ const frameDiff = backingTrackLength - currentSyncPoint.data.millisecondOffset;
22665
+ backingTrackPos = currentSyncPoint.data.millisecondOffset + frameDiff * relativeAlphaTabTimeDiff;
22666
+ }
22667
+ return backingTrackPos;
22668
+ }
22204
22669
  tickPositionToTimePositionWithSpeed(state, tickPosition, playbackSpeed) {
22205
22670
  let timePosition = 0.0;
22206
22671
  let bpm = 120.0;
@@ -22310,6 +22775,8 @@
22310
22775
  });
22311
22776
  state.endTime = metronomeTime;
22312
22777
  state.endTick = metronomeTick;
22778
+ state.currentTempo = bpm;
22779
+ state.modifiedTempo = bpm;
22313
22780
  this._countInState = state;
22314
22781
  }
22315
22782
  }
@@ -22355,12 +22822,22 @@
22355
22822
  * @param endTick The end tick.
22356
22823
  * @param isSeek Whether the time was seeked.
22357
22824
  */
22358
- constructor(currentTime, endTime, currentTick, endTick, isSeek) {
22825
+ constructor(currentTime, endTime, currentTick, endTick, isSeek, originalTempo, modifiedTempo) {
22826
+ /**
22827
+ * The original tempo in which alphaTab internally would be playing right now.
22828
+ */
22829
+ this.originalTempo = 0;
22830
+ /**
22831
+ * The modified tempo in which the actual playback is happening (e.g. due to playback speed or external audio synchronization)
22832
+ */
22833
+ this.modifiedTempo = 0;
22359
22834
  this.currentTime = currentTime;
22360
22835
  this.endTime = endTime;
22361
22836
  this.currentTick = currentTick;
22362
22837
  this.endTick = endTick;
22363
22838
  this.isSeek = isSeek;
22839
+ this.originalTempo = originalTempo;
22840
+ this.modifiedTempo = modifiedTempo;
22364
22841
  }
22365
22842
  }
22366
22843
 
@@ -26096,38 +26573,51 @@
26096
26573
  */
26097
26574
  Voice.RenderEffectSampleBlock = SynthConstants.MicroBufferSize;
26098
26575
 
26576
+ class QueueItem {
26577
+ constructor(value) {
26578
+ this.value = value;
26579
+ }
26580
+ }
26099
26581
  class Queue {
26100
- constructor() {
26101
- this._items = [];
26102
- this._position = 0;
26103
- this.isEmpty = true;
26582
+ get isEmpty() {
26583
+ return this._head === undefined;
26104
26584
  }
26105
26585
  clear() {
26106
- this._items = [];
26107
- this._position = 0;
26108
- this.isEmpty = true;
26586
+ this._head = undefined;
26587
+ this._tail = undefined;
26109
26588
  }
26110
26589
  enqueue(item) {
26111
- this.isEmpty = false;
26112
- this._items.push(item);
26590
+ const queueItem = new QueueItem(item);
26591
+ if (this._tail) {
26592
+ // not empty -> add after tail
26593
+ this._tail.next = queueItem;
26594
+ this._tail = queueItem;
26595
+ }
26596
+ else {
26597
+ // empty -> new item takes head and tail
26598
+ this._head = queueItem;
26599
+ this._tail = queueItem;
26600
+ }
26113
26601
  }
26114
26602
  peek() {
26115
- return this._items[this._position];
26603
+ const head = this._head;
26604
+ if (!head) {
26605
+ return undefined;
26606
+ }
26607
+ return head.value;
26116
26608
  }
26117
26609
  dequeue() {
26118
- const item = this._items[this._position];
26119
- this._position++;
26120
- if (this._position >= this._items.length / 2) {
26121
- this._items = this._items.slice(this._position);
26122
- this._position = 0;
26610
+ const head = this._head;
26611
+ if (!head) {
26612
+ return undefined;
26123
26613
  }
26124
- this.isEmpty = this._items.length === 0;
26125
- return item;
26126
- }
26127
- toArray() {
26128
- const items = this._items.slice(this._position);
26129
- items.reverse();
26130
- return items;
26614
+ const newHead = head.next;
26615
+ this._head = newHead;
26616
+ // last item removed?
26617
+ if (!newHead) {
26618
+ this._tail = undefined;
26619
+ }
26620
+ return head.value;
26131
26621
  }
26132
26622
  }
26133
26623
 
@@ -26458,7 +26948,7 @@
26458
26948
  break;
26459
26949
  case MidiEventType.TempoChange:
26460
26950
  const tempoChange = e;
26461
- this.currentTempo = 60000000 / tempoChange.microSecondsPerQuarterNote;
26951
+ this.currentTempo = tempoChange.beatsPerMinute;
26462
26952
  break;
26463
26953
  case MidiEventType.PitchBend:
26464
26954
  const pitchBend = e;
@@ -27610,15 +28100,15 @@
27610
28100
  }
27611
28101
 
27612
28102
  /**
27613
- * This is the main synthesizer component which can be used to
28103
+ * This is the base class for synthesizer components which can be used to
27614
28104
  * play a {@link MidiFile} via a {@link ISynthOutput}.
27615
28105
  */
27616
- class AlphaSynth {
28106
+ class AlphaSynthBase {
27617
28107
  get output() {
27618
28108
  return this._output;
27619
28109
  }
27620
28110
  get isReadyForPlayback() {
27621
- return this.isReady && this._isSoundFontLoaded && this._isMidiLoaded;
28111
+ return this.isReady && this.isSoundFontLoaded && this._isMidiLoaded;
27622
28112
  }
27623
28113
  get logLevel() {
27624
28114
  return Logger.logLevel;
@@ -27627,11 +28117,14 @@
27627
28117
  Logger.logLevel = value;
27628
28118
  }
27629
28119
  get masterVolume() {
27630
- return this._synthesizer.masterVolume;
28120
+ return this.synthesizer.masterVolume;
27631
28121
  }
27632
28122
  set masterVolume(value) {
27633
28123
  value = Math.max(value, SynthConstants.MinVolume);
27634
- this._synthesizer.masterVolume = value;
28124
+ this.updateMasterVolume(value);
28125
+ }
28126
+ updateMasterVolume(value) {
28127
+ this.synthesizer.masterVolume = value;
27635
28128
  }
27636
28129
  get metronomeVolume() {
27637
28130
  return this._metronomeVolume;
@@ -27639,7 +28132,7 @@
27639
28132
  set metronomeVolume(value) {
27640
28133
  value = Math.max(value, SynthConstants.MinVolume);
27641
28134
  this._metronomeVolume = value;
27642
- this._synthesizer.metronomeVolume = value;
28135
+ this.synthesizer.metronomeVolume = value;
27643
28136
  }
27644
28137
  get countInVolume() {
27645
28138
  return this._countInVolume;
@@ -27655,19 +28148,22 @@
27655
28148
  this._midiEventsPlayedFilter = new Set(value);
27656
28149
  }
27657
28150
  get playbackSpeed() {
27658
- return this._sequencer.playbackSpeed;
28151
+ return this.sequencer.playbackSpeed;
27659
28152
  }
27660
28153
  set playbackSpeed(value) {
27661
28154
  value = ModelUtils.clamp(value, SynthConstants.MinPlaybackSpeed, SynthConstants.MaxPlaybackSpeed);
27662
- const oldSpeed = this._sequencer.playbackSpeed;
27663
- this._sequencer.playbackSpeed = value;
28155
+ this.updatePlaybackSpeed(value);
28156
+ }
28157
+ updatePlaybackSpeed(value) {
28158
+ const oldSpeed = this.sequencer.playbackSpeed;
28159
+ this.sequencer.playbackSpeed = value;
27664
28160
  this.timePosition = this.timePosition * (oldSpeed / value);
27665
28161
  }
27666
28162
  get tickPosition() {
27667
28163
  return this._tickPosition;
27668
28164
  }
27669
28165
  set tickPosition(value) {
27670
- this.timePosition = this._sequencer.mainTickPositionToTimePosition(value);
28166
+ this.timePosition = this.sequencer.mainTickPositionToTimePosition(value);
27671
28167
  }
27672
28168
  get timePosition() {
27673
28169
  return this._timePosition;
@@ -27675,30 +28171,30 @@
27675
28171
  set timePosition(value) {
27676
28172
  Logger.debug('AlphaSynth', `Seeking to position ${value}ms (main)`);
27677
28173
  // tell the sequencer to jump to the given position
27678
- this._sequencer.mainSeek(value);
28174
+ this.sequencer.mainSeek(value);
27679
28175
  // update the internal position
27680
28176
  this.updateTimePosition(value, true);
27681
28177
  // tell the output to reset the already synthesized buffers and request data again
27682
- if (this._sequencer.isPlayingMain) {
28178
+ if (this.sequencer.isPlayingMain) {
27683
28179
  this._notPlayedSamples = 0;
27684
28180
  this.output.resetSamples();
27685
28181
  }
27686
28182
  }
27687
28183
  get playbackRange() {
27688
- return this._sequencer.mainPlaybackRange;
28184
+ return this.sequencer.mainPlaybackRange;
27689
28185
  }
27690
28186
  set playbackRange(value) {
27691
- this._sequencer.mainPlaybackRange = value;
28187
+ this.sequencer.mainPlaybackRange = value;
27692
28188
  if (value) {
27693
28189
  this.tickPosition = value.startTick;
27694
28190
  }
27695
28191
  this.playbackRangeChanged.trigger(new PlaybackRangeChangedEventArgs(value));
27696
28192
  }
27697
28193
  get isLooping() {
27698
- return this._sequencer.isLooping;
28194
+ return this.sequencer.isLooping;
27699
28195
  }
27700
28196
  set isLooping(value) {
27701
- this._sequencer.isLooping = value;
28197
+ this.sequencer.isLooping = value;
27702
28198
  }
27703
28199
  destroy() {
27704
28200
  Logger.debug('AlphaSynth', 'Destroying player');
@@ -27706,11 +28202,11 @@
27706
28202
  this.output.destroy();
27707
28203
  }
27708
28204
  /**
27709
- * Initializes a new instance of the {@link AlphaSynth} class.
28205
+ * Initializes a new instance of the {@link AlphaSynthBase} class.
27710
28206
  * @param output The output to use for playing the generated samples.
27711
28207
  */
27712
- constructor(output, bufferTimeInMilliseconds) {
27713
- this._isSoundFontLoaded = false;
28208
+ constructor(output, synthesizer, bufferTimeInMilliseconds) {
28209
+ this.isSoundFontLoaded = false;
27714
28210
  this._isMidiLoaded = false;
27715
28211
  this._tickPosition = 0;
27716
28212
  this._timePosition = 0;
@@ -27739,8 +28235,8 @@
27739
28235
  Logger.debug('AlphaSynth', 'Creating output');
27740
28236
  this._output = output;
27741
28237
  Logger.debug('AlphaSynth', 'Creating synthesizer');
27742
- this._synthesizer = new TinySoundFont(this.output.sampleRate);
27743
- this._sequencer = new MidiFileSequencer(this._synthesizer);
28238
+ this.synthesizer = synthesizer;
28239
+ this.sequencer = new MidiFileSequencer(this.synthesizer);
27744
28240
  Logger.debug('AlphaSynth', 'Opening output');
27745
28241
  this.output.ready.on(() => {
27746
28242
  this.isReady = true;
@@ -27748,42 +28244,45 @@
27748
28244
  this.checkReadyForPlayback();
27749
28245
  });
27750
28246
  this.output.sampleRequest.on(() => {
27751
- if (this.state === PlayerState.Playing &&
27752
- (!this._sequencer.isFinished || this._synthesizer.activeVoiceCount > 0)) {
27753
- let samples = new Float32Array(SynthConstants.MicroBufferSize * SynthConstants.MicroBufferCount * SynthConstants.AudioChannels);
27754
- let bufferPos = 0;
27755
- for (let i = 0; i < SynthConstants.MicroBufferCount; i++) {
27756
- // synthesize buffer
27757
- this._sequencer.fillMidiEventQueue();
27758
- const synthesizedEvents = this._synthesizer.synthesize(samples, bufferPos, SynthConstants.MicroBufferSize);
27759
- bufferPos += SynthConstants.MicroBufferSize * SynthConstants.AudioChannels;
27760
- // push all processed events into the queue
27761
- // for informing users about played events
27762
- for (const e of synthesizedEvents) {
27763
- if (this._midiEventsPlayedFilter.has(e.event.type)) {
27764
- this._playedEventsQueue.enqueue(e);
27765
- }
27766
- }
27767
- // tell sequencer to check whether its work is done
27768
- if (this._sequencer.isFinished) {
27769
- break;
28247
+ this.onSampleRequest();
28248
+ });
28249
+ this.output.samplesPlayed.on(this.onSamplesPlayed.bind(this));
28250
+ this.output.open(bufferTimeInMilliseconds);
28251
+ }
28252
+ onSampleRequest() {
28253
+ if (this.state === PlayerState.Playing &&
28254
+ (!this.sequencer.isFinished || this.synthesizer.activeVoiceCount > 0)) {
28255
+ let samples = new Float32Array(SynthConstants.MicroBufferSize * SynthConstants.MicroBufferCount * SynthConstants.AudioChannels);
28256
+ let bufferPos = 0;
28257
+ for (let i = 0; i < SynthConstants.MicroBufferCount; i++) {
28258
+ // synthesize buffer
28259
+ this.sequencer.fillMidiEventQueue();
28260
+ const synthesizedEvents = this.synthesizer.synthesize(samples, bufferPos, SynthConstants.MicroBufferSize);
28261
+ bufferPos += SynthConstants.MicroBufferSize * SynthConstants.AudioChannels;
28262
+ // push all processed events into the queue
28263
+ // for informing users about played events
28264
+ for (const e of synthesizedEvents) {
28265
+ if (this._midiEventsPlayedFilter.has(e.event.type)) {
28266
+ this._playedEventsQueue.enqueue(e);
27770
28267
  }
27771
28268
  }
27772
- // send it to output
27773
- if (bufferPos < samples.length) {
27774
- samples = samples.subarray(0, bufferPos);
28269
+ // tell sequencer to check whether its work is done
28270
+ if (this.sequencer.isFinished) {
28271
+ break;
27775
28272
  }
27776
- this._notPlayedSamples += samples.length;
27777
- this.output.addSamples(samples);
27778
28273
  }
27779
- else {
27780
- // Tell output that there is no data left for it.
27781
- const samples = new Float32Array(0);
27782
- this.output.addSamples(samples);
28274
+ // send it to output
28275
+ if (bufferPos < samples.length) {
28276
+ samples = samples.subarray(0, bufferPos);
27783
28277
  }
27784
- });
27785
- this.output.samplesPlayed.on(this.onSamplesPlayed.bind(this));
27786
- this.output.open(bufferTimeInMilliseconds);
28278
+ this._notPlayedSamples += samples.length;
28279
+ this.output.addSamples(samples);
28280
+ }
28281
+ else {
28282
+ // Tell output that there is no data left for it.
28283
+ const samples = new Float32Array(0);
28284
+ this.output.addSamples(samples);
28285
+ }
27787
28286
  }
27788
28287
  play() {
27789
28288
  if (this.state !== PlayerState.Paused || !this._isMidiLoaded) {
@@ -27793,20 +28292,20 @@
27793
28292
  this.playInternal();
27794
28293
  if (this._countInVolume > 0) {
27795
28294
  Logger.debug('AlphaSynth', 'Starting countin');
27796
- this._sequencer.startCountIn();
27797
- this._synthesizer.setupMetronomeChannel(this._countInVolume);
28295
+ this.sequencer.startCountIn();
28296
+ this.synthesizer.setupMetronomeChannel(this._countInVolume);
27798
28297
  this.updateTimePosition(0, true);
27799
28298
  }
27800
28299
  this.output.play();
27801
28300
  return true;
27802
28301
  }
27803
28302
  playInternal() {
27804
- if (this._sequencer.isPlayingOneTimeMidi) {
28303
+ if (this.sequencer.isPlayingOneTimeMidi) {
27805
28304
  Logger.debug('AlphaSynth', 'Cancelling one time midi');
27806
28305
  this.stopOneTimeMidi();
27807
28306
  }
27808
28307
  Logger.debug('AlphaSynth', 'Starting playback');
27809
- this._synthesizer.setupMetronomeChannel(this.metronomeVolume);
28308
+ this.synthesizer.setupMetronomeChannel(this.metronomeVolume);
27810
28309
  this._synthStopping = false;
27811
28310
  this.state = PlayerState.Playing;
27812
28311
  this.stateChanged.trigger(new PlayerStateChangedEventArgs(this.state, false));
@@ -27819,7 +28318,7 @@
27819
28318
  this.state = PlayerState.Paused;
27820
28319
  this.stateChanged.trigger(new PlayerStateChangedEventArgs(this.state, false));
27821
28320
  this.output.pause();
27822
- this._synthesizer.noteOffAll(false);
28321
+ this.synthesizer.noteOffAll(false);
27823
28322
  }
27824
28323
  playPause() {
27825
28324
  if (this.state !== PlayerState.Paused || !this._isMidiLoaded) {
@@ -27837,21 +28336,21 @@
27837
28336
  this.state = PlayerState.Paused;
27838
28337
  this.output.pause();
27839
28338
  this._notPlayedSamples = 0;
27840
- this._sequencer.stop();
27841
- this._synthesizer.noteOffAll(true);
27842
- this.tickPosition = this._sequencer.mainPlaybackRange ? this._sequencer.mainPlaybackRange.startTick : 0;
28339
+ this.sequencer.stop();
28340
+ this.synthesizer.noteOffAll(true);
28341
+ this.tickPosition = this.sequencer.mainPlaybackRange ? this.sequencer.mainPlaybackRange.startTick : 0;
27843
28342
  this.stateChanged.trigger(new PlayerStateChangedEventArgs(this.state, true));
27844
28343
  }
27845
28344
  playOneTimeMidiFile(midi) {
27846
- if (this._sequencer.isPlayingOneTimeMidi) {
28345
+ if (this.sequencer.isPlayingOneTimeMidi) {
27847
28346
  this.stopOneTimeMidi();
27848
28347
  }
27849
28348
  else {
27850
28349
  // pause current playback.
27851
28350
  this.pause();
27852
28351
  }
27853
- this._sequencer.loadOneTimeMidi(midi);
27854
- this._synthesizer.noteOffAll(true);
28352
+ this.sequencer.loadOneTimeMidi(midi);
28353
+ this.synthesizer.noteOffAll(true);
27855
28354
  // update the internal position
27856
28355
  this.updateTimePosition(0, true);
27857
28356
  // tell the output to reset the already synthesized buffers and request data again
@@ -27861,9 +28360,9 @@
27861
28360
  }
27862
28361
  resetSoundFonts() {
27863
28362
  this.stop();
27864
- this._synthesizer.resetPresets();
28363
+ this.synthesizer.resetPresets();
27865
28364
  this._loadedSoundFonts = [];
27866
- this._isSoundFontLoaded = false;
28365
+ this.isSoundFontLoaded = false;
27867
28366
  this.soundFontLoaded.trigger();
27868
28367
  }
27869
28368
  loadSoundFont(data, append) {
@@ -27877,7 +28376,7 @@
27877
28376
  this._loadedSoundFonts = [];
27878
28377
  }
27879
28378
  this._loadedSoundFonts.push(soundFont);
27880
- this._isSoundFontLoaded = true;
28379
+ this.isSoundFontLoaded = true;
27881
28380
  this.soundFontLoaded.trigger();
27882
28381
  Logger.debug('AlphaSynth', 'soundFont successfully loaded');
27883
28382
  this.checkReadyForPlayback();
@@ -27889,12 +28388,12 @@
27889
28388
  }
27890
28389
  checkReadyForPlayback() {
27891
28390
  if (this.isReadyForPlayback) {
27892
- this._synthesizer.setupMetronomeChannel(this.metronomeVolume);
27893
- const programs = this._sequencer.instrumentPrograms;
27894
- const percussionKeys = this._sequencer.percussionKeys;
28391
+ this.synthesizer.setupMetronomeChannel(this.metronomeVolume);
28392
+ const programs = this.sequencer.instrumentPrograms;
28393
+ const percussionKeys = this.sequencer.percussionKeys;
27895
28394
  let append = false;
27896
28395
  for (const soundFont of this._loadedSoundFonts) {
27897
- this._synthesizer.loadPresets(soundFont, programs, percussionKeys, append);
28396
+ this.synthesizer.loadPresets(soundFont, programs, percussionKeys, append);
27898
28397
  append = true;
27899
28398
  }
27900
28399
  this.readyForPlayback.trigger();
@@ -27908,9 +28407,9 @@
27908
28407
  this.stop();
27909
28408
  try {
27910
28409
  Logger.debug('AlphaSynth', 'Loading midi from model');
27911
- this._sequencer.loadMidi(midi);
28410
+ this.sequencer.loadMidi(midi);
27912
28411
  this._isMidiLoaded = true;
27913
- this.midiLoaded.trigger(new PositionChangedEventArgs(0, this._sequencer.currentEndTime, 0, this._sequencer.currentEndTick, false));
28412
+ this.midiLoaded.trigger(new PositionChangedEventArgs(0, this.sequencer.currentEndTime, 0, this.sequencer.currentEndTick, false, this.sequencer.currentTempo, this.sequencer.modifiedTempo));
27914
28413
  Logger.debug('AlphaSynth', 'Midi successfully loaded');
27915
28414
  this.checkReadyForPlayback();
27916
28415
  this.tickPosition = 0;
@@ -27921,29 +28420,29 @@
27921
28420
  }
27922
28421
  }
27923
28422
  applyTranspositionPitches(transpositionPitches) {
27924
- this._synthesizer.applyTranspositionPitches(transpositionPitches);
28423
+ this.synthesizer.applyTranspositionPitches(transpositionPitches);
27925
28424
  }
27926
28425
  setChannelTranspositionPitch(channel, semitones) {
27927
- this._synthesizer.setChannelTranspositionPitch(channel, semitones);
28426
+ this.synthesizer.setChannelTranspositionPitch(channel, semitones);
27928
28427
  }
27929
28428
  setChannelMute(channel, mute) {
27930
- this._synthesizer.channelSetMute(channel, mute);
28429
+ this.synthesizer.channelSetMute(channel, mute);
27931
28430
  }
27932
28431
  resetChannelStates() {
27933
- this._synthesizer.resetChannelStates();
28432
+ this.synthesizer.resetChannelStates();
27934
28433
  }
27935
28434
  setChannelSolo(channel, solo) {
27936
- this._synthesizer.channelSetSolo(channel, solo);
28435
+ this.synthesizer.channelSetSolo(channel, solo);
27937
28436
  }
27938
28437
  setChannelVolume(channel, volume) {
27939
28438
  volume = Math.max(volume, SynthConstants.MinVolume);
27940
- this._synthesizer.channelSetMixVolume(channel, volume);
28439
+ this.synthesizer.channelSetMixVolume(channel, volume);
27941
28440
  }
27942
28441
  onSamplesPlayed(sampleCount) {
27943
28442
  if (sampleCount === 0) {
27944
28443
  return;
27945
28444
  }
27946
- const playedMillis = (sampleCount / this._synthesizer.outSampleRate) * 1000;
28445
+ const playedMillis = (sampleCount / this.synthesizer.outSampleRate) * 1000;
27947
28446
  this._notPlayedSamples -= sampleCount * SynthConstants.AudioChannels;
27948
28447
  this.updateTimePosition(this._timePosition + playedMillis, false);
27949
28448
  this.checkForFinish();
@@ -27951,25 +28450,25 @@
27951
28450
  checkForFinish() {
27952
28451
  let startTick = 0;
27953
28452
  let endTick = 0;
27954
- if (this.playbackRange && this._sequencer.isPlayingMain) {
28453
+ if (this.playbackRange && this.sequencer.isPlayingMain) {
27955
28454
  startTick = this.playbackRange.startTick;
27956
28455
  endTick = this.playbackRange.endTick;
27957
28456
  }
27958
28457
  else {
27959
- endTick = this._sequencer.currentEndTick;
28458
+ endTick = this.sequencer.currentEndTick;
27960
28459
  }
27961
28460
  if (this._tickPosition >= endTick) {
27962
28461
  // fully done with playback of remaining samples?
27963
28462
  if (this._notPlayedSamples <= 0) {
27964
28463
  this._notPlayedSamples = 0;
27965
- if (this._sequencer.isPlayingCountIn) {
28464
+ if (this.sequencer.isPlayingCountIn) {
27966
28465
  Logger.debug('AlphaSynth', 'Finished playback (count-in)');
27967
- this._sequencer.resetCountIn();
27968
- this.timePosition = this._sequencer.currentTime;
28466
+ this.sequencer.resetCountIn();
28467
+ this.timePosition = this.sequencer.currentTime;
27969
28468
  this.playInternal();
27970
28469
  this.output.resetSamples();
27971
28470
  }
27972
- else if (this._sequencer.isPlayingOneTimeMidi) {
28471
+ else if (this.sequencer.isPlayingOneTimeMidi) {
27973
28472
  Logger.debug('AlphaSynth', 'Finished playback (one time)');
27974
28473
  this.output.resetSamples();
27975
28474
  this.state = PlayerState.Paused;
@@ -27981,11 +28480,11 @@
27981
28480
  this.tickPosition = startTick;
27982
28481
  this._synthStopping = false;
27983
28482
  }
27984
- else if (this._synthesizer.activeVoiceCount > 0) {
28483
+ else if (this.synthesizer.activeVoiceCount > 0) {
27985
28484
  // smooth stop
27986
28485
  if (!this._synthStopping) {
27987
28486
  Logger.debug('AlphaSynth', 'Signaling synth to stop all voices (all samples played)');
27988
- this._synthesizer.noteOffAll(true);
28487
+ this.synthesizer.noteOffAll(true);
27989
28488
  this._synthStopping = true;
27990
28489
  }
27991
28490
  }
@@ -28001,7 +28500,7 @@
28001
28500
  // to eventually bring the voices down to 0 and stop playing
28002
28501
  if (!this._synthStopping) {
28003
28502
  Logger.debug('AlphaSynth', 'Signaling synth to stop all voices (not all samples played)');
28004
- this._synthesizer.noteOffAll(true);
28503
+ this.synthesizer.noteOffAll(true);
28005
28504
  this._synthStopping = true;
28006
28505
  }
28007
28506
  }
@@ -28009,44 +28508,41 @@
28009
28508
  }
28010
28509
  stopOneTimeMidi() {
28011
28510
  this.output.pause();
28012
- this._synthesizer.noteOffAll(true);
28013
- this._sequencer.resetOneTimeMidi();
28014
- this.timePosition = this._sequencer.currentTime;
28511
+ this.synthesizer.noteOffAll(true);
28512
+ this.sequencer.resetOneTimeMidi();
28513
+ this.timePosition = this.sequencer.currentTime;
28015
28514
  }
28016
28515
  updateTimePosition(timePosition, isSeek) {
28017
28516
  // update the real positions
28018
28517
  let currentTime = timePosition;
28019
28518
  this._timePosition = currentTime;
28020
- let currentTick = this._sequencer.currentTimePositionToTickPosition(currentTime);
28519
+ let currentTick = this.sequencer.currentTimePositionToTickPosition(currentTime);
28021
28520
  this._tickPosition = currentTick;
28022
- const endTime = this._sequencer.currentEndTime;
28023
- const endTick = this._sequencer.currentEndTick;
28521
+ const endTime = this.sequencer.currentEndTime;
28522
+ const endTick = this.sequencer.currentEndTick;
28024
28523
  // on fade outs we can have some milliseconds longer, ensure we don't report this
28025
28524
  if (currentTime > endTime) {
28026
28525
  currentTime = endTime;
28027
28526
  currentTick = endTick;
28028
28527
  }
28029
- const mode = this._sequencer.isPlayingMain
28030
- ? 'main'
28031
- : this._sequencer.isPlayingCountIn
28032
- ? 'count-in'
28033
- : 'one-time';
28034
- Logger.debug('AlphaSynth', `Position changed: (time: ${currentTime}/${endTime}, tick: ${currentTick}/${endTick}, Active Voices: ${this._synthesizer.activeVoiceCount} (${mode})`);
28035
- if (this._sequencer.isPlayingMain) {
28036
- this.positionChanged.trigger(new PositionChangedEventArgs(currentTime, endTime, currentTick, endTick, isSeek));
28528
+ const mode = this.sequencer.isPlayingMain ? 'main' : this.sequencer.isPlayingCountIn ? 'count-in' : 'one-time';
28529
+ Logger.debug('AlphaSynth', `Position changed: (time: ${currentTime}/${endTime}, tick: ${currentTick}/${endTick}, Active Voices: ${this.synthesizer.activeVoiceCount} (${mode}), Tempo original: ${this.sequencer.currentTempo}, Tempo modified: ${this.sequencer.modifiedTempo})`);
28530
+ if (this.sequencer.isPlayingMain) {
28531
+ this.positionChanged.trigger(new PositionChangedEventArgs(currentTime, endTime, currentTick, endTick, isSeek, this.sequencer.currentTempo, this.sequencer.modifiedTempo));
28037
28532
  }
28038
28533
  // build events which were actually played
28039
28534
  if (isSeek) {
28040
28535
  this._playedEventsQueue.clear();
28041
28536
  }
28042
28537
  else {
28043
- const playedEvents = new Queue();
28538
+ const playedEvents = [];
28044
28539
  while (!this._playedEventsQueue.isEmpty && this._playedEventsQueue.peek().time < currentTime) {
28045
28540
  const synthEvent = this._playedEventsQueue.dequeue();
28046
- playedEvents.enqueue(synthEvent.event);
28541
+ playedEvents.push(synthEvent.event);
28047
28542
  }
28048
- if (!playedEvents.isEmpty) {
28049
- this.midiEventsPlayed.trigger(new MidiEventsPlayedEventArgs(playedEvents.toArray()));
28543
+ if (playedEvents.length > 0) {
28544
+ playedEvents.reverse();
28545
+ this.midiEventsPlayed.trigger(new MidiEventsPlayedEventArgs(playedEvents));
28050
28546
  }
28051
28547
  }
28052
28548
  }
@@ -28054,13 +28550,28 @@
28054
28550
  * @internal
28055
28551
  */
28056
28552
  hasSamplesForProgram(program) {
28057
- return this._synthesizer.hasSamplesForProgram(program);
28553
+ return this.synthesizer.hasSamplesForProgram(program);
28058
28554
  }
28059
28555
  /**
28060
28556
  * @internal
28061
28557
  */
28062
28558
  hasSamplesForPercussion(key) {
28063
- return this._synthesizer.hasSamplesForPercussion(key);
28559
+ return this.synthesizer.hasSamplesForPercussion(key);
28560
+ }
28561
+ loadBackingTrack(_score, _syncPoints) {
28562
+ }
28563
+ }
28564
+ /**
28565
+ * This is the main synthesizer component which can be used to
28566
+ * play a {@link MidiFile} via a {@link ISynthOutput}.
28567
+ */
28568
+ class AlphaSynth extends AlphaSynthBase {
28569
+ /**
28570
+ * Initializes a new instance of the {@link AlphaSynth} class.
28571
+ * @param output The output to use for playing the generated samples.
28572
+ */
28573
+ constructor(output, bufferTimeInMilliseconds) {
28574
+ super(output, new TinySoundFont(output.sampleRate), bufferTimeInMilliseconds);
28064
28575
  }
28065
28576
  }
28066
28577
 
@@ -29307,6 +29818,35 @@
29307
29818
  */
29308
29819
  PlayerOutputMode[PlayerOutputMode["WebAudioScriptProcessor"] = 1] = "WebAudioScriptProcessor";
29309
29820
  })(exports.PlayerOutputMode || (exports.PlayerOutputMode = {}));
29821
+ /**
29822
+ * Lists the different modes how the internal alphaTab player (and related cursor behavior) is working.
29823
+ */
29824
+ exports.PlayerMode = void 0;
29825
+ (function (PlayerMode) {
29826
+ /**
29827
+ * The player functionality is fully disabled.
29828
+ */
29829
+ PlayerMode[PlayerMode["Disabled"] = 0] = "Disabled";
29830
+ /**
29831
+ * The player functionality is enabled.
29832
+ * If the loaded file provides a backing track, it is used for playback.
29833
+ * If no backing track is provided, the midi synthesizer is used.
29834
+ */
29835
+ PlayerMode[PlayerMode["EnabledAutomatic"] = 1] = "EnabledAutomatic";
29836
+ /**
29837
+ * The player functionality is enabled and the synthesizer is used (even if a backing track is embedded in the file).
29838
+ */
29839
+ PlayerMode[PlayerMode["EnabledSynthesizer"] = 2] = "EnabledSynthesizer";
29840
+ /**
29841
+ * The player functionality is enabled. If the input data model has no backing track configured, the player might not work as expected (as playback completes instantly).
29842
+ */
29843
+ PlayerMode[PlayerMode["EnabledBackingTrack"] = 3] = "EnabledBackingTrack";
29844
+ /**
29845
+ * The player functionality is enabled and an external audio/video source is used as time axis.
29846
+ * The related player APIs need to be used to update the current position of the external audio source within alphaTab.
29847
+ */
29848
+ PlayerMode[PlayerMode["EnabledExternalMedia"] = 4] = "EnabledExternalMedia";
29849
+ })(exports.PlayerMode || (exports.PlayerMode = {}));
29310
29850
  /**
29311
29851
  * The player settings control how the audio playback and UI is behaving.
29312
29852
  * @json
@@ -29353,6 +29893,7 @@
29353
29893
  * @since 0.9.6
29354
29894
  * @defaultValue `false`
29355
29895
  * @category Player
29896
+ * @deprecated Use {@link playerMode} instead.
29356
29897
  * @remarks
29357
29898
  * This setting configures whether the player feature is enabled or not. Depending on the platform enabling the player needs some additional actions of the developer.
29358
29899
  * For the JavaScript version the [player.soundFont](/docs/reference/settings/player/soundfont) property must be set to the URL of the sound font that should be used or it must be loaded manually via API.
@@ -29361,6 +29902,37 @@
29361
29902
  * AlphaTab does not ship a default UI for the player. The API must be hooked up to some UI controls to allow the user to interact with the player.
29362
29903
  */
29363
29904
  this.enablePlayer = false;
29905
+ /**
29906
+ * Whether the player should be enabled and which mode it should use.
29907
+ * @since 1.6.0
29908
+ * @defaultValue `PlayerMode.Disabled`
29909
+ * @category Player
29910
+ * @remarks
29911
+ * This setting configures whether the player feature is enabled or not. Depending on the platform enabling the player needs some additional actions of the developer.
29912
+ *
29913
+ * **Synthesizer**
29914
+ *
29915
+ * If the synthesizer is used (via {@link PlayerMode.EnabledAutomatic} or {@link PlayerMode.EnabledSynthesizer}) a sound font is needed so that the midi synthesizer can produce the audio samples.
29916
+ *
29917
+ * For the JavaScript version the [player.soundFont](/docs/reference/settings/player/soundfont) property must be set to the URL of the sound font that should be used or it must be loaded manually via API.
29918
+ * For .net manually the soundfont must be loaded.
29919
+ *
29920
+ * **Backing Track**
29921
+ *
29922
+ * For a built-in backing track of the input file no additional data needs to be loaded (assuming everything is filled via the input file).
29923
+ * Otherwise the `score.backingTrack` needs to be filled before loading and the related sync points need to be configured.
29924
+ *
29925
+ * **External Media**
29926
+ *
29927
+ * For synchronizing alphaTab with an external media no data needs to be loaded into alphaTab. The configured sync points on the MasterBars are used
29928
+ * as reference to synchronize the external media with the internal time axis. Then the related APIs on the AlphaTabApi object need to be used
29929
+ * to update the playback state and exterrnal audio position during playback.
29930
+ *
29931
+ * **User Interface**
29932
+ *
29933
+ * AlphaTab does not ship a default UI for the player. The API must be hooked up to some UI controls to allow the user to interact with the player.
29934
+ */
29935
+ this.playerMode = exports.PlayerMode.Disabled;
29364
29936
  /**
29365
29937
  * Whether playback cursors should be displayed.
29366
29938
  * @since 0.9.6
@@ -30066,6 +30638,7 @@
30066
30638
  /*@target web*/
30067
30639
  o.set("outputmode", obj.outputMode);
30068
30640
  o.set("enableplayer", obj.enablePlayer);
30641
+ o.set("playermode", obj.playerMode);
30069
30642
  o.set("enablecursor", obj.enableCursor);
30070
30643
  o.set("enableanimatedbeatcursor", obj.enableAnimatedBeatCursor);
30071
30644
  o.set("enableelementhighlighting", obj.enableElementHighlighting);
@@ -30101,6 +30674,9 @@
30101
30674
  case "enableplayer":
30102
30675
  obj.enablePlayer = v;
30103
30676
  return true;
30677
+ case "playermode":
30678
+ obj.playerMode = JsonHelper.parseEnum(v, exports.PlayerMode);
30679
+ return true;
30104
30680
  case "enablecursor":
30105
30681
  obj.enableCursor = v;
30106
30682
  return true;
@@ -30335,6 +30911,39 @@
30335
30911
  }
30336
30912
  }
30337
30913
 
30914
+ class SyncPointDataSerializer {
30915
+ static fromJson(obj, m) {
30916
+ if (!m) {
30917
+ return;
30918
+ }
30919
+ JsonHelper.forEach(m, (v, k) => SyncPointDataSerializer.setProperty(obj, k, v));
30920
+ }
30921
+ static toJson(obj) {
30922
+ if (!obj) {
30923
+ return null;
30924
+ }
30925
+ const o = new Map();
30926
+ o.set("baroccurence", obj.barOccurence);
30927
+ o.set("modifiedtempo", obj.modifiedTempo);
30928
+ o.set("millisecondoffset", obj.millisecondOffset);
30929
+ return o;
30930
+ }
30931
+ static setProperty(obj, property, v) {
30932
+ switch (property) {
30933
+ case "baroccurence":
30934
+ obj.barOccurence = v;
30935
+ return true;
30936
+ case "modifiedtempo":
30937
+ obj.modifiedTempo = v;
30938
+ return true;
30939
+ case "millisecondoffset":
30940
+ obj.millisecondOffset = v;
30941
+ return true;
30942
+ }
30943
+ return false;
30944
+ }
30945
+ }
30946
+
30338
30947
  class AutomationSerializer {
30339
30948
  static fromJson(obj, m) {
30340
30949
  if (!m) {
@@ -30350,6 +30959,9 @@
30350
30959
  o.set("islinear", obj.isLinear);
30351
30960
  o.set("type", obj.type);
30352
30961
  o.set("value", obj.value);
30962
+ if (obj.syncPointValue) {
30963
+ o.set("syncpointvalue", SyncPointDataSerializer.toJson(obj.syncPointValue));
30964
+ }
30353
30965
  o.set("ratioposition", obj.ratioPosition);
30354
30966
  o.set("text", obj.text);
30355
30967
  return o;
@@ -30365,6 +30977,15 @@
30365
30977
  case "value":
30366
30978
  obj.value = v;
30367
30979
  return true;
30980
+ case "syncpointvalue":
30981
+ if (v) {
30982
+ obj.syncPointValue = new SyncPointData();
30983
+ SyncPointDataSerializer.fromJson(obj.syncPointValue, v);
30984
+ }
30985
+ else {
30986
+ obj.syncPointValue = undefined;
30987
+ }
30988
+ return true;
30368
30989
  case "ratioposition":
30369
30990
  obj.ratioPosition = v;
30370
30991
  return true;
@@ -30430,6 +31051,9 @@
30430
31051
  o.set("section", SectionSerializer.toJson(obj.section));
30431
31052
  }
30432
31053
  o.set("tempoautomations", obj.tempoAutomations.map(i => AutomationSerializer.toJson(i)));
31054
+ if (obj.syncPoints !== undefined) {
31055
+ o.set("syncpoints", obj.syncPoints?.map(i => AutomationSerializer.toJson(i)));
31056
+ }
30433
31057
  if (obj.fermata !== null) {
30434
31058
  const m = new Map();
30435
31059
  o.set("fermata", m);
@@ -30496,6 +31120,16 @@
30496
31120
  obj.tempoAutomations.push(i);
30497
31121
  }
30498
31122
  return true;
31123
+ case "syncpoints":
31124
+ if (v) {
31125
+ obj.syncPoints = [];
31126
+ for (const o of v) {
31127
+ const i = new Automation();
31128
+ AutomationSerializer.fromJson(i, o);
31129
+ obj.addSyncPoint(i);
31130
+ }
31131
+ }
31132
+ return true;
30499
31133
  case "fermata":
30500
31134
  obj.fermata = new Map();
30501
31135
  JsonHelper.forEach(v, (v, k) => {
@@ -31787,6 +32421,31 @@
31787
32421
  }
31788
32422
  }
31789
32423
 
32424
+ class BackingTrackSerializer {
32425
+ static fromJson(obj, m) {
32426
+ if (!m) {
32427
+ return;
32428
+ }
32429
+ JsonHelper.forEach(m, (v, k) => BackingTrackSerializer.setProperty(obj, k, v));
32430
+ }
32431
+ static toJson(obj) {
32432
+ if (!obj) {
32433
+ return null;
32434
+ }
32435
+ const o = new Map();
32436
+ o.set("padding", obj.padding);
32437
+ return o;
32438
+ }
32439
+ static setProperty(obj, property, v) {
32440
+ switch (property) {
32441
+ case "padding":
32442
+ obj.padding = v;
32443
+ return true;
32444
+ }
32445
+ return false;
32446
+ }
32447
+ }
32448
+
31790
32449
  class HeaderFooterStyleSerializer {
31791
32450
  static fromJson(obj, m) {
31792
32451
  if (!m) {
@@ -31898,6 +32557,9 @@
31898
32557
  o.set("defaultsystemslayout", obj.defaultSystemsLayout);
31899
32558
  o.set("systemslayout", obj.systemsLayout);
31900
32559
  o.set("stylesheet", RenderStylesheetSerializer.toJson(obj.stylesheet));
32560
+ if (obj.backingTrack) {
32561
+ o.set("backingtrack", BackingTrackSerializer.toJson(obj.backingTrack));
32562
+ }
31901
32563
  if (obj.style) {
31902
32564
  o.set("style", ScoreStyleSerializer.toJson(obj.style));
31903
32565
  }
@@ -31966,6 +32628,15 @@
31966
32628
  case "stylesheet":
31967
32629
  RenderStylesheetSerializer.fromJson(obj.stylesheet, v);
31968
32630
  return true;
32631
+ case "backingtrack":
32632
+ if (v) {
32633
+ obj.backingTrack = new BackingTrack();
32634
+ BackingTrackSerializer.fromJson(obj.backingTrack, v);
32635
+ }
32636
+ else {
32637
+ obj.backingTrack = undefined;
32638
+ }
32639
+ return true;
31969
32640
  case "style":
31970
32641
  if (v) {
31971
32642
  obj.style = new ScoreStyle();
@@ -32142,7 +32813,9 @@
32142
32813
  case MidiEventType.ProgramChange:
32143
32814
  return new ProgramChangeEvent(track, tick, JsonHelper.getValue(midiEvent, 'channel'), JsonHelper.getValue(midiEvent, 'program'));
32144
32815
  case MidiEventType.TempoChange:
32145
- return new TempoChangeEvent(tick, JsonHelper.getValue(midiEvent, 'microSecondsPerQuarterNote'));
32816
+ const tempo = new TempoChangeEvent(tick, 0);
32817
+ tempo.beatsPerMinute = JsonHelper.getValue(midiEvent, 'beatsPerMinute');
32818
+ return tempo;
32146
32819
  case MidiEventType.PitchBend:
32147
32820
  return new PitchBendEvent(track, tick, JsonHelper.getValue(midiEvent, 'channel'), JsonHelper.getValue(midiEvent, 'value'));
32148
32821
  case MidiEventType.PerNotePitchBend:
@@ -32217,7 +32890,7 @@
32217
32890
  o.set('program', midiEvent.program);
32218
32891
  break;
32219
32892
  case MidiEventType.TempoChange:
32220
- o.set('microSecondsPerQuarterNote', midiEvent.microSecondsPerQuarterNote);
32893
+ o.set('beatsPerMinute', midiEvent.beatsPerMinute);
32221
32894
  break;
32222
32895
  case MidiEventType.PitchBend:
32223
32896
  o.set('channel', midiEvent.channel);
@@ -32444,7 +33117,9 @@
32444
33117
  endTime: e.endTime,
32445
33118
  currentTick: e.currentTick,
32446
33119
  endTick: e.endTick,
32447
- isSeek: e.isSeek
33120
+ isSeek: e.isSeek,
33121
+ originalTempo: e.originalTempo,
33122
+ modifiedTempo: e.modifiedTempo
32448
33123
  });
32449
33124
  }
32450
33125
  onPlayerStateChanged(e) {
@@ -32490,7 +33165,9 @@
32490
33165
  endTime: e.endTime,
32491
33166
  currentTick: e.currentTick,
32492
33167
  endTick: e.endTick,
32493
- isSeek: e.isSeek
33168
+ isSeek: e.isSeek,
33169
+ originalTempo: e.originalTempo,
33170
+ modifiedTempo: e.modifiedTempo
32494
33171
  });
32495
33172
  }
32496
33173
  onMidiLoadFailed(e) {
@@ -33799,8 +34476,9 @@
33799
34476
  }
33800
34477
  addTempo(tick, tempo) {
33801
34478
  // bpm -> microsecond per quarter note
33802
- const tempoInUsq = (60000000 / tempo) | 0;
33803
- this._midiFile.addEvent(new TempoChangeEvent(tick, tempoInUsq));
34479
+ const tempoEvent = new TempoChangeEvent(tick, 0);
34480
+ tempoEvent.beatsPerMinute = tempo;
34481
+ this._midiFile.addEvent(tempoEvent);
33804
34482
  }
33805
34483
  addBend(track, tick, channel, value) {
33806
34484
  if (value >= SynthConstants.MaxPitchWheel) {
@@ -35072,6 +35750,10 @@
35072
35750
  * Gets or sets whether transposition pitches should be applied to the individual midi events or not.
35073
35751
  */
35074
35752
  this.applyTranspositionPitches = true;
35753
+ /**
35754
+ * The computed sync points for synchronizing the midi file with an external backing track.
35755
+ */
35756
+ this.syncPoints = [];
35075
35757
  /**
35076
35758
  * Gets the transposition pitches for the individual midi channels.
35077
35759
  */
@@ -35098,13 +35780,17 @@
35098
35780
  let previousMasterBar = null;
35099
35781
  let currentTempo = this._score.tempo;
35100
35782
  // store the previous played bar for repeats
35783
+ const barOccurence = new Map();
35101
35784
  while (!controller.finished) {
35102
35785
  const index = controller.index;
35103
35786
  const bar = this._score.masterBars[index];
35104
35787
  const currentTick = controller.currentTick;
35105
35788
  controller.processCurrent();
35106
35789
  if (controller.shouldPlay) {
35107
- this.generateMasterBar(bar, previousMasterBar, currentTick, currentTempo);
35790
+ let occurence = barOccurence.has(index) ? barOccurence.get(index) : -1;
35791
+ occurence++;
35792
+ barOccurence.set(index, occurence);
35793
+ this.generateMasterBar(bar, previousMasterBar, currentTick, currentTempo, occurence);
35108
35794
  if (bar.tempoAutomations.length > 0) {
35109
35795
  currentTempo = bar.tempoAutomations[0].value;
35110
35796
  }
@@ -35173,7 +35859,7 @@
35173
35859
  const value = Math.max(-32768, Math.min(32767, data * 8 - 1));
35174
35860
  return Math.max(value, -1) + 1;
35175
35861
  }
35176
- generateMasterBar(masterBar, previousMasterBar, currentTick, currentTempo) {
35862
+ generateMasterBar(masterBar, previousMasterBar, currentTick, currentTempo, barOccurence) {
35177
35863
  // time signature
35178
35864
  if (!previousMasterBar ||
35179
35865
  previousMasterBar.timeSignatureDenominator !== masterBar.timeSignatureDenominator ||
@@ -35200,6 +35886,15 @@
35200
35886
  else {
35201
35887
  masterBarLookup.tempoChanges.push(new MasterBarTickLookupTempoChange(currentTick, currentTempo));
35202
35888
  }
35889
+ const syncPoints = masterBar.syncPoints;
35890
+ if (syncPoints) {
35891
+ for (const syncPoint of syncPoints) {
35892
+ if (syncPoint.syncPointValue.barOccurence === barOccurence) {
35893
+ const tick = currentTick + masterBarDuration * syncPoint.ratioPosition;
35894
+ this.syncPoints.push(new BackingTrackSyncPoint(tick, syncPoint.syncPointValue));
35895
+ }
35896
+ }
35897
+ }
35203
35898
  masterBarLookup.masterBar = masterBar;
35204
35899
  masterBarLookup.start = currentTick;
35205
35900
  masterBarLookup.end = masterBarLookup.start + masterBarDuration;
@@ -37397,6 +38092,213 @@
37397
38092
  }
37398
38093
  }
37399
38094
 
38095
+ class BackingTrackAudioSynthesizer {
38096
+ constructor() {
38097
+ this._midiEventQueue = new Queue();
38098
+ this.masterVolume = 1;
38099
+ this.metronomeVolume = 0;
38100
+ this.outSampleRate = 44100;
38101
+ this.currentTempo = 120;
38102
+ this.timeSignatureNumerator = 4;
38103
+ this.timeSignatureDenominator = 4;
38104
+ this.activeVoiceCount = 0;
38105
+ }
38106
+ noteOffAll(_immediate) {
38107
+ }
38108
+ resetSoft() {
38109
+ }
38110
+ resetPresets() {
38111
+ }
38112
+ loadPresets(_hydra, _instrumentPrograms, _percussionKeys, _append) {
38113
+ }
38114
+ setupMetronomeChannel(_metronomeVolume) {
38115
+ }
38116
+ synthesizeSilent(_sampleCount) {
38117
+ this.fakeSynthesize();
38118
+ }
38119
+ processMidiMessage(e) {
38120
+ }
38121
+ dispatchEvent(synthEvent) {
38122
+ this._midiEventQueue.enqueue(synthEvent);
38123
+ }
38124
+ synthesize(_buffer, _bufferPos, _sampleCount) {
38125
+ return this.fakeSynthesize();
38126
+ }
38127
+ fakeSynthesize() {
38128
+ const processedEvents = [];
38129
+ while (!this._midiEventQueue.isEmpty) {
38130
+ const m = this._midiEventQueue.dequeue();
38131
+ if (m.isMetronome && this.metronomeVolume > 0) ;
38132
+ else if (m.event) {
38133
+ this.processMidiMessage(m.event);
38134
+ }
38135
+ processedEvents.push(m);
38136
+ }
38137
+ return processedEvents;
38138
+ }
38139
+ applyTranspositionPitches(transpositionPitches) {
38140
+ }
38141
+ setChannelTranspositionPitch(channel, semitones) {
38142
+ }
38143
+ channelSetMute(channel, mute) {
38144
+ }
38145
+ channelSetSolo(channel, solo) {
38146
+ }
38147
+ resetChannelStates() {
38148
+ }
38149
+ channelSetMixVolume(channel, volume) {
38150
+ }
38151
+ hasSamplesForProgram(program) {
38152
+ return true;
38153
+ }
38154
+ hasSamplesForPercussion(key) {
38155
+ return true;
38156
+ }
38157
+ }
38158
+ class BackingTrackPlayer extends AlphaSynthBase {
38159
+ constructor(backingTrackOutput, bufferTimeInMilliseconds) {
38160
+ super(backingTrackOutput, new BackingTrackAudioSynthesizer(), bufferTimeInMilliseconds);
38161
+ this.synthesizer.output = backingTrackOutput;
38162
+ this._backingTrackOutput = backingTrackOutput;
38163
+ backingTrackOutput.timeUpdate.on(timePosition => {
38164
+ const alphaTabTimePosition = this.sequencer.mainTimePositionFromBackingTrack(timePosition, backingTrackOutput.backingTrackDuration);
38165
+ this.sequencer.fillMidiEventQueueToEndTime(alphaTabTimePosition);
38166
+ this.synthesizer.fakeSynthesize();
38167
+ this.updateTimePosition(alphaTabTimePosition, false);
38168
+ this.checkForFinish();
38169
+ });
38170
+ }
38171
+ updateMasterVolume(value) {
38172
+ super.updateMasterVolume(value);
38173
+ this._backingTrackOutput.masterVolume = value;
38174
+ }
38175
+ updatePlaybackSpeed(value) {
38176
+ super.updatePlaybackSpeed(value);
38177
+ this._backingTrackOutput.playbackRate = value;
38178
+ }
38179
+ onSampleRequest() {
38180
+ }
38181
+ loadMidiFile(midi) {
38182
+ if (!this.isSoundFontLoaded) {
38183
+ this.isSoundFontLoaded = true;
38184
+ this.soundFontLoaded.trigger();
38185
+ }
38186
+ super.loadMidiFile(midi);
38187
+ }
38188
+ updateTimePosition(timePosition, isSeek) {
38189
+ super.updateTimePosition(timePosition, isSeek);
38190
+ if (isSeek) {
38191
+ this._backingTrackOutput.seekTo(this.sequencer.mainTimePositionToBackingTrack(timePosition, this._backingTrackOutput.backingTrackDuration));
38192
+ }
38193
+ }
38194
+ loadBackingTrack(score, syncPoints) {
38195
+ const backingTrackInfo = score.backingTrack;
38196
+ if (backingTrackInfo) {
38197
+ this._backingTrackOutput.loadBackingTrack(backingTrackInfo);
38198
+ this.sequencer.mainUpdateSyncPoints(syncPoints);
38199
+ this.timePosition = 0;
38200
+ }
38201
+ }
38202
+ }
38203
+
38204
+ class ExternalMediaSynthOutput {
38205
+ constructor() {
38206
+ // fake rate
38207
+ this.sampleRate = 44100;
38208
+ this._padding = 0;
38209
+ this._seekPosition = 0;
38210
+ this.ready = new EventEmitter();
38211
+ this.samplesPlayed = new EventEmitterOfT();
38212
+ this.timeUpdate = new EventEmitterOfT();
38213
+ this.sampleRequest = new EventEmitter();
38214
+ }
38215
+ get handler() {
38216
+ return this._handler;
38217
+ }
38218
+ set handler(value) {
38219
+ if (value) {
38220
+ if (this._seekPosition !== 0) {
38221
+ value.seekTo(this._seekPosition);
38222
+ this._seekPosition = 0;
38223
+ }
38224
+ }
38225
+ this._handler = value;
38226
+ }
38227
+ get backingTrackDuration() {
38228
+ return this.handler?.backingTrackDuration ?? 0;
38229
+ }
38230
+ get playbackRate() {
38231
+ return this.handler?.playbackRate ?? 1;
38232
+ }
38233
+ set playbackRate(value) {
38234
+ const handler = this.handler;
38235
+ if (handler) {
38236
+ handler.playbackRate = value;
38237
+ }
38238
+ }
38239
+ get masterVolume() {
38240
+ return this.handler?.masterVolume ?? 1;
38241
+ }
38242
+ set masterVolume(value) {
38243
+ const handler = this.handler;
38244
+ if (handler) {
38245
+ handler.masterVolume = value;
38246
+ }
38247
+ }
38248
+ seekTo(time) {
38249
+ const handler = this.handler;
38250
+ if (handler) {
38251
+ handler.seekTo(time - this._padding);
38252
+ }
38253
+ else {
38254
+ this._seekPosition = time - this._padding;
38255
+ }
38256
+ }
38257
+ loadBackingTrack(backingTrack) {
38258
+ this._padding = backingTrack.padding;
38259
+ }
38260
+ open(_bufferTimeInMilliseconds) {
38261
+ this.ready.trigger();
38262
+ }
38263
+ updatePosition(currentTime) {
38264
+ this.timeUpdate.trigger(currentTime + this._padding);
38265
+ }
38266
+ play() {
38267
+ this.handler?.play();
38268
+ }
38269
+ destroy() {
38270
+ }
38271
+ pause() {
38272
+ this.handler?.pause();
38273
+ }
38274
+ addSamples(_samples) {
38275
+ }
38276
+ resetSamples() {
38277
+ }
38278
+ activate() {
38279
+ }
38280
+ async enumerateOutputDevices() {
38281
+ const empty = [];
38282
+ return empty;
38283
+ }
38284
+ async setOutputDevice(_device) {
38285
+ }
38286
+ async getOutputDevice() {
38287
+ return null;
38288
+ }
38289
+ }
38290
+ class ExternalMediaPlayer extends BackingTrackPlayer {
38291
+ get handler() {
38292
+ return this.output.handler;
38293
+ }
38294
+ set handler(value) {
38295
+ this.output.handler = value;
38296
+ }
38297
+ constructor(bufferTimeInMilliseconds) {
38298
+ super(new ExternalMediaSynthOutput(), bufferTimeInMilliseconds);
38299
+ }
38300
+ }
38301
+
37400
38302
  class SelectionInfo {
37401
38303
  constructor(beat) {
37402
38304
  this.bounds = null;
@@ -37410,6 +38312,12 @@
37410
38312
  * @csharp_public
37411
38313
  */
37412
38314
  class AlphaTabApiBase {
38315
+ /**
38316
+ * The actual player mode which is currently active (e.g. allows determining whether a backing track or the synthesizer is active).
38317
+ */
38318
+ get actualPlayerMode() {
38319
+ return this._actualPlayerMode;
38320
+ }
37413
38321
  /**
37414
38322
  * The score holding all information about the song being rendered
37415
38323
  * @category Properties - Core
@@ -37479,10 +38387,8 @@
37479
38387
  this._isDestroyed = false;
37480
38388
  this._score = null;
37481
38389
  this._tracks = [];
38390
+ this._actualPlayerMode = exports.PlayerMode.Disabled;
37482
38391
  this._tickCache = null;
37483
- /**
37484
- * Gets the alphaSynth player used for playback. This is the low-level API to the Midi synthesizer used for playback.
37485
- */
37486
38392
  /**
37487
38393
  * The alphaSynth player used for playback.
37488
38394
  * @remarks
@@ -38519,6 +39425,10 @@
38519
39425
  this.container = uiFacade.rootContainer;
38520
39426
  uiFacade.initialize(this, settings);
38521
39427
  Logger.logLevel = this.settings.core.logLevel;
39428
+ // backwards compatibility: remove in 2.0
39429
+ if (this.settings.player.playerMode === exports.PlayerMode.Disabled && this.settings.player.enablePlayer) {
39430
+ this.settings.player.playerMode = exports.PlayerMode.EnabledAutomatic;
39431
+ }
38522
39432
  Environment.printEnvironmentInfo(false);
38523
39433
  this.canvasElement = uiFacade.createCanvasElement();
38524
39434
  this.container.appendChild(this.canvasElement);
@@ -38562,7 +39472,7 @@
38562
39472
  this.appendRenderResult(null); // marks last element
38563
39473
  });
38564
39474
  this.renderer.error.on(this.onError.bind(this));
38565
- if (this.settings.player.enablePlayer) {
39475
+ if (this.settings.player.playerMode !== exports.PlayerMode.Disabled) {
38566
39476
  this.setupPlayer();
38567
39477
  }
38568
39478
  this.setupClickHandling();
@@ -38654,10 +39564,9 @@
38654
39564
  }
38655
39565
  this.renderer.updateSettings(this.settings);
38656
39566
  // enable/disable player if needed
38657
- if (this.settings.player.enablePlayer) {
38658
- this.setupPlayer();
38659
- if (score) {
38660
- this.player?.applyTranspositionPitches(MidiFileGenerator.buildTranspositionPitches(score, this.settings));
39567
+ if (this.settings.player.playerMode !== exports.PlayerMode.Disabled) {
39568
+ if (this.setupPlayer() && score) {
39569
+ this.loadMidiForScore();
38661
39570
  }
38662
39571
  }
38663
39572
  else {
@@ -39589,13 +40498,51 @@
39589
40498
  this.destroyCursors();
39590
40499
  }
39591
40500
  setupPlayer() {
40501
+ let mode = this.settings.player.playerMode;
40502
+ if (mode === exports.PlayerMode.EnabledAutomatic) {
40503
+ const score = this.score;
40504
+ if (!score) {
40505
+ return false;
40506
+ }
40507
+ if (score?.backingTrack?.rawAudioFile) {
40508
+ mode = exports.PlayerMode.EnabledBackingTrack;
40509
+ }
40510
+ else {
40511
+ mode = exports.PlayerMode.EnabledSynthesizer;
40512
+ }
40513
+ }
40514
+ if (mode !== this._actualPlayerMode) {
40515
+ this.destroyPlayer();
40516
+ }
39592
40517
  this.updateCursors();
39593
- if (this.player) {
39594
- return;
40518
+ this._actualPlayerMode = mode;
40519
+ switch (mode) {
40520
+ case exports.PlayerMode.Disabled:
40521
+ this.destroyPlayer();
40522
+ return false;
40523
+ case exports.PlayerMode.EnabledSynthesizer:
40524
+ if (this.player) {
40525
+ return true;
40526
+ }
40527
+ // new player needed
40528
+ this.player = this.uiFacade.createWorkerPlayer();
40529
+ break;
40530
+ case exports.PlayerMode.EnabledBackingTrack:
40531
+ if (this.player) {
40532
+ return true;
40533
+ }
40534
+ // new player needed
40535
+ this.player = this.uiFacade.createBackingTrackPlayer();
40536
+ break;
40537
+ case exports.PlayerMode.EnabledExternalMedia:
40538
+ if (this.player) {
40539
+ return true;
40540
+ }
40541
+ this.player = new ExternalMediaPlayer(this.settings.player.bufferTimeInMilliseconds);
40542
+ break;
39595
40543
  }
39596
- this.player = this.uiFacade.createWorkerPlayer();
39597
40544
  if (!this.player) {
39598
- return;
40545
+ return false;
39599
40546
  }
39600
40547
  this.player.ready.on(() => {
39601
40548
  this.loadMidiForScore();
@@ -39624,6 +40571,7 @@
39624
40571
  this.player.playbackRangeChanged.on(this.onPlaybackRangeChanged.bind(this));
39625
40572
  this.player.finished.on(this.onPlayerFinished.bind(this));
39626
40573
  this.setupPlayerEvents();
40574
+ return false;
39627
40575
  }
39628
40576
  loadMidiForScore() {
39629
40577
  if (!this.score) {
@@ -39645,6 +40593,7 @@
39645
40593
  const player = this.player;
39646
40594
  if (player) {
39647
40595
  player.loadMidiFile(midiFile);
40596
+ player.loadBackingTrack(score, generator.syncPoints);
39648
40597
  player.applyTranspositionPitches(generator.transpositionPitches);
39649
40598
  }
39650
40599
  }
@@ -40061,7 +41010,7 @@
40061
41010
  this._selectionWrapper = cursors.selectionWrapper;
40062
41011
  }
40063
41012
  if (this._currentBeat !== null) {
40064
- this.cursorUpdateBeat(this._currentBeat, false, this._previousTick > 10, true);
41013
+ this.cursorUpdateBeat(this._currentBeat, false, this._previousTick > 10, 1, true);
40065
41014
  }
40066
41015
  }
40067
41016
  else if (!this.settings.player.enableCursor && this._cursorWrapper) {
@@ -40076,13 +41025,14 @@
40076
41025
  // we need to update our position caches if we render a tablature
40077
41026
  this.renderer.postRenderFinished.on(() => {
40078
41027
  this._currentBeat = null;
40079
- this.cursorUpdateTick(this._previousTick, false, this._previousTick > 10);
41028
+ this.cursorUpdateTick(this._previousTick, false, 1, this._previousTick > 10);
40080
41029
  });
40081
41030
  if (this.player) {
40082
41031
  this.player.positionChanged.on(e => {
40083
41032
  this._previousTick = e.currentTick;
40084
41033
  this.uiFacade.beginInvoke(() => {
40085
- this.cursorUpdateTick(e.currentTick, false, false, e.isSeek);
41034
+ const cursorSpeed = e.modifiedTempo / e.originalTempo;
41035
+ this.cursorUpdateTick(e.currentTick, false, cursorSpeed, false, e.isSeek);
40086
41036
  });
40087
41037
  });
40088
41038
  this.player.stateChanged.on(e => {
@@ -40103,14 +41053,15 @@
40103
41053
  * @param stop
40104
41054
  * @param shouldScroll whether we should scroll to the bar (if scrolling is active)
40105
41055
  */
40106
- cursorUpdateTick(tick, stop, shouldScroll = false, forceUpdate = false) {
41056
+ cursorUpdateTick(tick, stop, cursorSpeed, shouldScroll = false, forceUpdate = false) {
41057
+ this._previousTick = tick;
40107
41058
  const cache = this._tickCache;
40108
41059
  if (cache) {
40109
41060
  const tracks = this._trackIndexLookup;
40110
41061
  if (tracks != null && tracks.size > 0) {
40111
41062
  const beat = cache.findBeat(tracks, tick, this._currentBeat);
40112
41063
  if (beat) {
40113
- this.cursorUpdateBeat(beat, stop, shouldScroll, forceUpdate || this.playerState === PlayerState.Paused);
41064
+ this.cursorUpdateBeat(beat, stop, shouldScroll, cursorSpeed, forceUpdate || this.playerState === PlayerState.Paused);
40114
41065
  }
40115
41066
  }
40116
41067
  }
@@ -40118,7 +41069,7 @@
40118
41069
  /**
40119
41070
  * updates the cursors to highlight the specified beat
40120
41071
  */
40121
- cursorUpdateBeat(lookupResult, stop, shouldScroll, forceUpdate = false) {
41072
+ cursorUpdateBeat(lookupResult, stop, shouldScroll, cursorSpeed, forceUpdate = false) {
40122
41073
  const beat = lookupResult.beat;
40123
41074
  const nextBeat = lookupResult.nextBeat?.beat ?? null;
40124
41075
  const duration = lookupResult.duration;
@@ -40150,7 +41101,7 @@
40150
41101
  this._previousCursorCache = cache;
40151
41102
  this._previousStateForCursor = this._playerState;
40152
41103
  this.uiFacade.beginInvoke(() => {
40153
- this.internalCursorUpdateBeat(beat, nextBeat, duration, stop, beatsToHighlight, cache, beatBoundings, shouldScroll, lookupResult.cursorMode);
41104
+ this.internalCursorUpdateBeat(beat, nextBeat, duration, stop, beatsToHighlight, cache, beatBoundings, shouldScroll, lookupResult.cursorMode, cursorSpeed);
40154
41105
  });
40155
41106
  }
40156
41107
  /**
@@ -40215,7 +41166,7 @@
40215
41166
  }
40216
41167
  }
40217
41168
  }
40218
- internalCursorUpdateBeat(beat, nextBeat, duration, stop, beatsToHighlight, cache, beatBoundings, shouldScroll, cursorMode) {
41169
+ internalCursorUpdateBeat(beat, nextBeat, duration, stop, beatsToHighlight, cache, beatBoundings, shouldScroll, cursorMode, cursorSpeed) {
40219
41170
  const barCursor = this._barCursor;
40220
41171
  const beatCursor = this._beatCursor;
40221
41172
  const barBoundings = beatBoundings.barBounds.masterBarBounds;
@@ -40224,12 +41175,29 @@
40224
41175
  if (barCursor) {
40225
41176
  barCursor.setBounds(barBounds.x, barBounds.y, barBounds.w, barBounds.h);
40226
41177
  }
41178
+ let nextBeatX = barBoundings.visualBounds.x + barBoundings.visualBounds.w;
41179
+ // get position of next beat on same system
41180
+ if (nextBeat && cursorMode === MidiTickLookupFindBeatResultCursorMode.ToNextBext) {
41181
+ // if we are moving within the same bar or to the next bar
41182
+ // transition to the next beat, otherwise transition to the end of the bar.
41183
+ const nextBeatBoundings = cache.findBeat(nextBeat);
41184
+ if (nextBeatBoundings &&
41185
+ nextBeatBoundings.barBounds.masterBarBounds.staffSystemBounds === barBoundings.staffSystemBounds) {
41186
+ nextBeatX = nextBeatBoundings.onNotesX;
41187
+ }
41188
+ }
41189
+ let startBeatX = beatBoundings.onNotesX;
40227
41190
  if (beatCursor) {
40228
- // move beat to start position immediately
41191
+ // relative positioning of the cursor
40229
41192
  if (this.settings.player.enableAnimatedBeatCursor) {
40230
- beatCursor.stopAnimation();
41193
+ const animationWidth = nextBeatX - beatBoundings.onNotesX;
41194
+ const relativePosition = this._previousTick - this._currentBeat.start;
41195
+ const ratioPosition = relativePosition / this._currentBeat.tickDuration;
41196
+ startBeatX = beatBoundings.onNotesX + animationWidth * ratioPosition;
41197
+ duration -= duration * ratioPosition;
41198
+ beatCursor.transitionToX(0, startBeatX);
40231
41199
  }
40232
- beatCursor.setBounds(beatBoundings.onNotesX, barBounds.y, 1, barBounds.h);
41200
+ beatCursor.setBounds(startBeatX, barBounds.y, 1, barBounds.h);
40233
41201
  }
40234
41202
  // if playing, animate the cursor to the next beat
40235
41203
  if (this.settings.player.enableElementHighlighting) {
@@ -40249,22 +41217,11 @@
40249
41217
  shouldNotifyBeatChange = true;
40250
41218
  }
40251
41219
  if (this.settings.player.enableAnimatedBeatCursor && beatCursor) {
40252
- let nextBeatX = barBoundings.visualBounds.x + barBoundings.visualBounds.w;
40253
- // get position of next beat on same system
40254
- if (nextBeat && cursorMode === MidiTickLookupFindBeatResultCursorMode.ToNextBext) {
40255
- // if we are moving within the same bar or to the next bar
40256
- // transition to the next beat, otherwise transition to the end of the bar.
40257
- const nextBeatBoundings = cache.findBeat(nextBeat);
40258
- if (nextBeatBoundings &&
40259
- nextBeatBoundings.barBounds.masterBarBounds.staffSystemBounds === barBoundings.staffSystemBounds) {
40260
- nextBeatX = nextBeatBoundings.onNotesX;
40261
- }
40262
- }
40263
41220
  if (isPlayingUpdate) {
40264
41221
  // we need to put the transition to an own animation frame
40265
41222
  // otherwise the stop animation above is not applied.
40266
41223
  this.uiFacade.beginInvoke(() => {
40267
- beatCursor.transitionToX(duration / this.playbackSpeed, nextBeatX);
41224
+ beatCursor.transitionToX(duration / cursorSpeed, nextBeatX);
40268
41225
  });
40269
41226
  }
40270
41227
  }
@@ -40295,7 +41252,7 @@
40295
41252
  if (this._isDestroyed) {
40296
41253
  return;
40297
41254
  }
40298
- if (this.settings.player.enablePlayer &&
41255
+ if (this.settings.player.playerMode !== exports.PlayerMode.Disabled &&
40299
41256
  this.settings.player.enableCursor &&
40300
41257
  this.settings.player.enableUserInteraction) {
40301
41258
  this._selectionStart = new SelectionInfo(beat);
@@ -40337,7 +41294,7 @@
40337
41294
  if (this._isDestroyed) {
40338
41295
  return;
40339
41296
  }
40340
- if (this.settings.player.enablePlayer &&
41297
+ if (this.settings.player.playerMode !== exports.PlayerMode.Disabled &&
40341
41298
  this.settings.player.enableCursor &&
40342
41299
  this.settings.player.enableUserInteraction) {
40343
41300
  if (this._selectionEnd) {
@@ -40358,7 +41315,7 @@
40358
41315
  // move to selection start
40359
41316
  this._currentBeat = null; // reset current beat so it is updating the cursor
40360
41317
  if (this._playerState === PlayerState.Paused) {
40361
- this.cursorUpdateTick(this._tickCache.getBeatStart(this._selectionStart.beat), false);
41318
+ this.cursorUpdateTick(this._tickCache.getBeatStart(this._selectionStart.beat), false, 1);
40362
41319
  }
40363
41320
  this.tickPosition = realMasterBarStart + this._selectionStart.beat.playbackStart;
40364
41321
  // set playback range
@@ -40470,7 +41427,7 @@
40470
41427
  });
40471
41428
  this.renderer.postRenderFinished.on(() => {
40472
41429
  if (!this._selectionStart ||
40473
- !this.settings.player.enablePlayer ||
41430
+ this.settings.player.playerMode === exports.PlayerMode.Disabled ||
40474
41431
  !this.settings.player.enableCursor ||
40475
41432
  !this.settings.player.enableUserInteraction) {
40476
41433
  return;
@@ -40548,6 +41505,9 @@
40548
41505
  }
40549
41506
  this.scoreLoaded.trigger(score);
40550
41507
  this.uiFacade.triggerEvent(this.container, 'scoreLoaded', score);
41508
+ if (this.setupPlayer()) {
41509
+ this.loadMidiForScore();
41510
+ }
40551
41511
  }
40552
41512
  onResize(e) {
40553
41513
  if (this._isDestroyed) {
@@ -41288,52 +42248,14 @@
41288
42248
  }
41289
42249
  }
41290
42250
  /**
42251
+ * Some shared web audio stuff.
41291
42252
  * @target web
41292
42253
  */
41293
- class AlphaSynthWebAudioOutputBase {
41294
- constructor() {
41295
- this._context = null;
41296
- this._buffer = null;
41297
- this._source = null;
41298
- this.ready = new EventEmitter();
41299
- this.samplesPlayed = new EventEmitterOfT();
41300
- this.sampleRequest = new EventEmitter();
41301
- this._knownDevices = [];
41302
- }
41303
- get sampleRate() {
41304
- return this._context ? this._context.sampleRate : AlphaSynthWebAudioOutputBase.PreferredSampleRate;
42254
+ class WebAudioHelper {
42255
+ static findKnownDevice(sinkId) {
42256
+ return WebAudioHelper._knownDevices.find(d => d.deviceId === sinkId);
41305
42257
  }
41306
- activate(resumedCallback) {
41307
- if (!this._context) {
41308
- this._context = this.createAudioContext();
41309
- }
41310
- if (this._context.state === 'suspended' || this._context.state === 'interrupted') {
41311
- Logger.debug('WebAudio', 'Audio Context is suspended, trying resume');
41312
- this._context.resume().then(() => {
41313
- Logger.debug('WebAudio', `Audio Context resume success: state=${this._context?.state}, sampleRate:${this._context?.sampleRate}`);
41314
- if (resumedCallback) {
41315
- resumedCallback();
41316
- }
41317
- }, reason => {
41318
- Logger.warning('WebAudio', `Audio Context resume failed: state=${this._context?.state}, sampleRate:${this._context?.sampleRate}, reason=${reason}`);
41319
- });
41320
- }
41321
- }
41322
- patchIosSampleRate() {
41323
- const ua = navigator.userAgent;
41324
- if (ua.indexOf('iPhone') !== -1 || ua.indexOf('iPad') !== -1) {
41325
- const context = this.createAudioContext();
41326
- const buffer = context.createBuffer(1, 1, AlphaSynthWebAudioOutputBase.PreferredSampleRate);
41327
- const dummy = context.createBufferSource();
41328
- dummy.buffer = buffer;
41329
- dummy.connect(context.destination);
41330
- dummy.start(0);
41331
- dummy.disconnect(0);
41332
- // tslint:disable-next-line: no-floating-promises
41333
- context.close();
41334
- }
41335
- }
41336
- createAudioContext() {
42258
+ static createAudioContext() {
41337
42259
  if ('AudioContext' in Environment.globalThis) {
41338
42260
  return new AudioContext();
41339
42261
  }
@@ -41342,73 +42264,18 @@
41342
42264
  }
41343
42265
  throw new AlphaTabError(exports.AlphaTabErrorType.General, 'AudioContext not found');
41344
42266
  }
41345
- open(bufferTimeInMilliseconds) {
41346
- this.patchIosSampleRate();
41347
- this._context = this.createAudioContext();
41348
- const ctx = this._context;
41349
- if (ctx.state === 'suspended') {
41350
- this.registerResumeHandler();
41351
- }
41352
- }
41353
- registerResumeHandler() {
41354
- this._resumeHandler = (() => {
41355
- this.activate(() => {
41356
- this.unregisterResumeHandler();
41357
- });
41358
- }).bind(this);
41359
- document.body.addEventListener('touchend', this._resumeHandler, false);
41360
- document.body.addEventListener('click', this._resumeHandler, false);
41361
- }
41362
- unregisterResumeHandler() {
41363
- const resumeHandler = this._resumeHandler;
41364
- if (resumeHandler) {
41365
- document.body.removeEventListener('touchend', resumeHandler, false);
41366
- document.body.removeEventListener('click', resumeHandler, false);
41367
- }
41368
- }
41369
- play() {
41370
- const ctx = this._context;
41371
- this.activate();
41372
- // create an empty buffer source (silence)
41373
- this._buffer = ctx.createBuffer(2, AlphaSynthWebAudioOutputBase.BufferSize, ctx.sampleRate);
41374
- this._source = ctx.createBufferSource();
41375
- this._source.buffer = this._buffer;
41376
- this._source.loop = true;
41377
- }
41378
- pause() {
41379
- if (this._source) {
41380
- this._source.stop(0);
41381
- this._source.disconnect();
41382
- }
41383
- this._source = null;
41384
- }
41385
- destroy() {
41386
- this.pause();
41387
- this._context?.close();
41388
- this._context = null;
41389
- this.unregisterResumeHandler();
41390
- }
41391
- onSamplesPlayed(numberOfSamples) {
41392
- this.samplesPlayed.trigger(numberOfSamples);
41393
- }
41394
- onSampleRequest() {
41395
- this.sampleRequest.trigger();
41396
- }
41397
- onReady() {
41398
- this.ready.trigger();
41399
- }
41400
- async checkSinkIdSupport() {
42267
+ static async checkSinkIdSupport() {
41401
42268
  // https://caniuse.com/mdn-api_audiocontext_sinkid
41402
- const context = this._context ?? this.createAudioContext();
42269
+ const context = WebAudioHelper.createAudioContext();
41403
42270
  if (!('setSinkId' in context)) {
41404
42271
  Logger.warning('WebAudio', 'Browser does not support changing the output device');
41405
42272
  return false;
41406
42273
  }
41407
42274
  return true;
41408
42275
  }
41409
- async enumerateOutputDevices() {
42276
+ static async enumerateOutputDevices() {
41410
42277
  try {
41411
- if (!(await this.checkSinkIdSupport())) {
42278
+ if (!(await WebAudioHelper.checkSinkIdSupport())) {
41412
42279
  return [];
41413
42280
  }
41414
42281
  // Request permissions
@@ -41449,7 +42316,7 @@
41449
42316
  if (defaultDevice) {
41450
42317
  defaultDevice.isDefault = true;
41451
42318
  }
41452
- this._knownDevices = final;
42319
+ WebAudioHelper._knownDevices = final;
41453
42320
  return final;
41454
42321
  }
41455
42322
  catch (e) {
@@ -41457,8 +42324,113 @@
41457
42324
  return [];
41458
42325
  }
41459
42326
  }
42327
+ }
42328
+ WebAudioHelper._knownDevices = [];
42329
+ /**
42330
+ * @target web
42331
+ */
42332
+ class AlphaSynthWebAudioOutputBase {
42333
+ constructor() {
42334
+ this._context = null;
42335
+ this._buffer = null;
42336
+ this._source = null;
42337
+ this.ready = new EventEmitter();
42338
+ this.samplesPlayed = new EventEmitterOfT();
42339
+ this.sampleRequest = new EventEmitter();
42340
+ }
42341
+ get sampleRate() {
42342
+ return this._context ? this._context.sampleRate : AlphaSynthWebAudioOutputBase.PreferredSampleRate;
42343
+ }
42344
+ activate(resumedCallback) {
42345
+ if (!this._context) {
42346
+ this._context = WebAudioHelper.createAudioContext();
42347
+ }
42348
+ if (this._context.state === 'suspended' || this._context.state === 'interrupted') {
42349
+ Logger.debug('WebAudio', 'Audio Context is suspended, trying resume');
42350
+ this._context.resume().then(() => {
42351
+ Logger.debug('WebAudio', `Audio Context resume success: state=${this._context?.state}, sampleRate:${this._context?.sampleRate}`);
42352
+ if (resumedCallback) {
42353
+ resumedCallback();
42354
+ }
42355
+ }, reason => {
42356
+ Logger.warning('WebAudio', `Audio Context resume failed: state=${this._context?.state}, sampleRate:${this._context?.sampleRate}, reason=${reason}`);
42357
+ });
42358
+ }
42359
+ }
42360
+ patchIosSampleRate() {
42361
+ const ua = navigator.userAgent;
42362
+ if (ua.indexOf('iPhone') !== -1 || ua.indexOf('iPad') !== -1) {
42363
+ const context = WebAudioHelper.createAudioContext();
42364
+ const buffer = context.createBuffer(1, 1, AlphaSynthWebAudioOutputBase.PreferredSampleRate);
42365
+ const dummy = context.createBufferSource();
42366
+ dummy.buffer = buffer;
42367
+ dummy.connect(context.destination);
42368
+ dummy.start(0);
42369
+ dummy.disconnect(0);
42370
+ // tslint:disable-next-line: no-floating-promises
42371
+ context.close();
42372
+ }
42373
+ }
42374
+ open(bufferTimeInMilliseconds) {
42375
+ this.patchIosSampleRate();
42376
+ this._context = WebAudioHelper.createAudioContext();
42377
+ const ctx = this._context;
42378
+ if (ctx.state === 'suspended') {
42379
+ this.registerResumeHandler();
42380
+ }
42381
+ }
42382
+ registerResumeHandler() {
42383
+ this._resumeHandler = (() => {
42384
+ this.activate(() => {
42385
+ this.unregisterResumeHandler();
42386
+ });
42387
+ }).bind(this);
42388
+ document.body.addEventListener('touchend', this._resumeHandler, false);
42389
+ document.body.addEventListener('click', this._resumeHandler, false);
42390
+ }
42391
+ unregisterResumeHandler() {
42392
+ const resumeHandler = this._resumeHandler;
42393
+ if (resumeHandler) {
42394
+ document.body.removeEventListener('touchend', resumeHandler, false);
42395
+ document.body.removeEventListener('click', resumeHandler, false);
42396
+ }
42397
+ }
42398
+ play() {
42399
+ const ctx = this._context;
42400
+ this.activate();
42401
+ // create an empty buffer source (silence)
42402
+ this._buffer = ctx.createBuffer(2, AlphaSynthWebAudioOutputBase.BufferSize, ctx.sampleRate);
42403
+ this._source = ctx.createBufferSource();
42404
+ this._source.buffer = this._buffer;
42405
+ this._source.loop = true;
42406
+ }
42407
+ pause() {
42408
+ if (this._source) {
42409
+ this._source.stop(0);
42410
+ this._source.disconnect();
42411
+ }
42412
+ this._source = null;
42413
+ }
42414
+ destroy() {
42415
+ this.pause();
42416
+ this._context?.close();
42417
+ this._context = null;
42418
+ this.unregisterResumeHandler();
42419
+ }
42420
+ onSamplesPlayed(numberOfSamples) {
42421
+ this.samplesPlayed.trigger(numberOfSamples);
42422
+ }
42423
+ onSampleRequest() {
42424
+ this.sampleRequest.trigger();
42425
+ }
42426
+ onReady() {
42427
+ this.ready.trigger();
42428
+ }
42429
+ enumerateOutputDevices() {
42430
+ return WebAudioHelper.enumerateOutputDevices();
42431
+ }
41460
42432
  async setOutputDevice(device) {
41461
- if (!(await this.checkSinkIdSupport())) {
42433
+ if (!(await WebAudioHelper.checkSinkIdSupport())) {
41462
42434
  return;
41463
42435
  }
41464
42436
  // https://developer.mozilla.org/en-US/docs/Web/API/AudioContext/setSinkId
@@ -41470,7 +42442,7 @@
41470
42442
  }
41471
42443
  }
41472
42444
  async getOutputDevice() {
41473
- if (!(await this.checkSinkIdSupport())) {
42445
+ if (!(await WebAudioHelper.checkSinkIdSupport())) {
41474
42446
  return null;
41475
42447
  }
41476
42448
  // https://developer.mozilla.org/en-US/docs/Web/API/AudioContext/sinkId
@@ -41479,7 +42451,7 @@
41479
42451
  return null;
41480
42452
  }
41481
42453
  // fast path -> cached devices list
41482
- let device = this._knownDevices.find(d => d.deviceId === sinkId);
42454
+ let device = WebAudioHelper.findKnownDevice(sinkId);
41483
42455
  if (device) {
41484
42456
  return device;
41485
42457
  }
@@ -41927,7 +42899,7 @@
41927
42899
  case 'alphaSynth.positionChanged':
41928
42900
  this._timePosition = data.currentTime;
41929
42901
  this._tickPosition = data.currentTick;
41930
- this.positionChanged.trigger(new PositionChangedEventArgs(data.currentTime, data.endTime, data.currentTick, data.endTick, data.isSeek));
42902
+ this.positionChanged.trigger(new PositionChangedEventArgs(data.currentTime, data.endTime, data.currentTick, data.endTick, data.isSeek, data.originalTempo, data.modifiedTempo));
41931
42903
  break;
41932
42904
  case 'alphaSynth.midiEventsPlayed':
41933
42905
  this.midiEventsPlayed.trigger(new MidiEventsPlayedEventArgs(data.events.map(JsonConverter.jsObjectToMidiEvent)));
@@ -41951,7 +42923,7 @@
41951
42923
  break;
41952
42924
  case 'alphaSynth.midiLoaded':
41953
42925
  this.checkReadyForPlayback();
41954
- this.midiLoaded.trigger(new PositionChangedEventArgs(data.currentTime, data.endTime, data.currentTick, data.endTick, data.isSeek));
42926
+ this.midiLoaded.trigger(new PositionChangedEventArgs(data.currentTime, data.endTime, data.currentTick, data.endTick, data.isSeek, data.originalTempo, data.modifiedTempo));
41955
42927
  break;
41956
42928
  case 'alphaSynth.midiLoadFailed':
41957
42929
  this.checkReadyForPlayback();
@@ -42001,6 +42973,8 @@
42001
42973
  this._outputIsReady = true;
42002
42974
  this.checkReady();
42003
42975
  }
42976
+ loadBackingTrack(_score) {
42977
+ }
42004
42978
  }
42005
42979
 
42006
42980
  /**
@@ -42368,6 +43342,123 @@
42368
43342
  }
42369
43343
  }
42370
43344
 
43345
+ /**
43346
+ * @target web
43347
+ */
43348
+ class AudioElementBackingTrackSynthOutput {
43349
+ constructor() {
43350
+ // fake rate
43351
+ this.sampleRate = 44100;
43352
+ this._padding = 0;
43353
+ this._updateInterval = 0;
43354
+ this.ready = new EventEmitter();
43355
+ this.samplesPlayed = new EventEmitterOfT();
43356
+ this.timeUpdate = new EventEmitterOfT();
43357
+ this.sampleRequest = new EventEmitter();
43358
+ }
43359
+ get backingTrackDuration() {
43360
+ const duration = this.audioElement.duration ?? 0;
43361
+ return Number.isFinite(duration) ? duration * 1000 : 0;
43362
+ }
43363
+ get playbackRate() {
43364
+ return this.audioElement.playbackRate;
43365
+ }
43366
+ set playbackRate(value) {
43367
+ this.audioElement.playbackRate = value;
43368
+ }
43369
+ get masterVolume() {
43370
+ return this.audioElement.volume;
43371
+ }
43372
+ set masterVolume(value) {
43373
+ this.audioElement.volume = value;
43374
+ }
43375
+ seekTo(time) {
43376
+ this.audioElement.currentTime = time / 1000 - this._padding;
43377
+ }
43378
+ loadBackingTrack(backingTrack) {
43379
+ if (this.audioElement?.src) {
43380
+ URL.revokeObjectURL(this.audioElement.src);
43381
+ }
43382
+ this._padding = backingTrack.padding / 1000;
43383
+ const blob = new Blob([backingTrack.rawAudioFile]);
43384
+ this.audioElement.src = URL.createObjectURL(blob);
43385
+ }
43386
+ open(_bufferTimeInMilliseconds) {
43387
+ const audioElement = document.createElement('audio');
43388
+ audioElement.style.display = 'none';
43389
+ document.body.appendChild(audioElement);
43390
+ audioElement.addEventListener('timeupdate', () => {
43391
+ this.updatePosition();
43392
+ });
43393
+ this.audioElement = audioElement;
43394
+ this.ready.trigger();
43395
+ }
43396
+ updatePosition() {
43397
+ const timePos = (this.audioElement.currentTime + this._padding) * 1000;
43398
+ this.timeUpdate.trigger(timePos);
43399
+ }
43400
+ play() {
43401
+ this.audioElement.play();
43402
+ this._updateInterval = window.setInterval(() => {
43403
+ this.updatePosition();
43404
+ }, 50);
43405
+ }
43406
+ destroy() {
43407
+ const audioElement = this.audioElement;
43408
+ if (audioElement) {
43409
+ document.body.removeChild(audioElement);
43410
+ }
43411
+ }
43412
+ pause() {
43413
+ this.audioElement.pause();
43414
+ window.clearInterval(this._updateInterval);
43415
+ }
43416
+ addSamples(_samples) {
43417
+ }
43418
+ resetSamples() {
43419
+ }
43420
+ activate() {
43421
+ }
43422
+ async enumerateOutputDevices() {
43423
+ return WebAudioHelper.enumerateOutputDevices();
43424
+ }
43425
+ async setOutputDevice(device) {
43426
+ if (!(await WebAudioHelper.checkSinkIdSupport())) {
43427
+ return;
43428
+ }
43429
+ // https://developer.mozilla.org/en-US/docs/Web/API/AudioContext/setSinkId
43430
+ if (!device) {
43431
+ await this.audioElement.setSinkId('');
43432
+ }
43433
+ else {
43434
+ await this.audioElement.setSinkId(device.deviceId);
43435
+ }
43436
+ }
43437
+ async getOutputDevice() {
43438
+ if (!(await WebAudioHelper.checkSinkIdSupport())) {
43439
+ return null;
43440
+ }
43441
+ // https://developer.mozilla.org/en-US/docs/Web/API/AudioContext/sinkId
43442
+ const sinkId = this.audioElement.sinkId;
43443
+ if (typeof sinkId !== 'string' || sinkId === '' || sinkId === 'default') {
43444
+ return null;
43445
+ }
43446
+ // fast path -> cached devices list
43447
+ let device = WebAudioHelper.findKnownDevice(sinkId);
43448
+ if (device) {
43449
+ return device;
43450
+ }
43451
+ // slow path -> enumerate devices
43452
+ const allDevices = await this.enumerateOutputDevices();
43453
+ device = allDevices.find(d => d.deviceId === sinkId);
43454
+ if (device) {
43455
+ return device;
43456
+ }
43457
+ Logger.warning('WebAudio', 'Could not find output device in device list', sinkId, allDevices);
43458
+ return null;
43459
+ }
43460
+ }
43461
+
42371
43462
  /**
42372
43463
  * @target web
42373
43464
  */
@@ -43004,6 +44095,9 @@
43004
44095
  window.requestAnimationFrame(step);
43005
44096
  }
43006
44097
  }
44098
+ createBackingTrackPlayer() {
44099
+ return new BackingTrackPlayer(new AudioElementBackingTrackSynthOutput(), this._api.settings.player.bufferTimeInMilliseconds);
44100
+ }
43007
44101
  }
43008
44102
 
43009
44103
  /**
@@ -43146,7 +44240,7 @@
43146
44240
  settings.core.file = null;
43147
44241
  settings.core.tracks = null;
43148
44242
  settings.player.enableCursor = false;
43149
- settings.player.enablePlayer = false;
44243
+ settings.player.playerMode = exports.PlayerMode.Disabled;
43150
44244
  settings.player.enableElementHighlighting = false;
43151
44245
  settings.player.enableUserInteraction = false;
43152
44246
  settings.player.soundFont = null;
@@ -57107,96 +58201,6 @@
57107
58201
  }
57108
58202
  }
57109
58203
 
57110
- /**
57111
- * A very basic polyfill of the ResizeObserver which triggers
57112
- * a the callback on window resize for all registered targets.
57113
- * @target web
57114
- */
57115
- class ResizeObserverPolyfill {
57116
- constructor(callback) {
57117
- this._targets = new Set();
57118
- this._callback = callback;
57119
- window.addEventListener('resize', this.onWindowResize.bind(this), false);
57120
- }
57121
- observe(target) {
57122
- this._targets.add(target);
57123
- }
57124
- unobserve(target) {
57125
- this._targets.delete(target);
57126
- }
57127
- disconnect() {
57128
- this._targets.clear();
57129
- }
57130
- onWindowResize() {
57131
- const entries = [];
57132
- for (const t of this._targets) {
57133
- entries.push({
57134
- target: t,
57135
- // not used by alphaTab
57136
- contentRect: undefined,
57137
- borderBoxSize: undefined,
57138
- contentBoxSize: [],
57139
- devicePixelContentBoxSize: []
57140
- });
57141
- }
57142
- this._callback(entries, this);
57143
- }
57144
- }
57145
-
57146
- /**
57147
- * A polyfill of the InsersectionObserver
57148
- * @target web
57149
- */
57150
- class IntersectionObserverPolyfill {
57151
- constructor(callback) {
57152
- this._elements = [];
57153
- let timer = null;
57154
- const oldCheck = this.check.bind(this);
57155
- this.check = () => {
57156
- if (!timer) {
57157
- timer = setTimeout(() => {
57158
- oldCheck();
57159
- timer = null;
57160
- }, 100);
57161
- }
57162
- };
57163
- this._callback = callback;
57164
- window.addEventListener('resize', this.check, true);
57165
- document.addEventListener('scroll', this.check, true);
57166
- }
57167
- observe(target) {
57168
- if (this._elements.indexOf(target) >= 0) {
57169
- return;
57170
- }
57171
- this._elements.push(target);
57172
- this.check();
57173
- }
57174
- unobserve(target) {
57175
- this._elements = this._elements.filter(item => {
57176
- return item !== target;
57177
- });
57178
- }
57179
- check() {
57180
- const entries = [];
57181
- for (const element of this._elements) {
57182
- const rect = element.getBoundingClientRect();
57183
- const isVisible = rect.top + rect.height >= 0 &&
57184
- rect.top <= window.innerHeight &&
57185
- rect.left + rect.width >= 0 &&
57186
- rect.left <= window.innerWidth;
57187
- if (isVisible) {
57188
- entries.push({
57189
- target: element,
57190
- isIntersecting: true
57191
- });
57192
- }
57193
- }
57194
- if (entries.length) {
57195
- this._callback(entries, this);
57196
- }
57197
- }
57198
- }
57199
-
57200
58204
  /******************************************************************************
57201
58205
  Copyright (c) Microsoft Corporation.
57202
58206
 
@@ -59312,9 +60316,9 @@
59312
60316
  print(`build date: ${VersionInfo.date}`);
59313
60317
  }
59314
60318
  }
59315
- VersionInfo.version = '1.6.0-alpha.1401';
59316
- VersionInfo.date = '2025-05-07T12:40:48.955Z';
59317
- VersionInfo.commit = 'e58a9704e560b3344b8fe39a2b2f46a2ee3bb5b1';
60319
+ VersionInfo.version = '1.6.0-alpha.1405';
60320
+ VersionInfo.date = '2025-05-10T17:25:30.743Z';
60321
+ VersionInfo.commit = 'a9f729a65e195d4fec684444cd2c2a259dc9729b';
59318
60322
 
59319
60323
  /**
59320
60324
  * A factory for custom layout engines.
@@ -59785,29 +60789,6 @@
59785
60789
  if (Environment.webPlatform === exports.WebPlatform.Browser || Environment.webPlatform === exports.WebPlatform.BrowserModule) {
59786
60790
  Environment.registerJQueryPlugin();
59787
60791
  Environment.HighDpiFactor = window.devicePixelRatio;
59788
- // ResizeObserver API does not yet exist so long on Safari (only start 2020 with iOS Safari 13.7 and Desktop 13.1)
59789
- // so we better add a polyfill for it
59790
- if (!('ResizeObserver' in Environment.globalThis)) {
59791
- Environment.globalThis.ResizeObserver = ResizeObserverPolyfill;
59792
- }
59793
- // IntersectionObserver API does not on older iOS versions
59794
- // so we better add a polyfill for it
59795
- if (!('IntersectionObserver' in Environment.globalThis)) {
59796
- Environment.globalThis.IntersectionObserver = IntersectionObserverPolyfill;
59797
- }
59798
- if (!('replaceChildren' in Element.prototype)) {
59799
- Element.prototype.replaceChildren = function (...nodes) {
59800
- this.innerHTML = '';
59801
- this.append(...nodes);
59802
- };
59803
- Document.prototype.replaceChildren = Element.prototype.replaceChildren;
59804
- DocumentFragment.prototype.replaceChildren = Element.prototype.replaceChildren;
59805
- }
59806
- if (!('replaceAll' in String.prototype)) {
59807
- String.prototype.replaceAll = function (str, newStr) {
59808
- return this.replace(new RegExp(str, 'g'), newStr);
59809
- };
59810
- }
59811
60792
  }
59812
60793
  Environment.createWebWorker = createWebWorker;
59813
60794
  Environment.createAudioWorklet = createAudioWorklet;
@@ -63539,6 +64520,7 @@
63539
64520
  get AccidentalType () { return AccidentalType; },
63540
64521
  Automation,
63541
64522
  get AutomationType () { return AutomationType; },
64523
+ BackingTrack,
63542
64524
  Bar,
63543
64525
  get BarLineStyle () { return BarLineStyle; },
63544
64526
  BarStyle,
@@ -63601,6 +64583,7 @@
63601
64583
  Staff,
63602
64584
  SustainPedalMarker,
63603
64585
  get SustainPedalMarkerType () { return SustainPedalMarkerType; },
64586
+ SyncPointData,
63604
64587
  Track,
63605
64588
  get TrackNameMode () { return TrackNameMode; },
63606
64589
  get TrackNameOrientation () { return TrackNameOrientation; },