@coderline/alphatab 1.6.0-alpha.1399 → 1.6.0-alpha.1403
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/alphaTab.core.min.mjs +2 -2
- package/dist/alphaTab.core.mjs +1395 -413
- package/dist/alphaTab.d.ts +648 -40
- package/dist/alphaTab.js +1394 -412
- package/dist/alphaTab.min.js +2 -2
- package/dist/alphaTab.min.mjs +2 -2
- package/dist/alphaTab.mjs +1 -3
- package/dist/alphaTab.vite.js +1 -3
- package/dist/alphaTab.vite.mjs +1 -3
- package/dist/alphaTab.webpack.js +1 -3
- package/dist/alphaTab.webpack.mjs +1 -3
- package/dist/alphaTab.worker.min.mjs +2 -2
- package/dist/alphaTab.worker.mjs +1 -3
- package/dist/alphaTab.worklet.min.mjs +2 -2
- package/dist/alphaTab.worklet.mjs +1 -3
- package/package.json +1 -1
package/dist/alphaTab.core.mjs
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
/*!
|
|
2
|
-
* alphaTab v1.6.0-alpha.
|
|
2
|
+
* alphaTab v1.6.0-alpha.1403 (develop, build 1403)
|
|
3
3
|
*
|
|
4
4
|
* Copyright © 2025, Daniel Kuschny and Contributors, All rights reserved.
|
|
5
5
|
*
|
|
@@ -49,7 +49,127 @@
|
|
|
49
49
|
* @license
|
|
50
50
|
*/
|
|
51
51
|
|
|
52
|
-
|
|
52
|
+
/**
|
|
53
|
+
* A very basic polyfill of the ResizeObserver which triggers
|
|
54
|
+
* a the callback on window resize for all registered targets.
|
|
55
|
+
* @target web
|
|
56
|
+
*/
|
|
57
|
+
class ResizeObserverPolyfill {
|
|
58
|
+
constructor(callback) {
|
|
59
|
+
this._targets = new Set();
|
|
60
|
+
this._callback = callback;
|
|
61
|
+
window.addEventListener('resize', this.onWindowResize.bind(this), false);
|
|
62
|
+
}
|
|
63
|
+
observe(target) {
|
|
64
|
+
this._targets.add(target);
|
|
65
|
+
}
|
|
66
|
+
unobserve(target) {
|
|
67
|
+
this._targets.delete(target);
|
|
68
|
+
}
|
|
69
|
+
disconnect() {
|
|
70
|
+
this._targets.clear();
|
|
71
|
+
}
|
|
72
|
+
onWindowResize() {
|
|
73
|
+
const entries = [];
|
|
74
|
+
for (const t of this._targets) {
|
|
75
|
+
entries.push({
|
|
76
|
+
target: t,
|
|
77
|
+
// not used by alphaTab
|
|
78
|
+
contentRect: undefined,
|
|
79
|
+
borderBoxSize: undefined,
|
|
80
|
+
contentBoxSize: [],
|
|
81
|
+
devicePixelContentBoxSize: []
|
|
82
|
+
});
|
|
83
|
+
}
|
|
84
|
+
this._callback(entries, this);
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
/**
|
|
89
|
+
* A polyfill of the InsersectionObserver
|
|
90
|
+
* @target web
|
|
91
|
+
*/
|
|
92
|
+
class IntersectionObserverPolyfill {
|
|
93
|
+
constructor(callback) {
|
|
94
|
+
this._elements = [];
|
|
95
|
+
let timer = null;
|
|
96
|
+
const oldCheck = this.check.bind(this);
|
|
97
|
+
this.check = () => {
|
|
98
|
+
if (!timer) {
|
|
99
|
+
timer = setTimeout(() => {
|
|
100
|
+
oldCheck();
|
|
101
|
+
timer = null;
|
|
102
|
+
}, 100);
|
|
103
|
+
}
|
|
104
|
+
};
|
|
105
|
+
this._callback = callback;
|
|
106
|
+
window.addEventListener('resize', this.check, true);
|
|
107
|
+
document.addEventListener('scroll', this.check, true);
|
|
108
|
+
}
|
|
109
|
+
observe(target) {
|
|
110
|
+
if (this._elements.indexOf(target) >= 0) {
|
|
111
|
+
return;
|
|
112
|
+
}
|
|
113
|
+
this._elements.push(target);
|
|
114
|
+
this.check();
|
|
115
|
+
}
|
|
116
|
+
unobserve(target) {
|
|
117
|
+
this._elements = this._elements.filter(item => {
|
|
118
|
+
return item !== target;
|
|
119
|
+
});
|
|
120
|
+
}
|
|
121
|
+
check() {
|
|
122
|
+
const entries = [];
|
|
123
|
+
for (const element of this._elements) {
|
|
124
|
+
const rect = element.getBoundingClientRect();
|
|
125
|
+
const isVisible = rect.top + rect.height >= 0 &&
|
|
126
|
+
rect.top <= window.innerHeight &&
|
|
127
|
+
rect.left + rect.width >= 0 &&
|
|
128
|
+
rect.left <= window.innerWidth;
|
|
129
|
+
if (isVisible) {
|
|
130
|
+
entries.push({
|
|
131
|
+
target: element,
|
|
132
|
+
isIntersecting: true
|
|
133
|
+
});
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
if (entries.length) {
|
|
137
|
+
this._callback(entries, this);
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
/*@target web*/
|
|
143
|
+
(() => {
|
|
144
|
+
if (typeof Symbol.dispose === 'undefined') {
|
|
145
|
+
Symbol.dispose = Symbol('Symbol.dispose');
|
|
146
|
+
}
|
|
147
|
+
if (typeof window !== 'undefined') {
|
|
148
|
+
// ResizeObserver API does not yet exist so long on Safari (only start 2020 with iOS Safari 13.7 and Desktop 13.1)
|
|
149
|
+
// so we better add a polyfill for it
|
|
150
|
+
if (!('ResizeObserver' in globalThis)) {
|
|
151
|
+
globalThis.ResizeObserver = ResizeObserverPolyfill;
|
|
152
|
+
}
|
|
153
|
+
// IntersectionObserver API does not on older iOS versions
|
|
154
|
+
// so we better add a polyfill for it
|
|
155
|
+
if (!('IntersectionObserver' in globalThis)) {
|
|
156
|
+
globalThis.IntersectionObserver = IntersectionObserverPolyfill;
|
|
157
|
+
}
|
|
158
|
+
if (!('replaceChildren' in Element.prototype)) {
|
|
159
|
+
Element.prototype.replaceChildren = function (...nodes) {
|
|
160
|
+
this.innerHTML = '';
|
|
161
|
+
this.append(...nodes);
|
|
162
|
+
};
|
|
163
|
+
Document.prototype.replaceChildren = Element.prototype.replaceChildren;
|
|
164
|
+
DocumentFragment.prototype.replaceChildren = Element.prototype.replaceChildren;
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
if (!('replaceAll' in String.prototype)) {
|
|
168
|
+
String.prototype.replaceAll = function (str, newStr) {
|
|
169
|
+
return this.replace(new RegExp(str, 'g'), newStr);
|
|
170
|
+
};
|
|
171
|
+
}
|
|
172
|
+
})();
|
|
53
173
|
|
|
54
174
|
/**
|
|
55
175
|
* Lists all layout modes that are supported.
|
|
@@ -1273,7 +1393,37 @@ var AutomationType;
|
|
|
1273
1393
|
* Balance change.
|
|
1274
1394
|
*/
|
|
1275
1395
|
AutomationType[AutomationType["Balance"] = 3] = "Balance";
|
|
1396
|
+
/**
|
|
1397
|
+
* A sync point for synchronizing the internal time axis with an external audio track.
|
|
1398
|
+
*/
|
|
1399
|
+
AutomationType[AutomationType["SyncPoint"] = 4] = "SyncPoint";
|
|
1276
1400
|
})(AutomationType || (AutomationType = {}));
|
|
1401
|
+
/**
|
|
1402
|
+
* Represents the data of a sync point for synchronizing the internal time axis with
|
|
1403
|
+
* an external audio file.
|
|
1404
|
+
* @cloneable
|
|
1405
|
+
* @json
|
|
1406
|
+
* @json_strict
|
|
1407
|
+
*/
|
|
1408
|
+
class SyncPointData {
|
|
1409
|
+
constructor() {
|
|
1410
|
+
/**
|
|
1411
|
+
* Indicates for which repeat occurence this sync point is valid (e.g. 0 on the first time played, 1 on the second time played)
|
|
1412
|
+
*/
|
|
1413
|
+
this.barOccurence = 0;
|
|
1414
|
+
/**
|
|
1415
|
+
* The modified tempo at which the cursor should move (aka. the tempo played within the external audio track).
|
|
1416
|
+
* This information is used together with the {@link originalTempo} to calculate how much faster/slower the
|
|
1417
|
+
* cursor playback is performed to align with the audio track.
|
|
1418
|
+
*/
|
|
1419
|
+
this.modifiedTempo = 0;
|
|
1420
|
+
/**
|
|
1421
|
+
* The uadio offset marking the position within the audio track in milliseconds.
|
|
1422
|
+
* This information is used to regularly sync (or on seeking) to match a given external audio time axis with the internal time axis.
|
|
1423
|
+
*/
|
|
1424
|
+
this.millisecondOffset = 0;
|
|
1425
|
+
}
|
|
1426
|
+
}
|
|
1277
1427
|
/**
|
|
1278
1428
|
* Automations are used to change the behaviour of a song.
|
|
1279
1429
|
* @cloneable
|
|
@@ -2572,6 +2722,16 @@ class MasterBar {
|
|
|
2572
2722
|
}
|
|
2573
2723
|
return null;
|
|
2574
2724
|
}
|
|
2725
|
+
/**
|
|
2726
|
+
* Adds the given sync point to the list of sync points for this bar.
|
|
2727
|
+
* @param syncPoint The sync point to add.
|
|
2728
|
+
*/
|
|
2729
|
+
addSyncPoint(syncPoint) {
|
|
2730
|
+
if (!this.syncPoints) {
|
|
2731
|
+
this.syncPoints = [];
|
|
2732
|
+
}
|
|
2733
|
+
this.syncPoints.push(syncPoint);
|
|
2734
|
+
}
|
|
2575
2735
|
}
|
|
2576
2736
|
MasterBar.MaxAlternateEndings = 8;
|
|
2577
2737
|
|
|
@@ -5666,6 +5826,21 @@ class NoteCloner {
|
|
|
5666
5826
|
}
|
|
5667
5827
|
}
|
|
5668
5828
|
|
|
5829
|
+
// <auto-generated>
|
|
5830
|
+
// This code was auto-generated.
|
|
5831
|
+
// Changes to this file may cause incorrect behavior and will be lost if
|
|
5832
|
+
// the code is regenerated.
|
|
5833
|
+
// </auto-generated>
|
|
5834
|
+
class SyncPointDataCloner {
|
|
5835
|
+
static clone(original) {
|
|
5836
|
+
const clone = new SyncPointData();
|
|
5837
|
+
clone.barOccurence = original.barOccurence;
|
|
5838
|
+
clone.modifiedTempo = original.modifiedTempo;
|
|
5839
|
+
clone.millisecondOffset = original.millisecondOffset;
|
|
5840
|
+
return clone;
|
|
5841
|
+
}
|
|
5842
|
+
}
|
|
5843
|
+
|
|
5669
5844
|
// <auto-generated>
|
|
5670
5845
|
// This code was auto-generated.
|
|
5671
5846
|
// Changes to this file may cause incorrect behavior and will be lost if
|
|
@@ -5677,6 +5852,7 @@ class AutomationCloner {
|
|
|
5677
5852
|
clone.isLinear = original.isLinear;
|
|
5678
5853
|
clone.type = original.type;
|
|
5679
5854
|
clone.value = original.value;
|
|
5855
|
+
clone.syncPointValue = original.syncPointValue ? SyncPointDataCloner.clone(original.syncPointValue) : undefined;
|
|
5680
5856
|
clone.ratioPosition = original.ratioPosition;
|
|
5681
5857
|
clone.text = original.text;
|
|
5682
5858
|
return clone;
|
|
@@ -14122,6 +14298,21 @@ class XmlDocument extends XmlNode {
|
|
|
14122
14298
|
}
|
|
14123
14299
|
}
|
|
14124
14300
|
|
|
14301
|
+
/**
|
|
14302
|
+
* Holds information about the backing track which can be played instead of synthesized audio.
|
|
14303
|
+
* @json
|
|
14304
|
+
* @json_strict
|
|
14305
|
+
*/
|
|
14306
|
+
class BackingTrack {
|
|
14307
|
+
constructor() {
|
|
14308
|
+
/**
|
|
14309
|
+
* The number of milliseconds the audio should be shifted to align with the song.
|
|
14310
|
+
* (e.g. negative values allow skipping potential silent parts at the start of the file and directly start with the first note).
|
|
14311
|
+
*/
|
|
14312
|
+
this.padding = 0;
|
|
14313
|
+
}
|
|
14314
|
+
}
|
|
14315
|
+
|
|
14125
14316
|
/**
|
|
14126
14317
|
* This structure represents a duration within a gpif
|
|
14127
14318
|
*/
|
|
@@ -14214,6 +14405,9 @@ class GpifParser {
|
|
|
14214
14405
|
case 'MasterTrack':
|
|
14215
14406
|
this.parseMasterTrackNode(n);
|
|
14216
14407
|
break;
|
|
14408
|
+
case 'BackingTrack':
|
|
14409
|
+
this.parseBackingTrackNode(n);
|
|
14410
|
+
break;
|
|
14217
14411
|
case 'Tracks':
|
|
14218
14412
|
this.parseTracksNode(n);
|
|
14219
14413
|
break;
|
|
@@ -14235,6 +14429,9 @@ class GpifParser {
|
|
|
14235
14429
|
case 'Rhythms':
|
|
14236
14430
|
this.parseRhythms(n);
|
|
14237
14431
|
break;
|
|
14432
|
+
case 'Assets':
|
|
14433
|
+
this.parseAssets(n);
|
|
14434
|
+
break;
|
|
14238
14435
|
}
|
|
14239
14436
|
}
|
|
14240
14437
|
}
|
|
@@ -14242,6 +14439,37 @@ class GpifParser {
|
|
|
14242
14439
|
throw new UnsupportedFormatError('Root node of XML was not GPIF');
|
|
14243
14440
|
}
|
|
14244
14441
|
}
|
|
14442
|
+
parseAssets(element) {
|
|
14443
|
+
for (const c of element.childElements()) {
|
|
14444
|
+
switch (c.localName) {
|
|
14445
|
+
case 'Asset':
|
|
14446
|
+
if (c.getAttribute('id') === this._backingTrackAssetId) {
|
|
14447
|
+
this.parseBackingTrackAsset(c);
|
|
14448
|
+
}
|
|
14449
|
+
break;
|
|
14450
|
+
}
|
|
14451
|
+
}
|
|
14452
|
+
}
|
|
14453
|
+
parseBackingTrackAsset(element) {
|
|
14454
|
+
let embeddedFilePath = '';
|
|
14455
|
+
for (const c of element.childElements()) {
|
|
14456
|
+
switch (c.localName) {
|
|
14457
|
+
case 'EmbeddedFilePath':
|
|
14458
|
+
embeddedFilePath = c.innerText;
|
|
14459
|
+
break;
|
|
14460
|
+
}
|
|
14461
|
+
}
|
|
14462
|
+
const loadAsset = this.loadAsset;
|
|
14463
|
+
if (loadAsset) {
|
|
14464
|
+
const assetData = loadAsset(embeddedFilePath);
|
|
14465
|
+
if (assetData) {
|
|
14466
|
+
this.score.backingTrack.rawAudioFile = assetData;
|
|
14467
|
+
}
|
|
14468
|
+
else {
|
|
14469
|
+
this.score.backingTrack = undefined;
|
|
14470
|
+
}
|
|
14471
|
+
}
|
|
14472
|
+
}
|
|
14245
14473
|
//
|
|
14246
14474
|
// <Score>...</Score>
|
|
14247
14475
|
//
|
|
@@ -14322,7 +14550,41 @@ class GpifParser {
|
|
|
14322
14550
|
if (!text) {
|
|
14323
14551
|
return [];
|
|
14324
14552
|
}
|
|
14325
|
-
return text
|
|
14553
|
+
return text
|
|
14554
|
+
.split(separator)
|
|
14555
|
+
.map(t => t.trim())
|
|
14556
|
+
.filter(t => t.length > 0);
|
|
14557
|
+
}
|
|
14558
|
+
//
|
|
14559
|
+
// <BackingTrack>...</BackingTrack>
|
|
14560
|
+
//
|
|
14561
|
+
parseBackingTrackNode(node) {
|
|
14562
|
+
const backingTrack = new BackingTrack();
|
|
14563
|
+
let enabled = false;
|
|
14564
|
+
let source = '';
|
|
14565
|
+
let assetId = '';
|
|
14566
|
+
for (const c of node.childElements()) {
|
|
14567
|
+
switch (c.localName) {
|
|
14568
|
+
case 'Enabled':
|
|
14569
|
+
enabled = c.innerText === 'true';
|
|
14570
|
+
break;
|
|
14571
|
+
case 'Source':
|
|
14572
|
+
source = c.innerText;
|
|
14573
|
+
break;
|
|
14574
|
+
case 'AssetId':
|
|
14575
|
+
assetId = c.innerText;
|
|
14576
|
+
break;
|
|
14577
|
+
case 'FramePadding':
|
|
14578
|
+
backingTrack.padding = GpifParser.parseIntSafe(c.innerText, 0) / GpifParser.SampleRate * 1000;
|
|
14579
|
+
break;
|
|
14580
|
+
}
|
|
14581
|
+
}
|
|
14582
|
+
// only local (contained backing tracks are supported)
|
|
14583
|
+
// remote / youtube links seem to come in future releases according to the gpif tags.
|
|
14584
|
+
if (enabled && source === 'Local') {
|
|
14585
|
+
this.score.backingTrack = backingTrack;
|
|
14586
|
+
this._backingTrackAssetId = assetId; // when the Asset tag is parsed this ID is used to load the raw data
|
|
14587
|
+
}
|
|
14326
14588
|
}
|
|
14327
14589
|
//
|
|
14328
14590
|
// <MasterTrack>...</MasterTrack>
|
|
@@ -14360,6 +14622,7 @@ class GpifParser {
|
|
|
14360
14622
|
let textValue = null;
|
|
14361
14623
|
let reference = 0;
|
|
14362
14624
|
let text = null;
|
|
14625
|
+
let syncPointValue = undefined;
|
|
14363
14626
|
for (const c of node.childElements()) {
|
|
14364
14627
|
switch (c.localName) {
|
|
14365
14628
|
case 'Type':
|
|
@@ -14378,6 +14641,28 @@ class GpifParser {
|
|
|
14378
14641
|
if (c.firstElement && c.firstElement.nodeType === XmlNodeType.CDATA) {
|
|
14379
14642
|
textValue = c.innerText;
|
|
14380
14643
|
}
|
|
14644
|
+
else if (c.firstElement &&
|
|
14645
|
+
c.firstElement.nodeType === XmlNodeType.Element &&
|
|
14646
|
+
type === 'SyncPoint') {
|
|
14647
|
+
syncPointValue = new SyncPointData();
|
|
14648
|
+
for (const vc of c.childElements()) {
|
|
14649
|
+
switch (vc.localName) {
|
|
14650
|
+
case 'BarIndex':
|
|
14651
|
+
barIndex = GpifParser.parseIntSafe(vc.innerText, 0);
|
|
14652
|
+
break;
|
|
14653
|
+
case 'BarOccurrence':
|
|
14654
|
+
syncPointValue.barOccurence = GpifParser.parseIntSafe(vc.innerText, 0);
|
|
14655
|
+
break;
|
|
14656
|
+
case 'ModifiedTempo':
|
|
14657
|
+
syncPointValue.modifiedTempo = GpifParser.parseFloatSafe(vc.innerText, 0);
|
|
14658
|
+
break;
|
|
14659
|
+
case 'FrameOffset':
|
|
14660
|
+
const frameOffset = GpifParser.parseFloatSafe(vc.innerText, 0);
|
|
14661
|
+
syncPointValue.millisecondOffset = (frameOffset / GpifParser.SampleRate) * 1000;
|
|
14662
|
+
break;
|
|
14663
|
+
}
|
|
14664
|
+
}
|
|
14665
|
+
}
|
|
14381
14666
|
else {
|
|
14382
14667
|
const parts = GpifParser.splitSafe(c.innerText);
|
|
14383
14668
|
// Issue 391: Some GPX files might have
|
|
@@ -14405,6 +14690,13 @@ class GpifParser {
|
|
|
14405
14690
|
case 'Tempo':
|
|
14406
14691
|
automation = Automation.buildTempoAutomation(isLinear, ratioPosition, numberValue, reference);
|
|
14407
14692
|
break;
|
|
14693
|
+
case 'SyncPoint':
|
|
14694
|
+
automation = new Automation();
|
|
14695
|
+
automation.type = AutomationType.SyncPoint;
|
|
14696
|
+
automation.isLinear = isLinear;
|
|
14697
|
+
automation.ratioPosition = ratioPosition;
|
|
14698
|
+
automation.syncPointValue = syncPointValue;
|
|
14699
|
+
break;
|
|
14408
14700
|
case 'Sound':
|
|
14409
14701
|
if (textValue && sounds && sounds.has(textValue)) {
|
|
14410
14702
|
automation = Automation.buildInstrumentAutomation(isLinear, ratioPosition, sounds.get(textValue).program);
|
|
@@ -16486,14 +16778,19 @@ class GpifParser {
|
|
|
16486
16778
|
const masterBar = this.score.masterBars[barNumber];
|
|
16487
16779
|
for (let i = 0, j = automations.length; i < j; i++) {
|
|
16488
16780
|
const automation = automations[i];
|
|
16489
|
-
|
|
16490
|
-
|
|
16491
|
-
|
|
16492
|
-
|
|
16493
|
-
|
|
16781
|
+
switch (automation.type) {
|
|
16782
|
+
case AutomationType.Tempo:
|
|
16783
|
+
if (barNumber === 0) {
|
|
16784
|
+
this.score.tempo = automation.value | 0;
|
|
16785
|
+
if (automation.text) {
|
|
16786
|
+
this.score.tempoLabel = automation.text;
|
|
16787
|
+
}
|
|
16494
16788
|
}
|
|
16495
|
-
|
|
16496
|
-
|
|
16789
|
+
masterBar.tempoAutomations.push(automation);
|
|
16790
|
+
break;
|
|
16791
|
+
case AutomationType.SyncPoint:
|
|
16792
|
+
masterBar.addSyncPoint(automation);
|
|
16793
|
+
break;
|
|
16497
16794
|
}
|
|
16498
16795
|
}
|
|
16499
16796
|
}
|
|
@@ -16510,6 +16807,10 @@ GpifParser.BendPointPositionFactor = BendPoint.MaxPosition / 100.0;
|
|
|
16510
16807
|
* Internal Range: 1 per quarter note
|
|
16511
16808
|
*/
|
|
16512
16809
|
GpifParser.BendPointValueFactor = 1 / 25.0;
|
|
16810
|
+
// test have shown that Guitar Pro seem to always work with 44100hz for the frame offsets,
|
|
16811
|
+
// they are NOT using the sample rate of the input file.
|
|
16812
|
+
// Downsampling a 44100hz ogg to 8000hz and using it in as audio track resulted in the same frame offset when placing sync points.
|
|
16813
|
+
GpifParser.SampleRate = 44100;
|
|
16513
16814
|
|
|
16514
16815
|
// PartConfiguration File Format Notes.
|
|
16515
16816
|
// Based off Guitar Pro 8
|
|
@@ -17376,7 +17677,9 @@ class Gp7To8Importer extends ScoreImporter {
|
|
|
17376
17677
|
let binaryStylesheetData = null;
|
|
17377
17678
|
let partConfigurationData = null;
|
|
17378
17679
|
let layoutConfigurationData = null;
|
|
17680
|
+
const entryLookup = new Map();
|
|
17379
17681
|
for (const entry of entries) {
|
|
17682
|
+
entryLookup.set(entry.fullName, entry);
|
|
17380
17683
|
switch (entry.fileName) {
|
|
17381
17684
|
case 'score.gpif':
|
|
17382
17685
|
xml = IOHelper.toString(entry.data, this.settings.importer.encoding);
|
|
@@ -17399,6 +17702,12 @@ class Gp7To8Importer extends ScoreImporter {
|
|
|
17399
17702
|
// the score information as XML we need to parse.
|
|
17400
17703
|
Logger.debug(this.name, 'Start Parsing score.gpif');
|
|
17401
17704
|
const gpifParser = new GpifParser();
|
|
17705
|
+
gpifParser.loadAsset = (fileName) => {
|
|
17706
|
+
if (entryLookup.has(fileName)) {
|
|
17707
|
+
return entryLookup.get(fileName).data;
|
|
17708
|
+
}
|
|
17709
|
+
return undefined;
|
|
17710
|
+
};
|
|
17402
17711
|
gpifParser.parseXml(xml, this.settings);
|
|
17403
17712
|
Logger.debug(this.name, 'score.gpif parsed');
|
|
17404
17713
|
const score = gpifParser.score;
|
|
@@ -21864,8 +22173,24 @@ class ProgramChangeEvent extends MidiEvent {
|
|
|
21864
22173
|
* Represents a change of the tempo in the song.
|
|
21865
22174
|
*/
|
|
21866
22175
|
class TempoChangeEvent extends MidiEvent {
|
|
22176
|
+
/**
|
|
22177
|
+
* The tempo in microseconds per quarter note (aka USQ). A time format typically for midi.
|
|
22178
|
+
*/
|
|
22179
|
+
get microSecondsPerQuarterNote() {
|
|
22180
|
+
return 60000000 / this.beatsPerMinute;
|
|
22181
|
+
}
|
|
22182
|
+
/**
|
|
22183
|
+
* The tempo in microseconds per quarter note (aka USQ). A time format typically for midi.
|
|
22184
|
+
*/
|
|
22185
|
+
set microSecondsPerQuarterNote(value) {
|
|
22186
|
+
this.beatsPerMinute = 60000000 / value;
|
|
22187
|
+
}
|
|
21867
22188
|
constructor(tick, microSecondsPerQuarterNote) {
|
|
21868
22189
|
super(0, tick, MidiEventType.TempoChange);
|
|
22190
|
+
/**
|
|
22191
|
+
* The tempo in beats per minute
|
|
22192
|
+
*/
|
|
22193
|
+
this.beatsPerMinute = 0;
|
|
21869
22194
|
this.microSecondsPerQuarterNote = microSecondsPerQuarterNote;
|
|
21870
22195
|
}
|
|
21871
22196
|
writeTo(s) {
|
|
@@ -21948,6 +22273,17 @@ class SynthEvent {
|
|
|
21948
22273
|
}
|
|
21949
22274
|
}
|
|
21950
22275
|
|
|
22276
|
+
/**
|
|
22277
|
+
* Rerpresents a point to sync the alphaTab time axis with an external backing track.
|
|
22278
|
+
*/
|
|
22279
|
+
class BackingTrackSyncPoint {
|
|
22280
|
+
constructor(tick, data) {
|
|
22281
|
+
this.tick = 0;
|
|
22282
|
+
this.tick = tick;
|
|
22283
|
+
this.data = data;
|
|
22284
|
+
}
|
|
22285
|
+
}
|
|
22286
|
+
|
|
21951
22287
|
class MidiFileSequencerTempoChange {
|
|
21952
22288
|
constructor(bpm, ticks, time) {
|
|
21953
22289
|
this.bpm = bpm;
|
|
@@ -21955,9 +22291,17 @@ class MidiFileSequencerTempoChange {
|
|
|
21955
22291
|
this.time = time;
|
|
21956
22292
|
}
|
|
21957
22293
|
}
|
|
22294
|
+
class BackingTrackSyncPointWithTime extends BackingTrackSyncPoint {
|
|
22295
|
+
constructor(tick, data, time) {
|
|
22296
|
+
super(tick, data);
|
|
22297
|
+
this.time = time;
|
|
22298
|
+
}
|
|
22299
|
+
}
|
|
21958
22300
|
class MidiSequencerState {
|
|
21959
22301
|
constructor() {
|
|
21960
22302
|
this.tempoChanges = [];
|
|
22303
|
+
this.tempoChangeIndex = 0;
|
|
22304
|
+
this.syncPoints = [];
|
|
21961
22305
|
this.firstProgramEventPerChannel = new Map();
|
|
21962
22306
|
this.firstTimeSignatureNumerator = 0;
|
|
21963
22307
|
this.firstTimeSignatureDenominator = 0;
|
|
@@ -21965,11 +22309,15 @@ class MidiSequencerState {
|
|
|
21965
22309
|
this.division = MidiUtils.QuarterTime;
|
|
21966
22310
|
this.eventIndex = 0;
|
|
21967
22311
|
this.currentTime = 0;
|
|
22312
|
+
this.currentTick = 0;
|
|
22313
|
+
this.syncPointIndex = 0;
|
|
21968
22314
|
this.playbackRange = null;
|
|
21969
22315
|
this.playbackRangeStartTime = 0;
|
|
21970
22316
|
this.playbackRangeEndTime = 0;
|
|
21971
22317
|
this.endTick = 0;
|
|
21972
22318
|
this.endTime = 0;
|
|
22319
|
+
this.currentTempo = 0;
|
|
22320
|
+
this.modifiedTempo = 0;
|
|
21973
22321
|
}
|
|
21974
22322
|
}
|
|
21975
22323
|
/**
|
|
@@ -22022,6 +22370,12 @@ class MidiFileSequencer {
|
|
|
22022
22370
|
get currentEndTime() {
|
|
22023
22371
|
return this._currentState.endTime / this.playbackSpeed;
|
|
22024
22372
|
}
|
|
22373
|
+
get currentTempo() {
|
|
22374
|
+
return this._currentState.currentTempo;
|
|
22375
|
+
}
|
|
22376
|
+
get modifiedTempo() {
|
|
22377
|
+
return this._currentState.modifiedTempo * this.playbackSpeed;
|
|
22378
|
+
}
|
|
22025
22379
|
mainSeek(timePosition) {
|
|
22026
22380
|
// map to speed=1
|
|
22027
22381
|
timePosition *= this.playbackSpeed;
|
|
@@ -22041,6 +22395,8 @@ class MidiFileSequencer {
|
|
|
22041
22395
|
// we have to restart the midi to make sure we get the right state: instruments, volume, pan, etc
|
|
22042
22396
|
this._mainState.currentTime = 0;
|
|
22043
22397
|
this._mainState.eventIndex = 0;
|
|
22398
|
+
this._mainState.syncPointIndex = 0;
|
|
22399
|
+
this._mainState.tempoChangeIndex = 0;
|
|
22044
22400
|
if (this.isPlayingMain) {
|
|
22045
22401
|
const metronomeVolume = this._synthesizer.metronomeVolume;
|
|
22046
22402
|
this._synthesizer.noteOffAll(true);
|
|
@@ -22115,7 +22471,7 @@ class MidiFileSequencer {
|
|
|
22115
22471
|
}
|
|
22116
22472
|
if (mEvent.type === MidiEventType.TempoChange) {
|
|
22117
22473
|
const meta = mEvent;
|
|
22118
|
-
bpm =
|
|
22474
|
+
bpm = meta.beatsPerMinute;
|
|
22119
22475
|
state.tempoChanges.push(new MidiFileSequencerTempoChange(bpm, absTick, absTime));
|
|
22120
22476
|
metronomeLengthInMillis = metronomeLengthInTicks * (60000.0 / (bpm * midiFile.division));
|
|
22121
22477
|
}
|
|
@@ -22149,6 +22505,8 @@ class MidiFileSequencer {
|
|
|
22149
22505
|
}
|
|
22150
22506
|
}
|
|
22151
22507
|
}
|
|
22508
|
+
state.currentTempo = state.tempoChanges.length > 0 ? state.tempoChanges[0].bpm : bpm;
|
|
22509
|
+
state.modifiedTempo = state.currentTempo;
|
|
22152
22510
|
state.synthData.sort((a, b) => {
|
|
22153
22511
|
if (a.time > b.time) {
|
|
22154
22512
|
return 1;
|
|
@@ -22165,6 +22523,35 @@ class MidiFileSequencer {
|
|
|
22165
22523
|
fillMidiEventQueue() {
|
|
22166
22524
|
return this.fillMidiEventQueueLimited(-1);
|
|
22167
22525
|
}
|
|
22526
|
+
fillMidiEventQueueToEndTime(endTime) {
|
|
22527
|
+
while (this._mainState.currentTime < endTime) {
|
|
22528
|
+
if (this.fillMidiEventQueueLimited(endTime - this._mainState.currentTime)) {
|
|
22529
|
+
this._synthesizer.synthesizeSilent(SynthConstants.MicroBufferSize);
|
|
22530
|
+
}
|
|
22531
|
+
}
|
|
22532
|
+
let anyEventsDispatched = false;
|
|
22533
|
+
this._currentState.currentTime = endTime;
|
|
22534
|
+
while (this._currentState.eventIndex < this._currentState.synthData.length &&
|
|
22535
|
+
this._currentState.synthData[this._currentState.eventIndex].time < this._currentState.currentTime) {
|
|
22536
|
+
const synthEvent = this._currentState.synthData[this._currentState.eventIndex];
|
|
22537
|
+
this._synthesizer.dispatchEvent(synthEvent);
|
|
22538
|
+
while (this._currentState.syncPointIndex < this._currentState.syncPoints.length &&
|
|
22539
|
+
this._currentState.syncPoints[this._currentState.syncPointIndex].tick < synthEvent.event.tick) {
|
|
22540
|
+
this._currentState.modifiedTempo =
|
|
22541
|
+
this._currentState.syncPoints[this._currentState.syncPointIndex].data.modifiedTempo;
|
|
22542
|
+
this._currentState.syncPointIndex++;
|
|
22543
|
+
}
|
|
22544
|
+
while (this._currentState.tempoChangeIndex < this._currentState.tempoChanges.length &&
|
|
22545
|
+
this._currentState.tempoChanges[this._currentState.tempoChangeIndex].time <= synthEvent.time) {
|
|
22546
|
+
this._currentState.currentTempo =
|
|
22547
|
+
this._currentState.tempoChanges[this._currentState.tempoChangeIndex].bpm;
|
|
22548
|
+
this._currentState.tempoChangeIndex++;
|
|
22549
|
+
}
|
|
22550
|
+
this._currentState.eventIndex++;
|
|
22551
|
+
anyEventsDispatched = true;
|
|
22552
|
+
}
|
|
22553
|
+
return anyEventsDispatched;
|
|
22554
|
+
}
|
|
22168
22555
|
fillMidiEventQueueLimited(maxMilliseconds) {
|
|
22169
22556
|
let millisecondsPerBuffer = (SynthConstants.MicroBufferSize / this._synthesizer.outSampleRate) * 1000 * this.playbackSpeed;
|
|
22170
22557
|
let endTime = this.internalEndTime;
|
|
@@ -22192,9 +22579,87 @@ class MidiFileSequencer {
|
|
|
22192
22579
|
mainTimePositionToTickPosition(timePosition) {
|
|
22193
22580
|
return this.timePositionToTickPositionWithSpeed(this._mainState, timePosition, this.playbackSpeed);
|
|
22194
22581
|
}
|
|
22582
|
+
mainUpdateSyncPoints(syncPoints) {
|
|
22583
|
+
const state = this._mainState;
|
|
22584
|
+
syncPoints.sort((a, b) => a.tick - b.tick); // just in case
|
|
22585
|
+
state.syncPoints = new Array(syncPoints.length);
|
|
22586
|
+
if (syncPoints.length >= 0) {
|
|
22587
|
+
let bpm = 120;
|
|
22588
|
+
let absTick = 0;
|
|
22589
|
+
let absTime = 0.0;
|
|
22590
|
+
let previousTick = 0;
|
|
22591
|
+
let tempoChangeIndex = 0;
|
|
22592
|
+
for (let i = 0; i < syncPoints.length; i++) {
|
|
22593
|
+
const p = syncPoints[i];
|
|
22594
|
+
const deltaTick = p.tick - previousTick;
|
|
22595
|
+
absTick += deltaTick;
|
|
22596
|
+
absTime += deltaTick * (60000.0 / (bpm * state.division));
|
|
22597
|
+
state.syncPoints[i] = new BackingTrackSyncPointWithTime(p.tick, p.data, absTime);
|
|
22598
|
+
previousTick = p.tick;
|
|
22599
|
+
while (tempoChangeIndex < state.tempoChanges.length &&
|
|
22600
|
+
state.tempoChanges[tempoChangeIndex].ticks <= absTick) {
|
|
22601
|
+
bpm = state.tempoChanges[tempoChangeIndex].bpm;
|
|
22602
|
+
tempoChangeIndex++;
|
|
22603
|
+
}
|
|
22604
|
+
}
|
|
22605
|
+
}
|
|
22606
|
+
state.syncPointIndex = 0;
|
|
22607
|
+
}
|
|
22195
22608
|
currentTimePositionToTickPosition(timePosition) {
|
|
22196
22609
|
return this.timePositionToTickPositionWithSpeed(this._currentState, timePosition, this.playbackSpeed);
|
|
22197
22610
|
}
|
|
22611
|
+
mainTimePositionFromBackingTrack(timePosition, backingTrackLength) {
|
|
22612
|
+
const mainState = this._mainState;
|
|
22613
|
+
const syncPoints = mainState.syncPoints;
|
|
22614
|
+
if (timePosition < 0 || syncPoints.length === 0) {
|
|
22615
|
+
return timePosition;
|
|
22616
|
+
}
|
|
22617
|
+
let syncPointIndex = timePosition >= syncPoints[mainState.syncPointIndex].data.millisecondOffset ? mainState.syncPointIndex : 0;
|
|
22618
|
+
while (syncPointIndex + 1 < syncPoints.length &&
|
|
22619
|
+
syncPoints[syncPointIndex + 1].data.millisecondOffset <= timePosition) {
|
|
22620
|
+
syncPointIndex++;
|
|
22621
|
+
}
|
|
22622
|
+
const currentSyncPoint = syncPoints[syncPointIndex];
|
|
22623
|
+
const timeDiff = timePosition - currentSyncPoint.data.millisecondOffset;
|
|
22624
|
+
let alphaTabTimeDiff;
|
|
22625
|
+
if (syncPointIndex + 1 < syncPoints.length) {
|
|
22626
|
+
const nextSyncPoint = syncPoints[syncPointIndex + 1];
|
|
22627
|
+
const relativeTimeDiff = timeDiff / (nextSyncPoint.data.millisecondOffset - currentSyncPoint.data.millisecondOffset);
|
|
22628
|
+
alphaTabTimeDiff = (nextSyncPoint.time - currentSyncPoint.time) * relativeTimeDiff;
|
|
22629
|
+
}
|
|
22630
|
+
else {
|
|
22631
|
+
const relativeTimeDiff = timeDiff / (backingTrackLength - currentSyncPoint.data.millisecondOffset);
|
|
22632
|
+
alphaTabTimeDiff = (mainState.endTime - currentSyncPoint.time) * relativeTimeDiff;
|
|
22633
|
+
}
|
|
22634
|
+
return (currentSyncPoint.time + alphaTabTimeDiff) / this.playbackSpeed;
|
|
22635
|
+
}
|
|
22636
|
+
mainTimePositionToBackingTrack(timePosition, backingTrackLength) {
|
|
22637
|
+
const mainState = this._mainState;
|
|
22638
|
+
const syncPoints = mainState.syncPoints;
|
|
22639
|
+
if (timePosition < 0 || syncPoints.length === 0) {
|
|
22640
|
+
return timePosition;
|
|
22641
|
+
}
|
|
22642
|
+
timePosition *= this.playbackSpeed;
|
|
22643
|
+
let syncPointIndex = timePosition >= syncPoints[mainState.syncPointIndex].time ? mainState.syncPointIndex : 0;
|
|
22644
|
+
while (syncPointIndex + 1 < syncPoints.length && syncPoints[syncPointIndex + 1].time <= timePosition) {
|
|
22645
|
+
syncPointIndex++;
|
|
22646
|
+
}
|
|
22647
|
+
const currentSyncPoint = syncPoints[syncPointIndex];
|
|
22648
|
+
const alphaTabTimeDiff = timePosition - currentSyncPoint.time;
|
|
22649
|
+
let backingTrackPos;
|
|
22650
|
+
if (syncPointIndex + 1 < syncPoints.length) {
|
|
22651
|
+
const nextSyncPoint = syncPoints[syncPointIndex + 1];
|
|
22652
|
+
const relativeAlphaTabTimeDiff = alphaTabTimeDiff / (nextSyncPoint.time - currentSyncPoint.time);
|
|
22653
|
+
const backingTrackDiff = nextSyncPoint.data.millisecondOffset - currentSyncPoint.data.millisecondOffset;
|
|
22654
|
+
backingTrackPos = currentSyncPoint.data.millisecondOffset + backingTrackDiff * relativeAlphaTabTimeDiff;
|
|
22655
|
+
}
|
|
22656
|
+
else {
|
|
22657
|
+
const relativeAlphaTabTimeDiff = alphaTabTimeDiff / (mainState.endTime - currentSyncPoint.time);
|
|
22658
|
+
const frameDiff = backingTrackLength - currentSyncPoint.data.millisecondOffset;
|
|
22659
|
+
backingTrackPos = currentSyncPoint.data.millisecondOffset + frameDiff * relativeAlphaTabTimeDiff;
|
|
22660
|
+
}
|
|
22661
|
+
return backingTrackPos;
|
|
22662
|
+
}
|
|
22198
22663
|
tickPositionToTimePositionWithSpeed(state, tickPosition, playbackSpeed) {
|
|
22199
22664
|
let timePosition = 0.0;
|
|
22200
22665
|
let bpm = 120.0;
|
|
@@ -22304,6 +22769,8 @@ class MidiFileSequencer {
|
|
|
22304
22769
|
});
|
|
22305
22770
|
state.endTime = metronomeTime;
|
|
22306
22771
|
state.endTick = metronomeTick;
|
|
22772
|
+
state.currentTempo = bpm;
|
|
22773
|
+
state.modifiedTempo = bpm;
|
|
22307
22774
|
this._countInState = state;
|
|
22308
22775
|
}
|
|
22309
22776
|
}
|
|
@@ -22349,12 +22816,22 @@ class PositionChangedEventArgs {
|
|
|
22349
22816
|
* @param endTick The end tick.
|
|
22350
22817
|
* @param isSeek Whether the time was seeked.
|
|
22351
22818
|
*/
|
|
22352
|
-
constructor(currentTime, endTime, currentTick, endTick, isSeek) {
|
|
22819
|
+
constructor(currentTime, endTime, currentTick, endTick, isSeek, originalTempo, modifiedTempo) {
|
|
22820
|
+
/**
|
|
22821
|
+
* The original tempo in which alphaTab internally would be playing right now.
|
|
22822
|
+
*/
|
|
22823
|
+
this.originalTempo = 0;
|
|
22824
|
+
/**
|
|
22825
|
+
* The modified tempo in which the actual playback is happening (e.g. due to playback speed or external audio synchronization)
|
|
22826
|
+
*/
|
|
22827
|
+
this.modifiedTempo = 0;
|
|
22353
22828
|
this.currentTime = currentTime;
|
|
22354
22829
|
this.endTime = endTime;
|
|
22355
22830
|
this.currentTick = currentTick;
|
|
22356
22831
|
this.endTick = endTick;
|
|
22357
22832
|
this.isSeek = isSeek;
|
|
22833
|
+
this.originalTempo = originalTempo;
|
|
22834
|
+
this.modifiedTempo = modifiedTempo;
|
|
22358
22835
|
}
|
|
22359
22836
|
}
|
|
22360
22837
|
|
|
@@ -26452,7 +26929,7 @@ class TinySoundFont {
|
|
|
26452
26929
|
break;
|
|
26453
26930
|
case MidiEventType.TempoChange:
|
|
26454
26931
|
const tempoChange = e;
|
|
26455
|
-
this.currentTempo =
|
|
26932
|
+
this.currentTempo = tempoChange.beatsPerMinute;
|
|
26456
26933
|
break;
|
|
26457
26934
|
case MidiEventType.PitchBend:
|
|
26458
26935
|
const pitchBend = e;
|
|
@@ -27604,15 +28081,15 @@ class PlaybackRangeChangedEventArgs {
|
|
|
27604
28081
|
}
|
|
27605
28082
|
|
|
27606
28083
|
/**
|
|
27607
|
-
* This is the
|
|
28084
|
+
* This is the base class for synthesizer components which can be used to
|
|
27608
28085
|
* play a {@link MidiFile} via a {@link ISynthOutput}.
|
|
27609
28086
|
*/
|
|
27610
|
-
class
|
|
28087
|
+
class AlphaSynthBase {
|
|
27611
28088
|
get output() {
|
|
27612
28089
|
return this._output;
|
|
27613
28090
|
}
|
|
27614
28091
|
get isReadyForPlayback() {
|
|
27615
|
-
return this.isReady && this.
|
|
28092
|
+
return this.isReady && this.isSoundFontLoaded && this._isMidiLoaded;
|
|
27616
28093
|
}
|
|
27617
28094
|
get logLevel() {
|
|
27618
28095
|
return Logger.logLevel;
|
|
@@ -27621,11 +28098,14 @@ class AlphaSynth {
|
|
|
27621
28098
|
Logger.logLevel = value;
|
|
27622
28099
|
}
|
|
27623
28100
|
get masterVolume() {
|
|
27624
|
-
return this.
|
|
28101
|
+
return this.synthesizer.masterVolume;
|
|
27625
28102
|
}
|
|
27626
28103
|
set masterVolume(value) {
|
|
27627
28104
|
value = Math.max(value, SynthConstants.MinVolume);
|
|
27628
|
-
this.
|
|
28105
|
+
this.updateMasterVolume(value);
|
|
28106
|
+
}
|
|
28107
|
+
updateMasterVolume(value) {
|
|
28108
|
+
this.synthesizer.masterVolume = value;
|
|
27629
28109
|
}
|
|
27630
28110
|
get metronomeVolume() {
|
|
27631
28111
|
return this._metronomeVolume;
|
|
@@ -27633,7 +28113,7 @@ class AlphaSynth {
|
|
|
27633
28113
|
set metronomeVolume(value) {
|
|
27634
28114
|
value = Math.max(value, SynthConstants.MinVolume);
|
|
27635
28115
|
this._metronomeVolume = value;
|
|
27636
|
-
this.
|
|
28116
|
+
this.synthesizer.metronomeVolume = value;
|
|
27637
28117
|
}
|
|
27638
28118
|
get countInVolume() {
|
|
27639
28119
|
return this._countInVolume;
|
|
@@ -27649,19 +28129,22 @@ class AlphaSynth {
|
|
|
27649
28129
|
this._midiEventsPlayedFilter = new Set(value);
|
|
27650
28130
|
}
|
|
27651
28131
|
get playbackSpeed() {
|
|
27652
|
-
return this.
|
|
28132
|
+
return this.sequencer.playbackSpeed;
|
|
27653
28133
|
}
|
|
27654
28134
|
set playbackSpeed(value) {
|
|
27655
28135
|
value = ModelUtils.clamp(value, SynthConstants.MinPlaybackSpeed, SynthConstants.MaxPlaybackSpeed);
|
|
27656
|
-
|
|
27657
|
-
|
|
28136
|
+
this.updatePlaybackSpeed(value);
|
|
28137
|
+
}
|
|
28138
|
+
updatePlaybackSpeed(value) {
|
|
28139
|
+
const oldSpeed = this.sequencer.playbackSpeed;
|
|
28140
|
+
this.sequencer.playbackSpeed = value;
|
|
27658
28141
|
this.timePosition = this.timePosition * (oldSpeed / value);
|
|
27659
28142
|
}
|
|
27660
28143
|
get tickPosition() {
|
|
27661
28144
|
return this._tickPosition;
|
|
27662
28145
|
}
|
|
27663
28146
|
set tickPosition(value) {
|
|
27664
|
-
this.timePosition = this.
|
|
28147
|
+
this.timePosition = this.sequencer.mainTickPositionToTimePosition(value);
|
|
27665
28148
|
}
|
|
27666
28149
|
get timePosition() {
|
|
27667
28150
|
return this._timePosition;
|
|
@@ -27669,30 +28152,30 @@ class AlphaSynth {
|
|
|
27669
28152
|
set timePosition(value) {
|
|
27670
28153
|
Logger.debug('AlphaSynth', `Seeking to position ${value}ms (main)`);
|
|
27671
28154
|
// tell the sequencer to jump to the given position
|
|
27672
|
-
this.
|
|
28155
|
+
this.sequencer.mainSeek(value);
|
|
27673
28156
|
// update the internal position
|
|
27674
28157
|
this.updateTimePosition(value, true);
|
|
27675
28158
|
// tell the output to reset the already synthesized buffers and request data again
|
|
27676
|
-
if (this.
|
|
28159
|
+
if (this.sequencer.isPlayingMain) {
|
|
27677
28160
|
this._notPlayedSamples = 0;
|
|
27678
28161
|
this.output.resetSamples();
|
|
27679
28162
|
}
|
|
27680
28163
|
}
|
|
27681
28164
|
get playbackRange() {
|
|
27682
|
-
return this.
|
|
28165
|
+
return this.sequencer.mainPlaybackRange;
|
|
27683
28166
|
}
|
|
27684
28167
|
set playbackRange(value) {
|
|
27685
|
-
this.
|
|
28168
|
+
this.sequencer.mainPlaybackRange = value;
|
|
27686
28169
|
if (value) {
|
|
27687
28170
|
this.tickPosition = value.startTick;
|
|
27688
28171
|
}
|
|
27689
28172
|
this.playbackRangeChanged.trigger(new PlaybackRangeChangedEventArgs(value));
|
|
27690
28173
|
}
|
|
27691
28174
|
get isLooping() {
|
|
27692
|
-
return this.
|
|
28175
|
+
return this.sequencer.isLooping;
|
|
27693
28176
|
}
|
|
27694
28177
|
set isLooping(value) {
|
|
27695
|
-
this.
|
|
28178
|
+
this.sequencer.isLooping = value;
|
|
27696
28179
|
}
|
|
27697
28180
|
destroy() {
|
|
27698
28181
|
Logger.debug('AlphaSynth', 'Destroying player');
|
|
@@ -27700,11 +28183,11 @@ class AlphaSynth {
|
|
|
27700
28183
|
this.output.destroy();
|
|
27701
28184
|
}
|
|
27702
28185
|
/**
|
|
27703
|
-
* Initializes a new instance of the {@link
|
|
28186
|
+
* Initializes a new instance of the {@link AlphaSynthBase} class.
|
|
27704
28187
|
* @param output The output to use for playing the generated samples.
|
|
27705
28188
|
*/
|
|
27706
|
-
constructor(output, bufferTimeInMilliseconds) {
|
|
27707
|
-
this.
|
|
28189
|
+
constructor(output, synthesizer, bufferTimeInMilliseconds) {
|
|
28190
|
+
this.isSoundFontLoaded = false;
|
|
27708
28191
|
this._isMidiLoaded = false;
|
|
27709
28192
|
this._tickPosition = 0;
|
|
27710
28193
|
this._timePosition = 0;
|
|
@@ -27733,8 +28216,8 @@ class AlphaSynth {
|
|
|
27733
28216
|
Logger.debug('AlphaSynth', 'Creating output');
|
|
27734
28217
|
this._output = output;
|
|
27735
28218
|
Logger.debug('AlphaSynth', 'Creating synthesizer');
|
|
27736
|
-
this.
|
|
27737
|
-
this.
|
|
28219
|
+
this.synthesizer = synthesizer;
|
|
28220
|
+
this.sequencer = new MidiFileSequencer(this.synthesizer);
|
|
27738
28221
|
Logger.debug('AlphaSynth', 'Opening output');
|
|
27739
28222
|
this.output.ready.on(() => {
|
|
27740
28223
|
this.isReady = true;
|
|
@@ -27742,42 +28225,45 @@ class AlphaSynth {
|
|
|
27742
28225
|
this.checkReadyForPlayback();
|
|
27743
28226
|
});
|
|
27744
28227
|
this.output.sampleRequest.on(() => {
|
|
27745
|
-
|
|
27746
|
-
|
|
27747
|
-
|
|
27748
|
-
|
|
27749
|
-
|
|
27750
|
-
|
|
27751
|
-
|
|
27752
|
-
|
|
27753
|
-
|
|
27754
|
-
|
|
27755
|
-
|
|
27756
|
-
|
|
27757
|
-
|
|
27758
|
-
|
|
27759
|
-
|
|
27760
|
-
|
|
27761
|
-
|
|
27762
|
-
|
|
27763
|
-
|
|
28228
|
+
this.onSampleRequest();
|
|
28229
|
+
});
|
|
28230
|
+
this.output.samplesPlayed.on(this.onSamplesPlayed.bind(this));
|
|
28231
|
+
this.output.open(bufferTimeInMilliseconds);
|
|
28232
|
+
}
|
|
28233
|
+
onSampleRequest() {
|
|
28234
|
+
if (this.state === PlayerState.Playing &&
|
|
28235
|
+
(!this.sequencer.isFinished || this.synthesizer.activeVoiceCount > 0)) {
|
|
28236
|
+
let samples = new Float32Array(SynthConstants.MicroBufferSize * SynthConstants.MicroBufferCount * SynthConstants.AudioChannels);
|
|
28237
|
+
let bufferPos = 0;
|
|
28238
|
+
for (let i = 0; i < SynthConstants.MicroBufferCount; i++) {
|
|
28239
|
+
// synthesize buffer
|
|
28240
|
+
this.sequencer.fillMidiEventQueue();
|
|
28241
|
+
const synthesizedEvents = this.synthesizer.synthesize(samples, bufferPos, SynthConstants.MicroBufferSize);
|
|
28242
|
+
bufferPos += SynthConstants.MicroBufferSize * SynthConstants.AudioChannels;
|
|
28243
|
+
// push all processed events into the queue
|
|
28244
|
+
// for informing users about played events
|
|
28245
|
+
for (const e of synthesizedEvents) {
|
|
28246
|
+
if (this._midiEventsPlayedFilter.has(e.event.type)) {
|
|
28247
|
+
this._playedEventsQueue.enqueue(e);
|
|
27764
28248
|
}
|
|
27765
28249
|
}
|
|
27766
|
-
//
|
|
27767
|
-
if (
|
|
27768
|
-
|
|
28250
|
+
// tell sequencer to check whether its work is done
|
|
28251
|
+
if (this.sequencer.isFinished) {
|
|
28252
|
+
break;
|
|
27769
28253
|
}
|
|
27770
|
-
this._notPlayedSamples += samples.length;
|
|
27771
|
-
this.output.addSamples(samples);
|
|
27772
28254
|
}
|
|
27773
|
-
|
|
27774
|
-
|
|
27775
|
-
|
|
27776
|
-
this.output.addSamples(samples);
|
|
28255
|
+
// send it to output
|
|
28256
|
+
if (bufferPos < samples.length) {
|
|
28257
|
+
samples = samples.subarray(0, bufferPos);
|
|
27777
28258
|
}
|
|
27778
|
-
|
|
27779
|
-
|
|
27780
|
-
|
|
28259
|
+
this._notPlayedSamples += samples.length;
|
|
28260
|
+
this.output.addSamples(samples);
|
|
28261
|
+
}
|
|
28262
|
+
else {
|
|
28263
|
+
// Tell output that there is no data left for it.
|
|
28264
|
+
const samples = new Float32Array(0);
|
|
28265
|
+
this.output.addSamples(samples);
|
|
28266
|
+
}
|
|
27781
28267
|
}
|
|
27782
28268
|
play() {
|
|
27783
28269
|
if (this.state !== PlayerState.Paused || !this._isMidiLoaded) {
|
|
@@ -27787,20 +28273,20 @@ class AlphaSynth {
|
|
|
27787
28273
|
this.playInternal();
|
|
27788
28274
|
if (this._countInVolume > 0) {
|
|
27789
28275
|
Logger.debug('AlphaSynth', 'Starting countin');
|
|
27790
|
-
this.
|
|
27791
|
-
this.
|
|
28276
|
+
this.sequencer.startCountIn();
|
|
28277
|
+
this.synthesizer.setupMetronomeChannel(this._countInVolume);
|
|
27792
28278
|
this.updateTimePosition(0, true);
|
|
27793
28279
|
}
|
|
27794
28280
|
this.output.play();
|
|
27795
28281
|
return true;
|
|
27796
28282
|
}
|
|
27797
28283
|
playInternal() {
|
|
27798
|
-
if (this.
|
|
28284
|
+
if (this.sequencer.isPlayingOneTimeMidi) {
|
|
27799
28285
|
Logger.debug('AlphaSynth', 'Cancelling one time midi');
|
|
27800
28286
|
this.stopOneTimeMidi();
|
|
27801
28287
|
}
|
|
27802
28288
|
Logger.debug('AlphaSynth', 'Starting playback');
|
|
27803
|
-
this.
|
|
28289
|
+
this.synthesizer.setupMetronomeChannel(this.metronomeVolume);
|
|
27804
28290
|
this._synthStopping = false;
|
|
27805
28291
|
this.state = PlayerState.Playing;
|
|
27806
28292
|
this.stateChanged.trigger(new PlayerStateChangedEventArgs(this.state, false));
|
|
@@ -27813,7 +28299,7 @@ class AlphaSynth {
|
|
|
27813
28299
|
this.state = PlayerState.Paused;
|
|
27814
28300
|
this.stateChanged.trigger(new PlayerStateChangedEventArgs(this.state, false));
|
|
27815
28301
|
this.output.pause();
|
|
27816
|
-
this.
|
|
28302
|
+
this.synthesizer.noteOffAll(false);
|
|
27817
28303
|
}
|
|
27818
28304
|
playPause() {
|
|
27819
28305
|
if (this.state !== PlayerState.Paused || !this._isMidiLoaded) {
|
|
@@ -27831,21 +28317,21 @@ class AlphaSynth {
|
|
|
27831
28317
|
this.state = PlayerState.Paused;
|
|
27832
28318
|
this.output.pause();
|
|
27833
28319
|
this._notPlayedSamples = 0;
|
|
27834
|
-
this.
|
|
27835
|
-
this.
|
|
27836
|
-
this.tickPosition = this.
|
|
28320
|
+
this.sequencer.stop();
|
|
28321
|
+
this.synthesizer.noteOffAll(true);
|
|
28322
|
+
this.tickPosition = this.sequencer.mainPlaybackRange ? this.sequencer.mainPlaybackRange.startTick : 0;
|
|
27837
28323
|
this.stateChanged.trigger(new PlayerStateChangedEventArgs(this.state, true));
|
|
27838
28324
|
}
|
|
27839
28325
|
playOneTimeMidiFile(midi) {
|
|
27840
|
-
if (this.
|
|
28326
|
+
if (this.sequencer.isPlayingOneTimeMidi) {
|
|
27841
28327
|
this.stopOneTimeMidi();
|
|
27842
28328
|
}
|
|
27843
28329
|
else {
|
|
27844
28330
|
// pause current playback.
|
|
27845
28331
|
this.pause();
|
|
27846
28332
|
}
|
|
27847
|
-
this.
|
|
27848
|
-
this.
|
|
28333
|
+
this.sequencer.loadOneTimeMidi(midi);
|
|
28334
|
+
this.synthesizer.noteOffAll(true);
|
|
27849
28335
|
// update the internal position
|
|
27850
28336
|
this.updateTimePosition(0, true);
|
|
27851
28337
|
// tell the output to reset the already synthesized buffers and request data again
|
|
@@ -27855,9 +28341,9 @@ class AlphaSynth {
|
|
|
27855
28341
|
}
|
|
27856
28342
|
resetSoundFonts() {
|
|
27857
28343
|
this.stop();
|
|
27858
|
-
this.
|
|
28344
|
+
this.synthesizer.resetPresets();
|
|
27859
28345
|
this._loadedSoundFonts = [];
|
|
27860
|
-
this.
|
|
28346
|
+
this.isSoundFontLoaded = false;
|
|
27861
28347
|
this.soundFontLoaded.trigger();
|
|
27862
28348
|
}
|
|
27863
28349
|
loadSoundFont(data, append) {
|
|
@@ -27871,7 +28357,7 @@ class AlphaSynth {
|
|
|
27871
28357
|
this._loadedSoundFonts = [];
|
|
27872
28358
|
}
|
|
27873
28359
|
this._loadedSoundFonts.push(soundFont);
|
|
27874
|
-
this.
|
|
28360
|
+
this.isSoundFontLoaded = true;
|
|
27875
28361
|
this.soundFontLoaded.trigger();
|
|
27876
28362
|
Logger.debug('AlphaSynth', 'soundFont successfully loaded');
|
|
27877
28363
|
this.checkReadyForPlayback();
|
|
@@ -27883,12 +28369,12 @@ class AlphaSynth {
|
|
|
27883
28369
|
}
|
|
27884
28370
|
checkReadyForPlayback() {
|
|
27885
28371
|
if (this.isReadyForPlayback) {
|
|
27886
|
-
this.
|
|
27887
|
-
const programs = this.
|
|
27888
|
-
const percussionKeys = this.
|
|
28372
|
+
this.synthesizer.setupMetronomeChannel(this.metronomeVolume);
|
|
28373
|
+
const programs = this.sequencer.instrumentPrograms;
|
|
28374
|
+
const percussionKeys = this.sequencer.percussionKeys;
|
|
27889
28375
|
let append = false;
|
|
27890
28376
|
for (const soundFont of this._loadedSoundFonts) {
|
|
27891
|
-
this.
|
|
28377
|
+
this.synthesizer.loadPresets(soundFont, programs, percussionKeys, append);
|
|
27892
28378
|
append = true;
|
|
27893
28379
|
}
|
|
27894
28380
|
this.readyForPlayback.trigger();
|
|
@@ -27902,9 +28388,9 @@ class AlphaSynth {
|
|
|
27902
28388
|
this.stop();
|
|
27903
28389
|
try {
|
|
27904
28390
|
Logger.debug('AlphaSynth', 'Loading midi from model');
|
|
27905
|
-
this.
|
|
28391
|
+
this.sequencer.loadMidi(midi);
|
|
27906
28392
|
this._isMidiLoaded = true;
|
|
27907
|
-
this.midiLoaded.trigger(new PositionChangedEventArgs(0, this.
|
|
28393
|
+
this.midiLoaded.trigger(new PositionChangedEventArgs(0, this.sequencer.currentEndTime, 0, this.sequencer.currentEndTick, false, this.sequencer.currentTempo, this.sequencer.modifiedTempo));
|
|
27908
28394
|
Logger.debug('AlphaSynth', 'Midi successfully loaded');
|
|
27909
28395
|
this.checkReadyForPlayback();
|
|
27910
28396
|
this.tickPosition = 0;
|
|
@@ -27915,29 +28401,29 @@ class AlphaSynth {
|
|
|
27915
28401
|
}
|
|
27916
28402
|
}
|
|
27917
28403
|
applyTranspositionPitches(transpositionPitches) {
|
|
27918
|
-
this.
|
|
28404
|
+
this.synthesizer.applyTranspositionPitches(transpositionPitches);
|
|
27919
28405
|
}
|
|
27920
28406
|
setChannelTranspositionPitch(channel, semitones) {
|
|
27921
|
-
this.
|
|
28407
|
+
this.synthesizer.setChannelTranspositionPitch(channel, semitones);
|
|
27922
28408
|
}
|
|
27923
28409
|
setChannelMute(channel, mute) {
|
|
27924
|
-
this.
|
|
28410
|
+
this.synthesizer.channelSetMute(channel, mute);
|
|
27925
28411
|
}
|
|
27926
28412
|
resetChannelStates() {
|
|
27927
|
-
this.
|
|
28413
|
+
this.synthesizer.resetChannelStates();
|
|
27928
28414
|
}
|
|
27929
28415
|
setChannelSolo(channel, solo) {
|
|
27930
|
-
this.
|
|
28416
|
+
this.synthesizer.channelSetSolo(channel, solo);
|
|
27931
28417
|
}
|
|
27932
28418
|
setChannelVolume(channel, volume) {
|
|
27933
28419
|
volume = Math.max(volume, SynthConstants.MinVolume);
|
|
27934
|
-
this.
|
|
28420
|
+
this.synthesizer.channelSetMixVolume(channel, volume);
|
|
27935
28421
|
}
|
|
27936
28422
|
onSamplesPlayed(sampleCount) {
|
|
27937
28423
|
if (sampleCount === 0) {
|
|
27938
28424
|
return;
|
|
27939
28425
|
}
|
|
27940
|
-
const playedMillis = (sampleCount / this.
|
|
28426
|
+
const playedMillis = (sampleCount / this.synthesizer.outSampleRate) * 1000;
|
|
27941
28427
|
this._notPlayedSamples -= sampleCount * SynthConstants.AudioChannels;
|
|
27942
28428
|
this.updateTimePosition(this._timePosition + playedMillis, false);
|
|
27943
28429
|
this.checkForFinish();
|
|
@@ -27945,76 +28431,85 @@ class AlphaSynth {
|
|
|
27945
28431
|
checkForFinish() {
|
|
27946
28432
|
let startTick = 0;
|
|
27947
28433
|
let endTick = 0;
|
|
27948
|
-
if (this.playbackRange && this.
|
|
28434
|
+
if (this.playbackRange && this.sequencer.isPlayingMain) {
|
|
27949
28435
|
startTick = this.playbackRange.startTick;
|
|
27950
28436
|
endTick = this.playbackRange.endTick;
|
|
27951
28437
|
}
|
|
27952
28438
|
else {
|
|
27953
|
-
endTick = this.
|
|
27954
|
-
}
|
|
27955
|
-
if (this._tickPosition >= endTick
|
|
27956
|
-
|
|
27957
|
-
if (this.
|
|
27958
|
-
|
|
27959
|
-
this.
|
|
27960
|
-
|
|
27961
|
-
|
|
27962
|
-
|
|
27963
|
-
|
|
27964
|
-
|
|
27965
|
-
|
|
27966
|
-
this.
|
|
27967
|
-
|
|
27968
|
-
|
|
27969
|
-
|
|
27970
|
-
|
|
27971
|
-
|
|
27972
|
-
this.
|
|
27973
|
-
|
|
27974
|
-
|
|
28439
|
+
endTick = this.sequencer.currentEndTick;
|
|
28440
|
+
}
|
|
28441
|
+
if (this._tickPosition >= endTick) {
|
|
28442
|
+
// fully done with playback of remaining samples?
|
|
28443
|
+
if (this._notPlayedSamples <= 0) {
|
|
28444
|
+
this._notPlayedSamples = 0;
|
|
28445
|
+
if (this.sequencer.isPlayingCountIn) {
|
|
28446
|
+
Logger.debug('AlphaSynth', 'Finished playback (count-in)');
|
|
28447
|
+
this.sequencer.resetCountIn();
|
|
28448
|
+
this.timePosition = this.sequencer.currentTime;
|
|
28449
|
+
this.playInternal();
|
|
28450
|
+
this.output.resetSamples();
|
|
28451
|
+
}
|
|
28452
|
+
else if (this.sequencer.isPlayingOneTimeMidi) {
|
|
28453
|
+
Logger.debug('AlphaSynth', 'Finished playback (one time)');
|
|
28454
|
+
this.output.resetSamples();
|
|
28455
|
+
this.state = PlayerState.Paused;
|
|
28456
|
+
this.stopOneTimeMidi();
|
|
28457
|
+
}
|
|
28458
|
+
else if (this.isLooping) {
|
|
28459
|
+
Logger.debug('AlphaSynth', 'Finished playback (main looping)');
|
|
28460
|
+
this.finished.trigger();
|
|
28461
|
+
this.tickPosition = startTick;
|
|
28462
|
+
this._synthStopping = false;
|
|
28463
|
+
}
|
|
28464
|
+
else if (this.synthesizer.activeVoiceCount > 0) {
|
|
28465
|
+
// smooth stop
|
|
28466
|
+
if (!this._synthStopping) {
|
|
28467
|
+
Logger.debug('AlphaSynth', 'Signaling synth to stop all voices (all samples played)');
|
|
28468
|
+
this.synthesizer.noteOffAll(true);
|
|
28469
|
+
this._synthStopping = true;
|
|
28470
|
+
}
|
|
28471
|
+
}
|
|
28472
|
+
else {
|
|
28473
|
+
this._synthStopping = false;
|
|
28474
|
+
Logger.debug('AlphaSynth', 'Finished playback (main)');
|
|
28475
|
+
this.finished.trigger();
|
|
28476
|
+
this.stop();
|
|
28477
|
+
}
|
|
27975
28478
|
}
|
|
27976
|
-
else
|
|
27977
|
-
//
|
|
28479
|
+
else {
|
|
28480
|
+
// the output still has to play some samples, signal the synth to stop
|
|
28481
|
+
// to eventually bring the voices down to 0 and stop playing
|
|
27978
28482
|
if (!this._synthStopping) {
|
|
27979
|
-
|
|
28483
|
+
Logger.debug('AlphaSynth', 'Signaling synth to stop all voices (not all samples played)');
|
|
28484
|
+
this.synthesizer.noteOffAll(true);
|
|
27980
28485
|
this._synthStopping = true;
|
|
27981
28486
|
}
|
|
27982
28487
|
}
|
|
27983
|
-
else {
|
|
27984
|
-
this._synthStopping = false;
|
|
27985
|
-
Logger.debug('AlphaSynth', 'Finished playback (main)');
|
|
27986
|
-
this.finished.trigger();
|
|
27987
|
-
this.stop();
|
|
27988
|
-
}
|
|
27989
28488
|
}
|
|
27990
28489
|
}
|
|
27991
28490
|
stopOneTimeMidi() {
|
|
27992
28491
|
this.output.pause();
|
|
27993
|
-
this.
|
|
27994
|
-
this.
|
|
27995
|
-
this.timePosition = this.
|
|
28492
|
+
this.synthesizer.noteOffAll(true);
|
|
28493
|
+
this.sequencer.resetOneTimeMidi();
|
|
28494
|
+
this.timePosition = this.sequencer.currentTime;
|
|
27996
28495
|
}
|
|
27997
28496
|
updateTimePosition(timePosition, isSeek) {
|
|
27998
28497
|
// update the real positions
|
|
27999
28498
|
let currentTime = timePosition;
|
|
28000
28499
|
this._timePosition = currentTime;
|
|
28001
|
-
let currentTick = this.
|
|
28500
|
+
let currentTick = this.sequencer.currentTimePositionToTickPosition(currentTime);
|
|
28002
28501
|
this._tickPosition = currentTick;
|
|
28003
|
-
const endTime = this.
|
|
28004
|
-
const endTick = this.
|
|
28502
|
+
const endTime = this.sequencer.currentEndTime;
|
|
28503
|
+
const endTick = this.sequencer.currentEndTick;
|
|
28005
28504
|
// on fade outs we can have some milliseconds longer, ensure we don't report this
|
|
28006
28505
|
if (currentTime > endTime) {
|
|
28007
28506
|
currentTime = endTime;
|
|
28008
28507
|
currentTick = endTick;
|
|
28009
28508
|
}
|
|
28010
|
-
const mode = this.
|
|
28011
|
-
|
|
28012
|
-
|
|
28013
|
-
|
|
28014
|
-
: 'one-time';
|
|
28015
|
-
Logger.debug('AlphaSynth', `Position changed: (time: ${currentTime}/${endTime}, tick: ${currentTick}/${endTick}, Active Voices: ${this._synthesizer.activeVoiceCount} (${mode})`);
|
|
28016
|
-
if (this._sequencer.isPlayingMain) {
|
|
28017
|
-
this.positionChanged.trigger(new PositionChangedEventArgs(currentTime, endTime, currentTick, endTick, isSeek));
|
|
28509
|
+
const mode = this.sequencer.isPlayingMain ? 'main' : this.sequencer.isPlayingCountIn ? 'count-in' : 'one-time';
|
|
28510
|
+
Logger.debug('AlphaSynth', `Position changed: (time: ${currentTime}/${endTime}, tick: ${currentTick}/${endTick}, Active Voices: ${this.synthesizer.activeVoiceCount} (${mode}), Tempo original: ${this.sequencer.currentTempo}, Tempo modified: ${this.sequencer.modifiedTempo})`);
|
|
28511
|
+
if (this.sequencer.isPlayingMain) {
|
|
28512
|
+
this.positionChanged.trigger(new PositionChangedEventArgs(currentTime, endTime, currentTick, endTick, isSeek, this.sequencer.currentTempo, this.sequencer.modifiedTempo));
|
|
28018
28513
|
}
|
|
28019
28514
|
// build events which were actually played
|
|
28020
28515
|
if (isSeek) {
|
|
@@ -28035,13 +28530,28 @@ class AlphaSynth {
|
|
|
28035
28530
|
* @internal
|
|
28036
28531
|
*/
|
|
28037
28532
|
hasSamplesForProgram(program) {
|
|
28038
|
-
return this.
|
|
28533
|
+
return this.synthesizer.hasSamplesForProgram(program);
|
|
28039
28534
|
}
|
|
28040
28535
|
/**
|
|
28041
28536
|
* @internal
|
|
28042
28537
|
*/
|
|
28043
28538
|
hasSamplesForPercussion(key) {
|
|
28044
|
-
return this.
|
|
28539
|
+
return this.synthesizer.hasSamplesForPercussion(key);
|
|
28540
|
+
}
|
|
28541
|
+
loadBackingTrack(_score, _syncPoints) {
|
|
28542
|
+
}
|
|
28543
|
+
}
|
|
28544
|
+
/**
|
|
28545
|
+
* This is the main synthesizer component which can be used to
|
|
28546
|
+
* play a {@link MidiFile} via a {@link ISynthOutput}.
|
|
28547
|
+
*/
|
|
28548
|
+
class AlphaSynth extends AlphaSynthBase {
|
|
28549
|
+
/**
|
|
28550
|
+
* Initializes a new instance of the {@link AlphaSynth} class.
|
|
28551
|
+
* @param output The output to use for playing the generated samples.
|
|
28552
|
+
*/
|
|
28553
|
+
constructor(output, bufferTimeInMilliseconds) {
|
|
28554
|
+
super(output, new TinySoundFont(output.sampleRate), bufferTimeInMilliseconds);
|
|
28045
28555
|
}
|
|
28046
28556
|
}
|
|
28047
28557
|
|
|
@@ -29288,6 +29798,35 @@ var PlayerOutputMode;
|
|
|
29288
29798
|
*/
|
|
29289
29799
|
PlayerOutputMode[PlayerOutputMode["WebAudioScriptProcessor"] = 1] = "WebAudioScriptProcessor";
|
|
29290
29800
|
})(PlayerOutputMode || (PlayerOutputMode = {}));
|
|
29801
|
+
/**
|
|
29802
|
+
* Lists the different modes how the internal alphaTab player (and related cursor behavior) is working.
|
|
29803
|
+
*/
|
|
29804
|
+
var PlayerMode;
|
|
29805
|
+
(function (PlayerMode) {
|
|
29806
|
+
/**
|
|
29807
|
+
* The player functionality is fully disabled.
|
|
29808
|
+
*/
|
|
29809
|
+
PlayerMode[PlayerMode["Disabled"] = 0] = "Disabled";
|
|
29810
|
+
/**
|
|
29811
|
+
* The player functionality is enabled.
|
|
29812
|
+
* If the loaded file provides a backing track, it is used for playback.
|
|
29813
|
+
* If no backing track is provided, the midi synthesizer is used.
|
|
29814
|
+
*/
|
|
29815
|
+
PlayerMode[PlayerMode["EnabledAutomatic"] = 1] = "EnabledAutomatic";
|
|
29816
|
+
/**
|
|
29817
|
+
* The player functionality is enabled and the synthesizer is used (even if a backing track is embedded in the file).
|
|
29818
|
+
*/
|
|
29819
|
+
PlayerMode[PlayerMode["EnabledSynthesizer"] = 2] = "EnabledSynthesizer";
|
|
29820
|
+
/**
|
|
29821
|
+
* The player functionality is enabled. If the input data model has no backing track configured, the player might not work as expected (as playback completes instantly).
|
|
29822
|
+
*/
|
|
29823
|
+
PlayerMode[PlayerMode["EnabledBackingTrack"] = 3] = "EnabledBackingTrack";
|
|
29824
|
+
/**
|
|
29825
|
+
* The player functionality is enabled and an external audio/video source is used as time axis.
|
|
29826
|
+
* The related player APIs need to be used to update the current position of the external audio source within alphaTab.
|
|
29827
|
+
*/
|
|
29828
|
+
PlayerMode[PlayerMode["EnabledExternalMedia"] = 4] = "EnabledExternalMedia";
|
|
29829
|
+
})(PlayerMode || (PlayerMode = {}));
|
|
29291
29830
|
/**
|
|
29292
29831
|
* The player settings control how the audio playback and UI is behaving.
|
|
29293
29832
|
* @json
|
|
@@ -29334,6 +29873,7 @@ class PlayerSettings {
|
|
|
29334
29873
|
* @since 0.9.6
|
|
29335
29874
|
* @defaultValue `false`
|
|
29336
29875
|
* @category Player
|
|
29876
|
+
* @deprecated Use {@link playerMode} instead.
|
|
29337
29877
|
* @remarks
|
|
29338
29878
|
* This setting configures whether the player feature is enabled or not. Depending on the platform enabling the player needs some additional actions of the developer.
|
|
29339
29879
|
* For the JavaScript version the [player.soundFont](/docs/reference/settings/player/soundfont) property must be set to the URL of the sound font that should be used or it must be loaded manually via API.
|
|
@@ -29342,6 +29882,37 @@ class PlayerSettings {
|
|
|
29342
29882
|
* AlphaTab does not ship a default UI for the player. The API must be hooked up to some UI controls to allow the user to interact with the player.
|
|
29343
29883
|
*/
|
|
29344
29884
|
this.enablePlayer = false;
|
|
29885
|
+
/**
|
|
29886
|
+
* Whether the player should be enabled and which mode it should use.
|
|
29887
|
+
* @since 1.6.0
|
|
29888
|
+
* @defaultValue `PlayerMode.Disabled`
|
|
29889
|
+
* @category Player
|
|
29890
|
+
* @remarks
|
|
29891
|
+
* This setting configures whether the player feature is enabled or not. Depending on the platform enabling the player needs some additional actions of the developer.
|
|
29892
|
+
*
|
|
29893
|
+
* **Synthesizer**
|
|
29894
|
+
*
|
|
29895
|
+
* If the synthesizer is used (via {@link PlayerMode.EnabledAutomatic} or {@link PlayerMode.EnabledSynthesizer}) a sound font is needed so that the midi synthesizer can produce the audio samples.
|
|
29896
|
+
*
|
|
29897
|
+
* For the JavaScript version the [player.soundFont](/docs/reference/settings/player/soundfont) property must be set to the URL of the sound font that should be used or it must be loaded manually via API.
|
|
29898
|
+
* For .net manually the soundfont must be loaded.
|
|
29899
|
+
*
|
|
29900
|
+
* **Backing Track**
|
|
29901
|
+
*
|
|
29902
|
+
* For a built-in backing track of the input file no additional data needs to be loaded (assuming everything is filled via the input file).
|
|
29903
|
+
* Otherwise the `score.backingTrack` needs to be filled before loading and the related sync points need to be configured.
|
|
29904
|
+
*
|
|
29905
|
+
* **External Media**
|
|
29906
|
+
*
|
|
29907
|
+
* For synchronizing alphaTab with an external media no data needs to be loaded into alphaTab. The configured sync points on the MasterBars are used
|
|
29908
|
+
* as reference to synchronize the external media with the internal time axis. Then the related APIs on the AlphaTabApi object need to be used
|
|
29909
|
+
* to update the playback state and exterrnal audio position during playback.
|
|
29910
|
+
*
|
|
29911
|
+
* **User Interface**
|
|
29912
|
+
*
|
|
29913
|
+
* AlphaTab does not ship a default UI for the player. The API must be hooked up to some UI controls to allow the user to interact with the player.
|
|
29914
|
+
*/
|
|
29915
|
+
this.playerMode = PlayerMode.Disabled;
|
|
29345
29916
|
/**
|
|
29346
29917
|
* Whether playback cursors should be displayed.
|
|
29347
29918
|
* @since 0.9.6
|
|
@@ -30047,6 +30618,7 @@ class PlayerSettingsSerializer {
|
|
|
30047
30618
|
/*@target web*/
|
|
30048
30619
|
o.set("outputmode", obj.outputMode);
|
|
30049
30620
|
o.set("enableplayer", obj.enablePlayer);
|
|
30621
|
+
o.set("playermode", obj.playerMode);
|
|
30050
30622
|
o.set("enablecursor", obj.enableCursor);
|
|
30051
30623
|
o.set("enableanimatedbeatcursor", obj.enableAnimatedBeatCursor);
|
|
30052
30624
|
o.set("enableelementhighlighting", obj.enableElementHighlighting);
|
|
@@ -30082,6 +30654,9 @@ class PlayerSettingsSerializer {
|
|
|
30082
30654
|
case "enableplayer":
|
|
30083
30655
|
obj.enablePlayer = v;
|
|
30084
30656
|
return true;
|
|
30657
|
+
case "playermode":
|
|
30658
|
+
obj.playerMode = JsonHelper.parseEnum(v, PlayerMode);
|
|
30659
|
+
return true;
|
|
30085
30660
|
case "enablecursor":
|
|
30086
30661
|
obj.enableCursor = v;
|
|
30087
30662
|
return true;
|
|
@@ -30316,6 +30891,39 @@ class SectionSerializer {
|
|
|
30316
30891
|
}
|
|
30317
30892
|
}
|
|
30318
30893
|
|
|
30894
|
+
class SyncPointDataSerializer {
|
|
30895
|
+
static fromJson(obj, m) {
|
|
30896
|
+
if (!m) {
|
|
30897
|
+
return;
|
|
30898
|
+
}
|
|
30899
|
+
JsonHelper.forEach(m, (v, k) => SyncPointDataSerializer.setProperty(obj, k, v));
|
|
30900
|
+
}
|
|
30901
|
+
static toJson(obj) {
|
|
30902
|
+
if (!obj) {
|
|
30903
|
+
return null;
|
|
30904
|
+
}
|
|
30905
|
+
const o = new Map();
|
|
30906
|
+
o.set("baroccurence", obj.barOccurence);
|
|
30907
|
+
o.set("modifiedtempo", obj.modifiedTempo);
|
|
30908
|
+
o.set("millisecondoffset", obj.millisecondOffset);
|
|
30909
|
+
return o;
|
|
30910
|
+
}
|
|
30911
|
+
static setProperty(obj, property, v) {
|
|
30912
|
+
switch (property) {
|
|
30913
|
+
case "baroccurence":
|
|
30914
|
+
obj.barOccurence = v;
|
|
30915
|
+
return true;
|
|
30916
|
+
case "modifiedtempo":
|
|
30917
|
+
obj.modifiedTempo = v;
|
|
30918
|
+
return true;
|
|
30919
|
+
case "millisecondoffset":
|
|
30920
|
+
obj.millisecondOffset = v;
|
|
30921
|
+
return true;
|
|
30922
|
+
}
|
|
30923
|
+
return false;
|
|
30924
|
+
}
|
|
30925
|
+
}
|
|
30926
|
+
|
|
30319
30927
|
class AutomationSerializer {
|
|
30320
30928
|
static fromJson(obj, m) {
|
|
30321
30929
|
if (!m) {
|
|
@@ -30331,6 +30939,9 @@ class AutomationSerializer {
|
|
|
30331
30939
|
o.set("islinear", obj.isLinear);
|
|
30332
30940
|
o.set("type", obj.type);
|
|
30333
30941
|
o.set("value", obj.value);
|
|
30942
|
+
if (obj.syncPointValue) {
|
|
30943
|
+
o.set("syncpointvalue", SyncPointDataSerializer.toJson(obj.syncPointValue));
|
|
30944
|
+
}
|
|
30334
30945
|
o.set("ratioposition", obj.ratioPosition);
|
|
30335
30946
|
o.set("text", obj.text);
|
|
30336
30947
|
return o;
|
|
@@ -30346,6 +30957,15 @@ class AutomationSerializer {
|
|
|
30346
30957
|
case "value":
|
|
30347
30958
|
obj.value = v;
|
|
30348
30959
|
return true;
|
|
30960
|
+
case "syncpointvalue":
|
|
30961
|
+
if (v) {
|
|
30962
|
+
obj.syncPointValue = new SyncPointData();
|
|
30963
|
+
SyncPointDataSerializer.fromJson(obj.syncPointValue, v);
|
|
30964
|
+
}
|
|
30965
|
+
else {
|
|
30966
|
+
obj.syncPointValue = undefined;
|
|
30967
|
+
}
|
|
30968
|
+
return true;
|
|
30349
30969
|
case "ratioposition":
|
|
30350
30970
|
obj.ratioPosition = v;
|
|
30351
30971
|
return true;
|
|
@@ -30411,6 +31031,9 @@ class MasterBarSerializer {
|
|
|
30411
31031
|
o.set("section", SectionSerializer.toJson(obj.section));
|
|
30412
31032
|
}
|
|
30413
31033
|
o.set("tempoautomations", obj.tempoAutomations.map(i => AutomationSerializer.toJson(i)));
|
|
31034
|
+
if (obj.syncPoints !== undefined) {
|
|
31035
|
+
o.set("syncpoints", obj.syncPoints?.map(i => AutomationSerializer.toJson(i)));
|
|
31036
|
+
}
|
|
30414
31037
|
if (obj.fermata !== null) {
|
|
30415
31038
|
const m = new Map();
|
|
30416
31039
|
o.set("fermata", m);
|
|
@@ -30477,6 +31100,16 @@ class MasterBarSerializer {
|
|
|
30477
31100
|
obj.tempoAutomations.push(i);
|
|
30478
31101
|
}
|
|
30479
31102
|
return true;
|
|
31103
|
+
case "syncpoints":
|
|
31104
|
+
if (v) {
|
|
31105
|
+
obj.syncPoints = [];
|
|
31106
|
+
for (const o of v) {
|
|
31107
|
+
const i = new Automation();
|
|
31108
|
+
AutomationSerializer.fromJson(i, o);
|
|
31109
|
+
obj.addSyncPoint(i);
|
|
31110
|
+
}
|
|
31111
|
+
}
|
|
31112
|
+
return true;
|
|
30480
31113
|
case "fermata":
|
|
30481
31114
|
obj.fermata = new Map();
|
|
30482
31115
|
JsonHelper.forEach(v, (v, k) => {
|
|
@@ -31768,6 +32401,31 @@ class RenderStylesheetSerializer {
|
|
|
31768
32401
|
}
|
|
31769
32402
|
}
|
|
31770
32403
|
|
|
32404
|
+
class BackingTrackSerializer {
|
|
32405
|
+
static fromJson(obj, m) {
|
|
32406
|
+
if (!m) {
|
|
32407
|
+
return;
|
|
32408
|
+
}
|
|
32409
|
+
JsonHelper.forEach(m, (v, k) => BackingTrackSerializer.setProperty(obj, k, v));
|
|
32410
|
+
}
|
|
32411
|
+
static toJson(obj) {
|
|
32412
|
+
if (!obj) {
|
|
32413
|
+
return null;
|
|
32414
|
+
}
|
|
32415
|
+
const o = new Map();
|
|
32416
|
+
o.set("padding", obj.padding);
|
|
32417
|
+
return o;
|
|
32418
|
+
}
|
|
32419
|
+
static setProperty(obj, property, v) {
|
|
32420
|
+
switch (property) {
|
|
32421
|
+
case "padding":
|
|
32422
|
+
obj.padding = v;
|
|
32423
|
+
return true;
|
|
32424
|
+
}
|
|
32425
|
+
return false;
|
|
32426
|
+
}
|
|
32427
|
+
}
|
|
32428
|
+
|
|
31771
32429
|
class HeaderFooterStyleSerializer {
|
|
31772
32430
|
static fromJson(obj, m) {
|
|
31773
32431
|
if (!m) {
|
|
@@ -31879,6 +32537,9 @@ class ScoreSerializer {
|
|
|
31879
32537
|
o.set("defaultsystemslayout", obj.defaultSystemsLayout);
|
|
31880
32538
|
o.set("systemslayout", obj.systemsLayout);
|
|
31881
32539
|
o.set("stylesheet", RenderStylesheetSerializer.toJson(obj.stylesheet));
|
|
32540
|
+
if (obj.backingTrack) {
|
|
32541
|
+
o.set("backingtrack", BackingTrackSerializer.toJson(obj.backingTrack));
|
|
32542
|
+
}
|
|
31882
32543
|
if (obj.style) {
|
|
31883
32544
|
o.set("style", ScoreStyleSerializer.toJson(obj.style));
|
|
31884
32545
|
}
|
|
@@ -31947,6 +32608,15 @@ class ScoreSerializer {
|
|
|
31947
32608
|
case "stylesheet":
|
|
31948
32609
|
RenderStylesheetSerializer.fromJson(obj.stylesheet, v);
|
|
31949
32610
|
return true;
|
|
32611
|
+
case "backingtrack":
|
|
32612
|
+
if (v) {
|
|
32613
|
+
obj.backingTrack = new BackingTrack();
|
|
32614
|
+
BackingTrackSerializer.fromJson(obj.backingTrack, v);
|
|
32615
|
+
}
|
|
32616
|
+
else {
|
|
32617
|
+
obj.backingTrack = undefined;
|
|
32618
|
+
}
|
|
32619
|
+
return true;
|
|
31950
32620
|
case "style":
|
|
31951
32621
|
if (v) {
|
|
31952
32622
|
obj.style = new ScoreStyle();
|
|
@@ -32123,7 +32793,9 @@ class JsonConverter {
|
|
|
32123
32793
|
case MidiEventType.ProgramChange:
|
|
32124
32794
|
return new ProgramChangeEvent(track, tick, JsonHelper.getValue(midiEvent, 'channel'), JsonHelper.getValue(midiEvent, 'program'));
|
|
32125
32795
|
case MidiEventType.TempoChange:
|
|
32126
|
-
|
|
32796
|
+
const tempo = new TempoChangeEvent(tick, 0);
|
|
32797
|
+
tempo.beatsPerMinute = JsonHelper.getValue(midiEvent, 'beatsPerMinute');
|
|
32798
|
+
return tempo;
|
|
32127
32799
|
case MidiEventType.PitchBend:
|
|
32128
32800
|
return new PitchBendEvent(track, tick, JsonHelper.getValue(midiEvent, 'channel'), JsonHelper.getValue(midiEvent, 'value'));
|
|
32129
32801
|
case MidiEventType.PerNotePitchBend:
|
|
@@ -32198,7 +32870,7 @@ class JsonConverter {
|
|
|
32198
32870
|
o.set('program', midiEvent.program);
|
|
32199
32871
|
break;
|
|
32200
32872
|
case MidiEventType.TempoChange:
|
|
32201
|
-
o.set('
|
|
32873
|
+
o.set('beatsPerMinute', midiEvent.beatsPerMinute);
|
|
32202
32874
|
break;
|
|
32203
32875
|
case MidiEventType.PitchBend:
|
|
32204
32876
|
o.set('channel', midiEvent.channel);
|
|
@@ -32425,7 +33097,9 @@ class AlphaSynthWebWorker {
|
|
|
32425
33097
|
endTime: e.endTime,
|
|
32426
33098
|
currentTick: e.currentTick,
|
|
32427
33099
|
endTick: e.endTick,
|
|
32428
|
-
isSeek: e.isSeek
|
|
33100
|
+
isSeek: e.isSeek,
|
|
33101
|
+
originalTempo: e.originalTempo,
|
|
33102
|
+
modifiedTempo: e.modifiedTempo
|
|
32429
33103
|
});
|
|
32430
33104
|
}
|
|
32431
33105
|
onPlayerStateChanged(e) {
|
|
@@ -32471,7 +33145,9 @@ class AlphaSynthWebWorker {
|
|
|
32471
33145
|
endTime: e.endTime,
|
|
32472
33146
|
currentTick: e.currentTick,
|
|
32473
33147
|
endTick: e.endTick,
|
|
32474
|
-
isSeek: e.isSeek
|
|
33148
|
+
isSeek: e.isSeek,
|
|
33149
|
+
originalTempo: e.originalTempo,
|
|
33150
|
+
modifiedTempo: e.modifiedTempo
|
|
32475
33151
|
});
|
|
32476
33152
|
}
|
|
32477
33153
|
onMidiLoadFailed(e) {
|
|
@@ -33780,8 +34456,9 @@ class AlphaSynthMidiFileHandler {
|
|
|
33780
34456
|
}
|
|
33781
34457
|
addTempo(tick, tempo) {
|
|
33782
34458
|
// bpm -> microsecond per quarter note
|
|
33783
|
-
const
|
|
33784
|
-
|
|
34459
|
+
const tempoEvent = new TempoChangeEvent(tick, 0);
|
|
34460
|
+
tempoEvent.beatsPerMinute = tempo;
|
|
34461
|
+
this._midiFile.addEvent(tempoEvent);
|
|
33785
34462
|
}
|
|
33786
34463
|
addBend(track, tick, channel, value) {
|
|
33787
34464
|
if (value >= SynthConstants.MaxPitchWheel) {
|
|
@@ -35053,6 +35730,10 @@ class MidiFileGenerator {
|
|
|
35053
35730
|
* Gets or sets whether transposition pitches should be applied to the individual midi events or not.
|
|
35054
35731
|
*/
|
|
35055
35732
|
this.applyTranspositionPitches = true;
|
|
35733
|
+
/**
|
|
35734
|
+
* The computed sync points for synchronizing the midi file with an external backing track.
|
|
35735
|
+
*/
|
|
35736
|
+
this.syncPoints = [];
|
|
35056
35737
|
/**
|
|
35057
35738
|
* Gets the transposition pitches for the individual midi channels.
|
|
35058
35739
|
*/
|
|
@@ -35079,13 +35760,17 @@ class MidiFileGenerator {
|
|
|
35079
35760
|
let previousMasterBar = null;
|
|
35080
35761
|
let currentTempo = this._score.tempo;
|
|
35081
35762
|
// store the previous played bar for repeats
|
|
35763
|
+
const barOccurence = new Map();
|
|
35082
35764
|
while (!controller.finished) {
|
|
35083
35765
|
const index = controller.index;
|
|
35084
35766
|
const bar = this._score.masterBars[index];
|
|
35085
35767
|
const currentTick = controller.currentTick;
|
|
35086
35768
|
controller.processCurrent();
|
|
35087
35769
|
if (controller.shouldPlay) {
|
|
35088
|
-
|
|
35770
|
+
let occurence = barOccurence.has(index) ? barOccurence.get(index) : -1;
|
|
35771
|
+
occurence++;
|
|
35772
|
+
barOccurence.set(index, occurence);
|
|
35773
|
+
this.generateMasterBar(bar, previousMasterBar, currentTick, currentTempo, occurence);
|
|
35089
35774
|
if (bar.tempoAutomations.length > 0) {
|
|
35090
35775
|
currentTempo = bar.tempoAutomations[0].value;
|
|
35091
35776
|
}
|
|
@@ -35154,7 +35839,7 @@ class MidiFileGenerator {
|
|
|
35154
35839
|
const value = Math.max(-32768, Math.min(32767, data * 8 - 1));
|
|
35155
35840
|
return Math.max(value, -1) + 1;
|
|
35156
35841
|
}
|
|
35157
|
-
generateMasterBar(masterBar, previousMasterBar, currentTick, currentTempo) {
|
|
35842
|
+
generateMasterBar(masterBar, previousMasterBar, currentTick, currentTempo, barOccurence) {
|
|
35158
35843
|
// time signature
|
|
35159
35844
|
if (!previousMasterBar ||
|
|
35160
35845
|
previousMasterBar.timeSignatureDenominator !== masterBar.timeSignatureDenominator ||
|
|
@@ -35181,6 +35866,15 @@ class MidiFileGenerator {
|
|
|
35181
35866
|
else {
|
|
35182
35867
|
masterBarLookup.tempoChanges.push(new MasterBarTickLookupTempoChange(currentTick, currentTempo));
|
|
35183
35868
|
}
|
|
35869
|
+
const syncPoints = masterBar.syncPoints;
|
|
35870
|
+
if (syncPoints) {
|
|
35871
|
+
for (const syncPoint of syncPoints) {
|
|
35872
|
+
if (syncPoint.syncPointValue.barOccurence === barOccurence) {
|
|
35873
|
+
const tick = currentTick + masterBarDuration * syncPoint.ratioPosition;
|
|
35874
|
+
this.syncPoints.push(new BackingTrackSyncPoint(tick, syncPoint.syncPointValue));
|
|
35875
|
+
}
|
|
35876
|
+
}
|
|
35877
|
+
}
|
|
35184
35878
|
masterBarLookup.masterBar = masterBar;
|
|
35185
35879
|
masterBarLookup.start = currentTick;
|
|
35186
35880
|
masterBarLookup.end = masterBarLookup.start + masterBarDuration;
|
|
@@ -37378,6 +38072,213 @@ class ActiveBeatsChangedEventArgs {
|
|
|
37378
38072
|
}
|
|
37379
38073
|
}
|
|
37380
38074
|
|
|
38075
|
+
class BackingTrackAudioSynthesizer {
|
|
38076
|
+
constructor() {
|
|
38077
|
+
this._midiEventQueue = new Queue();
|
|
38078
|
+
this.masterVolume = 1;
|
|
38079
|
+
this.metronomeVolume = 0;
|
|
38080
|
+
this.outSampleRate = 44100;
|
|
38081
|
+
this.currentTempo = 120;
|
|
38082
|
+
this.timeSignatureNumerator = 4;
|
|
38083
|
+
this.timeSignatureDenominator = 4;
|
|
38084
|
+
this.activeVoiceCount = 0;
|
|
38085
|
+
}
|
|
38086
|
+
noteOffAll(_immediate) {
|
|
38087
|
+
}
|
|
38088
|
+
resetSoft() {
|
|
38089
|
+
}
|
|
38090
|
+
resetPresets() {
|
|
38091
|
+
}
|
|
38092
|
+
loadPresets(_hydra, _instrumentPrograms, _percussionKeys, _append) {
|
|
38093
|
+
}
|
|
38094
|
+
setupMetronomeChannel(_metronomeVolume) {
|
|
38095
|
+
}
|
|
38096
|
+
synthesizeSilent(_sampleCount) {
|
|
38097
|
+
this.fakeSynthesize();
|
|
38098
|
+
}
|
|
38099
|
+
processMidiMessage(e) {
|
|
38100
|
+
}
|
|
38101
|
+
dispatchEvent(synthEvent) {
|
|
38102
|
+
this._midiEventQueue.enqueue(synthEvent);
|
|
38103
|
+
}
|
|
38104
|
+
synthesize(_buffer, _bufferPos, _sampleCount) {
|
|
38105
|
+
return this.fakeSynthesize();
|
|
38106
|
+
}
|
|
38107
|
+
fakeSynthesize() {
|
|
38108
|
+
const processedEvents = [];
|
|
38109
|
+
while (!this._midiEventQueue.isEmpty) {
|
|
38110
|
+
const m = this._midiEventQueue.dequeue();
|
|
38111
|
+
if (m.isMetronome && this.metronomeVolume > 0) ;
|
|
38112
|
+
else if (m.event) {
|
|
38113
|
+
this.processMidiMessage(m.event);
|
|
38114
|
+
}
|
|
38115
|
+
processedEvents.push(m);
|
|
38116
|
+
}
|
|
38117
|
+
return processedEvents;
|
|
38118
|
+
}
|
|
38119
|
+
applyTranspositionPitches(transpositionPitches) {
|
|
38120
|
+
}
|
|
38121
|
+
setChannelTranspositionPitch(channel, semitones) {
|
|
38122
|
+
}
|
|
38123
|
+
channelSetMute(channel, mute) {
|
|
38124
|
+
}
|
|
38125
|
+
channelSetSolo(channel, solo) {
|
|
38126
|
+
}
|
|
38127
|
+
resetChannelStates() {
|
|
38128
|
+
}
|
|
38129
|
+
channelSetMixVolume(channel, volume) {
|
|
38130
|
+
}
|
|
38131
|
+
hasSamplesForProgram(program) {
|
|
38132
|
+
return true;
|
|
38133
|
+
}
|
|
38134
|
+
hasSamplesForPercussion(key) {
|
|
38135
|
+
return true;
|
|
38136
|
+
}
|
|
38137
|
+
}
|
|
38138
|
+
class BackingTrackPlayer extends AlphaSynthBase {
|
|
38139
|
+
constructor(backingTrackOutput, bufferTimeInMilliseconds) {
|
|
38140
|
+
super(backingTrackOutput, new BackingTrackAudioSynthesizer(), bufferTimeInMilliseconds);
|
|
38141
|
+
this.synthesizer.output = backingTrackOutput;
|
|
38142
|
+
this._backingTrackOutput = backingTrackOutput;
|
|
38143
|
+
backingTrackOutput.timeUpdate.on(timePosition => {
|
|
38144
|
+
const alphaTabTimePosition = this.sequencer.mainTimePositionFromBackingTrack(timePosition, backingTrackOutput.backingTrackDuration);
|
|
38145
|
+
this.sequencer.fillMidiEventQueueToEndTime(alphaTabTimePosition);
|
|
38146
|
+
this.synthesizer.fakeSynthesize();
|
|
38147
|
+
this.updateTimePosition(alphaTabTimePosition, false);
|
|
38148
|
+
this.checkForFinish();
|
|
38149
|
+
});
|
|
38150
|
+
}
|
|
38151
|
+
updateMasterVolume(value) {
|
|
38152
|
+
super.updateMasterVolume(value);
|
|
38153
|
+
this._backingTrackOutput.masterVolume = value;
|
|
38154
|
+
}
|
|
38155
|
+
updatePlaybackSpeed(value) {
|
|
38156
|
+
super.updatePlaybackSpeed(value);
|
|
38157
|
+
this._backingTrackOutput.playbackRate = value;
|
|
38158
|
+
}
|
|
38159
|
+
onSampleRequest() {
|
|
38160
|
+
}
|
|
38161
|
+
loadMidiFile(midi) {
|
|
38162
|
+
if (!this.isSoundFontLoaded) {
|
|
38163
|
+
this.isSoundFontLoaded = true;
|
|
38164
|
+
this.soundFontLoaded.trigger();
|
|
38165
|
+
}
|
|
38166
|
+
super.loadMidiFile(midi);
|
|
38167
|
+
}
|
|
38168
|
+
updateTimePosition(timePosition, isSeek) {
|
|
38169
|
+
super.updateTimePosition(timePosition, isSeek);
|
|
38170
|
+
if (isSeek) {
|
|
38171
|
+
this._backingTrackOutput.seekTo(this.sequencer.mainTimePositionToBackingTrack(timePosition, this._backingTrackOutput.backingTrackDuration));
|
|
38172
|
+
}
|
|
38173
|
+
}
|
|
38174
|
+
loadBackingTrack(score, syncPoints) {
|
|
38175
|
+
const backingTrackInfo = score.backingTrack;
|
|
38176
|
+
if (backingTrackInfo) {
|
|
38177
|
+
this._backingTrackOutput.loadBackingTrack(backingTrackInfo);
|
|
38178
|
+
this.sequencer.mainUpdateSyncPoints(syncPoints);
|
|
38179
|
+
this.timePosition = 0;
|
|
38180
|
+
}
|
|
38181
|
+
}
|
|
38182
|
+
}
|
|
38183
|
+
|
|
38184
|
+
class ExternalMediaSynthOutput {
|
|
38185
|
+
constructor() {
|
|
38186
|
+
// fake rate
|
|
38187
|
+
this.sampleRate = 44100;
|
|
38188
|
+
this._padding = 0;
|
|
38189
|
+
this._seekPosition = 0;
|
|
38190
|
+
this.ready = new EventEmitter();
|
|
38191
|
+
this.samplesPlayed = new EventEmitterOfT();
|
|
38192
|
+
this.timeUpdate = new EventEmitterOfT();
|
|
38193
|
+
this.sampleRequest = new EventEmitter();
|
|
38194
|
+
}
|
|
38195
|
+
get handler() {
|
|
38196
|
+
return this._handler;
|
|
38197
|
+
}
|
|
38198
|
+
set handler(value) {
|
|
38199
|
+
if (value) {
|
|
38200
|
+
if (this._seekPosition !== 0) {
|
|
38201
|
+
value.seekTo(this._seekPosition);
|
|
38202
|
+
this._seekPosition = 0;
|
|
38203
|
+
}
|
|
38204
|
+
}
|
|
38205
|
+
this._handler = value;
|
|
38206
|
+
}
|
|
38207
|
+
get backingTrackDuration() {
|
|
38208
|
+
return this.handler?.backingTrackDuration ?? 0;
|
|
38209
|
+
}
|
|
38210
|
+
get playbackRate() {
|
|
38211
|
+
return this.handler?.playbackRate ?? 1;
|
|
38212
|
+
}
|
|
38213
|
+
set playbackRate(value) {
|
|
38214
|
+
const handler = this.handler;
|
|
38215
|
+
if (handler) {
|
|
38216
|
+
handler.playbackRate = value;
|
|
38217
|
+
}
|
|
38218
|
+
}
|
|
38219
|
+
get masterVolume() {
|
|
38220
|
+
return this.handler?.masterVolume ?? 1;
|
|
38221
|
+
}
|
|
38222
|
+
set masterVolume(value) {
|
|
38223
|
+
const handler = this.handler;
|
|
38224
|
+
if (handler) {
|
|
38225
|
+
handler.masterVolume = value;
|
|
38226
|
+
}
|
|
38227
|
+
}
|
|
38228
|
+
seekTo(time) {
|
|
38229
|
+
const handler = this.handler;
|
|
38230
|
+
if (handler) {
|
|
38231
|
+
handler.seekTo(time - this._padding);
|
|
38232
|
+
}
|
|
38233
|
+
else {
|
|
38234
|
+
this._seekPosition = time - this._padding;
|
|
38235
|
+
}
|
|
38236
|
+
}
|
|
38237
|
+
loadBackingTrack(backingTrack) {
|
|
38238
|
+
this._padding = backingTrack.padding;
|
|
38239
|
+
}
|
|
38240
|
+
open(_bufferTimeInMilliseconds) {
|
|
38241
|
+
this.ready.trigger();
|
|
38242
|
+
}
|
|
38243
|
+
updatePosition(currentTime) {
|
|
38244
|
+
this.timeUpdate.trigger(currentTime + this._padding);
|
|
38245
|
+
}
|
|
38246
|
+
play() {
|
|
38247
|
+
this.handler?.play();
|
|
38248
|
+
}
|
|
38249
|
+
destroy() {
|
|
38250
|
+
}
|
|
38251
|
+
pause() {
|
|
38252
|
+
this.handler?.pause();
|
|
38253
|
+
}
|
|
38254
|
+
addSamples(_samples) {
|
|
38255
|
+
}
|
|
38256
|
+
resetSamples() {
|
|
38257
|
+
}
|
|
38258
|
+
activate() {
|
|
38259
|
+
}
|
|
38260
|
+
async enumerateOutputDevices() {
|
|
38261
|
+
const empty = [];
|
|
38262
|
+
return empty;
|
|
38263
|
+
}
|
|
38264
|
+
async setOutputDevice(_device) {
|
|
38265
|
+
}
|
|
38266
|
+
async getOutputDevice() {
|
|
38267
|
+
return null;
|
|
38268
|
+
}
|
|
38269
|
+
}
|
|
38270
|
+
class ExternalMediaPlayer extends BackingTrackPlayer {
|
|
38271
|
+
get handler() {
|
|
38272
|
+
return this.output.handler;
|
|
38273
|
+
}
|
|
38274
|
+
set handler(value) {
|
|
38275
|
+
this.output.handler = value;
|
|
38276
|
+
}
|
|
38277
|
+
constructor(bufferTimeInMilliseconds) {
|
|
38278
|
+
super(new ExternalMediaSynthOutput(), bufferTimeInMilliseconds);
|
|
38279
|
+
}
|
|
38280
|
+
}
|
|
38281
|
+
|
|
37381
38282
|
class SelectionInfo {
|
|
37382
38283
|
constructor(beat) {
|
|
37383
38284
|
this.bounds = null;
|
|
@@ -37391,6 +38292,12 @@ class SelectionInfo {
|
|
|
37391
38292
|
* @csharp_public
|
|
37392
38293
|
*/
|
|
37393
38294
|
class AlphaTabApiBase {
|
|
38295
|
+
/**
|
|
38296
|
+
* The actual player mode which is currently active (e.g. allows determining whether a backing track or the synthesizer is active).
|
|
38297
|
+
*/
|
|
38298
|
+
get actualPlayerMode() {
|
|
38299
|
+
return this._actualPlayerMode;
|
|
38300
|
+
}
|
|
37394
38301
|
/**
|
|
37395
38302
|
* The score holding all information about the song being rendered
|
|
37396
38303
|
* @category Properties - Core
|
|
@@ -37460,10 +38367,8 @@ class AlphaTabApiBase {
|
|
|
37460
38367
|
this._isDestroyed = false;
|
|
37461
38368
|
this._score = null;
|
|
37462
38369
|
this._tracks = [];
|
|
38370
|
+
this._actualPlayerMode = PlayerMode.Disabled;
|
|
37463
38371
|
this._tickCache = null;
|
|
37464
|
-
/**
|
|
37465
|
-
* Gets the alphaSynth player used for playback. This is the low-level API to the Midi synthesizer used for playback.
|
|
37466
|
-
*/
|
|
37467
38372
|
/**
|
|
37468
38373
|
* The alphaSynth player used for playback.
|
|
37469
38374
|
* @remarks
|
|
@@ -38500,6 +39405,10 @@ class AlphaTabApiBase {
|
|
|
38500
39405
|
this.container = uiFacade.rootContainer;
|
|
38501
39406
|
uiFacade.initialize(this, settings);
|
|
38502
39407
|
Logger.logLevel = this.settings.core.logLevel;
|
|
39408
|
+
// backwards compatibility: remove in 2.0
|
|
39409
|
+
if (this.settings.player.playerMode === PlayerMode.Disabled && this.settings.player.enablePlayer) {
|
|
39410
|
+
this.settings.player.playerMode = PlayerMode.EnabledAutomatic;
|
|
39411
|
+
}
|
|
38503
39412
|
Environment.printEnvironmentInfo(false);
|
|
38504
39413
|
this.canvasElement = uiFacade.createCanvasElement();
|
|
38505
39414
|
this.container.appendChild(this.canvasElement);
|
|
@@ -38543,7 +39452,7 @@ class AlphaTabApiBase {
|
|
|
38543
39452
|
this.appendRenderResult(null); // marks last element
|
|
38544
39453
|
});
|
|
38545
39454
|
this.renderer.error.on(this.onError.bind(this));
|
|
38546
|
-
if (this.settings.player.
|
|
39455
|
+
if (this.settings.player.playerMode !== PlayerMode.Disabled) {
|
|
38547
39456
|
this.setupPlayer();
|
|
38548
39457
|
}
|
|
38549
39458
|
this.setupClickHandling();
|
|
@@ -38635,10 +39544,9 @@ class AlphaTabApiBase {
|
|
|
38635
39544
|
}
|
|
38636
39545
|
this.renderer.updateSettings(this.settings);
|
|
38637
39546
|
// enable/disable player if needed
|
|
38638
|
-
if (this.settings.player.
|
|
38639
|
-
this.setupPlayer()
|
|
38640
|
-
|
|
38641
|
-
this.player?.applyTranspositionPitches(MidiFileGenerator.buildTranspositionPitches(score, this.settings));
|
|
39547
|
+
if (this.settings.player.playerMode !== PlayerMode.Disabled) {
|
|
39548
|
+
if (this.setupPlayer() && score) {
|
|
39549
|
+
this.loadMidiForScore();
|
|
38642
39550
|
}
|
|
38643
39551
|
}
|
|
38644
39552
|
else {
|
|
@@ -39570,13 +40478,51 @@ class AlphaTabApiBase {
|
|
|
39570
40478
|
this.destroyCursors();
|
|
39571
40479
|
}
|
|
39572
40480
|
setupPlayer() {
|
|
40481
|
+
let mode = this.settings.player.playerMode;
|
|
40482
|
+
if (mode === PlayerMode.EnabledAutomatic) {
|
|
40483
|
+
const score = this.score;
|
|
40484
|
+
if (!score) {
|
|
40485
|
+
return false;
|
|
40486
|
+
}
|
|
40487
|
+
if (score?.backingTrack?.rawAudioFile) {
|
|
40488
|
+
mode = PlayerMode.EnabledBackingTrack;
|
|
40489
|
+
}
|
|
40490
|
+
else {
|
|
40491
|
+
mode = PlayerMode.EnabledSynthesizer;
|
|
40492
|
+
}
|
|
40493
|
+
}
|
|
40494
|
+
if (mode !== this._actualPlayerMode) {
|
|
40495
|
+
this.destroyPlayer();
|
|
40496
|
+
}
|
|
39573
40497
|
this.updateCursors();
|
|
39574
|
-
|
|
39575
|
-
|
|
40498
|
+
this._actualPlayerMode = mode;
|
|
40499
|
+
switch (mode) {
|
|
40500
|
+
case PlayerMode.Disabled:
|
|
40501
|
+
this.destroyPlayer();
|
|
40502
|
+
return false;
|
|
40503
|
+
case PlayerMode.EnabledSynthesizer:
|
|
40504
|
+
if (this.player) {
|
|
40505
|
+
return true;
|
|
40506
|
+
}
|
|
40507
|
+
// new player needed
|
|
40508
|
+
this.player = this.uiFacade.createWorkerPlayer();
|
|
40509
|
+
break;
|
|
40510
|
+
case PlayerMode.EnabledBackingTrack:
|
|
40511
|
+
if (this.player) {
|
|
40512
|
+
return true;
|
|
40513
|
+
}
|
|
40514
|
+
// new player needed
|
|
40515
|
+
this.player = this.uiFacade.createBackingTrackPlayer();
|
|
40516
|
+
break;
|
|
40517
|
+
case PlayerMode.EnabledExternalMedia:
|
|
40518
|
+
if (this.player) {
|
|
40519
|
+
return true;
|
|
40520
|
+
}
|
|
40521
|
+
this.player = new ExternalMediaPlayer(this.settings.player.bufferTimeInMilliseconds);
|
|
40522
|
+
break;
|
|
39576
40523
|
}
|
|
39577
|
-
this.player = this.uiFacade.createWorkerPlayer();
|
|
39578
40524
|
if (!this.player) {
|
|
39579
|
-
return;
|
|
40525
|
+
return false;
|
|
39580
40526
|
}
|
|
39581
40527
|
this.player.ready.on(() => {
|
|
39582
40528
|
this.loadMidiForScore();
|
|
@@ -39605,6 +40551,7 @@ class AlphaTabApiBase {
|
|
|
39605
40551
|
this.player.playbackRangeChanged.on(this.onPlaybackRangeChanged.bind(this));
|
|
39606
40552
|
this.player.finished.on(this.onPlayerFinished.bind(this));
|
|
39607
40553
|
this.setupPlayerEvents();
|
|
40554
|
+
return false;
|
|
39608
40555
|
}
|
|
39609
40556
|
loadMidiForScore() {
|
|
39610
40557
|
if (!this.score) {
|
|
@@ -39626,6 +40573,7 @@ class AlphaTabApiBase {
|
|
|
39626
40573
|
const player = this.player;
|
|
39627
40574
|
if (player) {
|
|
39628
40575
|
player.loadMidiFile(midiFile);
|
|
40576
|
+
player.loadBackingTrack(score, generator.syncPoints);
|
|
39629
40577
|
player.applyTranspositionPitches(generator.transpositionPitches);
|
|
39630
40578
|
}
|
|
39631
40579
|
}
|
|
@@ -40042,7 +40990,7 @@ class AlphaTabApiBase {
|
|
|
40042
40990
|
this._selectionWrapper = cursors.selectionWrapper;
|
|
40043
40991
|
}
|
|
40044
40992
|
if (this._currentBeat !== null) {
|
|
40045
|
-
this.cursorUpdateBeat(this._currentBeat, false, this._previousTick > 10, true);
|
|
40993
|
+
this.cursorUpdateBeat(this._currentBeat, false, this._previousTick > 10, 1, true);
|
|
40046
40994
|
}
|
|
40047
40995
|
}
|
|
40048
40996
|
else if (!this.settings.player.enableCursor && this._cursorWrapper) {
|
|
@@ -40057,13 +41005,14 @@ class AlphaTabApiBase {
|
|
|
40057
41005
|
// we need to update our position caches if we render a tablature
|
|
40058
41006
|
this.renderer.postRenderFinished.on(() => {
|
|
40059
41007
|
this._currentBeat = null;
|
|
40060
|
-
this.cursorUpdateTick(this._previousTick, false, this._previousTick > 10);
|
|
41008
|
+
this.cursorUpdateTick(this._previousTick, false, 1, this._previousTick > 10);
|
|
40061
41009
|
});
|
|
40062
41010
|
if (this.player) {
|
|
40063
41011
|
this.player.positionChanged.on(e => {
|
|
40064
41012
|
this._previousTick = e.currentTick;
|
|
40065
41013
|
this.uiFacade.beginInvoke(() => {
|
|
40066
|
-
|
|
41014
|
+
const cursorSpeed = e.modifiedTempo / e.originalTempo;
|
|
41015
|
+
this.cursorUpdateTick(e.currentTick, false, cursorSpeed, false, e.isSeek);
|
|
40067
41016
|
});
|
|
40068
41017
|
});
|
|
40069
41018
|
this.player.stateChanged.on(e => {
|
|
@@ -40084,14 +41033,15 @@ class AlphaTabApiBase {
|
|
|
40084
41033
|
* @param stop
|
|
40085
41034
|
* @param shouldScroll whether we should scroll to the bar (if scrolling is active)
|
|
40086
41035
|
*/
|
|
40087
|
-
cursorUpdateTick(tick, stop, shouldScroll = false, forceUpdate = false) {
|
|
41036
|
+
cursorUpdateTick(tick, stop, cursorSpeed, shouldScroll = false, forceUpdate = false) {
|
|
41037
|
+
this._previousTick = tick;
|
|
40088
41038
|
const cache = this._tickCache;
|
|
40089
41039
|
if (cache) {
|
|
40090
41040
|
const tracks = this._trackIndexLookup;
|
|
40091
41041
|
if (tracks != null && tracks.size > 0) {
|
|
40092
41042
|
const beat = cache.findBeat(tracks, tick, this._currentBeat);
|
|
40093
41043
|
if (beat) {
|
|
40094
|
-
this.cursorUpdateBeat(beat, stop, shouldScroll, forceUpdate || this.playerState === PlayerState.Paused);
|
|
41044
|
+
this.cursorUpdateBeat(beat, stop, shouldScroll, cursorSpeed, forceUpdate || this.playerState === PlayerState.Paused);
|
|
40095
41045
|
}
|
|
40096
41046
|
}
|
|
40097
41047
|
}
|
|
@@ -40099,7 +41049,7 @@ class AlphaTabApiBase {
|
|
|
40099
41049
|
/**
|
|
40100
41050
|
* updates the cursors to highlight the specified beat
|
|
40101
41051
|
*/
|
|
40102
|
-
cursorUpdateBeat(lookupResult, stop, shouldScroll, forceUpdate = false) {
|
|
41052
|
+
cursorUpdateBeat(lookupResult, stop, shouldScroll, cursorSpeed, forceUpdate = false) {
|
|
40103
41053
|
const beat = lookupResult.beat;
|
|
40104
41054
|
const nextBeat = lookupResult.nextBeat?.beat ?? null;
|
|
40105
41055
|
const duration = lookupResult.duration;
|
|
@@ -40131,7 +41081,7 @@ class AlphaTabApiBase {
|
|
|
40131
41081
|
this._previousCursorCache = cache;
|
|
40132
41082
|
this._previousStateForCursor = this._playerState;
|
|
40133
41083
|
this.uiFacade.beginInvoke(() => {
|
|
40134
|
-
this.internalCursorUpdateBeat(beat, nextBeat, duration, stop, beatsToHighlight, cache, beatBoundings, shouldScroll, lookupResult.cursorMode);
|
|
41084
|
+
this.internalCursorUpdateBeat(beat, nextBeat, duration, stop, beatsToHighlight, cache, beatBoundings, shouldScroll, lookupResult.cursorMode, cursorSpeed);
|
|
40135
41085
|
});
|
|
40136
41086
|
}
|
|
40137
41087
|
/**
|
|
@@ -40196,7 +41146,7 @@ class AlphaTabApiBase {
|
|
|
40196
41146
|
}
|
|
40197
41147
|
}
|
|
40198
41148
|
}
|
|
40199
|
-
internalCursorUpdateBeat(beat, nextBeat, duration, stop, beatsToHighlight, cache, beatBoundings, shouldScroll, cursorMode) {
|
|
41149
|
+
internalCursorUpdateBeat(beat, nextBeat, duration, stop, beatsToHighlight, cache, beatBoundings, shouldScroll, cursorMode, cursorSpeed) {
|
|
40200
41150
|
const barCursor = this._barCursor;
|
|
40201
41151
|
const beatCursor = this._beatCursor;
|
|
40202
41152
|
const barBoundings = beatBoundings.barBounds.masterBarBounds;
|
|
@@ -40205,12 +41155,29 @@ class AlphaTabApiBase {
|
|
|
40205
41155
|
if (barCursor) {
|
|
40206
41156
|
barCursor.setBounds(barBounds.x, barBounds.y, barBounds.w, barBounds.h);
|
|
40207
41157
|
}
|
|
41158
|
+
let nextBeatX = barBoundings.visualBounds.x + barBoundings.visualBounds.w;
|
|
41159
|
+
// get position of next beat on same system
|
|
41160
|
+
if (nextBeat && cursorMode === MidiTickLookupFindBeatResultCursorMode.ToNextBext) {
|
|
41161
|
+
// if we are moving within the same bar or to the next bar
|
|
41162
|
+
// transition to the next beat, otherwise transition to the end of the bar.
|
|
41163
|
+
const nextBeatBoundings = cache.findBeat(nextBeat);
|
|
41164
|
+
if (nextBeatBoundings &&
|
|
41165
|
+
nextBeatBoundings.barBounds.masterBarBounds.staffSystemBounds === barBoundings.staffSystemBounds) {
|
|
41166
|
+
nextBeatX = nextBeatBoundings.onNotesX;
|
|
41167
|
+
}
|
|
41168
|
+
}
|
|
41169
|
+
let startBeatX = beatBoundings.onNotesX;
|
|
40208
41170
|
if (beatCursor) {
|
|
40209
|
-
//
|
|
41171
|
+
// relative positioning of the cursor
|
|
40210
41172
|
if (this.settings.player.enableAnimatedBeatCursor) {
|
|
40211
|
-
|
|
41173
|
+
const animationWidth = nextBeatX - beatBoundings.onNotesX;
|
|
41174
|
+
const relativePosition = this._previousTick - this._currentBeat.start;
|
|
41175
|
+
const ratioPosition = relativePosition / this._currentBeat.tickDuration;
|
|
41176
|
+
startBeatX = beatBoundings.onNotesX + animationWidth * ratioPosition;
|
|
41177
|
+
duration -= duration * ratioPosition;
|
|
41178
|
+
beatCursor.transitionToX(0, startBeatX);
|
|
40212
41179
|
}
|
|
40213
|
-
beatCursor.setBounds(
|
|
41180
|
+
beatCursor.setBounds(startBeatX, barBounds.y, 1, barBounds.h);
|
|
40214
41181
|
}
|
|
40215
41182
|
// if playing, animate the cursor to the next beat
|
|
40216
41183
|
if (this.settings.player.enableElementHighlighting) {
|
|
@@ -40230,22 +41197,11 @@ class AlphaTabApiBase {
|
|
|
40230
41197
|
shouldNotifyBeatChange = true;
|
|
40231
41198
|
}
|
|
40232
41199
|
if (this.settings.player.enableAnimatedBeatCursor && beatCursor) {
|
|
40233
|
-
let nextBeatX = barBoundings.visualBounds.x + barBoundings.visualBounds.w;
|
|
40234
|
-
// get position of next beat on same system
|
|
40235
|
-
if (nextBeat && cursorMode === MidiTickLookupFindBeatResultCursorMode.ToNextBext) {
|
|
40236
|
-
// if we are moving within the same bar or to the next bar
|
|
40237
|
-
// transition to the next beat, otherwise transition to the end of the bar.
|
|
40238
|
-
const nextBeatBoundings = cache.findBeat(nextBeat);
|
|
40239
|
-
if (nextBeatBoundings &&
|
|
40240
|
-
nextBeatBoundings.barBounds.masterBarBounds.staffSystemBounds === barBoundings.staffSystemBounds) {
|
|
40241
|
-
nextBeatX = nextBeatBoundings.onNotesX;
|
|
40242
|
-
}
|
|
40243
|
-
}
|
|
40244
41200
|
if (isPlayingUpdate) {
|
|
40245
41201
|
// we need to put the transition to an own animation frame
|
|
40246
41202
|
// otherwise the stop animation above is not applied.
|
|
40247
41203
|
this.uiFacade.beginInvoke(() => {
|
|
40248
|
-
beatCursor.transitionToX(duration /
|
|
41204
|
+
beatCursor.transitionToX(duration / cursorSpeed, nextBeatX);
|
|
40249
41205
|
});
|
|
40250
41206
|
}
|
|
40251
41207
|
}
|
|
@@ -40276,7 +41232,7 @@ class AlphaTabApiBase {
|
|
|
40276
41232
|
if (this._isDestroyed) {
|
|
40277
41233
|
return;
|
|
40278
41234
|
}
|
|
40279
|
-
if (this.settings.player.
|
|
41235
|
+
if (this.settings.player.playerMode !== PlayerMode.Disabled &&
|
|
40280
41236
|
this.settings.player.enableCursor &&
|
|
40281
41237
|
this.settings.player.enableUserInteraction) {
|
|
40282
41238
|
this._selectionStart = new SelectionInfo(beat);
|
|
@@ -40318,7 +41274,7 @@ class AlphaTabApiBase {
|
|
|
40318
41274
|
if (this._isDestroyed) {
|
|
40319
41275
|
return;
|
|
40320
41276
|
}
|
|
40321
|
-
if (this.settings.player.
|
|
41277
|
+
if (this.settings.player.playerMode !== PlayerMode.Disabled &&
|
|
40322
41278
|
this.settings.player.enableCursor &&
|
|
40323
41279
|
this.settings.player.enableUserInteraction) {
|
|
40324
41280
|
if (this._selectionEnd) {
|
|
@@ -40339,7 +41295,7 @@ class AlphaTabApiBase {
|
|
|
40339
41295
|
// move to selection start
|
|
40340
41296
|
this._currentBeat = null; // reset current beat so it is updating the cursor
|
|
40341
41297
|
if (this._playerState === PlayerState.Paused) {
|
|
40342
|
-
this.cursorUpdateTick(this._tickCache.getBeatStart(this._selectionStart.beat), false);
|
|
41298
|
+
this.cursorUpdateTick(this._tickCache.getBeatStart(this._selectionStart.beat), false, 1);
|
|
40343
41299
|
}
|
|
40344
41300
|
this.tickPosition = realMasterBarStart + this._selectionStart.beat.playbackStart;
|
|
40345
41301
|
// set playback range
|
|
@@ -40451,7 +41407,7 @@ class AlphaTabApiBase {
|
|
|
40451
41407
|
});
|
|
40452
41408
|
this.renderer.postRenderFinished.on(() => {
|
|
40453
41409
|
if (!this._selectionStart ||
|
|
40454
|
-
|
|
41410
|
+
this.settings.player.playerMode === PlayerMode.Disabled ||
|
|
40455
41411
|
!this.settings.player.enableCursor ||
|
|
40456
41412
|
!this.settings.player.enableUserInteraction) {
|
|
40457
41413
|
return;
|
|
@@ -40529,6 +41485,9 @@ class AlphaTabApiBase {
|
|
|
40529
41485
|
}
|
|
40530
41486
|
this.scoreLoaded.trigger(score);
|
|
40531
41487
|
this.uiFacade.triggerEvent(this.container, 'scoreLoaded', score);
|
|
41488
|
+
if (this.setupPlayer()) {
|
|
41489
|
+
this.loadMidiForScore();
|
|
41490
|
+
}
|
|
40532
41491
|
}
|
|
40533
41492
|
onResize(e) {
|
|
40534
41493
|
if (this._isDestroyed) {
|
|
@@ -41269,52 +42228,14 @@ class AlphaSynthWebAudioSynthOutputDevice {
|
|
|
41269
42228
|
}
|
|
41270
42229
|
}
|
|
41271
42230
|
/**
|
|
42231
|
+
* Some shared web audio stuff.
|
|
41272
42232
|
* @target web
|
|
41273
42233
|
*/
|
|
41274
|
-
class
|
|
41275
|
-
|
|
41276
|
-
|
|
41277
|
-
this._buffer = null;
|
|
41278
|
-
this._source = null;
|
|
41279
|
-
this.ready = new EventEmitter();
|
|
41280
|
-
this.samplesPlayed = new EventEmitterOfT();
|
|
41281
|
-
this.sampleRequest = new EventEmitter();
|
|
41282
|
-
this._knownDevices = [];
|
|
41283
|
-
}
|
|
41284
|
-
get sampleRate() {
|
|
41285
|
-
return this._context ? this._context.sampleRate : AlphaSynthWebAudioOutputBase.PreferredSampleRate;
|
|
41286
|
-
}
|
|
41287
|
-
activate(resumedCallback) {
|
|
41288
|
-
if (!this._context) {
|
|
41289
|
-
this._context = this.createAudioContext();
|
|
41290
|
-
}
|
|
41291
|
-
if (this._context.state === 'suspended' || this._context.state === 'interrupted') {
|
|
41292
|
-
Logger.debug('WebAudio', 'Audio Context is suspended, trying resume');
|
|
41293
|
-
this._context.resume().then(() => {
|
|
41294
|
-
Logger.debug('WebAudio', `Audio Context resume success: state=${this._context?.state}, sampleRate:${this._context?.sampleRate}`);
|
|
41295
|
-
if (resumedCallback) {
|
|
41296
|
-
resumedCallback();
|
|
41297
|
-
}
|
|
41298
|
-
}, reason => {
|
|
41299
|
-
Logger.warning('WebAudio', `Audio Context resume failed: state=${this._context?.state}, sampleRate:${this._context?.sampleRate}, reason=${reason}`);
|
|
41300
|
-
});
|
|
41301
|
-
}
|
|
42234
|
+
class WebAudioHelper {
|
|
42235
|
+
static findKnownDevice(sinkId) {
|
|
42236
|
+
return WebAudioHelper._knownDevices.find(d => d.deviceId === sinkId);
|
|
41302
42237
|
}
|
|
41303
|
-
|
|
41304
|
-
const ua = navigator.userAgent;
|
|
41305
|
-
if (ua.indexOf('iPhone') !== -1 || ua.indexOf('iPad') !== -1) {
|
|
41306
|
-
const context = this.createAudioContext();
|
|
41307
|
-
const buffer = context.createBuffer(1, 1, AlphaSynthWebAudioOutputBase.PreferredSampleRate);
|
|
41308
|
-
const dummy = context.createBufferSource();
|
|
41309
|
-
dummy.buffer = buffer;
|
|
41310
|
-
dummy.connect(context.destination);
|
|
41311
|
-
dummy.start(0);
|
|
41312
|
-
dummy.disconnect(0);
|
|
41313
|
-
// tslint:disable-next-line: no-floating-promises
|
|
41314
|
-
context.close();
|
|
41315
|
-
}
|
|
41316
|
-
}
|
|
41317
|
-
createAudioContext() {
|
|
42238
|
+
static createAudioContext() {
|
|
41318
42239
|
if ('AudioContext' in Environment.globalThis) {
|
|
41319
42240
|
return new AudioContext();
|
|
41320
42241
|
}
|
|
@@ -41323,73 +42244,18 @@ class AlphaSynthWebAudioOutputBase {
|
|
|
41323
42244
|
}
|
|
41324
42245
|
throw new AlphaTabError(AlphaTabErrorType.General, 'AudioContext not found');
|
|
41325
42246
|
}
|
|
41326
|
-
|
|
41327
|
-
this.patchIosSampleRate();
|
|
41328
|
-
this._context = this.createAudioContext();
|
|
41329
|
-
const ctx = this._context;
|
|
41330
|
-
if (ctx.state === 'suspended') {
|
|
41331
|
-
this.registerResumeHandler();
|
|
41332
|
-
}
|
|
41333
|
-
}
|
|
41334
|
-
registerResumeHandler() {
|
|
41335
|
-
this._resumeHandler = (() => {
|
|
41336
|
-
this.activate(() => {
|
|
41337
|
-
this.unregisterResumeHandler();
|
|
41338
|
-
});
|
|
41339
|
-
}).bind(this);
|
|
41340
|
-
document.body.addEventListener('touchend', this._resumeHandler, false);
|
|
41341
|
-
document.body.addEventListener('click', this._resumeHandler, false);
|
|
41342
|
-
}
|
|
41343
|
-
unregisterResumeHandler() {
|
|
41344
|
-
const resumeHandler = this._resumeHandler;
|
|
41345
|
-
if (resumeHandler) {
|
|
41346
|
-
document.body.removeEventListener('touchend', resumeHandler, false);
|
|
41347
|
-
document.body.removeEventListener('click', resumeHandler, false);
|
|
41348
|
-
}
|
|
41349
|
-
}
|
|
41350
|
-
play() {
|
|
41351
|
-
const ctx = this._context;
|
|
41352
|
-
this.activate();
|
|
41353
|
-
// create an empty buffer source (silence)
|
|
41354
|
-
this._buffer = ctx.createBuffer(2, AlphaSynthWebAudioOutputBase.BufferSize, ctx.sampleRate);
|
|
41355
|
-
this._source = ctx.createBufferSource();
|
|
41356
|
-
this._source.buffer = this._buffer;
|
|
41357
|
-
this._source.loop = true;
|
|
41358
|
-
}
|
|
41359
|
-
pause() {
|
|
41360
|
-
if (this._source) {
|
|
41361
|
-
this._source.stop(0);
|
|
41362
|
-
this._source.disconnect();
|
|
41363
|
-
}
|
|
41364
|
-
this._source = null;
|
|
41365
|
-
}
|
|
41366
|
-
destroy() {
|
|
41367
|
-
this.pause();
|
|
41368
|
-
this._context?.close();
|
|
41369
|
-
this._context = null;
|
|
41370
|
-
this.unregisterResumeHandler();
|
|
41371
|
-
}
|
|
41372
|
-
onSamplesPlayed(numberOfSamples) {
|
|
41373
|
-
this.samplesPlayed.trigger(numberOfSamples);
|
|
41374
|
-
}
|
|
41375
|
-
onSampleRequest() {
|
|
41376
|
-
this.sampleRequest.trigger();
|
|
41377
|
-
}
|
|
41378
|
-
onReady() {
|
|
41379
|
-
this.ready.trigger();
|
|
41380
|
-
}
|
|
41381
|
-
async checkSinkIdSupport() {
|
|
42247
|
+
static async checkSinkIdSupport() {
|
|
41382
42248
|
// https://caniuse.com/mdn-api_audiocontext_sinkid
|
|
41383
|
-
const context =
|
|
42249
|
+
const context = WebAudioHelper.createAudioContext();
|
|
41384
42250
|
if (!('setSinkId' in context)) {
|
|
41385
42251
|
Logger.warning('WebAudio', 'Browser does not support changing the output device');
|
|
41386
42252
|
return false;
|
|
41387
42253
|
}
|
|
41388
42254
|
return true;
|
|
41389
42255
|
}
|
|
41390
|
-
async enumerateOutputDevices() {
|
|
42256
|
+
static async enumerateOutputDevices() {
|
|
41391
42257
|
try {
|
|
41392
|
-
if (!(await
|
|
42258
|
+
if (!(await WebAudioHelper.checkSinkIdSupport())) {
|
|
41393
42259
|
return [];
|
|
41394
42260
|
}
|
|
41395
42261
|
// Request permissions
|
|
@@ -41430,7 +42296,7 @@ class AlphaSynthWebAudioOutputBase {
|
|
|
41430
42296
|
if (defaultDevice) {
|
|
41431
42297
|
defaultDevice.isDefault = true;
|
|
41432
42298
|
}
|
|
41433
|
-
|
|
42299
|
+
WebAudioHelper._knownDevices = final;
|
|
41434
42300
|
return final;
|
|
41435
42301
|
}
|
|
41436
42302
|
catch (e) {
|
|
@@ -41438,8 +42304,113 @@ class AlphaSynthWebAudioOutputBase {
|
|
|
41438
42304
|
return [];
|
|
41439
42305
|
}
|
|
41440
42306
|
}
|
|
42307
|
+
}
|
|
42308
|
+
WebAudioHelper._knownDevices = [];
|
|
42309
|
+
/**
|
|
42310
|
+
* @target web
|
|
42311
|
+
*/
|
|
42312
|
+
class AlphaSynthWebAudioOutputBase {
|
|
42313
|
+
constructor() {
|
|
42314
|
+
this._context = null;
|
|
42315
|
+
this._buffer = null;
|
|
42316
|
+
this._source = null;
|
|
42317
|
+
this.ready = new EventEmitter();
|
|
42318
|
+
this.samplesPlayed = new EventEmitterOfT();
|
|
42319
|
+
this.sampleRequest = new EventEmitter();
|
|
42320
|
+
}
|
|
42321
|
+
get sampleRate() {
|
|
42322
|
+
return this._context ? this._context.sampleRate : AlphaSynthWebAudioOutputBase.PreferredSampleRate;
|
|
42323
|
+
}
|
|
42324
|
+
activate(resumedCallback) {
|
|
42325
|
+
if (!this._context) {
|
|
42326
|
+
this._context = WebAudioHelper.createAudioContext();
|
|
42327
|
+
}
|
|
42328
|
+
if (this._context.state === 'suspended' || this._context.state === 'interrupted') {
|
|
42329
|
+
Logger.debug('WebAudio', 'Audio Context is suspended, trying resume');
|
|
42330
|
+
this._context.resume().then(() => {
|
|
42331
|
+
Logger.debug('WebAudio', `Audio Context resume success: state=${this._context?.state}, sampleRate:${this._context?.sampleRate}`);
|
|
42332
|
+
if (resumedCallback) {
|
|
42333
|
+
resumedCallback();
|
|
42334
|
+
}
|
|
42335
|
+
}, reason => {
|
|
42336
|
+
Logger.warning('WebAudio', `Audio Context resume failed: state=${this._context?.state}, sampleRate:${this._context?.sampleRate}, reason=${reason}`);
|
|
42337
|
+
});
|
|
42338
|
+
}
|
|
42339
|
+
}
|
|
42340
|
+
patchIosSampleRate() {
|
|
42341
|
+
const ua = navigator.userAgent;
|
|
42342
|
+
if (ua.indexOf('iPhone') !== -1 || ua.indexOf('iPad') !== -1) {
|
|
42343
|
+
const context = WebAudioHelper.createAudioContext();
|
|
42344
|
+
const buffer = context.createBuffer(1, 1, AlphaSynthWebAudioOutputBase.PreferredSampleRate);
|
|
42345
|
+
const dummy = context.createBufferSource();
|
|
42346
|
+
dummy.buffer = buffer;
|
|
42347
|
+
dummy.connect(context.destination);
|
|
42348
|
+
dummy.start(0);
|
|
42349
|
+
dummy.disconnect(0);
|
|
42350
|
+
// tslint:disable-next-line: no-floating-promises
|
|
42351
|
+
context.close();
|
|
42352
|
+
}
|
|
42353
|
+
}
|
|
42354
|
+
open(bufferTimeInMilliseconds) {
|
|
42355
|
+
this.patchIosSampleRate();
|
|
42356
|
+
this._context = WebAudioHelper.createAudioContext();
|
|
42357
|
+
const ctx = this._context;
|
|
42358
|
+
if (ctx.state === 'suspended') {
|
|
42359
|
+
this.registerResumeHandler();
|
|
42360
|
+
}
|
|
42361
|
+
}
|
|
42362
|
+
registerResumeHandler() {
|
|
42363
|
+
this._resumeHandler = (() => {
|
|
42364
|
+
this.activate(() => {
|
|
42365
|
+
this.unregisterResumeHandler();
|
|
42366
|
+
});
|
|
42367
|
+
}).bind(this);
|
|
42368
|
+
document.body.addEventListener('touchend', this._resumeHandler, false);
|
|
42369
|
+
document.body.addEventListener('click', this._resumeHandler, false);
|
|
42370
|
+
}
|
|
42371
|
+
unregisterResumeHandler() {
|
|
42372
|
+
const resumeHandler = this._resumeHandler;
|
|
42373
|
+
if (resumeHandler) {
|
|
42374
|
+
document.body.removeEventListener('touchend', resumeHandler, false);
|
|
42375
|
+
document.body.removeEventListener('click', resumeHandler, false);
|
|
42376
|
+
}
|
|
42377
|
+
}
|
|
42378
|
+
play() {
|
|
42379
|
+
const ctx = this._context;
|
|
42380
|
+
this.activate();
|
|
42381
|
+
// create an empty buffer source (silence)
|
|
42382
|
+
this._buffer = ctx.createBuffer(2, AlphaSynthWebAudioOutputBase.BufferSize, ctx.sampleRate);
|
|
42383
|
+
this._source = ctx.createBufferSource();
|
|
42384
|
+
this._source.buffer = this._buffer;
|
|
42385
|
+
this._source.loop = true;
|
|
42386
|
+
}
|
|
42387
|
+
pause() {
|
|
42388
|
+
if (this._source) {
|
|
42389
|
+
this._source.stop(0);
|
|
42390
|
+
this._source.disconnect();
|
|
42391
|
+
}
|
|
42392
|
+
this._source = null;
|
|
42393
|
+
}
|
|
42394
|
+
destroy() {
|
|
42395
|
+
this.pause();
|
|
42396
|
+
this._context?.close();
|
|
42397
|
+
this._context = null;
|
|
42398
|
+
this.unregisterResumeHandler();
|
|
42399
|
+
}
|
|
42400
|
+
onSamplesPlayed(numberOfSamples) {
|
|
42401
|
+
this.samplesPlayed.trigger(numberOfSamples);
|
|
42402
|
+
}
|
|
42403
|
+
onSampleRequest() {
|
|
42404
|
+
this.sampleRequest.trigger();
|
|
42405
|
+
}
|
|
42406
|
+
onReady() {
|
|
42407
|
+
this.ready.trigger();
|
|
42408
|
+
}
|
|
42409
|
+
enumerateOutputDevices() {
|
|
42410
|
+
return WebAudioHelper.enumerateOutputDevices();
|
|
42411
|
+
}
|
|
41441
42412
|
async setOutputDevice(device) {
|
|
41442
|
-
if (!(await
|
|
42413
|
+
if (!(await WebAudioHelper.checkSinkIdSupport())) {
|
|
41443
42414
|
return;
|
|
41444
42415
|
}
|
|
41445
42416
|
// https://developer.mozilla.org/en-US/docs/Web/API/AudioContext/setSinkId
|
|
@@ -41451,7 +42422,7 @@ class AlphaSynthWebAudioOutputBase {
|
|
|
41451
42422
|
}
|
|
41452
42423
|
}
|
|
41453
42424
|
async getOutputDevice() {
|
|
41454
|
-
if (!(await
|
|
42425
|
+
if (!(await WebAudioHelper.checkSinkIdSupport())) {
|
|
41455
42426
|
return null;
|
|
41456
42427
|
}
|
|
41457
42428
|
// https://developer.mozilla.org/en-US/docs/Web/API/AudioContext/sinkId
|
|
@@ -41460,7 +42431,7 @@ class AlphaSynthWebAudioOutputBase {
|
|
|
41460
42431
|
return null;
|
|
41461
42432
|
}
|
|
41462
42433
|
// fast path -> cached devices list
|
|
41463
|
-
let device =
|
|
42434
|
+
let device = WebAudioHelper.findKnownDevice(sinkId);
|
|
41464
42435
|
if (device) {
|
|
41465
42436
|
return device;
|
|
41466
42437
|
}
|
|
@@ -41908,7 +42879,7 @@ class AlphaSynthWebWorkerApi {
|
|
|
41908
42879
|
case 'alphaSynth.positionChanged':
|
|
41909
42880
|
this._timePosition = data.currentTime;
|
|
41910
42881
|
this._tickPosition = data.currentTick;
|
|
41911
|
-
this.positionChanged.trigger(new PositionChangedEventArgs(data.currentTime, data.endTime, data.currentTick, data.endTick, data.isSeek));
|
|
42882
|
+
this.positionChanged.trigger(new PositionChangedEventArgs(data.currentTime, data.endTime, data.currentTick, data.endTick, data.isSeek, data.originalTempo, data.modifiedTempo));
|
|
41912
42883
|
break;
|
|
41913
42884
|
case 'alphaSynth.midiEventsPlayed':
|
|
41914
42885
|
this.midiEventsPlayed.trigger(new MidiEventsPlayedEventArgs(data.events.map(JsonConverter.jsObjectToMidiEvent)));
|
|
@@ -41932,7 +42903,7 @@ class AlphaSynthWebWorkerApi {
|
|
|
41932
42903
|
break;
|
|
41933
42904
|
case 'alphaSynth.midiLoaded':
|
|
41934
42905
|
this.checkReadyForPlayback();
|
|
41935
|
-
this.midiLoaded.trigger(new PositionChangedEventArgs(data.currentTime, data.endTime, data.currentTick, data.endTick, data.isSeek));
|
|
42906
|
+
this.midiLoaded.trigger(new PositionChangedEventArgs(data.currentTime, data.endTime, data.currentTick, data.endTick, data.isSeek, data.originalTempo, data.modifiedTempo));
|
|
41936
42907
|
break;
|
|
41937
42908
|
case 'alphaSynth.midiLoadFailed':
|
|
41938
42909
|
this.checkReadyForPlayback();
|
|
@@ -41982,6 +42953,8 @@ class AlphaSynthWebWorkerApi {
|
|
|
41982
42953
|
this._outputIsReady = true;
|
|
41983
42954
|
this.checkReady();
|
|
41984
42955
|
}
|
|
42956
|
+
loadBackingTrack(_score) {
|
|
42957
|
+
}
|
|
41985
42958
|
}
|
|
41986
42959
|
|
|
41987
42960
|
/**
|
|
@@ -42349,6 +43322,123 @@ class ScalableHtmlElementContainer extends HtmlElementContainer {
|
|
|
42349
43322
|
}
|
|
42350
43323
|
}
|
|
42351
43324
|
|
|
43325
|
+
/**
|
|
43326
|
+
* @target web
|
|
43327
|
+
*/
|
|
43328
|
+
class AudioElementBackingTrackSynthOutput {
|
|
43329
|
+
constructor() {
|
|
43330
|
+
// fake rate
|
|
43331
|
+
this.sampleRate = 44100;
|
|
43332
|
+
this._padding = 0;
|
|
43333
|
+
this._updateInterval = 0;
|
|
43334
|
+
this.ready = new EventEmitter();
|
|
43335
|
+
this.samplesPlayed = new EventEmitterOfT();
|
|
43336
|
+
this.timeUpdate = new EventEmitterOfT();
|
|
43337
|
+
this.sampleRequest = new EventEmitter();
|
|
43338
|
+
}
|
|
43339
|
+
get backingTrackDuration() {
|
|
43340
|
+
const duration = this.audioElement.duration ?? 0;
|
|
43341
|
+
return Number.isFinite(duration) ? duration * 1000 : 0;
|
|
43342
|
+
}
|
|
43343
|
+
get playbackRate() {
|
|
43344
|
+
return this.audioElement.playbackRate;
|
|
43345
|
+
}
|
|
43346
|
+
set playbackRate(value) {
|
|
43347
|
+
this.audioElement.playbackRate = value;
|
|
43348
|
+
}
|
|
43349
|
+
get masterVolume() {
|
|
43350
|
+
return this.audioElement.volume;
|
|
43351
|
+
}
|
|
43352
|
+
set masterVolume(value) {
|
|
43353
|
+
this.audioElement.volume = value;
|
|
43354
|
+
}
|
|
43355
|
+
seekTo(time) {
|
|
43356
|
+
this.audioElement.currentTime = time / 1000 - this._padding;
|
|
43357
|
+
}
|
|
43358
|
+
loadBackingTrack(backingTrack) {
|
|
43359
|
+
if (this.audioElement?.src) {
|
|
43360
|
+
URL.revokeObjectURL(this.audioElement.src);
|
|
43361
|
+
}
|
|
43362
|
+
this._padding = backingTrack.padding / 1000;
|
|
43363
|
+
const blob = new Blob([backingTrack.rawAudioFile]);
|
|
43364
|
+
this.audioElement.src = URL.createObjectURL(blob);
|
|
43365
|
+
}
|
|
43366
|
+
open(_bufferTimeInMilliseconds) {
|
|
43367
|
+
const audioElement = document.createElement('audio');
|
|
43368
|
+
audioElement.style.display = 'none';
|
|
43369
|
+
document.body.appendChild(audioElement);
|
|
43370
|
+
audioElement.addEventListener('timeupdate', () => {
|
|
43371
|
+
this.updatePosition();
|
|
43372
|
+
});
|
|
43373
|
+
this.audioElement = audioElement;
|
|
43374
|
+
this.ready.trigger();
|
|
43375
|
+
}
|
|
43376
|
+
updatePosition() {
|
|
43377
|
+
const timePos = (this.audioElement.currentTime + this._padding) * 1000;
|
|
43378
|
+
this.timeUpdate.trigger(timePos);
|
|
43379
|
+
}
|
|
43380
|
+
play() {
|
|
43381
|
+
this.audioElement.play();
|
|
43382
|
+
this._updateInterval = window.setInterval(() => {
|
|
43383
|
+
this.updatePosition();
|
|
43384
|
+
}, 50);
|
|
43385
|
+
}
|
|
43386
|
+
destroy() {
|
|
43387
|
+
const audioElement = this.audioElement;
|
|
43388
|
+
if (audioElement) {
|
|
43389
|
+
document.body.removeChild(audioElement);
|
|
43390
|
+
}
|
|
43391
|
+
}
|
|
43392
|
+
pause() {
|
|
43393
|
+
this.audioElement.pause();
|
|
43394
|
+
window.clearInterval(this._updateInterval);
|
|
43395
|
+
}
|
|
43396
|
+
addSamples(_samples) {
|
|
43397
|
+
}
|
|
43398
|
+
resetSamples() {
|
|
43399
|
+
}
|
|
43400
|
+
activate() {
|
|
43401
|
+
}
|
|
43402
|
+
async enumerateOutputDevices() {
|
|
43403
|
+
return WebAudioHelper.enumerateOutputDevices();
|
|
43404
|
+
}
|
|
43405
|
+
async setOutputDevice(device) {
|
|
43406
|
+
if (!(await WebAudioHelper.checkSinkIdSupport())) {
|
|
43407
|
+
return;
|
|
43408
|
+
}
|
|
43409
|
+
// https://developer.mozilla.org/en-US/docs/Web/API/AudioContext/setSinkId
|
|
43410
|
+
if (!device) {
|
|
43411
|
+
await this.audioElement.setSinkId('');
|
|
43412
|
+
}
|
|
43413
|
+
else {
|
|
43414
|
+
await this.audioElement.setSinkId(device.deviceId);
|
|
43415
|
+
}
|
|
43416
|
+
}
|
|
43417
|
+
async getOutputDevice() {
|
|
43418
|
+
if (!(await WebAudioHelper.checkSinkIdSupport())) {
|
|
43419
|
+
return null;
|
|
43420
|
+
}
|
|
43421
|
+
// https://developer.mozilla.org/en-US/docs/Web/API/AudioContext/sinkId
|
|
43422
|
+
const sinkId = this.audioElement.sinkId;
|
|
43423
|
+
if (typeof sinkId !== 'string' || sinkId === '' || sinkId === 'default') {
|
|
43424
|
+
return null;
|
|
43425
|
+
}
|
|
43426
|
+
// fast path -> cached devices list
|
|
43427
|
+
let device = WebAudioHelper.findKnownDevice(sinkId);
|
|
43428
|
+
if (device) {
|
|
43429
|
+
return device;
|
|
43430
|
+
}
|
|
43431
|
+
// slow path -> enumerate devices
|
|
43432
|
+
const allDevices = await this.enumerateOutputDevices();
|
|
43433
|
+
device = allDevices.find(d => d.deviceId === sinkId);
|
|
43434
|
+
if (device) {
|
|
43435
|
+
return device;
|
|
43436
|
+
}
|
|
43437
|
+
Logger.warning('WebAudio', 'Could not find output device in device list', sinkId, allDevices);
|
|
43438
|
+
return null;
|
|
43439
|
+
}
|
|
43440
|
+
}
|
|
43441
|
+
|
|
42352
43442
|
/**
|
|
42353
43443
|
* @target web
|
|
42354
43444
|
*/
|
|
@@ -42985,6 +44075,9 @@ class BrowserUiFacade {
|
|
|
42985
44075
|
window.requestAnimationFrame(step);
|
|
42986
44076
|
}
|
|
42987
44077
|
}
|
|
44078
|
+
createBackingTrackPlayer() {
|
|
44079
|
+
return new BackingTrackPlayer(new AudioElementBackingTrackSynthOutput(), this._api.settings.player.bufferTimeInMilliseconds);
|
|
44080
|
+
}
|
|
42988
44081
|
}
|
|
42989
44082
|
|
|
42990
44083
|
/**
|
|
@@ -43127,7 +44220,7 @@ class AlphaTabApi extends AlphaTabApiBase {
|
|
|
43127
44220
|
settings.core.file = null;
|
|
43128
44221
|
settings.core.tracks = null;
|
|
43129
44222
|
settings.player.enableCursor = false;
|
|
43130
|
-
settings.player.
|
|
44223
|
+
settings.player.playerMode = PlayerMode.Disabled;
|
|
43131
44224
|
settings.player.enableElementHighlighting = false;
|
|
43132
44225
|
settings.player.enableUserInteraction = false;
|
|
43133
44226
|
settings.player.soundFont = null;
|
|
@@ -57088,96 +58181,6 @@ class CapellaImporter extends ScoreImporter {
|
|
|
57088
58181
|
}
|
|
57089
58182
|
}
|
|
57090
58183
|
|
|
57091
|
-
/**
|
|
57092
|
-
* A very basic polyfill of the ResizeObserver which triggers
|
|
57093
|
-
* a the callback on window resize for all registered targets.
|
|
57094
|
-
* @target web
|
|
57095
|
-
*/
|
|
57096
|
-
class ResizeObserverPolyfill {
|
|
57097
|
-
constructor(callback) {
|
|
57098
|
-
this._targets = new Set();
|
|
57099
|
-
this._callback = callback;
|
|
57100
|
-
window.addEventListener('resize', this.onWindowResize.bind(this), false);
|
|
57101
|
-
}
|
|
57102
|
-
observe(target) {
|
|
57103
|
-
this._targets.add(target);
|
|
57104
|
-
}
|
|
57105
|
-
unobserve(target) {
|
|
57106
|
-
this._targets.delete(target);
|
|
57107
|
-
}
|
|
57108
|
-
disconnect() {
|
|
57109
|
-
this._targets.clear();
|
|
57110
|
-
}
|
|
57111
|
-
onWindowResize() {
|
|
57112
|
-
const entries = [];
|
|
57113
|
-
for (const t of this._targets) {
|
|
57114
|
-
entries.push({
|
|
57115
|
-
target: t,
|
|
57116
|
-
// not used by alphaTab
|
|
57117
|
-
contentRect: undefined,
|
|
57118
|
-
borderBoxSize: undefined,
|
|
57119
|
-
contentBoxSize: [],
|
|
57120
|
-
devicePixelContentBoxSize: []
|
|
57121
|
-
});
|
|
57122
|
-
}
|
|
57123
|
-
this._callback(entries, this);
|
|
57124
|
-
}
|
|
57125
|
-
}
|
|
57126
|
-
|
|
57127
|
-
/**
|
|
57128
|
-
* A polyfill of the InsersectionObserver
|
|
57129
|
-
* @target web
|
|
57130
|
-
*/
|
|
57131
|
-
class IntersectionObserverPolyfill {
|
|
57132
|
-
constructor(callback) {
|
|
57133
|
-
this._elements = [];
|
|
57134
|
-
let timer = null;
|
|
57135
|
-
const oldCheck = this.check.bind(this);
|
|
57136
|
-
this.check = () => {
|
|
57137
|
-
if (!timer) {
|
|
57138
|
-
timer = setTimeout(() => {
|
|
57139
|
-
oldCheck();
|
|
57140
|
-
timer = null;
|
|
57141
|
-
}, 100);
|
|
57142
|
-
}
|
|
57143
|
-
};
|
|
57144
|
-
this._callback = callback;
|
|
57145
|
-
window.addEventListener('resize', this.check, true);
|
|
57146
|
-
document.addEventListener('scroll', this.check, true);
|
|
57147
|
-
}
|
|
57148
|
-
observe(target) {
|
|
57149
|
-
if (this._elements.indexOf(target) >= 0) {
|
|
57150
|
-
return;
|
|
57151
|
-
}
|
|
57152
|
-
this._elements.push(target);
|
|
57153
|
-
this.check();
|
|
57154
|
-
}
|
|
57155
|
-
unobserve(target) {
|
|
57156
|
-
this._elements = this._elements.filter(item => {
|
|
57157
|
-
return item !== target;
|
|
57158
|
-
});
|
|
57159
|
-
}
|
|
57160
|
-
check() {
|
|
57161
|
-
const entries = [];
|
|
57162
|
-
for (const element of this._elements) {
|
|
57163
|
-
const rect = element.getBoundingClientRect();
|
|
57164
|
-
const isVisible = rect.top + rect.height >= 0 &&
|
|
57165
|
-
rect.top <= window.innerHeight &&
|
|
57166
|
-
rect.left + rect.width >= 0 &&
|
|
57167
|
-
rect.left <= window.innerWidth;
|
|
57168
|
-
if (isVisible) {
|
|
57169
|
-
entries.push({
|
|
57170
|
-
target: element,
|
|
57171
|
-
isIntersecting: true
|
|
57172
|
-
});
|
|
57173
|
-
}
|
|
57174
|
-
}
|
|
57175
|
-
if (entries.length) {
|
|
57176
|
-
this._callback(entries, this);
|
|
57177
|
-
}
|
|
57178
|
-
}
|
|
57179
|
-
}
|
|
57180
|
-
|
|
57181
58184
|
/******************************************************************************
|
|
57182
58185
|
Copyright (c) Microsoft Corporation.
|
|
57183
58186
|
|
|
@@ -59293,9 +60296,9 @@ class VersionInfo {
|
|
|
59293
60296
|
print(`build date: ${VersionInfo.date}`);
|
|
59294
60297
|
}
|
|
59295
60298
|
}
|
|
59296
|
-
VersionInfo.version = '1.6.0-alpha.
|
|
59297
|
-
VersionInfo.date = '2025-05-
|
|
59298
|
-
VersionInfo.commit = '
|
|
60299
|
+
VersionInfo.version = '1.6.0-alpha.1403';
|
|
60300
|
+
VersionInfo.date = '2025-05-09T02:06:22.101Z';
|
|
60301
|
+
VersionInfo.commit = '3644a11f557063573413de459c607a1f9c302a6a';
|
|
59299
60302
|
|
|
59300
60303
|
/**
|
|
59301
60304
|
* A factory for custom layout engines.
|
|
@@ -59766,29 +60769,6 @@ class Environment {
|
|
|
59766
60769
|
if (Environment.webPlatform === WebPlatform.Browser || Environment.webPlatform === WebPlatform.BrowserModule) {
|
|
59767
60770
|
Environment.registerJQueryPlugin();
|
|
59768
60771
|
Environment.HighDpiFactor = window.devicePixelRatio;
|
|
59769
|
-
// ResizeObserver API does not yet exist so long on Safari (only start 2020 with iOS Safari 13.7 and Desktop 13.1)
|
|
59770
|
-
// so we better add a polyfill for it
|
|
59771
|
-
if (!('ResizeObserver' in Environment.globalThis)) {
|
|
59772
|
-
Environment.globalThis.ResizeObserver = ResizeObserverPolyfill;
|
|
59773
|
-
}
|
|
59774
|
-
// IntersectionObserver API does not on older iOS versions
|
|
59775
|
-
// so we better add a polyfill for it
|
|
59776
|
-
if (!('IntersectionObserver' in Environment.globalThis)) {
|
|
59777
|
-
Environment.globalThis.IntersectionObserver = IntersectionObserverPolyfill;
|
|
59778
|
-
}
|
|
59779
|
-
if (!('replaceChildren' in Element.prototype)) {
|
|
59780
|
-
Element.prototype.replaceChildren = function (...nodes) {
|
|
59781
|
-
this.innerHTML = '';
|
|
59782
|
-
this.append(...nodes);
|
|
59783
|
-
};
|
|
59784
|
-
Document.prototype.replaceChildren = Element.prototype.replaceChildren;
|
|
59785
|
-
DocumentFragment.prototype.replaceChildren = Element.prototype.replaceChildren;
|
|
59786
|
-
}
|
|
59787
|
-
if (!('replaceAll' in String.prototype)) {
|
|
59788
|
-
String.prototype.replaceAll = function (str, newStr) {
|
|
59789
|
-
return this.replace(new RegExp(str, 'g'), newStr);
|
|
59790
|
-
};
|
|
59791
|
-
}
|
|
59792
60772
|
}
|
|
59793
60773
|
Environment.createWebWorker = createWebWorker;
|
|
59794
60774
|
Environment.createAudioWorklet = createAudioWorklet;
|
|
@@ -63520,6 +64500,7 @@ const _barrel$3 = /*#__PURE__*/Object.freeze(/*#__PURE__*/Object.defineProperty(
|
|
|
63520
64500
|
get AccidentalType () { return AccidentalType; },
|
|
63521
64501
|
Automation,
|
|
63522
64502
|
get AutomationType () { return AutomationType; },
|
|
64503
|
+
BackingTrack,
|
|
63523
64504
|
Bar,
|
|
63524
64505
|
get BarLineStyle () { return BarLineStyle; },
|
|
63525
64506
|
BarStyle,
|
|
@@ -63582,6 +64563,7 @@ const _barrel$3 = /*#__PURE__*/Object.freeze(/*#__PURE__*/Object.defineProperty(
|
|
|
63582
64563
|
Staff,
|
|
63583
64564
|
SustainPedalMarker,
|
|
63584
64565
|
get SustainPedalMarkerType () { return SustainPedalMarkerType; },
|
|
64566
|
+
SyncPointData,
|
|
63585
64567
|
Track,
|
|
63586
64568
|
get TrackNameMode () { return TrackNameMode; },
|
|
63587
64569
|
get TrackNameOrientation () { return TrackNameOrientation; },
|
|
@@ -63646,4 +64628,4 @@ const _jsonbarrel = /*#__PURE__*/Object.freeze(/*#__PURE__*/Object.definePropert
|
|
|
63646
64628
|
__proto__: null
|
|
63647
64629
|
}, Symbol.toStringTag, { value: 'Module' }));
|
|
63648
64630
|
|
|
63649
|
-
export { AlphaTabApi, AlphaTabApiBase, AlphaTabError, AlphaTabErrorType, ConsoleLogger, CoreSettings, DisplaySettings, Environment, FileLoadError, FingeringMode, FormatError, ImporterSettings, LayoutMode, LogLevel, Logger, NotationElement, NotationMode, NotationSettings, PlayerOutputMode, PlayerSettings, ProgressEventArgs, RenderEngineFactory, RenderingResources, ResizeEventArgs, ScrollMode, Settings, SlidePlaybackSettings, StaveProfile, SystemsLayoutMode, TabRhythmMode, VibratoPlaybackSettings, WebPlatform, _barrel$5 as exporter, _barrel$7 as importer, _barrel$6 as io, _jsonbarrel as json, VersionInfo as meta, _barrel$4 as midi, _barrel$3 as model, _barrel$1 as platform, _barrel$2 as rendering, _barrel as synth };
|
|
64631
|
+
export { AlphaTabApi, AlphaTabApiBase, AlphaTabError, AlphaTabErrorType, ConsoleLogger, CoreSettings, DisplaySettings, Environment, FileLoadError, FingeringMode, FormatError, ImporterSettings, LayoutMode, LogLevel, Logger, NotationElement, NotationMode, NotationSettings, PlayerMode, PlayerOutputMode, PlayerSettings, ProgressEventArgs, RenderEngineFactory, RenderingResources, ResizeEventArgs, ScrollMode, Settings, SlidePlaybackSettings, StaveProfile, SystemsLayoutMode, TabRhythmMode, VibratoPlaybackSettings, WebPlatform, _barrel$5 as exporter, _barrel$7 as importer, _barrel$6 as io, _jsonbarrel as json, VersionInfo as meta, _barrel$4 as midi, _barrel$3 as model, _barrel$1 as platform, _barrel$2 as rendering, _barrel as synth };
|