supersonic-scsynth 0.1.9 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/dist/supersonic.js +808 -288
- package/dist/wasm/manifest.json +3 -3
- package/dist/wasm/scsynth-nrt.wasm +0 -0
- package/dist/workers/debug_worker.js +16 -3
- package/dist/workers/osc_in_worker.js +15 -2
- package/dist/workers/osc_out_prescheduler_worker.js +575 -0
- package/dist/workers/osc_out_worker.js +46 -183
- package/dist/workers/ring_buffer_worker_base.js +305 -0
- package/dist/workers/scsynth_audio_worklet.js +44 -23
- package/dist/workers/system_worker.js +64 -0
- package/package.json +1 -1
package/dist/supersonic.js
CHANGED
|
@@ -769,11 +769,15 @@ var ScsynthOSC = class {
|
|
|
769
769
|
constructor() {
|
|
770
770
|
this.workers = {
|
|
771
771
|
oscOut: null,
|
|
772
|
+
// Scheduler worker (now also writes directly to ring buffer)
|
|
772
773
|
oscIn: null,
|
|
773
774
|
debug: null
|
|
774
775
|
};
|
|
775
776
|
this.callbacks = {
|
|
776
|
-
|
|
777
|
+
onRawOSC: null,
|
|
778
|
+
// Raw binary OSC callback
|
|
779
|
+
onParsedOSC: null,
|
|
780
|
+
// Parsed OSC callback
|
|
777
781
|
onDebugMessage: null,
|
|
778
782
|
onError: null,
|
|
779
783
|
onInitialized: null
|
|
@@ -795,12 +799,12 @@ var ScsynthOSC = class {
|
|
|
795
799
|
this.ringBufferBase = ringBufferBase;
|
|
796
800
|
this.bufferConstants = bufferConstants;
|
|
797
801
|
try {
|
|
798
|
-
this.workers.oscOut = new Worker("./dist/workers/
|
|
802
|
+
this.workers.oscOut = new Worker("./dist/workers/osc_out_prescheduler_worker.js");
|
|
799
803
|
this.workers.oscIn = new Worker("./dist/workers/osc_in_worker.js");
|
|
800
804
|
this.workers.debug = new Worker("./dist/workers/debug_worker.js");
|
|
801
805
|
this.setupWorkerHandlers();
|
|
802
806
|
const initPromises = [
|
|
803
|
-
this.initWorker(this.workers.oscOut, "OSC
|
|
807
|
+
this.initWorker(this.workers.oscOut, "OSC SCHEDULER+WRITER"),
|
|
804
808
|
this.initWorker(this.workers.oscIn, "OSC IN"),
|
|
805
809
|
this.initWorker(this.workers.debug, "DEBUG")
|
|
806
810
|
];
|
|
@@ -851,19 +855,24 @@ var ScsynthOSC = class {
|
|
|
851
855
|
const data = event.data;
|
|
852
856
|
switch (data.type) {
|
|
853
857
|
case "messages":
|
|
854
|
-
|
|
855
|
-
|
|
856
|
-
|
|
857
|
-
|
|
858
|
-
|
|
859
|
-
|
|
860
|
-
|
|
861
|
-
|
|
862
|
-
|
|
863
|
-
|
|
858
|
+
data.messages.forEach((msg) => {
|
|
859
|
+
if (!msg.oscData) return;
|
|
860
|
+
if (this.callbacks.onRawOSC) {
|
|
861
|
+
this.callbacks.onRawOSC({
|
|
862
|
+
oscData: msg.oscData,
|
|
863
|
+
sequence: msg.sequence
|
|
864
|
+
});
|
|
865
|
+
}
|
|
866
|
+
if (this.callbacks.onParsedOSC) {
|
|
867
|
+
try {
|
|
868
|
+
const options = { metadata: false, unpackSingleArgs: false };
|
|
869
|
+
const decoded = osc_default.readPacket(msg.oscData, options);
|
|
870
|
+
this.callbacks.onParsedOSC(decoded);
|
|
871
|
+
} catch (e) {
|
|
872
|
+
console.error("[ScsynthOSC] Failed to decode OSC message:", e, msg);
|
|
864
873
|
}
|
|
865
|
-
}
|
|
866
|
-
}
|
|
874
|
+
}
|
|
875
|
+
});
|
|
867
876
|
break;
|
|
868
877
|
case "error":
|
|
869
878
|
console.error("[ScsynthOSC] OSC IN error:", data.error);
|
|
@@ -906,23 +915,24 @@ var ScsynthOSC = class {
|
|
|
906
915
|
/**
|
|
907
916
|
* Send OSC data (message or bundle)
|
|
908
917
|
* - OSC messages are sent immediately
|
|
909
|
-
* - OSC bundles are scheduled based on
|
|
918
|
+
* - OSC bundles are scheduled based on audioTimeS (target audio time)
|
|
910
919
|
*
|
|
911
920
|
* @param {Uint8Array} oscData - Binary OSC data (message or bundle)
|
|
912
|
-
* @param {Object} options - Optional metadata (editorId, runTag,
|
|
921
|
+
* @param {Object} options - Optional metadata (editorId, runTag, audioTimeS, currentTimeS)
|
|
913
922
|
*/
|
|
914
923
|
send(oscData, options = {}) {
|
|
915
924
|
if (!this.initialized) {
|
|
916
925
|
console.error("[ScsynthOSC] Not initialized");
|
|
917
926
|
return;
|
|
918
927
|
}
|
|
919
|
-
const { editorId = 0, runTag = "",
|
|
928
|
+
const { editorId = 0, runTag = "", audioTimeS = null, currentTimeS = null } = options;
|
|
920
929
|
this.workers.oscOut.postMessage({
|
|
921
930
|
type: "send",
|
|
922
931
|
oscData,
|
|
923
932
|
editorId,
|
|
924
933
|
runTag,
|
|
925
|
-
|
|
934
|
+
audioTimeS,
|
|
935
|
+
currentTimeS
|
|
926
936
|
});
|
|
927
937
|
}
|
|
928
938
|
/**
|
|
@@ -1021,10 +1031,16 @@ var ScsynthOSC = class {
|
|
|
1021
1031
|
});
|
|
1022
1032
|
}
|
|
1023
1033
|
/**
|
|
1024
|
-
* Set callback for OSC messages received from scsynth
|
|
1034
|
+
* Set callback for raw binary OSC messages received from scsynth
|
|
1025
1035
|
*/
|
|
1026
|
-
|
|
1027
|
-
this.callbacks.
|
|
1036
|
+
onRawOSC(callback) {
|
|
1037
|
+
this.callbacks.onRawOSC = callback;
|
|
1038
|
+
}
|
|
1039
|
+
/**
|
|
1040
|
+
* Set callback for parsed OSC messages received from scsynth
|
|
1041
|
+
*/
|
|
1042
|
+
onParsedOSC(callback) {
|
|
1043
|
+
this.callbacks.onParsedOSC = callback;
|
|
1028
1044
|
}
|
|
1029
1045
|
/**
|
|
1030
1046
|
* Set callback for debug messages
|
|
@@ -1584,8 +1600,289 @@ var MemPool = class {
|
|
|
1584
1600
|
var __blockDataAddress = (blockAddress) => blockAddress > 0 ? blockAddress + SIZEOF_MEM_BLOCK : 0;
|
|
1585
1601
|
var __blockSelfAddress = (dataAddress) => dataAddress > 0 ? dataAddress - SIZEOF_MEM_BLOCK : 0;
|
|
1586
1602
|
|
|
1603
|
+
// js/timing_constants.js
|
|
1604
|
+
var NTP_EPOCH_OFFSET = 2208988800;
|
|
1605
|
+
var DRIFT_UPDATE_INTERVAL_MS = 15e3;
|
|
1606
|
+
|
|
1587
1607
|
// js/supersonic.js
|
|
1588
|
-
var
|
|
1608
|
+
var BufferManager = class {
|
|
1609
|
+
constructor(options) {
|
|
1610
|
+
const {
|
|
1611
|
+
audioContext,
|
|
1612
|
+
sharedBuffer,
|
|
1613
|
+
bufferPool,
|
|
1614
|
+
allocatedBuffers,
|
|
1615
|
+
resolveAudioPath,
|
|
1616
|
+
registerPendingOp
|
|
1617
|
+
} = options;
|
|
1618
|
+
this.audioContext = audioContext;
|
|
1619
|
+
this.sharedBuffer = sharedBuffer;
|
|
1620
|
+
this.bufferPool = bufferPool;
|
|
1621
|
+
this.allocatedBuffers = allocatedBuffers;
|
|
1622
|
+
this.resolveAudioPath = resolveAudioPath;
|
|
1623
|
+
this.registerPendingOp = registerPendingOp;
|
|
1624
|
+
this.bufferLocks = /* @__PURE__ */ new Map();
|
|
1625
|
+
this.GUARD_BEFORE = 3;
|
|
1626
|
+
this.GUARD_AFTER = 1;
|
|
1627
|
+
this.MAX_BUFFERS = 1024;
|
|
1628
|
+
}
|
|
1629
|
+
#validateBufferNumber(bufnum) {
|
|
1630
|
+
if (!Number.isInteger(bufnum) || bufnum < 0 || bufnum >= this.MAX_BUFFERS) {
|
|
1631
|
+
throw new Error(`Invalid buffer number ${bufnum} (must be 0-${this.MAX_BUFFERS - 1})`);
|
|
1632
|
+
}
|
|
1633
|
+
}
|
|
1634
|
+
async prepareFromFile(params) {
|
|
1635
|
+
const {
|
|
1636
|
+
bufnum,
|
|
1637
|
+
path,
|
|
1638
|
+
startFrame = 0,
|
|
1639
|
+
numFrames = 0,
|
|
1640
|
+
channels = null
|
|
1641
|
+
} = params;
|
|
1642
|
+
this.#validateBufferNumber(bufnum);
|
|
1643
|
+
let allocatedPtr = null;
|
|
1644
|
+
let pendingToken = null;
|
|
1645
|
+
let allocationRegistered = false;
|
|
1646
|
+
const releaseLock = await this.#acquireBufferLock(bufnum);
|
|
1647
|
+
let lockReleased = false;
|
|
1648
|
+
try {
|
|
1649
|
+
await this.#awaitPendingReplacement(bufnum);
|
|
1650
|
+
const resolvedPath = this.resolveAudioPath(path);
|
|
1651
|
+
const response = await fetch(resolvedPath);
|
|
1652
|
+
if (!response.ok) {
|
|
1653
|
+
throw new Error(`Failed to fetch ${resolvedPath}: ${response.status} ${response.statusText}`);
|
|
1654
|
+
}
|
|
1655
|
+
const arrayBuffer = await response.arrayBuffer();
|
|
1656
|
+
const audioBuffer = await this.audioContext.decodeAudioData(arrayBuffer);
|
|
1657
|
+
const start = Math.max(0, Math.floor(startFrame || 0));
|
|
1658
|
+
const availableFrames = audioBuffer.length - start;
|
|
1659
|
+
const framesRequested = numFrames && numFrames > 0 ? Math.min(Math.floor(numFrames), availableFrames) : availableFrames;
|
|
1660
|
+
if (framesRequested <= 0) {
|
|
1661
|
+
throw new Error(`No audio frames available for buffer ${bufnum} from ${path}`);
|
|
1662
|
+
}
|
|
1663
|
+
const selectedChannels = this.#normalizeChannels(channels, audioBuffer.numberOfChannels);
|
|
1664
|
+
const numChannels = selectedChannels.length;
|
|
1665
|
+
const totalSamples = framesRequested * numChannels + (this.GUARD_BEFORE + this.GUARD_AFTER) * numChannels;
|
|
1666
|
+
allocatedPtr = this.#malloc(totalSamples);
|
|
1667
|
+
const interleaved = new Float32Array(totalSamples);
|
|
1668
|
+
const dataOffset = this.GUARD_BEFORE * numChannels;
|
|
1669
|
+
for (let frame = 0; frame < framesRequested; frame++) {
|
|
1670
|
+
for (let ch = 0; ch < numChannels; ch++) {
|
|
1671
|
+
const sourceChannel = selectedChannels[ch];
|
|
1672
|
+
const channelData = audioBuffer.getChannelData(sourceChannel);
|
|
1673
|
+
interleaved[dataOffset + frame * numChannels + ch] = channelData[start + frame];
|
|
1674
|
+
}
|
|
1675
|
+
}
|
|
1676
|
+
this.#writeToSharedBuffer(allocatedPtr, interleaved);
|
|
1677
|
+
const sizeBytes = interleaved.length * 4;
|
|
1678
|
+
const { uuid, allocationComplete } = this.#registerPending(bufnum);
|
|
1679
|
+
pendingToken = uuid;
|
|
1680
|
+
this.#recordAllocation(bufnum, allocatedPtr, sizeBytes, uuid, allocationComplete);
|
|
1681
|
+
allocationRegistered = true;
|
|
1682
|
+
const managedCompletion = this.#attachFinalizer(bufnum, uuid, allocationComplete);
|
|
1683
|
+
releaseLock();
|
|
1684
|
+
lockReleased = true;
|
|
1685
|
+
return {
|
|
1686
|
+
ptr: allocatedPtr,
|
|
1687
|
+
numFrames: framesRequested,
|
|
1688
|
+
numChannels,
|
|
1689
|
+
sampleRate: audioBuffer.sampleRate,
|
|
1690
|
+
uuid,
|
|
1691
|
+
allocationComplete: managedCompletion
|
|
1692
|
+
};
|
|
1693
|
+
} catch (error) {
|
|
1694
|
+
if (allocationRegistered && pendingToken) {
|
|
1695
|
+
this.#finalizeReplacement(bufnum, pendingToken, false);
|
|
1696
|
+
} else if (allocatedPtr) {
|
|
1697
|
+
this.bufferPool.free(allocatedPtr);
|
|
1698
|
+
}
|
|
1699
|
+
throw error;
|
|
1700
|
+
} finally {
|
|
1701
|
+
if (!lockReleased) {
|
|
1702
|
+
releaseLock();
|
|
1703
|
+
}
|
|
1704
|
+
}
|
|
1705
|
+
}
|
|
1706
|
+
async prepareEmpty(params) {
|
|
1707
|
+
const {
|
|
1708
|
+
bufnum,
|
|
1709
|
+
numFrames,
|
|
1710
|
+
numChannels = 1,
|
|
1711
|
+
sampleRate = null
|
|
1712
|
+
} = params;
|
|
1713
|
+
this.#validateBufferNumber(bufnum);
|
|
1714
|
+
let allocationRegistered = false;
|
|
1715
|
+
let pendingToken = null;
|
|
1716
|
+
let allocatedPtr = null;
|
|
1717
|
+
if (!Number.isFinite(numFrames) || numFrames <= 0) {
|
|
1718
|
+
throw new Error(`/b_alloc requires a positive number of frames (got ${numFrames})`);
|
|
1719
|
+
}
|
|
1720
|
+
if (!Number.isFinite(numChannels) || numChannels <= 0) {
|
|
1721
|
+
throw new Error(`/b_alloc requires a positive channel count (got ${numChannels})`);
|
|
1722
|
+
}
|
|
1723
|
+
const roundedFrames = Math.floor(numFrames);
|
|
1724
|
+
const roundedChannels = Math.floor(numChannels);
|
|
1725
|
+
const totalSamples = roundedFrames * roundedChannels + (this.GUARD_BEFORE + this.GUARD_AFTER) * roundedChannels;
|
|
1726
|
+
const releaseLock = await this.#acquireBufferLock(bufnum);
|
|
1727
|
+
let lockReleased = false;
|
|
1728
|
+
try {
|
|
1729
|
+
await this.#awaitPendingReplacement(bufnum);
|
|
1730
|
+
allocatedPtr = this.#malloc(totalSamples);
|
|
1731
|
+
const interleaved = new Float32Array(totalSamples);
|
|
1732
|
+
this.#writeToSharedBuffer(allocatedPtr, interleaved);
|
|
1733
|
+
const sizeBytes = interleaved.length * 4;
|
|
1734
|
+
const { uuid, allocationComplete } = this.#registerPending(bufnum);
|
|
1735
|
+
pendingToken = uuid;
|
|
1736
|
+
this.#recordAllocation(bufnum, allocatedPtr, sizeBytes, uuid, allocationComplete);
|
|
1737
|
+
allocationRegistered = true;
|
|
1738
|
+
const managedCompletion = this.#attachFinalizer(bufnum, uuid, allocationComplete);
|
|
1739
|
+
releaseLock();
|
|
1740
|
+
lockReleased = true;
|
|
1741
|
+
return {
|
|
1742
|
+
ptr: allocatedPtr,
|
|
1743
|
+
numFrames: roundedFrames,
|
|
1744
|
+
numChannels: roundedChannels,
|
|
1745
|
+
sampleRate: sampleRate || this.audioContext.sampleRate,
|
|
1746
|
+
uuid,
|
|
1747
|
+
allocationComplete: managedCompletion
|
|
1748
|
+
};
|
|
1749
|
+
} catch (error) {
|
|
1750
|
+
if (allocationRegistered && pendingToken) {
|
|
1751
|
+
this.#finalizeReplacement(bufnum, pendingToken, false);
|
|
1752
|
+
} else if (allocatedPtr) {
|
|
1753
|
+
this.bufferPool.free(allocatedPtr);
|
|
1754
|
+
}
|
|
1755
|
+
throw error;
|
|
1756
|
+
} finally {
|
|
1757
|
+
if (!lockReleased) {
|
|
1758
|
+
releaseLock();
|
|
1759
|
+
}
|
|
1760
|
+
}
|
|
1761
|
+
}
|
|
1762
|
+
#normalizeChannels(requestedChannels, fileChannels) {
|
|
1763
|
+
if (!requestedChannels || requestedChannels.length === 0) {
|
|
1764
|
+
return Array.from({ length: fileChannels }, (_, i) => i);
|
|
1765
|
+
}
|
|
1766
|
+
requestedChannels.forEach((channel) => {
|
|
1767
|
+
if (!Number.isInteger(channel) || channel < 0 || channel >= fileChannels) {
|
|
1768
|
+
throw new Error(`Channel ${channel} is out of range (file has ${fileChannels} channels)`);
|
|
1769
|
+
}
|
|
1770
|
+
});
|
|
1771
|
+
return requestedChannels;
|
|
1772
|
+
}
|
|
1773
|
+
#malloc(totalSamples) {
|
|
1774
|
+
const bytesNeeded = totalSamples * 4;
|
|
1775
|
+
const ptr = this.bufferPool.malloc(bytesNeeded);
|
|
1776
|
+
if (ptr === 0) {
|
|
1777
|
+
const stats = this.bufferPool.stats();
|
|
1778
|
+
const availableMB = ((stats.available || 0) / (1024 * 1024)).toFixed(2);
|
|
1779
|
+
const totalMB = ((stats.total || 0) / (1024 * 1024)).toFixed(2);
|
|
1780
|
+
const requestedMB = (bytesNeeded / (1024 * 1024)).toFixed(2);
|
|
1781
|
+
throw new Error(
|
|
1782
|
+
`Buffer pool allocation failed: requested ${requestedMB}MB, available ${availableMB}MB of ${totalMB}MB total`
|
|
1783
|
+
);
|
|
1784
|
+
}
|
|
1785
|
+
return ptr;
|
|
1786
|
+
}
|
|
1787
|
+
#writeToSharedBuffer(ptr, data) {
|
|
1788
|
+
const heap = new Float32Array(this.sharedBuffer, ptr, data.length);
|
|
1789
|
+
heap.set(data);
|
|
1790
|
+
}
|
|
1791
|
+
#registerPending(bufnum) {
|
|
1792
|
+
if (!this.registerPendingOp) {
|
|
1793
|
+
return {
|
|
1794
|
+
uuid: crypto.randomUUID(),
|
|
1795
|
+
allocationComplete: Promise.resolve()
|
|
1796
|
+
};
|
|
1797
|
+
}
|
|
1798
|
+
const uuid = crypto.randomUUID();
|
|
1799
|
+
const allocationComplete = this.registerPendingOp(uuid, bufnum);
|
|
1800
|
+
return { uuid, allocationComplete };
|
|
1801
|
+
}
|
|
1802
|
+
async #acquireBufferLock(bufnum) {
|
|
1803
|
+
const prev = this.bufferLocks.get(bufnum) || Promise.resolve();
|
|
1804
|
+
let releaseLock;
|
|
1805
|
+
const current = new Promise((resolve) => {
|
|
1806
|
+
releaseLock = resolve;
|
|
1807
|
+
});
|
|
1808
|
+
this.bufferLocks.set(bufnum, prev.then(() => current));
|
|
1809
|
+
await prev;
|
|
1810
|
+
return () => {
|
|
1811
|
+
if (releaseLock) {
|
|
1812
|
+
releaseLock();
|
|
1813
|
+
releaseLock = null;
|
|
1814
|
+
}
|
|
1815
|
+
if (this.bufferLocks.get(bufnum) === current) {
|
|
1816
|
+
this.bufferLocks.delete(bufnum);
|
|
1817
|
+
}
|
|
1818
|
+
};
|
|
1819
|
+
}
|
|
1820
|
+
#recordAllocation(bufnum, ptr, sizeBytes, pendingToken, pendingPromise) {
|
|
1821
|
+
const previousEntry = this.allocatedBuffers.get(bufnum);
|
|
1822
|
+
const entry = {
|
|
1823
|
+
ptr,
|
|
1824
|
+
size: sizeBytes,
|
|
1825
|
+
pendingToken,
|
|
1826
|
+
pendingPromise,
|
|
1827
|
+
previousAllocation: previousEntry ? { ptr: previousEntry.ptr, size: previousEntry.size } : null
|
|
1828
|
+
};
|
|
1829
|
+
this.allocatedBuffers.set(bufnum, entry);
|
|
1830
|
+
return entry;
|
|
1831
|
+
}
|
|
1832
|
+
async #awaitPendingReplacement(bufnum) {
|
|
1833
|
+
const existing = this.allocatedBuffers.get(bufnum);
|
|
1834
|
+
if (existing && existing.pendingToken && existing.pendingPromise) {
|
|
1835
|
+
try {
|
|
1836
|
+
await existing.pendingPromise;
|
|
1837
|
+
} catch {
|
|
1838
|
+
}
|
|
1839
|
+
}
|
|
1840
|
+
}
|
|
1841
|
+
#attachFinalizer(bufnum, pendingToken, promise) {
|
|
1842
|
+
if (!promise || typeof promise.then !== "function") {
|
|
1843
|
+
this.#finalizeReplacement(bufnum, pendingToken, true);
|
|
1844
|
+
return Promise.resolve();
|
|
1845
|
+
}
|
|
1846
|
+
return promise.then((value) => {
|
|
1847
|
+
this.#finalizeReplacement(bufnum, pendingToken, true);
|
|
1848
|
+
return value;
|
|
1849
|
+
}).catch((error) => {
|
|
1850
|
+
this.#finalizeReplacement(bufnum, pendingToken, false);
|
|
1851
|
+
throw error;
|
|
1852
|
+
});
|
|
1853
|
+
}
|
|
1854
|
+
#finalizeReplacement(bufnum, pendingToken, success) {
|
|
1855
|
+
const entry = this.allocatedBuffers.get(bufnum);
|
|
1856
|
+
if (!entry || entry.pendingToken !== pendingToken) {
|
|
1857
|
+
return;
|
|
1858
|
+
}
|
|
1859
|
+
const previous = entry.previousAllocation;
|
|
1860
|
+
if (success) {
|
|
1861
|
+
entry.pendingToken = null;
|
|
1862
|
+
entry.pendingPromise = null;
|
|
1863
|
+
entry.previousAllocation = null;
|
|
1864
|
+
if (previous?.ptr) {
|
|
1865
|
+
this.bufferPool.free(previous.ptr);
|
|
1866
|
+
}
|
|
1867
|
+
return;
|
|
1868
|
+
}
|
|
1869
|
+
if (entry.ptr) {
|
|
1870
|
+
this.bufferPool.free(entry.ptr);
|
|
1871
|
+
}
|
|
1872
|
+
entry.pendingPromise = null;
|
|
1873
|
+
if (previous?.ptr) {
|
|
1874
|
+
this.allocatedBuffers.set(bufnum, {
|
|
1875
|
+
ptr: previous.ptr,
|
|
1876
|
+
size: previous.size,
|
|
1877
|
+
pendingToken: null,
|
|
1878
|
+
previousAllocation: null
|
|
1879
|
+
});
|
|
1880
|
+
} else {
|
|
1881
|
+
this.allocatedBuffers.delete(bufnum);
|
|
1882
|
+
}
|
|
1883
|
+
}
|
|
1884
|
+
};
|
|
1885
|
+
var SuperSonic = class _SuperSonic {
|
|
1589
1886
|
// Expose OSC utilities as static methods
|
|
1590
1887
|
static osc = {
|
|
1591
1888
|
encode: (message) => osc_default.writePacket(message),
|
|
@@ -1604,11 +1901,14 @@ var SuperSonic = class {
|
|
|
1604
1901
|
this.wasmModule = null;
|
|
1605
1902
|
this.wasmInstance = null;
|
|
1606
1903
|
this.bufferPool = null;
|
|
1904
|
+
this.bufferManager = null;
|
|
1905
|
+
this.loadedSynthDefs = /* @__PURE__ */ new Set();
|
|
1607
1906
|
this.pendingBufferOps = /* @__PURE__ */ new Map();
|
|
1608
|
-
this.wasmTimeOffset = null;
|
|
1609
1907
|
this._timeOffsetPromise = null;
|
|
1610
1908
|
this._resolveTimeOffset = null;
|
|
1611
|
-
this.
|
|
1909
|
+
this._localClockOffsetTimer = null;
|
|
1910
|
+
this.onOSC = null;
|
|
1911
|
+
this.onMessage = null;
|
|
1612
1912
|
this.onMessageSent = null;
|
|
1613
1913
|
this.onMetricsUpdate = null;
|
|
1614
1914
|
this.onStatusUpdate = null;
|
|
@@ -1695,21 +1995,6 @@ var SuperSonic = class {
|
|
|
1695
1995
|
});
|
|
1696
1996
|
console.log("[SuperSonic] Buffer pool initialized: 128MB at offset 64MB");
|
|
1697
1997
|
}
|
|
1698
|
-
/**
|
|
1699
|
-
* Calculate time offset (AudioContext → NTP conversion)
|
|
1700
|
-
* Called when AudioContext is in 'running' state to ensure accurate timing
|
|
1701
|
-
*/
|
|
1702
|
-
#calculateTimeOffset() {
|
|
1703
|
-
const SECONDS_1900_TO_1970 = 2208988800;
|
|
1704
|
-
const audioContextTime = this.audioContext.currentTime;
|
|
1705
|
-
const unixSeconds = Date.now() / 1e3;
|
|
1706
|
-
this.wasmTimeOffset = SECONDS_1900_TO_1970 + unixSeconds - audioContextTime;
|
|
1707
|
-
if (this._resolveTimeOffset) {
|
|
1708
|
-
this._resolveTimeOffset(this.wasmTimeOffset);
|
|
1709
|
-
this._resolveTimeOffset = null;
|
|
1710
|
-
}
|
|
1711
|
-
return this.wasmTimeOffset;
|
|
1712
|
-
}
|
|
1713
1998
|
/**
|
|
1714
1999
|
* Initialize AudioContext and set up time offset calculation
|
|
1715
2000
|
*/
|
|
@@ -1729,14 +2014,17 @@ var SuperSonic = class {
|
|
|
1729
2014
|
document.addEventListener("click", resumeContext, { once: true });
|
|
1730
2015
|
document.addEventListener("touchstart", resumeContext, { once: true });
|
|
1731
2016
|
}
|
|
1732
|
-
this.audioContext
|
|
1733
|
-
|
|
1734
|
-
|
|
1735
|
-
|
|
2017
|
+
return this.audioContext;
|
|
2018
|
+
}
|
|
2019
|
+
#initializeBufferManager() {
|
|
2020
|
+
this.bufferManager = new BufferManager({
|
|
2021
|
+
audioContext: this.audioContext,
|
|
2022
|
+
sharedBuffer: this.sharedBuffer,
|
|
2023
|
+
bufferPool: this.bufferPool,
|
|
2024
|
+
allocatedBuffers: this.allocatedBuffers,
|
|
2025
|
+
resolveAudioPath: (path) => this._resolveAudioPath(path),
|
|
2026
|
+
registerPendingOp: (uuid, bufnum, timeoutMs) => this.#createPendingBufferOperation(uuid, bufnum, timeoutMs)
|
|
1736
2027
|
});
|
|
1737
|
-
if (this.audioContext.state === "running") {
|
|
1738
|
-
this.#calculateTimeOffset();
|
|
1739
|
-
}
|
|
1740
2028
|
}
|
|
1741
2029
|
/**
|
|
1742
2030
|
* Load WASM manifest to get the current hashed filename
|
|
@@ -1782,12 +2070,10 @@ var SuperSonic = class {
|
|
|
1782
2070
|
type: "init",
|
|
1783
2071
|
sharedBuffer: this.sharedBuffer
|
|
1784
2072
|
});
|
|
1785
|
-
const timeOffset = await this._timeOffsetPromise;
|
|
1786
2073
|
this.workletNode.port.postMessage({
|
|
1787
2074
|
type: "loadWasm",
|
|
1788
2075
|
wasmBytes,
|
|
1789
|
-
wasmMemory: this.wasmMemory
|
|
1790
|
-
timeOffset
|
|
2076
|
+
wasmMemory: this.wasmMemory
|
|
1791
2077
|
});
|
|
1792
2078
|
await this.#waitForWorkletInit();
|
|
1793
2079
|
}
|
|
@@ -1796,15 +2082,20 @@ var SuperSonic = class {
|
|
|
1796
2082
|
*/
|
|
1797
2083
|
async #initializeOSC() {
|
|
1798
2084
|
this.osc = new ScsynthOSC();
|
|
1799
|
-
this.osc.
|
|
2085
|
+
this.osc.onRawOSC((msg) => {
|
|
2086
|
+
if (this.onOSC) {
|
|
2087
|
+
this.onOSC(msg);
|
|
2088
|
+
}
|
|
2089
|
+
});
|
|
2090
|
+
this.osc.onParsedOSC((msg) => {
|
|
1800
2091
|
if (msg.address === "/buffer/freed") {
|
|
1801
2092
|
this._handleBufferFreed(msg.args);
|
|
1802
2093
|
} else if (msg.address === "/buffer/allocated") {
|
|
1803
2094
|
this._handleBufferAllocated(msg.args);
|
|
1804
2095
|
}
|
|
1805
|
-
if (this.
|
|
2096
|
+
if (this.onMessage) {
|
|
1806
2097
|
this.stats.messagesReceived++;
|
|
1807
|
-
this.
|
|
2098
|
+
this.onMessage(msg);
|
|
1808
2099
|
}
|
|
1809
2100
|
});
|
|
1810
2101
|
this.osc.onDebugMessage((msg) => {
|
|
@@ -1867,6 +2158,7 @@ var SuperSonic = class {
|
|
|
1867
2158
|
this.checkCapabilities();
|
|
1868
2159
|
this.#initializeSharedMemory();
|
|
1869
2160
|
this.#initializeAudioContext();
|
|
2161
|
+
this.#initializeBufferManager();
|
|
1870
2162
|
const wasmBytes = await this.#loadWasm();
|
|
1871
2163
|
await this.#initializeAudioWorklet(wasmBytes);
|
|
1872
2164
|
await this.#initializeOSC();
|
|
@@ -1911,10 +2203,20 @@ var SuperSonic = class {
|
|
|
1911
2203
|
console.warn("[SuperSonic] Warning: ringBufferBase not provided by worklet");
|
|
1912
2204
|
}
|
|
1913
2205
|
if (event.data.bufferConstants !== void 0) {
|
|
2206
|
+
console.log("[SuperSonic] Received bufferConstants from worklet");
|
|
1914
2207
|
this.bufferConstants = event.data.bufferConstants;
|
|
2208
|
+
console.log("[SuperSonic] Initializing NTP timing");
|
|
2209
|
+
this.initializeNTPTiming();
|
|
2210
|
+
this.#startDriftOffsetTimer();
|
|
2211
|
+
console.log("[SuperSonic] Resolving time offset promise, _resolveTimeOffset=", this._resolveTimeOffset);
|
|
2212
|
+
if (this._resolveTimeOffset) {
|
|
2213
|
+
this._resolveTimeOffset();
|
|
2214
|
+
this._resolveTimeOffset = null;
|
|
2215
|
+
}
|
|
1915
2216
|
} else {
|
|
1916
2217
|
console.warn("[SuperSonic] Warning: bufferConstants not provided by worklet");
|
|
1917
2218
|
}
|
|
2219
|
+
console.log("[SuperSonic] Calling resolve() for worklet initialization");
|
|
1918
2220
|
resolve();
|
|
1919
2221
|
} else {
|
|
1920
2222
|
reject(new Error(event.data.error || "AudioWorklet initialization failed"));
|
|
@@ -1997,12 +2299,7 @@ var SuperSonic = class {
|
|
|
1997
2299
|
* sonic.send('/n_set', 1000, 'freq', 440.0, 'amp', 0.5);
|
|
1998
2300
|
*/
|
|
1999
2301
|
async send(address, ...args) {
|
|
2000
|
-
|
|
2001
|
-
throw new Error("SuperSonic not initialized. Call init() first.");
|
|
2002
|
-
}
|
|
2003
|
-
if (this._isBufferAllocationCommand(address)) {
|
|
2004
|
-
return await this._handleBufferCommand(address, args);
|
|
2005
|
-
}
|
|
2302
|
+
this.#ensureInitialized("send OSC messages");
|
|
2006
2303
|
const oscArgs = args.map((arg) => {
|
|
2007
2304
|
if (typeof arg === "string") {
|
|
2008
2305
|
return { type: "s", value: arg };
|
|
@@ -2014,108 +2311,9 @@ var SuperSonic = class {
|
|
|
2014
2311
|
throw new Error(`Unsupported argument type: ${typeof arg}`);
|
|
2015
2312
|
}
|
|
2016
2313
|
});
|
|
2017
|
-
const message = {
|
|
2018
|
-
|
|
2019
|
-
|
|
2020
|
-
};
|
|
2021
|
-
const oscData = osc_default.writePacket(message);
|
|
2022
|
-
this.sendOSC(oscData);
|
|
2023
|
-
}
|
|
2024
|
-
_isBufferAllocationCommand(address) {
|
|
2025
|
-
return [
|
|
2026
|
-
"/b_allocRead",
|
|
2027
|
-
"/b_allocReadChannel",
|
|
2028
|
-
"/b_read",
|
|
2029
|
-
"/b_readChannel"
|
|
2030
|
-
// NOTE: /b_alloc and /b_free are NOT intercepted
|
|
2031
|
-
].includes(address);
|
|
2032
|
-
}
|
|
2033
|
-
async _handleBufferCommand(address, args) {
|
|
2034
|
-
switch (address) {
|
|
2035
|
-
case "/b_allocRead":
|
|
2036
|
-
return await this._allocReadBuffer(...args);
|
|
2037
|
-
case "/b_allocReadChannel":
|
|
2038
|
-
return await this._allocReadChannelBuffer(...args);
|
|
2039
|
-
case "/b_read":
|
|
2040
|
-
return await this._readBuffer(...args);
|
|
2041
|
-
case "/b_readChannel":
|
|
2042
|
-
return await this._readChannelBuffer(...args);
|
|
2043
|
-
}
|
|
2044
|
-
}
|
|
2045
|
-
/**
|
|
2046
|
-
* /b_allocRead bufnum path [startFrame numFrames completion]
|
|
2047
|
-
*/
|
|
2048
|
-
async _allocReadBuffer(bufnum, path, startFrame = 0, numFrames = 0, completionMsg = null) {
|
|
2049
|
-
let allocatedPtr = null;
|
|
2050
|
-
const GUARD_BEFORE = 3;
|
|
2051
|
-
const GUARD_AFTER = 1;
|
|
2052
|
-
try {
|
|
2053
|
-
const url = this._resolveAudioPath(path);
|
|
2054
|
-
const response = await fetch(url);
|
|
2055
|
-
if (!response.ok) {
|
|
2056
|
-
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
|
|
2057
|
-
}
|
|
2058
|
-
const arrayBuffer = await response.arrayBuffer();
|
|
2059
|
-
const audioBuffer = await this.audioContext.decodeAudioData(arrayBuffer);
|
|
2060
|
-
const actualStartFrame = startFrame || 0;
|
|
2061
|
-
const actualNumFrames = numFrames || audioBuffer.length - actualStartFrame;
|
|
2062
|
-
const framesToRead = Math.min(actualNumFrames, audioBuffer.length - actualStartFrame);
|
|
2063
|
-
if (framesToRead <= 0) {
|
|
2064
|
-
throw new Error(`Invalid frame range: start=${actualStartFrame}, numFrames=${actualNumFrames}, fileLength=${audioBuffer.length}`);
|
|
2065
|
-
}
|
|
2066
|
-
const numChannels = audioBuffer.numberOfChannels;
|
|
2067
|
-
const guardSamples = (GUARD_BEFORE + GUARD_AFTER) * numChannels;
|
|
2068
|
-
const interleavedData = new Float32Array(framesToRead * numChannels + guardSamples);
|
|
2069
|
-
const dataOffset = GUARD_BEFORE * numChannels;
|
|
2070
|
-
for (let frame = 0; frame < framesToRead; frame++) {
|
|
2071
|
-
for (let ch = 0; ch < numChannels; ch++) {
|
|
2072
|
-
const channelData = audioBuffer.getChannelData(ch);
|
|
2073
|
-
interleavedData[dataOffset + frame * numChannels + ch] = channelData[actualStartFrame + frame];
|
|
2074
|
-
}
|
|
2075
|
-
}
|
|
2076
|
-
const bytesNeeded = interleavedData.length * 4;
|
|
2077
|
-
allocatedPtr = this.bufferPool.malloc(bytesNeeded);
|
|
2078
|
-
if (allocatedPtr === 0) {
|
|
2079
|
-
throw new Error("Buffer pool allocation failed (out of memory)");
|
|
2080
|
-
}
|
|
2081
|
-
const wasmHeap = new Float32Array(
|
|
2082
|
-
this.sharedBuffer,
|
|
2083
|
-
allocatedPtr,
|
|
2084
|
-
interleavedData.length
|
|
2085
|
-
);
|
|
2086
|
-
wasmHeap.set(interleavedData);
|
|
2087
|
-
this.allocatedBuffers.set(bufnum, {
|
|
2088
|
-
ptr: allocatedPtr,
|
|
2089
|
-
size: bytesNeeded
|
|
2090
|
-
});
|
|
2091
|
-
const uuid = crypto.randomUUID();
|
|
2092
|
-
const allocationComplete = new Promise((resolve, reject) => {
|
|
2093
|
-
const timeout = setTimeout(() => {
|
|
2094
|
-
this.pendingBufferOps.delete(uuid);
|
|
2095
|
-
reject(new Error(`Timeout waiting for buffer ${bufnum} allocation`));
|
|
2096
|
-
}, 5e3);
|
|
2097
|
-
this.pendingBufferOps.set(uuid, { resolve, reject, timeout });
|
|
2098
|
-
});
|
|
2099
|
-
await this.send(
|
|
2100
|
-
"/b_allocPtr",
|
|
2101
|
-
bufnum,
|
|
2102
|
-
allocatedPtr,
|
|
2103
|
-
framesToRead,
|
|
2104
|
-
numChannels,
|
|
2105
|
-
audioBuffer.sampleRate,
|
|
2106
|
-
uuid
|
|
2107
|
-
);
|
|
2108
|
-
await allocationComplete;
|
|
2109
|
-
if (completionMsg) {
|
|
2110
|
-
}
|
|
2111
|
-
} catch (error) {
|
|
2112
|
-
if (allocatedPtr) {
|
|
2113
|
-
this.bufferPool.free(allocatedPtr);
|
|
2114
|
-
this.allocatedBuffers.delete(bufnum);
|
|
2115
|
-
}
|
|
2116
|
-
console.error(`[SuperSonic] Buffer ${bufnum} load failed:`, error);
|
|
2117
|
-
throw error;
|
|
2118
|
-
}
|
|
2314
|
+
const message = { address, args: oscArgs };
|
|
2315
|
+
const oscData = _SuperSonic.osc.encode(message);
|
|
2316
|
+
return this.sendOSC(oscData);
|
|
2119
2317
|
}
|
|
2120
2318
|
/**
|
|
2121
2319
|
* Resolve audio file path to full URL
|
|
@@ -2131,17 +2329,45 @@ var SuperSonic = class {
|
|
|
2131
2329
|
}
|
|
2132
2330
|
return this.sampleBaseURL + scPath;
|
|
2133
2331
|
}
|
|
2332
|
+
#ensureInitialized(actionDescription = "perform this operation") {
|
|
2333
|
+
if (!this.initialized) {
|
|
2334
|
+
throw new Error(`SuperSonic not initialized. Call init() before attempting to ${actionDescription}.`);
|
|
2335
|
+
}
|
|
2336
|
+
}
|
|
2337
|
+
#createPendingBufferOperation(uuid, bufnum, timeoutMs = 3e4) {
|
|
2338
|
+
return new Promise((resolve, reject) => {
|
|
2339
|
+
const timeout = setTimeout(() => {
|
|
2340
|
+
this.pendingBufferOps.delete(uuid);
|
|
2341
|
+
reject(new Error(`Buffer ${bufnum} allocation timeout (${timeoutMs}ms)`));
|
|
2342
|
+
}, timeoutMs);
|
|
2343
|
+
this.pendingBufferOps.set(uuid, { resolve, reject, timeout });
|
|
2344
|
+
});
|
|
2345
|
+
}
|
|
2134
2346
|
/**
|
|
2135
2347
|
* Handle /buffer/freed message from WASM
|
|
2136
2348
|
*/
|
|
2137
2349
|
_handleBufferFreed(args) {
|
|
2138
2350
|
const bufnum = args[0];
|
|
2139
|
-
const
|
|
2351
|
+
const freedPtr = args[1];
|
|
2140
2352
|
const bufferInfo = this.allocatedBuffers.get(bufnum);
|
|
2141
|
-
if (bufferInfo) {
|
|
2353
|
+
if (!bufferInfo) {
|
|
2354
|
+
if (typeof freedPtr === "number" && freedPtr !== 0) {
|
|
2355
|
+
this.bufferPool.free(freedPtr);
|
|
2356
|
+
}
|
|
2357
|
+
return;
|
|
2358
|
+
}
|
|
2359
|
+
if (typeof freedPtr === "number" && freedPtr === bufferInfo.ptr) {
|
|
2142
2360
|
this.bufferPool.free(bufferInfo.ptr);
|
|
2143
2361
|
this.allocatedBuffers.delete(bufnum);
|
|
2362
|
+
return;
|
|
2363
|
+
}
|
|
2364
|
+
if (typeof freedPtr === "number" && bufferInfo.previousAllocation && bufferInfo.previousAllocation.ptr === freedPtr) {
|
|
2365
|
+
this.bufferPool.free(freedPtr);
|
|
2366
|
+
bufferInfo.previousAllocation = null;
|
|
2367
|
+
return;
|
|
2144
2368
|
}
|
|
2369
|
+
this.bufferPool.free(bufferInfo.ptr);
|
|
2370
|
+
this.allocatedBuffers.delete(bufnum);
|
|
2145
2371
|
}
|
|
2146
2372
|
/**
|
|
2147
2373
|
* Handle /buffer/allocated message with UUID correlation
|
|
@@ -2156,134 +2382,26 @@ var SuperSonic = class {
|
|
|
2156
2382
|
this.pendingBufferOps.delete(uuid);
|
|
2157
2383
|
}
|
|
2158
2384
|
}
|
|
2159
|
-
/**
|
|
2160
|
-
* /b_allocReadChannel bufnum path [startFrame numFrames channel1 channel2 ... completion]
|
|
2161
|
-
* Load specific channels from an audio file
|
|
2162
|
-
*/
|
|
2163
|
-
async _allocReadChannelBuffer(bufnum, path, startFrame = 0, numFrames = 0, ...channelsAndCompletion) {
|
|
2164
|
-
let allocatedPtr = null;
|
|
2165
|
-
const GUARD_BEFORE = 3;
|
|
2166
|
-
const GUARD_AFTER = 1;
|
|
2167
|
-
try {
|
|
2168
|
-
const channels = [];
|
|
2169
|
-
let completionMsg = null;
|
|
2170
|
-
for (let i = 0; i < channelsAndCompletion.length; i++) {
|
|
2171
|
-
if (typeof channelsAndCompletion[i] === "number" && Number.isInteger(channelsAndCompletion[i])) {
|
|
2172
|
-
channels.push(channelsAndCompletion[i]);
|
|
2173
|
-
} else {
|
|
2174
|
-
completionMsg = channelsAndCompletion[i];
|
|
2175
|
-
break;
|
|
2176
|
-
}
|
|
2177
|
-
}
|
|
2178
|
-
const url = this._resolveAudioPath(path);
|
|
2179
|
-
const response = await fetch(url);
|
|
2180
|
-
if (!response.ok) {
|
|
2181
|
-
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
|
|
2182
|
-
}
|
|
2183
|
-
const arrayBuffer = await response.arrayBuffer();
|
|
2184
|
-
const audioBuffer = await this.audioContext.decodeAudioData(arrayBuffer);
|
|
2185
|
-
const actualStartFrame = startFrame || 0;
|
|
2186
|
-
const actualNumFrames = numFrames || audioBuffer.length - actualStartFrame;
|
|
2187
|
-
const framesToRead = Math.min(actualNumFrames, audioBuffer.length - actualStartFrame);
|
|
2188
|
-
if (framesToRead <= 0) {
|
|
2189
|
-
throw new Error(`Invalid frame range: start=${actualStartFrame}, numFrames=${actualNumFrames}, fileLength=${audioBuffer.length}`);
|
|
2190
|
-
}
|
|
2191
|
-
const fileChannels = audioBuffer.numberOfChannels;
|
|
2192
|
-
const selectedChannels = channels.length > 0 ? channels : Array.from({ length: fileChannels }, (_, i) => i);
|
|
2193
|
-
for (const ch of selectedChannels) {
|
|
2194
|
-
if (ch < 0 || ch >= fileChannels) {
|
|
2195
|
-
throw new Error(`Invalid channel ${ch} (file has ${fileChannels} channels)`);
|
|
2196
|
-
}
|
|
2197
|
-
}
|
|
2198
|
-
const numChannels = selectedChannels.length;
|
|
2199
|
-
const guardSamples = (GUARD_BEFORE + GUARD_AFTER) * numChannels;
|
|
2200
|
-
const interleavedData = new Float32Array(framesToRead * numChannels + guardSamples);
|
|
2201
|
-
const dataOffset = GUARD_BEFORE * numChannels;
|
|
2202
|
-
for (let frame = 0; frame < framesToRead; frame++) {
|
|
2203
|
-
for (let ch = 0; ch < numChannels; ch++) {
|
|
2204
|
-
const fileChannel = selectedChannels[ch];
|
|
2205
|
-
const channelData = audioBuffer.getChannelData(fileChannel);
|
|
2206
|
-
interleavedData[dataOffset + frame * numChannels + ch] = channelData[actualStartFrame + frame];
|
|
2207
|
-
}
|
|
2208
|
-
}
|
|
2209
|
-
const bytesNeeded = interleavedData.length * 4;
|
|
2210
|
-
allocatedPtr = this.bufferPool.malloc(bytesNeeded);
|
|
2211
|
-
if (allocatedPtr === 0) {
|
|
2212
|
-
throw new Error("Buffer pool allocation failed (out of memory)");
|
|
2213
|
-
}
|
|
2214
|
-
const wasmHeap = new Float32Array(this.sharedBuffer, allocatedPtr, interleavedData.length);
|
|
2215
|
-
wasmHeap.set(interleavedData);
|
|
2216
|
-
this.allocatedBuffers.set(bufnum, { ptr: allocatedPtr, size: bytesNeeded });
|
|
2217
|
-
await this.send("/b_allocPtr", bufnum, allocatedPtr, framesToRead, numChannels, audioBuffer.sampleRate);
|
|
2218
|
-
if (completionMsg) {
|
|
2219
|
-
}
|
|
2220
|
-
} catch (error) {
|
|
2221
|
-
if (allocatedPtr) {
|
|
2222
|
-
this.bufferPool.free(allocatedPtr);
|
|
2223
|
-
this.allocatedBuffers.delete(bufnum);
|
|
2224
|
-
}
|
|
2225
|
-
console.error(`[SuperSonic] Buffer ${bufnum} load failed:`, error);
|
|
2226
|
-
throw error;
|
|
2227
|
-
}
|
|
2228
|
-
}
|
|
2229
|
-
/**
|
|
2230
|
-
* /b_read bufnum path [startFrame numFrames bufStartFrame leaveOpen completion]
|
|
2231
|
-
* Read file into existing buffer
|
|
2232
|
-
*/
|
|
2233
|
-
async _readBuffer(bufnum, path, startFrame = 0, numFrames = 0, bufStartFrame = 0, leaveOpen = 0, completionMsg = null) {
|
|
2234
|
-
console.warn("[SuperSonic] /b_read requires pre-allocated buffer - not yet implemented");
|
|
2235
|
-
throw new Error("/b_read not yet implemented (requires /b_alloc first)");
|
|
2236
|
-
}
|
|
2237
|
-
/**
|
|
2238
|
-
* /b_readChannel bufnum path [startFrame numFrames bufStartFrame leaveOpen channel1 channel2 ... completion]
|
|
2239
|
-
* Read specific channels into existing buffer
|
|
2240
|
-
*/
|
|
2241
|
-
async _readChannelBuffer(bufnum, path, startFrame = 0, numFrames = 0, bufStartFrame = 0, leaveOpen = 0, ...channelsAndCompletion) {
|
|
2242
|
-
console.warn("[SuperSonic] /b_readChannel requires pre-allocated buffer - not yet implemented");
|
|
2243
|
-
throw new Error("/b_readChannel not yet implemented (requires /b_alloc first)");
|
|
2244
|
-
}
|
|
2245
2385
|
/**
|
|
2246
2386
|
* Send pre-encoded OSC bytes to scsynth
|
|
2247
2387
|
* @param {ArrayBuffer|Uint8Array} oscData - Pre-encoded OSC data
|
|
2248
2388
|
* @param {Object} options - Send options
|
|
2249
2389
|
*/
|
|
2250
|
-
sendOSC(oscData, options = {}) {
|
|
2251
|
-
|
|
2252
|
-
|
|
2253
|
-
|
|
2254
|
-
let uint8Data;
|
|
2255
|
-
if (oscData instanceof ArrayBuffer) {
|
|
2256
|
-
uint8Data = new Uint8Array(oscData);
|
|
2257
|
-
} else if (oscData instanceof Uint8Array) {
|
|
2258
|
-
uint8Data = oscData;
|
|
2259
|
-
} else {
|
|
2260
|
-
throw new Error("oscData must be ArrayBuffer or Uint8Array");
|
|
2261
|
-
}
|
|
2390
|
+
async sendOSC(oscData, options = {}) {
|
|
2391
|
+
this.#ensureInitialized("send OSC data");
|
|
2392
|
+
const uint8Data = this.#toUint8Array(oscData);
|
|
2393
|
+
const preparedData = await this.#prepareOutboundPacket(uint8Data);
|
|
2262
2394
|
this.stats.messagesSent++;
|
|
2263
2395
|
if (this.onMessageSent) {
|
|
2264
|
-
this.onMessageSent(
|
|
2265
|
-
}
|
|
2266
|
-
|
|
2267
|
-
|
|
2268
|
-
|
|
2269
|
-
|
|
2270
|
-
|
|
2271
|
-
console.warn("[SuperSonic] Time offset not yet calculated, calculating now");
|
|
2272
|
-
this.#calculateTimeOffset();
|
|
2273
|
-
}
|
|
2274
|
-
const view = new DataView(uint8Data.buffer, uint8Data.byteOffset);
|
|
2275
|
-
const ntpSeconds = view.getUint32(8, false);
|
|
2276
|
-
const ntpFraction = view.getUint32(12, false);
|
|
2277
|
-
if (!(ntpSeconds === 0 && (ntpFraction === 0 || ntpFraction === 1))) {
|
|
2278
|
-
const ntpTimeS = ntpSeconds + ntpFraction / 4294967296;
|
|
2279
|
-
const audioTimeS = ntpTimeS - this.wasmTimeOffset;
|
|
2280
|
-
const currentAudioTimeS = this.audioContext.currentTime;
|
|
2281
|
-
const latencyS = 0.05;
|
|
2282
|
-
waitTimeMs = (audioTimeS - currentAudioTimeS - latencyS) * 1e3;
|
|
2283
|
-
}
|
|
2284
|
-
}
|
|
2396
|
+
this.onMessageSent(preparedData);
|
|
2397
|
+
}
|
|
2398
|
+
const timing = this.#calculateBundleWait(preparedData);
|
|
2399
|
+
const sendOptions = { ...options };
|
|
2400
|
+
if (timing) {
|
|
2401
|
+
sendOptions.audioTimeS = timing.audioTimeS;
|
|
2402
|
+
sendOptions.currentTimeS = timing.currentTimeS;
|
|
2285
2403
|
}
|
|
2286
|
-
this.osc.send(
|
|
2404
|
+
this.osc.send(preparedData, sendOptions);
|
|
2287
2405
|
}
|
|
2288
2406
|
/**
|
|
2289
2407
|
* Get current status
|
|
@@ -2301,6 +2419,7 @@ var SuperSonic = class {
|
|
|
2301
2419
|
*/
|
|
2302
2420
|
async destroy() {
|
|
2303
2421
|
console.log("[SuperSonic] Destroying...");
|
|
2422
|
+
this.#stopDriftOffsetTimer();
|
|
2304
2423
|
if (this.osc) {
|
|
2305
2424
|
this.osc.terminate();
|
|
2306
2425
|
this.osc = null;
|
|
@@ -2313,10 +2432,32 @@ var SuperSonic = class {
|
|
|
2313
2432
|
await this.audioContext.close();
|
|
2314
2433
|
this.audioContext = null;
|
|
2315
2434
|
}
|
|
2435
|
+
for (const [uuid, pending] of this.pendingBufferOps.entries()) {
|
|
2436
|
+
clearTimeout(pending.timeout);
|
|
2437
|
+
pending.reject(new Error("SuperSonic instance destroyed"));
|
|
2438
|
+
}
|
|
2439
|
+
this.pendingBufferOps.clear();
|
|
2316
2440
|
this.sharedBuffer = null;
|
|
2317
2441
|
this.initialized = false;
|
|
2442
|
+
this.bufferManager = null;
|
|
2443
|
+
this.allocatedBuffers.clear();
|
|
2444
|
+
this.loadedSynthDefs.clear();
|
|
2318
2445
|
console.log("[SuperSonic] Destroyed");
|
|
2319
2446
|
}
|
|
2447
|
+
/**
|
|
2448
|
+
* Wait until NTP timing has been established.
|
|
2449
|
+
* Note: NTP calculation is now done internally in C++ process_audio().
|
|
2450
|
+
* Returns 0 for backward compatibility.
|
|
2451
|
+
*/
|
|
2452
|
+
async waitForTimeSync() {
|
|
2453
|
+
if (!this.bufferConstants) {
|
|
2454
|
+
if (this._timeOffsetPromise) {
|
|
2455
|
+
await this._timeOffsetPromise;
|
|
2456
|
+
}
|
|
2457
|
+
}
|
|
2458
|
+
const ntpStartView = new Float64Array(this.sharedBuffer, this.ringBufferBase + this.bufferConstants.NTP_START_TIME_START, 1);
|
|
2459
|
+
return ntpStartView[0];
|
|
2460
|
+
}
|
|
2320
2461
|
/**
|
|
2321
2462
|
* Load a sample into a buffer and wait for confirmation
|
|
2322
2463
|
* @param {number} bufnum - Buffer number
|
|
@@ -2324,10 +2465,23 @@ var SuperSonic = class {
|
|
|
2324
2465
|
* @returns {Promise} Resolves when buffer is ready
|
|
2325
2466
|
*/
|
|
2326
2467
|
async loadSample(bufnum, path, startFrame = 0, numFrames = 0) {
|
|
2327
|
-
|
|
2328
|
-
|
|
2329
|
-
|
|
2330
|
-
|
|
2468
|
+
this.#ensureInitialized("load samples");
|
|
2469
|
+
const bufferInfo = await this.#requireBufferManager().prepareFromFile({
|
|
2470
|
+
bufnum,
|
|
2471
|
+
path,
|
|
2472
|
+
startFrame,
|
|
2473
|
+
numFrames
|
|
2474
|
+
});
|
|
2475
|
+
await this.send(
|
|
2476
|
+
"/b_allocPtr",
|
|
2477
|
+
bufnum,
|
|
2478
|
+
bufferInfo.ptr,
|
|
2479
|
+
bufferInfo.numFrames,
|
|
2480
|
+
bufferInfo.numChannels,
|
|
2481
|
+
bufferInfo.sampleRate,
|
|
2482
|
+
bufferInfo.uuid
|
|
2483
|
+
);
|
|
2484
|
+
return bufferInfo.allocationComplete;
|
|
2331
2485
|
}
|
|
2332
2486
|
/**
|
|
2333
2487
|
* Load a binary synthdef file and send it to scsynth
|
|
@@ -2347,7 +2501,11 @@ var SuperSonic = class {
|
|
|
2347
2501
|
}
|
|
2348
2502
|
const arrayBuffer = await response.arrayBuffer();
|
|
2349
2503
|
const synthdefData = new Uint8Array(arrayBuffer);
|
|
2350
|
-
this.send("/d_recv", synthdefData);
|
|
2504
|
+
await this.send("/d_recv", synthdefData);
|
|
2505
|
+
const synthName = this.#extractSynthDefName(path);
|
|
2506
|
+
if (synthName) {
|
|
2507
|
+
this.loadedSynthDefs.add(synthName);
|
|
2508
|
+
}
|
|
2351
2509
|
console.log(`[SuperSonic] Loaded synthdef from ${path} (${synthdefData.length} bytes)`);
|
|
2352
2510
|
} catch (error) {
|
|
2353
2511
|
console.error("[SuperSonic] Failed to load synthdef:", error);
|
|
@@ -2446,6 +2604,368 @@ var SuperSonic = class {
|
|
|
2446
2604
|
}
|
|
2447
2605
|
return this.bufferPool.stats();
|
|
2448
2606
|
}
|
|
2607
|
+
getDiagnostics() {
|
|
2608
|
+
this.#ensureInitialized("get diagnostics");
|
|
2609
|
+
const poolStats = this.bufferPool?.stats ? this.bufferPool.stats() : null;
|
|
2610
|
+
let bytesActive = 0;
|
|
2611
|
+
let pendingCount = 0;
|
|
2612
|
+
for (const entry of this.allocatedBuffers.values()) {
|
|
2613
|
+
if (!entry) continue;
|
|
2614
|
+
bytesActive += entry.size || 0;
|
|
2615
|
+
if (entry.pendingToken) {
|
|
2616
|
+
pendingCount++;
|
|
2617
|
+
}
|
|
2618
|
+
}
|
|
2619
|
+
return {
|
|
2620
|
+
buffers: {
|
|
2621
|
+
active: this.allocatedBuffers.size,
|
|
2622
|
+
pending: pendingCount,
|
|
2623
|
+
bytesActive,
|
|
2624
|
+
pool: poolStats ? {
|
|
2625
|
+
total: poolStats.total || 0,
|
|
2626
|
+
available: poolStats.available || 0,
|
|
2627
|
+
freeBytes: poolStats.free?.size || 0,
|
|
2628
|
+
freeBlocks: poolStats.free?.count || 0,
|
|
2629
|
+
usedBytes: poolStats.used?.size || 0,
|
|
2630
|
+
usedBlocks: poolStats.used?.count || 0
|
|
2631
|
+
} : null
|
|
2632
|
+
},
|
|
2633
|
+
synthdefs: {
|
|
2634
|
+
count: this.loadedSynthDefs.size
|
|
2635
|
+
}
|
|
2636
|
+
};
|
|
2637
|
+
}
|
|
2638
|
+
/**
|
|
2639
|
+
* Initialize NTP timing (write-once)
|
|
2640
|
+
* Sets the NTP start time when AudioContext started
|
|
2641
|
+
* @private
|
|
2642
|
+
*/
|
|
2643
|
+
initializeNTPTiming() {
|
|
2644
|
+
if (!this.bufferConstants || !this.audioContext) {
|
|
2645
|
+
return;
|
|
2646
|
+
}
|
|
2647
|
+
const perfTimeMs = performance.timeOrigin + performance.now();
|
|
2648
|
+
const currentNTP = perfTimeMs / 1e3 + NTP_EPOCH_OFFSET;
|
|
2649
|
+
const currentAudioCtx = this.audioContext.currentTime;
|
|
2650
|
+
const ntpStartTime = currentNTP - currentAudioCtx;
|
|
2651
|
+
const ntpStartView = new Float64Array(
|
|
2652
|
+
this.sharedBuffer,
|
|
2653
|
+
this.ringBufferBase + this.bufferConstants.NTP_START_TIME_START,
|
|
2654
|
+
1
|
|
2655
|
+
);
|
|
2656
|
+
ntpStartView[0] = ntpStartTime;
|
|
2657
|
+
this._initialNTPStartTime = ntpStartTime;
|
|
2658
|
+
console.log(`[SuperSonic] NTP timing initialized: start=${ntpStartTime.toFixed(6)}s (current NTP=${currentNTP.toFixed(3)}, AudioCtx=${currentAudioCtx.toFixed(3)}), ringBufferBase=${this.ringBufferBase}`);
|
|
2659
|
+
}
|
|
2660
|
+
/**
|
|
2661
|
+
* Update drift offset (AudioContext → NTP drift correction)
|
|
2662
|
+
* CRITICAL: This REPLACES the drift value, does not accumulate
|
|
2663
|
+
* @private
|
|
2664
|
+
*/
|
|
2665
|
+
updateDriftOffset() {
|
|
2666
|
+
if (!this.bufferConstants || !this.audioContext || this._initialNTPStartTime === void 0) {
|
|
2667
|
+
return;
|
|
2668
|
+
}
|
|
2669
|
+
const perfTimeMs = performance.timeOrigin + performance.now();
|
|
2670
|
+
const currentNTP = perfTimeMs / 1e3 + NTP_EPOCH_OFFSET;
|
|
2671
|
+
const currentAudioCtx = this.audioContext.currentTime;
|
|
2672
|
+
const currentNTPStartTime = currentNTP - currentAudioCtx;
|
|
2673
|
+
const driftSeconds = currentNTPStartTime - this._initialNTPStartTime;
|
|
2674
|
+
const driftMs = Math.round(driftSeconds * 1e3);
|
|
2675
|
+
const driftView = new Int32Array(
|
|
2676
|
+
this.sharedBuffer,
|
|
2677
|
+
this.ringBufferBase + this.bufferConstants.DRIFT_OFFSET_START,
|
|
2678
|
+
1
|
|
2679
|
+
);
|
|
2680
|
+
Atomics.store(driftView, 0, driftMs);
|
|
2681
|
+
console.log(`[SuperSonic] Drift offset updated: ${driftMs}ms (current NTP start=${currentNTPStartTime.toFixed(6)}, initial=${this._initialNTPStartTime.toFixed(6)})`);
|
|
2682
|
+
}
|
|
2683
|
+
/**
|
|
2684
|
+
* Get current drift offset in milliseconds
|
|
2685
|
+
* @returns {number} Current drift in milliseconds
|
|
2686
|
+
*/
|
|
2687
|
+
getDriftOffset() {
|
|
2688
|
+
if (!this.bufferConstants) {
|
|
2689
|
+
return 0;
|
|
2690
|
+
}
|
|
2691
|
+
const driftView = new Int32Array(
|
|
2692
|
+
this.sharedBuffer,
|
|
2693
|
+
this.ringBufferBase + this.bufferConstants.DRIFT_OFFSET_START,
|
|
2694
|
+
1
|
|
2695
|
+
);
|
|
2696
|
+
return Atomics.load(driftView, 0);
|
|
2697
|
+
}
|
|
2698
|
+
/**
|
|
2699
|
+
* Start periodic drift offset updates
|
|
2700
|
+
* @private
|
|
2701
|
+
*/
|
|
2702
|
+
#startDriftOffsetTimer() {
|
|
2703
|
+
this.#stopDriftOffsetTimer();
|
|
2704
|
+
this._driftOffsetTimer = setInterval(() => {
|
|
2705
|
+
this.updateDriftOffset();
|
|
2706
|
+
}, DRIFT_UPDATE_INTERVAL_MS);
|
|
2707
|
+
console.log(`[SuperSonic] Started drift offset correction (every ${DRIFT_UPDATE_INTERVAL_MS}ms)`);
|
|
2708
|
+
}
|
|
2709
|
+
/**
|
|
2710
|
+
* Stop periodic drift offset updates
|
|
2711
|
+
* @private
|
|
2712
|
+
*/
|
|
2713
|
+
#stopDriftOffsetTimer() {
|
|
2714
|
+
if (this._driftOffsetTimer) {
|
|
2715
|
+
clearInterval(this._driftOffsetTimer);
|
|
2716
|
+
this._driftOffsetTimer = null;
|
|
2717
|
+
}
|
|
2718
|
+
}
|
|
2719
|
+
#extractSynthDefName(path) {
|
|
2720
|
+
if (!path || typeof path !== "string") {
|
|
2721
|
+
return null;
|
|
2722
|
+
}
|
|
2723
|
+
const lastSegment = path.split("/").filter(Boolean).pop() || path;
|
|
2724
|
+
return lastSegment.replace(/\.scsyndef$/i, "");
|
|
2725
|
+
}
|
|
2726
|
+
#toUint8Array(data) {
|
|
2727
|
+
if (data instanceof Uint8Array) {
|
|
2728
|
+
return data;
|
|
2729
|
+
}
|
|
2730
|
+
if (data instanceof ArrayBuffer) {
|
|
2731
|
+
return new Uint8Array(data);
|
|
2732
|
+
}
|
|
2733
|
+
throw new Error("oscData must be ArrayBuffer or Uint8Array");
|
|
2734
|
+
}
|
|
2735
|
+
async #prepareOutboundPacket(uint8Data) {
|
|
2736
|
+
const decodeOptions = { metadata: true, unpackSingleArgs: false };
|
|
2737
|
+
try {
|
|
2738
|
+
const decodedPacket = _SuperSonic.osc.decode(uint8Data, decodeOptions);
|
|
2739
|
+
const { packet, changed } = await this.#rewritePacket(decodedPacket);
|
|
2740
|
+
if (!changed) {
|
|
2741
|
+
return uint8Data;
|
|
2742
|
+
}
|
|
2743
|
+
return _SuperSonic.osc.encode(packet);
|
|
2744
|
+
} catch (error) {
|
|
2745
|
+
console.error("[SuperSonic] Failed to prepare OSC packet:", error);
|
|
2746
|
+
throw error;
|
|
2747
|
+
}
|
|
2748
|
+
}
|
|
2749
|
+
async #rewritePacket(packet) {
|
|
2750
|
+
if (packet && packet.address) {
|
|
2751
|
+
const { message, changed } = await this.#rewriteMessage(packet);
|
|
2752
|
+
return { packet: message, changed };
|
|
2753
|
+
}
|
|
2754
|
+
if (this.#isBundle(packet)) {
|
|
2755
|
+
const subResults = await Promise.all(
|
|
2756
|
+
packet.packets.map((subPacket) => this.#rewritePacket(subPacket))
|
|
2757
|
+
);
|
|
2758
|
+
const changed = subResults.some((result) => result.changed);
|
|
2759
|
+
if (!changed) {
|
|
2760
|
+
return { packet, changed: false };
|
|
2761
|
+
}
|
|
2762
|
+
const rewrittenPackets = subResults.map((result) => result.packet);
|
|
2763
|
+
return {
|
|
2764
|
+
packet: {
|
|
2765
|
+
timeTag: packet.timeTag,
|
|
2766
|
+
packets: rewrittenPackets
|
|
2767
|
+
},
|
|
2768
|
+
changed: true
|
|
2769
|
+
};
|
|
2770
|
+
}
|
|
2771
|
+
return { packet, changed: false };
|
|
2772
|
+
}
|
|
2773
|
+
async #rewriteMessage(message) {
|
|
2774
|
+
switch (message.address) {
|
|
2775
|
+
case "/b_alloc":
|
|
2776
|
+
return {
|
|
2777
|
+
message: await this.#rewriteAlloc(message),
|
|
2778
|
+
changed: true
|
|
2779
|
+
};
|
|
2780
|
+
case "/b_allocRead":
|
|
2781
|
+
return {
|
|
2782
|
+
message: await this.#rewriteAllocRead(message),
|
|
2783
|
+
changed: true
|
|
2784
|
+
};
|
|
2785
|
+
case "/b_allocReadChannel":
|
|
2786
|
+
return {
|
|
2787
|
+
message: await this.#rewriteAllocReadChannel(message),
|
|
2788
|
+
changed: true
|
|
2789
|
+
};
|
|
2790
|
+
default:
|
|
2791
|
+
return { message, changed: false };
|
|
2792
|
+
}
|
|
2793
|
+
}
|
|
2794
|
+
async #rewriteAllocRead(message) {
|
|
2795
|
+
const bufferManager = this.#requireBufferManager();
|
|
2796
|
+
const bufnum = this.#requireIntArg(message.args, 0, "/b_allocRead requires a buffer number");
|
|
2797
|
+
const path = this.#requireStringArg(message.args, 1, "/b_allocRead requires a file path");
|
|
2798
|
+
const startFrame = this.#optionalIntArg(message.args, 2, 0);
|
|
2799
|
+
const numFrames = this.#optionalIntArg(message.args, 3, 0);
|
|
2800
|
+
const bufferInfo = await bufferManager.prepareFromFile({
|
|
2801
|
+
bufnum,
|
|
2802
|
+
path,
|
|
2803
|
+
startFrame,
|
|
2804
|
+
numFrames
|
|
2805
|
+
});
|
|
2806
|
+
this.#detachAllocationPromise(bufferInfo.allocationComplete, `/b_allocRead ${bufnum}`);
|
|
2807
|
+
return this.#buildAllocPtrMessage(bufnum, bufferInfo);
|
|
2808
|
+
}
|
|
2809
|
+
async #rewriteAllocReadChannel(message) {
|
|
2810
|
+
const bufferManager = this.#requireBufferManager();
|
|
2811
|
+
const bufnum = this.#requireIntArg(message.args, 0, "/b_allocReadChannel requires a buffer number");
|
|
2812
|
+
const path = this.#requireStringArg(message.args, 1, "/b_allocReadChannel requires a file path");
|
|
2813
|
+
const startFrame = this.#optionalIntArg(message.args, 2, 0);
|
|
2814
|
+
const numFrames = this.#optionalIntArg(message.args, 3, 0);
|
|
2815
|
+
const channels = [];
|
|
2816
|
+
for (let i = 4; i < (message.args?.length || 0); i++) {
|
|
2817
|
+
if (!this.#isNumericArg(message.args[i])) {
|
|
2818
|
+
break;
|
|
2819
|
+
}
|
|
2820
|
+
channels.push(Math.floor(this.#getArgValue(message.args[i])));
|
|
2821
|
+
}
|
|
2822
|
+
const bufferInfo = await bufferManager.prepareFromFile({
|
|
2823
|
+
bufnum,
|
|
2824
|
+
path,
|
|
2825
|
+
startFrame,
|
|
2826
|
+
numFrames,
|
|
2827
|
+
channels: channels.length > 0 ? channels : null
|
|
2828
|
+
});
|
|
2829
|
+
this.#detachAllocationPromise(bufferInfo.allocationComplete, `/b_allocReadChannel ${bufnum}`);
|
|
2830
|
+
return this.#buildAllocPtrMessage(bufnum, bufferInfo);
|
|
2831
|
+
}
|
|
2832
|
+
async #rewriteAlloc(message) {
|
|
2833
|
+
const bufferManager = this.#requireBufferManager();
|
|
2834
|
+
const bufnum = this.#requireIntArg(message.args, 0, "/b_alloc requires a buffer number");
|
|
2835
|
+
const numFrames = this.#requireIntArg(message.args, 1, "/b_alloc requires a frame count");
|
|
2836
|
+
let argIndex = 2;
|
|
2837
|
+
let numChannels = 1;
|
|
2838
|
+
let sampleRate = this.audioContext?.sampleRate || 44100;
|
|
2839
|
+
if (this.#isNumericArg(this.#argAt(message.args, argIndex))) {
|
|
2840
|
+
numChannels = Math.max(1, this.#optionalIntArg(message.args, argIndex, 1));
|
|
2841
|
+
argIndex++;
|
|
2842
|
+
}
|
|
2843
|
+
if (this.#argAt(message.args, argIndex)?.type === "b") {
|
|
2844
|
+
argIndex++;
|
|
2845
|
+
}
|
|
2846
|
+
if (this.#isNumericArg(this.#argAt(message.args, argIndex))) {
|
|
2847
|
+
sampleRate = this.#getArgValue(this.#argAt(message.args, argIndex));
|
|
2848
|
+
}
|
|
2849
|
+
const bufferInfo = await bufferManager.prepareEmpty({
|
|
2850
|
+
bufnum,
|
|
2851
|
+
numFrames,
|
|
2852
|
+
numChannels,
|
|
2853
|
+
sampleRate
|
|
2854
|
+
});
|
|
2855
|
+
this.#detachAllocationPromise(bufferInfo.allocationComplete, `/b_alloc ${bufnum}`);
|
|
2856
|
+
return this.#buildAllocPtrMessage(bufnum, bufferInfo);
|
|
2857
|
+
}
|
|
2858
|
+
#buildAllocPtrMessage(bufnum, bufferInfo) {
|
|
2859
|
+
return {
|
|
2860
|
+
address: "/b_allocPtr",
|
|
2861
|
+
args: [
|
|
2862
|
+
this.#intArg(bufnum),
|
|
2863
|
+
this.#intArg(bufferInfo.ptr),
|
|
2864
|
+
this.#intArg(bufferInfo.numFrames),
|
|
2865
|
+
this.#intArg(bufferInfo.numChannels),
|
|
2866
|
+
this.#floatArg(bufferInfo.sampleRate),
|
|
2867
|
+
this.#stringArg(bufferInfo.uuid)
|
|
2868
|
+
]
|
|
2869
|
+
};
|
|
2870
|
+
}
|
|
2871
|
+
#intArg(value) {
|
|
2872
|
+
return { type: "i", value: Math.floor(value) };
|
|
2873
|
+
}
|
|
2874
|
+
#floatArg(value) {
|
|
2875
|
+
return { type: "f", value };
|
|
2876
|
+
}
|
|
2877
|
+
#stringArg(value) {
|
|
2878
|
+
return { type: "s", value: String(value) };
|
|
2879
|
+
}
|
|
2880
|
+
#argAt(args, index) {
|
|
2881
|
+
if (!Array.isArray(args)) {
|
|
2882
|
+
return void 0;
|
|
2883
|
+
}
|
|
2884
|
+
return args[index];
|
|
2885
|
+
}
|
|
2886
|
+
#getArgValue(arg) {
|
|
2887
|
+
if (arg === void 0 || arg === null) {
|
|
2888
|
+
return void 0;
|
|
2889
|
+
}
|
|
2890
|
+
return typeof arg === "object" && Object.prototype.hasOwnProperty.call(arg, "value") ? arg.value : arg;
|
|
2891
|
+
}
|
|
2892
|
+
#requireIntArg(args, index, errorMessage) {
|
|
2893
|
+
const value = this.#getArgValue(this.#argAt(args, index));
|
|
2894
|
+
if (!Number.isFinite(value)) {
|
|
2895
|
+
throw new Error(errorMessage);
|
|
2896
|
+
}
|
|
2897
|
+
return Math.floor(value);
|
|
2898
|
+
}
|
|
2899
|
+
#optionalIntArg(args, index, defaultValue = 0) {
|
|
2900
|
+
const value = this.#getArgValue(this.#argAt(args, index));
|
|
2901
|
+
if (!Number.isFinite(value)) {
|
|
2902
|
+
return defaultValue;
|
|
2903
|
+
}
|
|
2904
|
+
return Math.floor(value);
|
|
2905
|
+
}
|
|
2906
|
+
#requireStringArg(args, index, errorMessage) {
|
|
2907
|
+
const value = this.#getArgValue(this.#argAt(args, index));
|
|
2908
|
+
if (typeof value !== "string") {
|
|
2909
|
+
throw new Error(errorMessage);
|
|
2910
|
+
}
|
|
2911
|
+
return value;
|
|
2912
|
+
}
|
|
2913
|
+
#isNumericArg(arg) {
|
|
2914
|
+
if (!arg) {
|
|
2915
|
+
return false;
|
|
2916
|
+
}
|
|
2917
|
+
const value = this.#getArgValue(arg);
|
|
2918
|
+
return Number.isFinite(value);
|
|
2919
|
+
}
|
|
2920
|
+
#detachAllocationPromise(promise, context) {
|
|
2921
|
+
if (!promise || typeof promise.catch !== "function") {
|
|
2922
|
+
return;
|
|
2923
|
+
}
|
|
2924
|
+
promise.catch((error) => {
|
|
2925
|
+
console.error(`[SuperSonic] ${context} allocation failed:`, error);
|
|
2926
|
+
});
|
|
2927
|
+
}
|
|
2928
|
+
#requireBufferManager() {
|
|
2929
|
+
if (!this.bufferManager) {
|
|
2930
|
+
throw new Error("Buffer manager not ready. Call init() before issuing buffer commands.");
|
|
2931
|
+
}
|
|
2932
|
+
return this.bufferManager;
|
|
2933
|
+
}
|
|
2934
|
+
#isBundle(packet) {
|
|
2935
|
+
return packet && packet.timeTag !== void 0 && Array.isArray(packet.packets);
|
|
2936
|
+
}
|
|
2937
|
+
#calculateBundleWait(uint8Data) {
|
|
2938
|
+
if (uint8Data.length < 16) {
|
|
2939
|
+
return null;
|
|
2940
|
+
}
|
|
2941
|
+
const header = String.fromCharCode.apply(null, uint8Data.slice(0, 8));
|
|
2942
|
+
if (header !== "#bundle\0") {
|
|
2943
|
+
return null;
|
|
2944
|
+
}
|
|
2945
|
+
const ntpStartView = new Float64Array(this.sharedBuffer, this.ringBufferBase + this.bufferConstants.NTP_START_TIME_START, 1);
|
|
2946
|
+
const ntpStartTime = ntpStartView[0];
|
|
2947
|
+
if (ntpStartTime === 0) {
|
|
2948
|
+
console.warn("[SuperSonic] NTP start time not yet initialized");
|
|
2949
|
+
return null;
|
|
2950
|
+
}
|
|
2951
|
+
const driftView = new Int32Array(this.sharedBuffer, this.ringBufferBase + this.bufferConstants.DRIFT_OFFSET_START, 1);
|
|
2952
|
+
const driftMs = Atomics.load(driftView, 0);
|
|
2953
|
+
const driftSeconds = driftMs / 1e3;
|
|
2954
|
+
const globalView = new Int32Array(this.sharedBuffer, this.ringBufferBase + this.bufferConstants.GLOBAL_OFFSET_START, 1);
|
|
2955
|
+
const globalMs = Atomics.load(globalView, 0);
|
|
2956
|
+
const globalSeconds = globalMs / 1e3;
|
|
2957
|
+
const totalOffset = ntpStartTime + driftSeconds + globalSeconds;
|
|
2958
|
+
const view = new DataView(uint8Data.buffer, uint8Data.byteOffset);
|
|
2959
|
+
const ntpSeconds = view.getUint32(8, false);
|
|
2960
|
+
const ntpFraction = view.getUint32(12, false);
|
|
2961
|
+
if (ntpSeconds === 0 && (ntpFraction === 0 || ntpFraction === 1)) {
|
|
2962
|
+
return null;
|
|
2963
|
+
}
|
|
2964
|
+
const ntpTimeS = ntpSeconds + ntpFraction / 4294967296;
|
|
2965
|
+
const audioTimeS = ntpTimeS - totalOffset;
|
|
2966
|
+
const currentTimeS = this.audioContext.currentTime;
|
|
2967
|
+
return { audioTimeS, currentTimeS };
|
|
2968
|
+
}
|
|
2449
2969
|
};
|
|
2450
2970
|
export {
|
|
2451
2971
|
SuperSonic
|