lox-airplay-sender 0.3.1 → 0.3.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/core/audioOut.d.ts +1 -0
- package/dist/core/audioOut.js +24 -5
- package/dist/core/rtsp.js +2 -1
- package/dist/esm/core/audioOut.js +24 -5
- package/dist/esm/core/rtsp.js +2 -1
- package/dist/esm/utils/config.js +2 -0
- package/dist/utils/config.d.ts +2 -0
- package/dist/utils/config.js +2 -0
- package/package.json +1 -1
package/dist/core/audioOut.d.ts
CHANGED
package/dist/core/audioOut.js
CHANGED
|
@@ -4,6 +4,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
4
4
|
};
|
|
5
5
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
6
|
const node_events_1 = require("node:events");
|
|
7
|
+
const node_perf_hooks_1 = require("node:perf_hooks");
|
|
7
8
|
const config_1 = __importDefault(require("../utils/config"));
|
|
8
9
|
const numUtil_1 = require("../utils/numUtil");
|
|
9
10
|
const SEQ_NUM_WRAP = Math.pow(2, 16);
|
|
@@ -14,7 +15,8 @@ const SEQ_NUM_WRAP = Math.pow(2, 16);
|
|
|
14
15
|
class AudioOut extends node_events_1.EventEmitter {
|
|
15
16
|
lastSeq = -1;
|
|
16
17
|
hasAirTunes = false;
|
|
17
|
-
rtpTimeRef =
|
|
18
|
+
rtpTimeRef = 0;
|
|
19
|
+
monotonicRef = 0;
|
|
18
20
|
startTimeMs;
|
|
19
21
|
latencyFrames = 0;
|
|
20
22
|
latencyApplied = false;
|
|
@@ -29,7 +31,10 @@ class AudioOut extends node_events_1.EventEmitter {
|
|
|
29
31
|
typeof startTimeMs === 'number' && Number.isFinite(startTimeMs)
|
|
30
32
|
? startTimeMs
|
|
31
33
|
: undefined;
|
|
32
|
-
|
|
34
|
+
const wallToMonoOffset = Date.now() - node_perf_hooks_1.performance.now();
|
|
35
|
+
// Anchor the RTP clock to a monotonic base to avoid NTP slews.
|
|
36
|
+
this.rtpTimeRef = (this.startTimeMs ?? Date.now()) - wallToMonoOffset;
|
|
37
|
+
this.monotonicRef = node_perf_hooks_1.performance.now();
|
|
33
38
|
devices.on('airtunes_devices', (hasAirTunes) => {
|
|
34
39
|
this.hasAirTunes = hasAirTunes;
|
|
35
40
|
});
|
|
@@ -42,21 +47,35 @@ class AudioOut extends node_events_1.EventEmitter {
|
|
|
42
47
|
packet.timestamp = (0, numUtil_1.low32)(seq * config_1.default.frames_per_packet + 2 * config_1.default.sampling_rate);
|
|
43
48
|
if (this.hasAirTunes && seq % config_1.default.sync_period === 0) {
|
|
44
49
|
this.emit('need_sync', seq);
|
|
50
|
+
const nowMs = node_perf_hooks_1.performance.now();
|
|
45
51
|
const expectedTimeMs = this.rtpTimeRef +
|
|
46
52
|
((seq * config_1.default.frames_per_packet) / config_1.default.sampling_rate) * 1000;
|
|
47
|
-
const deltaMs =
|
|
53
|
+
const deltaMs = nowMs - expectedTimeMs;
|
|
48
54
|
this.emit('metrics', { type: 'sync', seq, deltaMs, latencyFrames: this.latencyFrames });
|
|
49
55
|
}
|
|
50
56
|
this.emit('packet', packet);
|
|
51
57
|
packet.release();
|
|
52
58
|
};
|
|
59
|
+
const frameDurationMs = (config_1.default.frames_per_packet / config_1.default.sampling_rate) * 1000;
|
|
53
60
|
const syncAudio = () => {
|
|
54
|
-
const
|
|
61
|
+
const nowMs = node_perf_hooks_1.performance.now();
|
|
62
|
+
const elapsed = nowMs - this.rtpTimeRef;
|
|
55
63
|
if (elapsed < 0) {
|
|
56
64
|
setTimeout(syncAudio, Math.min(config_1.default.stream_latency, Math.abs(elapsed)));
|
|
57
65
|
return;
|
|
58
66
|
}
|
|
59
|
-
|
|
67
|
+
let currentSeq = Math.floor((elapsed * config_1.default.sampling_rate) / (config_1.default.frames_per_packet * 1000));
|
|
68
|
+
// If we're lagging behind significantly, jump forward to avoid long hitches.
|
|
69
|
+
const expectedTimeMs = this.rtpTimeRef + currentSeq * frameDurationMs;
|
|
70
|
+
const deltaMs = nowMs - expectedTimeMs;
|
|
71
|
+
if (deltaMs > config_1.default.jump_forward_threshold_ms) {
|
|
72
|
+
const jumpSeq = Math.ceil((config_1.default.jump_forward_lead_ms * config_1.default.sampling_rate) /
|
|
73
|
+
(config_1.default.frames_per_packet * 1000));
|
|
74
|
+
const newSeq = currentSeq + jumpSeq;
|
|
75
|
+
this.rtpTimeRef = nowMs - newSeq * frameDurationMs;
|
|
76
|
+
this.lastSeq = newSeq - 1;
|
|
77
|
+
currentSeq = newSeq;
|
|
78
|
+
}
|
|
60
79
|
for (let i = this.lastSeq + 1; i <= currentSeq; i += 1) {
|
|
61
80
|
sendPacket(i);
|
|
62
81
|
}
|
package/dist/core/rtsp.js
CHANGED
|
@@ -1636,7 +1636,8 @@ Client.prototype.processData = function (blob, rawData) {
|
|
|
1636
1636
|
;
|
|
1637
1637
|
break;
|
|
1638
1638
|
case SETPROGRESS:
|
|
1639
|
-
|
|
1639
|
+
// After reporting progress, stay in PLAYING; avoid forcing FLUSH on every update.
|
|
1640
|
+
this.status = PLAYING;
|
|
1640
1641
|
break;
|
|
1641
1642
|
case SETDAAP:
|
|
1642
1643
|
this.status = PLAYING;
|
|
@@ -4,6 +4,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
4
4
|
};
|
|
5
5
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
6
|
const node_events_1 = require("node:events");
|
|
7
|
+
const node_perf_hooks_1 = require("node:perf_hooks");
|
|
7
8
|
const config_1 = __importDefault(require("../utils/config"));
|
|
8
9
|
const numUtil_1 = require("../utils/numUtil");
|
|
9
10
|
const SEQ_NUM_WRAP = Math.pow(2, 16);
|
|
@@ -14,7 +15,8 @@ const SEQ_NUM_WRAP = Math.pow(2, 16);
|
|
|
14
15
|
class AudioOut extends node_events_1.EventEmitter {
|
|
15
16
|
lastSeq = -1;
|
|
16
17
|
hasAirTunes = false;
|
|
17
|
-
rtpTimeRef =
|
|
18
|
+
rtpTimeRef = 0;
|
|
19
|
+
monotonicRef = 0;
|
|
18
20
|
startTimeMs;
|
|
19
21
|
latencyFrames = 0;
|
|
20
22
|
latencyApplied = false;
|
|
@@ -29,7 +31,10 @@ class AudioOut extends node_events_1.EventEmitter {
|
|
|
29
31
|
typeof startTimeMs === 'number' && Number.isFinite(startTimeMs)
|
|
30
32
|
? startTimeMs
|
|
31
33
|
: undefined;
|
|
32
|
-
|
|
34
|
+
const wallToMonoOffset = Date.now() - node_perf_hooks_1.performance.now();
|
|
35
|
+
// Anchor the RTP clock to a monotonic base to avoid NTP slews.
|
|
36
|
+
this.rtpTimeRef = (this.startTimeMs ?? Date.now()) - wallToMonoOffset;
|
|
37
|
+
this.monotonicRef = node_perf_hooks_1.performance.now();
|
|
33
38
|
devices.on('airtunes_devices', (hasAirTunes) => {
|
|
34
39
|
this.hasAirTunes = hasAirTunes;
|
|
35
40
|
});
|
|
@@ -42,21 +47,35 @@ class AudioOut extends node_events_1.EventEmitter {
|
|
|
42
47
|
packet.timestamp = (0, numUtil_1.low32)(seq * config_1.default.frames_per_packet + 2 * config_1.default.sampling_rate);
|
|
43
48
|
if (this.hasAirTunes && seq % config_1.default.sync_period === 0) {
|
|
44
49
|
this.emit('need_sync', seq);
|
|
50
|
+
const nowMs = node_perf_hooks_1.performance.now();
|
|
45
51
|
const expectedTimeMs = this.rtpTimeRef +
|
|
46
52
|
((seq * config_1.default.frames_per_packet) / config_1.default.sampling_rate) * 1000;
|
|
47
|
-
const deltaMs =
|
|
53
|
+
const deltaMs = nowMs - expectedTimeMs;
|
|
48
54
|
this.emit('metrics', { type: 'sync', seq, deltaMs, latencyFrames: this.latencyFrames });
|
|
49
55
|
}
|
|
50
56
|
this.emit('packet', packet);
|
|
51
57
|
packet.release();
|
|
52
58
|
};
|
|
59
|
+
const frameDurationMs = (config_1.default.frames_per_packet / config_1.default.sampling_rate) * 1000;
|
|
53
60
|
const syncAudio = () => {
|
|
54
|
-
const
|
|
61
|
+
const nowMs = node_perf_hooks_1.performance.now();
|
|
62
|
+
const elapsed = nowMs - this.rtpTimeRef;
|
|
55
63
|
if (elapsed < 0) {
|
|
56
64
|
setTimeout(syncAudio, Math.min(config_1.default.stream_latency, Math.abs(elapsed)));
|
|
57
65
|
return;
|
|
58
66
|
}
|
|
59
|
-
|
|
67
|
+
let currentSeq = Math.floor((elapsed * config_1.default.sampling_rate) / (config_1.default.frames_per_packet * 1000));
|
|
68
|
+
// If we're lagging behind significantly, jump forward to avoid long hitches.
|
|
69
|
+
const expectedTimeMs = this.rtpTimeRef + currentSeq * frameDurationMs;
|
|
70
|
+
const deltaMs = nowMs - expectedTimeMs;
|
|
71
|
+
if (deltaMs > config_1.default.jump_forward_threshold_ms) {
|
|
72
|
+
const jumpSeq = Math.ceil((config_1.default.jump_forward_lead_ms * config_1.default.sampling_rate) /
|
|
73
|
+
(config_1.default.frames_per_packet * 1000));
|
|
74
|
+
const newSeq = currentSeq + jumpSeq;
|
|
75
|
+
this.rtpTimeRef = nowMs - newSeq * frameDurationMs;
|
|
76
|
+
this.lastSeq = newSeq - 1;
|
|
77
|
+
currentSeq = newSeq;
|
|
78
|
+
}
|
|
60
79
|
for (let i = this.lastSeq + 1; i <= currentSeq; i += 1) {
|
|
61
80
|
sendPacket(i);
|
|
62
81
|
}
|
package/dist/esm/core/rtsp.js
CHANGED
|
@@ -1636,7 +1636,8 @@ Client.prototype.processData = function (blob, rawData) {
|
|
|
1636
1636
|
;
|
|
1637
1637
|
break;
|
|
1638
1638
|
case SETPROGRESS:
|
|
1639
|
-
|
|
1639
|
+
// After reporting progress, stay in PLAYING; avoid forcing FLUSH on every update.
|
|
1640
|
+
this.status = PLAYING;
|
|
1640
1641
|
break;
|
|
1641
1642
|
case SETDAAP:
|
|
1642
1643
|
this.status = PLAYING;
|
package/dist/esm/utils/config.js
CHANGED
|
@@ -27,6 +27,8 @@ exports.config = {
|
|
|
27
27
|
rtsp_retry_jitter_ms: 150,
|
|
28
28
|
control_sync_base_delay_ms: 2,
|
|
29
29
|
control_sync_jitter_ms: 3,
|
|
30
|
+
jump_forward_threshold_ms: 180,
|
|
31
|
+
jump_forward_lead_ms: 220,
|
|
30
32
|
device_magic: (0, numUtil_1.randomInt)(9),
|
|
31
33
|
ntp_epoch: 0x83aa7e80,
|
|
32
34
|
iv_base64: 'ePRBLI0XN5ArFaaz7ncNZw',
|
package/dist/utils/config.d.ts
CHANGED
|
@@ -22,6 +22,8 @@ export interface AirplayConfig {
|
|
|
22
22
|
rtsp_retry_jitter_ms: number;
|
|
23
23
|
control_sync_base_delay_ms: number;
|
|
24
24
|
control_sync_jitter_ms: number;
|
|
25
|
+
jump_forward_threshold_ms: number;
|
|
26
|
+
jump_forward_lead_ms: number;
|
|
25
27
|
device_magic: number;
|
|
26
28
|
ntp_epoch: number;
|
|
27
29
|
iv_base64: string;
|
package/dist/utils/config.js
CHANGED
|
@@ -27,6 +27,8 @@ exports.config = {
|
|
|
27
27
|
rtsp_retry_jitter_ms: 150,
|
|
28
28
|
control_sync_base_delay_ms: 2,
|
|
29
29
|
control_sync_jitter_ms: 3,
|
|
30
|
+
jump_forward_threshold_ms: 180,
|
|
31
|
+
jump_forward_lead_ms: 220,
|
|
30
32
|
device_magic: (0, numUtil_1.randomInt)(9),
|
|
31
33
|
ntp_epoch: 0x83aa7e80,
|
|
32
34
|
iv_base64: 'ePRBLI0XN5ArFaaz7ncNZw',
|