rollback-netcode 0.0.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (83) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +140 -0
  3. package/dist/debug.d.ts +29 -0
  4. package/dist/debug.d.ts.map +1 -0
  5. package/dist/debug.js +56 -0
  6. package/dist/debug.js.map +1 -0
  7. package/dist/index.d.ts +62 -0
  8. package/dist/index.d.ts.map +1 -0
  9. package/dist/index.js +57 -0
  10. package/dist/index.js.map +1 -0
  11. package/dist/protocol/encoding.d.ts +80 -0
  12. package/dist/protocol/encoding.d.ts.map +1 -0
  13. package/dist/protocol/encoding.js +992 -0
  14. package/dist/protocol/encoding.js.map +1 -0
  15. package/dist/protocol/messages.d.ts +271 -0
  16. package/dist/protocol/messages.d.ts.map +1 -0
  17. package/dist/protocol/messages.js +114 -0
  18. package/dist/protocol/messages.js.map +1 -0
  19. package/dist/rollback/engine.d.ts +261 -0
  20. package/dist/rollback/engine.d.ts.map +1 -0
  21. package/dist/rollback/engine.js +543 -0
  22. package/dist/rollback/engine.js.map +1 -0
  23. package/dist/rollback/input-buffer.d.ts +225 -0
  24. package/dist/rollback/input-buffer.d.ts.map +1 -0
  25. package/dist/rollback/input-buffer.js +483 -0
  26. package/dist/rollback/input-buffer.js.map +1 -0
  27. package/dist/rollback/snapshot-buffer.d.ts +119 -0
  28. package/dist/rollback/snapshot-buffer.d.ts.map +1 -0
  29. package/dist/rollback/snapshot-buffer.js +256 -0
  30. package/dist/rollback/snapshot-buffer.js.map +1 -0
  31. package/dist/session/desync-manager.d.ts +106 -0
  32. package/dist/session/desync-manager.d.ts.map +1 -0
  33. package/dist/session/desync-manager.js +136 -0
  34. package/dist/session/desync-manager.js.map +1 -0
  35. package/dist/session/lag-monitor.d.ts +69 -0
  36. package/dist/session/lag-monitor.d.ts.map +1 -0
  37. package/dist/session/lag-monitor.js +74 -0
  38. package/dist/session/lag-monitor.js.map +1 -0
  39. package/dist/session/message-builders.d.ts +86 -0
  40. package/dist/session/message-builders.d.ts.map +1 -0
  41. package/dist/session/message-builders.js +199 -0
  42. package/dist/session/message-builders.js.map +1 -0
  43. package/dist/session/message-router.d.ts +61 -0
  44. package/dist/session/message-router.d.ts.map +1 -0
  45. package/dist/session/message-router.js +105 -0
  46. package/dist/session/message-router.js.map +1 -0
  47. package/dist/session/player-manager.d.ts +100 -0
  48. package/dist/session/player-manager.d.ts.map +1 -0
  49. package/dist/session/player-manager.js +160 -0
  50. package/dist/session/player-manager.js.map +1 -0
  51. package/dist/session/session.d.ts +379 -0
  52. package/dist/session/session.d.ts.map +1 -0
  53. package/dist/session/session.js +1294 -0
  54. package/dist/session/session.js.map +1 -0
  55. package/dist/session/topology.d.ts +66 -0
  56. package/dist/session/topology.d.ts.map +1 -0
  57. package/dist/session/topology.js +72 -0
  58. package/dist/session/topology.js.map +1 -0
  59. package/dist/transport/adapter.d.ts +99 -0
  60. package/dist/transport/adapter.d.ts.map +1 -0
  61. package/dist/transport/adapter.js +8 -0
  62. package/dist/transport/adapter.js.map +1 -0
  63. package/dist/transport/local.d.ts +192 -0
  64. package/dist/transport/local.d.ts.map +1 -0
  65. package/dist/transport/local.js +435 -0
  66. package/dist/transport/local.js.map +1 -0
  67. package/dist/transport/transforming.d.ts +177 -0
  68. package/dist/transport/transforming.d.ts.map +1 -0
  69. package/dist/transport/transforming.js +407 -0
  70. package/dist/transport/transforming.js.map +1 -0
  71. package/dist/transport/webrtc.d.ts +285 -0
  72. package/dist/transport/webrtc.d.ts.map +1 -0
  73. package/dist/transport/webrtc.js +734 -0
  74. package/dist/transport/webrtc.js.map +1 -0
  75. package/dist/types.d.ts +394 -0
  76. package/dist/types.d.ts.map +1 -0
  77. package/dist/types.js +256 -0
  78. package/dist/types.js.map +1 -0
  79. package/dist/utils/rate-limiter.d.ts +59 -0
  80. package/dist/utils/rate-limiter.d.ts.map +1 -0
  81. package/dist/utils/rate-limiter.js +93 -0
  82. package/dist/utils/rate-limiter.js.map +1 -0
  83. package/package.json +61 -0
@@ -0,0 +1,1294 @@
1
+ /**
2
+ * High-level session manager that orchestrates the rollback engine and transport.
3
+ *
4
+ * Handles room management, player join/leave, message routing, and desync detection.
5
+ */
6
+ import { DEFAULT_SESSION_CONFIG, ErrorSource, PauseReason, PlayerConnectionState, PlayerRole, SessionState, Topology, asPlayerId, asTick, playerIdToPeerId, validatePlayerId, validateSessionConfig, } from "../types.js";
7
+ import { createDebugLogger } from "../debug.js";
8
+ import { encodeMessage } from "../protocol/encoding.js";
9
+ import { isReliableMessage, } from "../protocol/messages.js";
10
+ import { RollbackEngine, } from "../rollback/engine.js";
11
+ import { RateLimiter } from "../utils/rate-limiter.js";
12
+ import { DesyncManager } from "./desync-manager.js";
13
+ import { DEFAULT_LAG_REPORT_COOLDOWN_TICKS, LagMonitor, } from "./lag-monitor.js";
14
+ import { createDisconnectReport, createDropPlayer, createHash, createInput, createJoinAccept, createJoinReject, createJoinRequest, createLagReport, createPause, createPing, createPlayerJoined, createPlayerLeft, createPong, createResume, createResumeCountdown, createStateSync, createSync, createSyncRequest, } from "./message-builders.js";
15
+ import { MessageRouter } from "./message-router.js";
16
+ import { PlayerManager } from "./player-manager.js";
17
+ import { createTopologyStrategy } from "./topology.js";
18
+ /** Interval for cleaning up stale rate limit entries (in milliseconds) */
19
+ const RATE_LIMIT_CLEANUP_INTERVAL_MS = 60000;
20
+ /**
21
+ * Generate a cryptographically random room ID.
22
+ * Uses crypto.randomUUID() for unpredictable room IDs.
23
+ */
24
+ function generateRoomId() {
25
+ // Use crypto.randomUUID() for secure random IDs
26
+ // Falls back to a less secure method if crypto is unavailable
27
+ if (typeof crypto !== "undefined" && crypto.randomUUID) {
28
+ // Take first 8 characters of UUID for shorter room codes
29
+ return crypto.randomUUID().slice(0, 8);
30
+ }
31
+ // Fallback for environments without crypto.randomUUID
32
+ const chars = "abcdefghijklmnopqrstuvwxyz0123456789";
33
+ let id = "";
34
+ for (let i = 0; i < 8; i++) {
35
+ id += chars[Math.floor(Math.random() * chars.length)];
36
+ }
37
+ return id;
38
+ }
39
+ /**
40
+ * Session manager that coordinates the rollback engine with network transport.
41
+ */
42
+ export class Session {
43
+ game;
44
+ transport;
45
+ config;
46
+ engine;
47
+ eventHandlers = new Map();
48
+ _localPlayerId;
49
+ topologyStrategy;
50
+ debug;
51
+ // Extracted components
52
+ playerManager;
53
+ desyncManager;
54
+ lagMonitor;
55
+ joinRateLimiter;
56
+ messageRouter;
57
+ _state = SessionState.Disconnected;
58
+ _isHost = false;
59
+ _roomId = null;
60
+ _localRole = PlayerRole.Player;
61
+ lastHashBroadcastTick = asTick(-1);
62
+ inputRedundancy;
63
+ /** Timer for periodic rate limit cleanup */
64
+ rateLimitCleanupTimer = null;
65
+ /** Buffer for deferred hash comparison (processed after rollback in tick()) */
66
+ pendingHashMessages = [];
67
+ /** Tracks players whose playerJoined event has been emitted to prevent duplicates during resimulation */
68
+ emittedJoinEvents = new Set();
69
+ /** Number of RTT samples to keep for averaging */
70
+ static RTT_SAMPLE_COUNT = 5;
71
+ /** RTT tracking: peerId -> { pendingPings: Map<timestamp, sendTime>, rtt: number } */
72
+ peerRttData = new Map();
73
+ /**
74
+ * Create a new session.
75
+ * @throws ValidationError if config values are invalid
76
+ */
77
+ constructor(options) {
78
+ this.game = options.game;
79
+ this.transport = options.transport;
80
+ this.config = { ...DEFAULT_SESSION_CONFIG, ...options.config };
81
+ // Validate configuration
82
+ validateSessionConfig(this.config);
83
+ this._localPlayerId =
84
+ options.localPlayerId ?? asPlayerId(this.transport.localPeerId);
85
+ // Validate local player ID
86
+ validatePlayerId(this._localPlayerId);
87
+ // Create topology strategy
88
+ this.topologyStrategy = createTopologyStrategy(this.config.topology);
89
+ // Create debug logger
90
+ this.debug = createDebugLogger(this.config.debug);
91
+ // Warn about Mesh topology with many players
92
+ if (this.config.topology === Topology.Mesh && this.config.maxPlayers > 4) {
93
+ console.warn("[rollback-netcode] Mesh topology with >4 players is not recommended. " +
94
+ "Mesh requires N×(N-1)/2 connections which scales poorly. " +
95
+ "Consider using Star topology for better performance.");
96
+ }
97
+ // Initialize extracted components
98
+ this.playerManager = new PlayerManager();
99
+ this.desyncManager = new DesyncManager({
100
+ topology: this.config.topology,
101
+ desyncAuthority: this.config.desyncAuthority,
102
+ hashInterval: this.config.hashInterval,
103
+ });
104
+ this.lagMonitor = new LagMonitor({
105
+ threshold: this.config.lagReportThreshold,
106
+ cooldownTicks: DEFAULT_LAG_REPORT_COOLDOWN_TICKS,
107
+ });
108
+ this.joinRateLimiter = new RateLimiter({
109
+ maxRequests: this.config.joinRateLimitRequests,
110
+ windowMs: this.config.joinRateLimitWindowMs,
111
+ });
112
+ this.inputRedundancy = this.config.inputRedundancy;
113
+ // Create message router with handlers
114
+ this.messageRouter = new MessageRouter(this.createMessageHandlers(), this.handleDecodeError.bind(this));
115
+ const engineConfig = {
116
+ game: this.game,
117
+ localPlayerId: this._localPlayerId,
118
+ snapshotHistorySize: this.config.snapshotHistorySize,
119
+ maxSpeculationTicks: this.config.maxSpeculationTicks,
120
+ // During resimulation, emit playerJoined when crossing a player's joinTick
121
+ // so the game layer can re-add players that were lost during snapshot restore.
122
+ // Skip if we've already emitted for this player (prevents duplicate events).
123
+ onPlayerAddDuringResimulation: (playerId, _tick) => {
124
+ if (this.emittedJoinEvents.has(playerId)) {
125
+ return; // Already emitted, skip duplicate
126
+ }
127
+ const playerInfo = this.playerManager.getPlayer(playerId);
128
+ if (playerInfo) {
129
+ this.emittedJoinEvents.add(playerId);
130
+ this.emit("playerJoined", playerInfo);
131
+ }
132
+ },
133
+ // During resimulation, emit playerLeft when crossing a player's leaveTick
134
+ onPlayerRemoveDuringResimulation: (playerId, _tick) => {
135
+ const playerInfo = this.playerManager.getPlayer(playerId);
136
+ if (playerInfo) {
137
+ this.emit("playerLeft", playerInfo);
138
+ }
139
+ },
140
+ // When rollback occurs, clear emittedJoinEvents for players whose joinTick
141
+ // is after the restore tick, since they need to be re-added during resimulation
142
+ onRollback: (restoreTick) => {
143
+ for (const playerId of this.emittedJoinEvents) {
144
+ const playerInfo = this.playerManager.getPlayer(playerId);
145
+ if (playerInfo &&
146
+ playerInfo.joinTick !== null &&
147
+ playerInfo.joinTick > restoreTick) {
148
+ this.emittedJoinEvents.delete(playerId);
149
+ }
150
+ }
151
+ },
152
+ };
153
+ if (options.inputPredictor) {
154
+ engineConfig.inputPredictor = options.inputPredictor;
155
+ }
156
+ this.engine = new RollbackEngine(engineConfig);
157
+ // Initialize local player info
158
+ this.playerManager.addPlayer({
159
+ id: this._localPlayerId,
160
+ connectionState: PlayerConnectionState.Connected,
161
+ joinTick: null,
162
+ leaveTick: null,
163
+ isHost: false,
164
+ role: PlayerRole.Player,
165
+ });
166
+ // Setup transport callbacks
167
+ this.transport.onMessage = this.handleMessage.bind(this);
168
+ this.transport.onConnect = this.handlePeerConnect.bind(this);
169
+ this.transport.onDisconnect = this.handlePeerDisconnect.bind(this);
170
+ // Setup keepalive callback if transport supports it
171
+ const transportWithKeepalive = this.transport;
172
+ if ("onKeepalivePing" in transportWithKeepalive) {
173
+ transportWithKeepalive.onKeepalivePing = (peerId) => {
174
+ this.sendPing(peerId);
175
+ };
176
+ }
177
+ // Start periodic rate limit cleanup (unref to not block process exit)
178
+ this.rateLimitCleanupTimer = setInterval(() => {
179
+ this.joinRateLimiter.cleanup();
180
+ }, RATE_LIMIT_CLEANUP_INTERVAL_MS);
181
+ if (this.rateLimitCleanupTimer.unref) {
182
+ this.rateLimitCleanupTimer.unref();
183
+ }
184
+ }
185
+ /**
186
+ * Create message handlers bound to this session.
187
+ */
188
+ createMessageHandlers() {
189
+ return {
190
+ onInput: (msg) => this.handleInputMessage(msg),
191
+ onHash: (msg) => this.handleHashMessage(msg),
192
+ onSync: (msg) => this.handleSyncMessage(msg),
193
+ onSyncRequest: (msg) => this.handleSyncRequest(msg),
194
+ onJoinRequest: (msg, peerId) => this.handleJoinRequest(peerId, msg),
195
+ onJoinAccept: (msg) => this.handleJoinAccept(msg),
196
+ onJoinReject: (msg) => this.handleJoinReject(msg),
197
+ onPlayerJoined: (msg) => this.handlePlayerJoined(msg),
198
+ onPlayerLeft: (msg) => this.handlePlayerLeft(msg),
199
+ onPause: (msg) => this.handlePause(msg),
200
+ onResume: (msg) => this.handleResume(msg),
201
+ onPing: (msg, peerId) => this.handlePing(peerId, msg),
202
+ onPong: (msg, peerId) => this.handlePong(peerId, msg),
203
+ onDisconnectReport: (msg) => this.handleDisconnectReport(msg),
204
+ onLagReport: (msg) => this.handleLagReport(msg),
205
+ onResumeCountdown: (msg) => this.handleResumeCountdown(msg),
206
+ onDropPlayer: (msg) => this.handleDropPlayer(msg),
207
+ };
208
+ }
209
+ /**
210
+ * Current session state.
211
+ */
212
+ get state() {
213
+ return this._state;
214
+ }
215
+ /**
216
+ * Map of all players in the session.
217
+ */
218
+ get players() {
219
+ return this.playerManager.asReadonlyMap();
220
+ }
221
+ /**
222
+ * Local player's ID.
223
+ */
224
+ get localPlayerId() {
225
+ return this._localPlayerId;
226
+ }
227
+ /**
228
+ * Whether this session is the host.
229
+ */
230
+ get isHost() {
231
+ return this._isHost;
232
+ }
233
+ /**
234
+ * Current room ID (null if not in a room).
235
+ */
236
+ get roomId() {
237
+ return this._roomId;
238
+ }
239
+ /**
240
+ * Local player's role in the session.
241
+ */
242
+ get localRole() {
243
+ return this._localRole;
244
+ }
245
+ /**
246
+ * Set the local player's role.
247
+ * Must be called before joining a room or starting the game.
248
+ *
249
+ * @param role - The role to set ('pilot' or 'spectator')
250
+ * @throws Error if called while in a room
251
+ */
252
+ setLocalRole(role) {
253
+ if (this._state !== SessionState.Disconnected) {
254
+ throw new Error("Cannot change role while in a room");
255
+ }
256
+ this._localRole = role;
257
+ const localPlayer = this.playerManager.getPlayer(this.localPlayerId);
258
+ if (localPlayer) {
259
+ localPlayer.role = role;
260
+ }
261
+ }
262
+ /**
263
+ * Current tick from the engine.
264
+ */
265
+ get currentTick() {
266
+ return this.engine.currentTick;
267
+ }
268
+ /**
269
+ * Confirmed tick from the engine.
270
+ */
271
+ get confirmedTick() {
272
+ return this.engine.confirmedTick;
273
+ }
274
+ /**
275
+ * Get connection quality metrics for a player.
276
+ *
277
+ * @param playerId - The player's ID
278
+ * @returns Connection metrics or null if not available
279
+ */
280
+ getPlayerMetrics(playerId) {
281
+ // Local player doesn't have network metrics
282
+ if (playerId === this.localPlayerId) {
283
+ return null;
284
+ }
285
+ const peerId = playerIdToPeerId(playerId);
286
+ // Check if transport supports metrics
287
+ if (this.transport.getConnectionMetrics) {
288
+ return this.transport.getConnectionMetrics(peerId);
289
+ }
290
+ return null;
291
+ }
292
+ /**
293
+ * Destroy the session and clean up resources.
294
+ * Call this when the session is no longer needed.
295
+ */
296
+ destroy() {
297
+ this.leaveRoom();
298
+ // Stop rate limit cleanup timer
299
+ if (this.rateLimitCleanupTimer) {
300
+ clearInterval(this.rateLimitCleanupTimer);
301
+ this.rateLimitCleanupTimer = null;
302
+ }
303
+ // Clear component state
304
+ this.joinRateLimiter.clear();
305
+ this.lagMonitor.clear();
306
+ this.desyncManager.clear();
307
+ this.pendingHashMessages = [];
308
+ // Remove transport callbacks
309
+ this.transport.onMessage = null;
310
+ this.transport.onConnect = null;
311
+ this.transport.onDisconnect = null;
312
+ }
313
+ /**
314
+ * Create a new room and become the host.
315
+ *
316
+ * @returns The room ID
317
+ */
318
+ async createRoom() {
319
+ if (this._state !== SessionState.Disconnected) {
320
+ throw new Error("Already in a room or connecting");
321
+ }
322
+ this._roomId = generateRoomId();
323
+ this._isHost = true;
324
+ // Update local player to be host
325
+ const localPlayer = this.playerManager.getPlayer(this.localPlayerId);
326
+ if (localPlayer) {
327
+ localPlayer.isHost = true;
328
+ localPlayer.joinTick = asTick(0);
329
+ }
330
+ this.setState(SessionState.Lobby);
331
+ return this._roomId;
332
+ }
333
+ /**
334
+ * Join an existing room.
335
+ *
336
+ * @param roomId - The room ID to join
337
+ * @param hostPeerId - The host's peer ID
338
+ */
339
+ async joinRoom(roomId, hostPeerId) {
340
+ if (this._state !== SessionState.Disconnected) {
341
+ throw new Error("Already in a room or connecting");
342
+ }
343
+ this._roomId = roomId;
344
+ this._isHost = false;
345
+ this.setState(SessionState.Connecting);
346
+ // Connect to host
347
+ await this.transport.connect(hostPeerId);
348
+ // Send join request with role
349
+ this.sendToHost(createJoinRequest(this.localPlayerId, this._localRole));
350
+ }
351
+ /**
352
+ * Leave the current room.
353
+ */
354
+ leaveRoom() {
355
+ if (this._state === SessionState.Disconnected) {
356
+ return;
357
+ }
358
+ // Notify other players
359
+ if (this._state === SessionState.Playing) {
360
+ this.broadcast(createPlayerLeft(this.localPlayerId, this.engine.currentTick), true);
361
+ }
362
+ // Disconnect from all peers
363
+ this.transport.disconnectAll();
364
+ // Reset state
365
+ this._roomId = null;
366
+ this._isHost = false;
367
+ this.playerManager.clear();
368
+ this.emittedJoinEvents.clear();
369
+ this.engine.reset();
370
+ // Re-add local player
371
+ this.playerManager.addPlayer({
372
+ id: this.localPlayerId,
373
+ connectionState: PlayerConnectionState.Connected,
374
+ joinTick: null,
375
+ leaveTick: null,
376
+ isHost: false,
377
+ role: PlayerRole.Player,
378
+ });
379
+ this.setState(SessionState.Disconnected);
380
+ }
381
+ /**
382
+ * Start the game (host only).
383
+ * Transitions from lobby to playing state.
384
+ */
385
+ start() {
386
+ if (!this._isHost) {
387
+ throw new Error("Only the host can start the game");
388
+ }
389
+ if (this._state !== SessionState.Lobby) {
390
+ throw new Error("Can only start from lobby state");
391
+ }
392
+ // Set join ticks for all players, but only add players to engine
393
+ const startTick = asTick(0);
394
+ for (const player of this.playerManager) {
395
+ player.joinTick = startTick;
396
+ // Only players contribute inputs - spectators run simulation but don't send inputs
397
+ if (player.role === PlayerRole.Player) {
398
+ this.engine.addPlayer(player.id, startTick);
399
+ }
400
+ }
401
+ // Send state sync to all players
402
+ const state = this.engine.getState();
403
+ this.broadcast(createStateSync(state.tick, state.state, this.engine.getCurrentHash(), state.playerTimeline), true);
404
+ this.setState(SessionState.Playing);
405
+ this.emit("gameStart");
406
+ }
407
+ /**
408
+ * Pause the game (host only).
409
+ */
410
+ pause(reason = PauseReason.PlayerRequest) {
411
+ if (!this._isHost) {
412
+ throw new Error("Only the host can pause");
413
+ }
414
+ if (this._state !== SessionState.Playing) {
415
+ return;
416
+ }
417
+ this.broadcast(createPause(this.localPlayerId, this.engine.currentTick, reason), true);
418
+ this.setState(SessionState.Paused);
419
+ }
420
+ /**
421
+ * Resume the game (host only).
422
+ */
423
+ resume() {
424
+ if (!this._isHost) {
425
+ throw new Error("Only the host can resume");
426
+ }
427
+ if (this._state !== SessionState.Paused) {
428
+ return;
429
+ }
430
+ this.broadcast(createResume(this.localPlayerId, this.engine.currentTick), true);
431
+ this.setState(SessionState.Playing);
432
+ }
433
+ /**
434
+ * Send a resume countdown to all players (host only).
435
+ * Use this before calling resume() to give players time to prepare.
436
+ *
437
+ * @param secondsRemaining - Number of seconds until resume
438
+ */
439
+ sendResumeCountdown(secondsRemaining) {
440
+ if (!this._isHost) {
441
+ throw new Error("Only the host can send resume countdown");
442
+ }
443
+ if (this._state !== SessionState.Paused) {
444
+ return;
445
+ }
446
+ this.broadcast(createResumeCountdown(secondsRemaining), true);
447
+ // Also emit locally so host's game layer can react
448
+ this.emit("resumeCountdown", secondsRemaining);
449
+ }
450
+ /**
451
+ * Drop a player from the game (host only).
452
+ * Use this to remove a disconnected or lagging player and allow the game to continue.
453
+ *
454
+ * @param playerId - The player to drop
455
+ * @param metadata - Optional metadata (e.g., AI replacement info)
456
+ */
457
+ dropPlayer(playerId, metadata) {
458
+ if (!this._isHost) {
459
+ throw new Error("Only the host can drop players");
460
+ }
461
+ const player = this.playerManager.getPlayer(playerId);
462
+ if (!player) {
463
+ return;
464
+ }
465
+ // Mark player as disconnected locally
466
+ this.markPlayerDisconnected(playerId);
467
+ // Broadcast DropPlayer to all other players
468
+ this.broadcast(createDropPlayer(playerId, metadata), true);
469
+ // Emit locally
470
+ this.emit("playerDropped", playerId, metadata);
471
+ }
472
+ /**
473
+ * Advance the simulation by one tick.
474
+ * Call this at your game's tick rate (e.g., 60 times per second).
475
+ *
476
+ * @param localInput - The local player's input for this tick (required for pilots, ignored for spectators)
477
+ * @returns Tick result with rollback info
478
+ */
479
+ tick(localInput) {
480
+ if (this._state !== SessionState.Playing) {
481
+ return {
482
+ tick: this.engine.currentTick,
483
+ rolledBack: false,
484
+ };
485
+ }
486
+ const currentTick = this.engine.currentTick;
487
+ // Only players send inputs
488
+ if (this._localRole === PlayerRole.Player) {
489
+ if (!localInput) {
490
+ throw new Error("Players must provide input");
491
+ }
492
+ // Set local input
493
+ this.engine.setLocalInput(currentTick, localInput);
494
+ // Broadcast local input to all peers
495
+ this.broadcastInput(currentTick, localInput);
496
+ }
497
+ // Run the engine tick (both pilots and spectators run simulation)
498
+ const result = this.engine.tick();
499
+ // Handle rollback errors (e.g., "cannot rollback: no snapshots available")
500
+ // A failed rollback means we couldn't correct a misprediction, which may lead to desync
501
+ if (result.error) {
502
+ this.debug.warn("Rollback error", {
503
+ tick: result.tick,
504
+ error: result.error.message,
505
+ });
506
+ this.emitError(result.error, {
507
+ source: ErrorSource.Engine,
508
+ recoverable: true,
509
+ details: {
510
+ tick: result.tick,
511
+ rollbackTarget: result.error.tick,
512
+ },
513
+ });
514
+ // Non-hosts should request sync to recover from potential desync
515
+ if (!this._isHost) {
516
+ this.requestSync();
517
+ }
518
+ }
519
+ // Log rollback if it occurred
520
+ if (result.rolledBack && result.rollbackTicks !== undefined) {
521
+ this.debug.log("Rollback triggered", {
522
+ tick: result.tick,
523
+ rollbackTicks: result.rollbackTicks,
524
+ });
525
+ // Clear pending hashes for rolled-back ticks
526
+ // Both our state and the sender's state may have changed
527
+ const rollbackToTick = asTick(result.tick - result.rollbackTicks);
528
+ this.pendingHashMessages = this.pendingHashMessages.filter((h) => h.tick < rollbackToTick);
529
+ }
530
+ // Process deferred hash comparisons (after rollback has corrected state)
531
+ this.processPendingHashComparisons();
532
+ // Periodic hash broadcast for desync detection
533
+ this.maybeBroadcastHash();
534
+ // Periodic lag check and report (guests only, in host-authority mode)
535
+ this.checkAndReportLag();
536
+ return result;
537
+ }
538
+ /**
539
+ * Request state sync from host (for desync recovery).
540
+ */
541
+ requestSync() {
542
+ if (this._isHost) {
543
+ return; // Host doesn't need to request sync
544
+ }
545
+ this.sendToHost(createSyncRequest(this.localPlayerId, this.engine.currentTick, this.engine.getCurrentHash()));
546
+ }
547
+ /**
548
+ * Register an event handler.
549
+ */
550
+ on(event, handler) {
551
+ if (!this.eventHandlers.has(event)) {
552
+ this.eventHandlers.set(event, new Set());
553
+ }
554
+ this.eventHandlers.get(event)?.add(handler);
555
+ }
556
+ /**
557
+ * Remove an event handler.
558
+ */
559
+ off(event, handler) {
560
+ this.eventHandlers.get(event)?.delete(handler);
561
+ }
562
+ /**
563
+ * Remove all event handlers.
564
+ *
565
+ * Optionally specify an event type to only remove handlers for that event.
566
+ * Call this during cleanup to prevent memory leaks.
567
+ *
568
+ * @param event - Optional event type to clear handlers for
569
+ */
570
+ removeAllListeners(event) {
571
+ if (event !== undefined) {
572
+ this.eventHandlers.delete(event);
573
+ }
574
+ else {
575
+ this.eventHandlers.clear();
576
+ }
577
+ }
578
+ /**
579
+ * Emit an event to all registered handlers.
580
+ */
581
+ emit(event, ...args) {
582
+ const handlers = this.eventHandlers.get(event);
583
+ if (handlers) {
584
+ for (const handler of handlers) {
585
+ try {
586
+ handler(...args);
587
+ }
588
+ catch (handlerError) {
589
+ // Avoid infinite recursion: don't emit error events for errors in error handlers
590
+ if (event !== "error") {
591
+ this.emitError(handlerError instanceof Error
592
+ ? handlerError
593
+ : new Error(String(handlerError)), {
594
+ source: ErrorSource.Session,
595
+ recoverable: true,
596
+ details: { event },
597
+ });
598
+ }
599
+ }
600
+ }
601
+ }
602
+ }
603
+ /**
604
+ * Emit an error event with context.
605
+ */
606
+ emitError(error, context) {
607
+ this.emit("error", error, context);
608
+ }
609
+ /**
610
+ * Valid state transitions for the session state machine.
611
+ *
612
+ * State machine:
613
+ * ```
614
+ * Disconnected ──→ Connecting ──→ Lobby ──→ Playing ⇄ Paused
615
+ * ↑ │ │ │ │
616
+ * └───────────────┴────────────┴─────────┴─────────┘
617
+ * (any state can go to Disconnected)
618
+ * ```
619
+ */
620
+ static VALID_TRANSITIONS = new Map([
621
+ [
622
+ SessionState.Disconnected,
623
+ new Set([SessionState.Connecting, SessionState.Lobby]),
624
+ ],
625
+ [
626
+ SessionState.Connecting,
627
+ new Set([
628
+ SessionState.Lobby,
629
+ SessionState.Playing,
630
+ SessionState.Disconnected,
631
+ ]),
632
+ ],
633
+ [
634
+ SessionState.Lobby,
635
+ new Set([SessionState.Playing, SessionState.Disconnected]),
636
+ ],
637
+ [
638
+ SessionState.Playing,
639
+ new Set([SessionState.Paused, SessionState.Disconnected]),
640
+ ],
641
+ [
642
+ SessionState.Paused,
643
+ new Set([SessionState.Playing, SessionState.Disconnected]),
644
+ ],
645
+ ]);
646
+ /**
647
+ * Update session state and emit event.
648
+ * Validates that the transition is allowed by the state machine.
649
+ * @throws Error if the transition is not valid
650
+ */
651
+ setState(newState) {
652
+ const oldState = this._state;
653
+ if (oldState === newState)
654
+ return;
655
+ // Validate transition - fail fast on invalid transitions
656
+ const validNextStates = Session.VALID_TRANSITIONS.get(oldState);
657
+ if (!validNextStates?.has(newState)) {
658
+ throw new Error(`Invalid state transition: ${SessionState[oldState]} → ${SessionState[newState]}`);
659
+ }
660
+ this._state = newState;
661
+ this.emit("stateChange", newState, oldState);
662
+ }
663
+ /**
664
+ * Handle decode error from message router.
665
+ */
666
+ handleDecodeError(error, peerId, dataLength) {
667
+ this.emitError(error, {
668
+ source: ErrorSource.Protocol,
669
+ recoverable: true,
670
+ details: { peerId, dataLength },
671
+ });
672
+ }
673
+ /**
674
+ * Handle incoming message from transport.
675
+ */
676
+ handleMessage(peerId, data) {
677
+ // Record peer response for keepalive tracking
678
+ const transportWithMetrics = this.transport;
679
+ transportWithMetrics.recordPeerResponse?.(peerId);
680
+ // Route through message router
681
+ this.messageRouter.route(peerId, data);
682
+ }
683
+ /**
684
+ * Handle peer connection.
685
+ */
686
+ handlePeerConnect(peerId) {
687
+ this.debug.log("Peer connected", { peerId });
688
+ // Update player state if known
689
+ const playerId = asPlayerId(peerId);
690
+ this.playerManager.markConnected(playerId);
691
+ }
692
+ /**
693
+ * Handle peer disconnection.
694
+ */
695
+ handlePeerDisconnect(peerId) {
696
+ this.debug.log("Peer disconnected", { peerId });
697
+ // Clean up RTT tracking data
698
+ this.peerRttData.delete(peerId);
699
+ const playerId = asPlayerId(peerId);
700
+ // In host-authority mode, guests report to host instead of handling locally
701
+ if (this.desyncManager.isHostAuthority && !this._isHost) {
702
+ this.sendToHost(createDisconnectReport(playerId));
703
+ return;
704
+ }
705
+ // Host or star topology or peer mode: handle directly
706
+ this.markPlayerDisconnected(playerId);
707
+ }
708
+ /**
709
+ * Mark a player as disconnected and handle cleanup.
710
+ */
711
+ markPlayerDisconnected(playerId) {
712
+ const player = this.playerManager.getPlayer(playerId);
713
+ if (!player)
714
+ return;
715
+ player.connectionState = PlayerConnectionState.Disconnected;
716
+ if (this._state === SessionState.Playing) {
717
+ player.leaveTick = this.engine.currentTick;
718
+ // Only remove players from engine (spectators were never added)
719
+ if (player.role === PlayerRole.Player) {
720
+ this.engine.removePlayer(playerId, player.leaveTick);
721
+ }
722
+ // Clear join event tracking so if player rejoins later, we emit again
723
+ this.emittedJoinEvents.delete(playerId);
724
+ this.emit("playerLeft", player);
725
+ // If host in host-authority mode, broadcast PlayerLeft to all
726
+ if (this._isHost && this.desyncManager.isHostAuthority) {
727
+ this.broadcast(createPlayerLeft(playerId, player.leaveTick), true);
728
+ }
729
+ }
730
+ }
731
+ /**
732
+ * Handle disconnect report from a guest (host only, mesh+host-authority mode).
733
+ */
734
+ handleDisconnectReport(message) {
735
+ if (!this._isHost)
736
+ return;
737
+ this.debug.log("Received disconnect report", {
738
+ disconnectedPeerId: message.disconnectedPeerId,
739
+ });
740
+ this.markPlayerDisconnected(message.disconnectedPeerId);
741
+ }
742
+ /**
743
+ * Handle lag report from a guest (host only).
744
+ */
745
+ handleLagReport(message) {
746
+ if (!this._isHost)
747
+ return;
748
+ // Emit event for game layer to handle (pause decision, kick UI, etc.)
749
+ this.emit("lagReport", message.laggyPlayerId, message.ticksBehind);
750
+ }
751
+ /**
752
+ * Check for lagging players and report to host (guests only, when lag threshold is set).
753
+ */
754
+ checkAndReportLag() {
755
+ // Only guests report lag in host-authority mode
756
+ if (this._isHost)
757
+ return;
758
+ if (!this.lagMonitor.isEnabled)
759
+ return;
760
+ const currentTick = this.engine.currentTick;
761
+ // Check each remote player's confirmed tick
762
+ for (const player of this.playerManager) {
763
+ if (player.id === this.localPlayerId)
764
+ continue;
765
+ if (player.role !== PlayerRole.Player)
766
+ continue;
767
+ if (player.connectionState !== PlayerConnectionState.Connected)
768
+ continue;
769
+ const confirmedTick = this.engine.getConfirmedTickForPlayer(player.id);
770
+ if (confirmedTick === undefined)
771
+ continue;
772
+ const ticksBehind = currentTick - confirmedTick;
773
+ const lagReport = this.lagMonitor.checkPlayer(player.id, ticksBehind, currentTick);
774
+ if (lagReport) {
775
+ this.sendToHost(createLagReport(lagReport.laggyPlayerId, lagReport.ticksBehind));
776
+ this.debug.log("Sent lag report", {
777
+ laggyPlayerId: lagReport.laggyPlayerId,
778
+ ticksBehind: lagReport.ticksBehind,
779
+ });
780
+ }
781
+ }
782
+ }
783
+ /**
784
+ * Handle resume countdown message.
785
+ */
786
+ handleResumeCountdown(message) {
787
+ this.emit("resumeCountdown", message.secondsRemaining);
788
+ }
789
+ /**
790
+ * Handle drop player message.
791
+ */
792
+ handleDropPlayer(message) {
793
+ this.markPlayerDisconnected(message.playerId);
794
+ this.emit("playerDropped", message.playerId, message.metadata);
795
+ }
796
+ /**
797
+ * Handle input message from remote player.
798
+ */
799
+ handleInputMessage(message) {
800
+ for (const { tick, input } of message.inputs) {
801
+ this.engine.receiveRemoteInput(message.playerId, tick, input);
802
+ }
803
+ // Log once per message, not per input (reduces noise)
804
+ if (message.inputs.length > 0) {
805
+ this.debug.trace("Inputs received", {
806
+ playerId: message.playerId,
807
+ count: message.inputs.length,
808
+ ticks: message.inputs.map((i) => i.tick),
809
+ });
810
+ }
811
+ // Check if we should relay inputs based on topology
812
+ if (this.topologyStrategy.shouldRelayInput(message.playerId, this._isHost)) {
813
+ const targets = this.topologyStrategy.getRelayTargets(message.playerId, this.transport.connectedPeers);
814
+ if (targets.length > 0) {
815
+ const encoded = encodeMessage(message);
816
+ for (const peerId of targets) {
817
+ this.transport.send(peerId, encoded, false);
818
+ }
819
+ }
820
+ }
821
+ }
822
+ /**
823
+ * Handle hash message for desync detection.
824
+ */
825
+ handleHashMessage(message) {
826
+ // Host-authority mode: host collects and compares immediately
827
+ if (this.desyncManager.isHostAuthority) {
828
+ if (this._isHost) {
829
+ this.recordHashAndCheckDesync(message.tick, message.playerId, message.hash);
830
+ }
831
+ // Guests don't compare hashes in host-authority mode
832
+ return;
833
+ }
834
+ // Peer mode: defer comparison until after rollback in tick()
835
+ // Store the hash for later comparison
836
+ this.pendingHashMessages.push({
837
+ tick: message.tick,
838
+ playerId: message.playerId,
839
+ hash: message.hash,
840
+ });
841
+ }
842
+ /**
843
+ * Process pending hash comparisons after rollback has corrected state.
844
+ * Called from tick() after engine.tick() completes.
845
+ */
846
+ processPendingHashComparisons() {
847
+ if (this.pendingHashMessages.length === 0) {
848
+ return;
849
+ }
850
+ const currentTick = this.engine.currentTick;
851
+ const confirmedTick = this.engine.confirmedTick;
852
+ const remaining = [];
853
+ // Prune threshold: discard hashes older than 2x hashInterval
854
+ // These are too old to be useful and prevent memory growth
855
+ const pruneThreshold = asTick(Math.max(0, currentTick - this.config.hashInterval * 2));
856
+ for (const pending of this.pendingHashMessages) {
857
+ // Prune old hashes that are no longer relevant
858
+ if (pending.tick < pruneThreshold) {
859
+ continue;
860
+ }
861
+ // Only compare if the hash tick is confirmed (all inputs received)
862
+ // This ensures we won't roll back and change our hash for this tick
863
+ if (pending.tick > confirmedTick) {
864
+ // Not confirmed yet, keep for later
865
+ remaining.push(pending);
866
+ continue;
867
+ }
868
+ // Compare hashes (our state should now be correct after rollback)
869
+ const localHash = this.engine.getHash(pending.tick);
870
+ const desyncResult = this.desyncManager.checkPeerDesync(pending.tick, localHash, pending.hash, pending.playerId);
871
+ if (desyncResult) {
872
+ this.debug.warn("Desync detected", {
873
+ tick: pending.tick,
874
+ localHash: desyncResult.localHash,
875
+ remoteHash: desyncResult.remoteHash,
876
+ remotePlayer: pending.playerId,
877
+ });
878
+ this.emit("desync", pending.tick, desyncResult.localHash, desyncResult.remoteHash);
879
+ // Request sync if we're not the host
880
+ if (!this._isHost) {
881
+ this.requestSync();
882
+ }
883
+ }
884
+ }
885
+ this.pendingHashMessages = remaining;
886
+ }
887
+ /**
888
+ * Handle sync message (state synchronization).
889
+ */
890
+ handleSyncMessage(message) {
891
+ this.engine.setState(message.tick, message.state, message.playerTimeline);
892
+ // Clear pending hash comparisons since our state has been reset
893
+ this.pendingHashMessages = [];
894
+ // Update player list from timeline
895
+ for (const entry of message.playerTimeline) {
896
+ // Determine connection state from leaveTick
897
+ const connectionState = entry.leaveTick !== null
898
+ ? PlayerConnectionState.Disconnected
899
+ : PlayerConnectionState.Connected;
900
+ if (!this.playerManager.hasPlayer(entry.playerId)) {
901
+ // Add new player
902
+ this.playerManager.addPlayer({
903
+ id: entry.playerId,
904
+ connectionState,
905
+ joinTick: entry.joinTick,
906
+ leaveTick: entry.leaveTick,
907
+ isHost: false,
908
+ role: PlayerRole.Player, // Default to player; role info may come from elsewhere
909
+ });
910
+ }
911
+ else {
912
+ // Update existing player's state from authoritative timeline
913
+ // This handles the case where JoinAccept added the player before StateSync
914
+ const existingPlayer = this.playerManager.getPlayer(entry.playerId);
915
+ if (existingPlayer) {
916
+ existingPlayer.connectionState = connectionState;
917
+ existingPlayer.joinTick = entry.joinTick;
918
+ existingPlayer.leaveTick = entry.leaveTick;
919
+ }
920
+ }
921
+ }
922
+ if (this._state === SessionState.Connecting ||
923
+ this._state === SessionState.Lobby) {
924
+ this.setState(SessionState.Playing);
925
+ this.emit("gameStart");
926
+ }
927
+ }
928
+ /**
929
+ * Handle sync request (host only).
930
+ *
931
+ * IMPORTANT: When a sync is requested, we broadcast the authoritative state
932
+ * to ALL players, not just the requester. This ensures all players are
933
+ * synchronized to the same tick, preventing tick divergence issues that
934
+ * can occur with latency:
935
+ *
936
+ * Without broadcast-to-all:
937
+ * 1. Player-2 desyncs, requests sync from host at tick X
938
+ * 2. With latency, host responds with state at tick X+3
939
+ * 3. Player-2 resets to tick X+3, but player-1/3 are at tick X+6
940
+ * 4. Player-1/3's input redundancy doesn't cover tick X+3
941
+ * 5. Player-2 can't advance confirmedTick -> max speculation -> deadlock
942
+ *
943
+ * With broadcast-to-all:
944
+ * - All players reset to the same tick simultaneously
945
+ * - No tick divergence, no deadlock
946
+ */
947
+ handleSyncRequest(message) {
948
+ if (!this._isHost)
949
+ return;
950
+ const state = this.engine.getState();
951
+ const syncMsg = createSync(state.tick, state.state, this.engine.getCurrentHash(), state.playerTimeline);
952
+ // Broadcast sync to ALL players to prevent tick divergence
953
+ // The requesting player needs the state, but other players also need
954
+ // to synchronize to avoid getting stuck at max speculation
955
+ this.broadcast(syncMsg, true);
956
+ // Reset host's buffers and tick counters to match the synced state
957
+ // (game state is already correct, so use resetForSync instead of setState
958
+ // to avoid unnecessary serialize/deserialize round-trip)
959
+ this.engine.resetForSync(state.tick, state.playerTimeline);
960
+ this.pendingHashMessages = [];
961
+ }
962
+ /**
963
+ * Handle join request (host only).
964
+ */
965
+ handleJoinRequest(peerId, message) {
966
+ if (!this._isHost || !this._roomId)
967
+ return;
968
+ const playerId = message.playerId;
969
+ // Check rate limiting
970
+ if (this.joinRateLimiter.checkAndRecord(peerId)) {
971
+ this.sendToPeer(peerId, createJoinReject(playerId, "Too many join requests, please wait"), true);
972
+ return;
973
+ }
974
+ // Check if room is full (only count connected players, not disconnected ones)
975
+ if (this.playerManager.getConnectedPlayers().length >= this.config.maxPlayers) {
976
+ this.sendToPeer(peerId, createJoinReject(playerId, "Room is full"), true);
977
+ return;
978
+ }
979
+ // Accept the join
980
+ this.sendToPeer(peerId, createJoinAccept(playerId, this._roomId, { tickRate: this.config.tickRate, maxPlayers: this.config.maxPlayers }, this.playerManager.getPlayerIds()), true);
981
+ // Add player
982
+ const playerRole = message.role ?? PlayerRole.Player;
983
+ const playerInfo = {
984
+ id: playerId,
985
+ connectionState: PlayerConnectionState.Connected,
986
+ joinTick: this._state === SessionState.Playing ? this.engine.currentTick : null,
987
+ leaveTick: null,
988
+ isHost: false,
989
+ role: playerRole,
990
+ };
991
+ this.playerManager.addPlayer(playerInfo);
992
+ // Only add players to the engine (spectators don't contribute inputs)
993
+ if (this._state === SessionState.Playing &&
994
+ playerInfo.joinTick !== null &&
995
+ playerRole === PlayerRole.Player) {
996
+ this.engine.addPlayer(playerId, playerInfo.joinTick);
997
+ }
998
+ // Emit playerJoined BEFORE sending StateSync so the game layer can
999
+ // add the new player to its state before serialization.
1000
+ // Track that we've emitted to prevent duplicate events during resimulation.
1001
+ this.emittedJoinEvents.add(playerId);
1002
+ this.emit("playerJoined", playerInfo);
1003
+ // Send current game state to the joining player if game is in progress
1004
+ // This is critical for mid-game joins - the new player needs the full state
1005
+ if (this._state === SessionState.Playing) {
1006
+ const state = this.engine.getState();
1007
+ this.sendToPeer(peerId, createStateSync(state.tick, state.state, this.engine.getCurrentHash(), state.playerTimeline), true);
1008
+ }
1009
+ // Notify other players about the new join
1010
+ if (this._state === SessionState.Playing && playerInfo.joinTick !== null) {
1011
+ this.broadcast(createPlayerJoined(playerId, playerRole, playerInfo.joinTick), true);
1012
+ }
1013
+ }
1014
+ /**
1015
+ * Handle join accept.
1016
+ */
1017
+ handleJoinAccept(message) {
1018
+ // Only transition to Lobby if we're still in Connecting state.
1019
+ // If we're already in Playing (from receiving StateSync first due to message reordering),
1020
+ // we should not go backwards to Lobby.
1021
+ if (this._state === SessionState.Connecting) {
1022
+ this.setState(SessionState.Lobby);
1023
+ }
1024
+ // Add existing players
1025
+ for (const playerId of message.players) {
1026
+ if (!this.playerManager.hasPlayer(playerId)) {
1027
+ this.playerManager.addPlayer({
1028
+ id: playerId,
1029
+ connectionState: PlayerConnectionState.Connected,
1030
+ joinTick: null,
1031
+ leaveTick: null,
1032
+ isHost: playerId === message.players[0], // First player is host
1033
+ role: PlayerRole.Player, // Default; actual role will come from PlayerJoined
1034
+ });
1035
+ }
1036
+ }
1037
+ }
1038
+ /**
1039
+ * Handle join reject.
1040
+ */
1041
+ handleJoinReject(message) {
1042
+ this.setState(SessionState.Disconnected);
1043
+ this.emitError(new Error(`Join rejected: ${message.reason}`), {
1044
+ source: ErrorSource.Session,
1045
+ recoverable: false,
1046
+ details: { reason: message.reason, playerId: message.playerId },
1047
+ });
1048
+ }
1049
+ /**
1050
+ * Handle player joined notification.
1051
+ */
1052
+ handlePlayerJoined(message) {
1053
+ const playerInfo = {
1054
+ id: message.playerId,
1055
+ connectionState: PlayerConnectionState.Connected,
1056
+ joinTick: message.joinTick,
1057
+ leaveTick: null,
1058
+ isHost: false,
1059
+ role: message.role,
1060
+ };
1061
+ this.playerManager.addPlayer(playerInfo);
1062
+ // Only add players to the engine (spectators don't contribute inputs)
1063
+ if (message.role === PlayerRole.Player) {
1064
+ this.engine.addPlayer(message.playerId, message.joinTick);
1065
+ }
1066
+ // Track that we've emitted to prevent duplicate events during resimulation
1067
+ this.emittedJoinEvents.add(message.playerId);
1068
+ this.emit("playerJoined", playerInfo);
1069
+ }
1070
+ /**
1071
+ * Handle player left notification.
1072
+ */
1073
+ handlePlayerLeft(message) {
1074
+ const player = this.playerManager.getPlayer(message.playerId);
1075
+ if (player) {
1076
+ player.leaveTick = message.leaveTick;
1077
+ player.connectionState = PlayerConnectionState.Disconnected;
1078
+ this.engine.removePlayer(message.playerId, message.leaveTick);
1079
+ // Clear join event tracking so if player rejoins later, we emit again
1080
+ this.emittedJoinEvents.delete(message.playerId);
1081
+ this.emit("playerLeft", player);
1082
+ // In Star topology, host must relay PlayerLeft to other peers
1083
+ // (similar to how PlayerJoined is broadcast)
1084
+ if (this._isHost && this.config.topology === Topology.Star) {
1085
+ this.broadcast(createPlayerLeft(message.playerId, message.leaveTick), true);
1086
+ }
1087
+ }
1088
+ }
1089
+ /**
1090
+ * Handle pause message.
1091
+ */
1092
+ handlePause(_message) {
1093
+ this.setState(SessionState.Paused);
1094
+ }
1095
+ /**
1096
+ * Handle resume message.
1097
+ */
1098
+ handleResume(_message) {
1099
+ this.setState(SessionState.Playing);
1100
+ }
1101
+ /**
1102
+ * Handle ping message - respond with pong.
1103
+ */
1104
+ handlePing(peerId, message) {
1105
+ // Respond with pong containing the original timestamp
1106
+ this.transport.send(peerId, encodeMessage(createPong(message.timestamp)), false);
1107
+ }
1108
+ /**
1109
+ * Handle pong message - calculate RTT.
1110
+ */
1111
+ handlePong(peerId, message) {
1112
+ const data = this.peerRttData.get(peerId);
1113
+ if (!data)
1114
+ return;
1115
+ const sentAt = data.pendingPings.get(message.timestamp);
1116
+ if (sentAt === undefined)
1117
+ return;
1118
+ // Calculate RTT
1119
+ const rtt = Date.now() - sentAt;
1120
+ data.pendingPings.delete(message.timestamp);
1121
+ // Update running average
1122
+ data.samples.push(rtt);
1123
+ if (data.samples.length > Session.RTT_SAMPLE_COUNT) {
1124
+ data.samples.shift();
1125
+ }
1126
+ data.rtt = data.samples.reduce((a, b) => a + b, 0) / data.samples.length;
1127
+ }
1128
+ /**
1129
+ * Send a ping to a peer for RTT measurement.
1130
+ *
1131
+ * @param peerId - The peer to ping
1132
+ */
1133
+ sendPing(peerId) {
1134
+ const timestamp = Date.now();
1135
+ this.transport.send(peerId, encodeMessage(createPing(timestamp)), false);
1136
+ // Track the ping locally for RTT calculation
1137
+ let data = this.peerRttData.get(peerId);
1138
+ if (!data) {
1139
+ data = { pendingPings: new Map(), rtt: 0, samples: [] };
1140
+ this.peerRttData.set(peerId, data);
1141
+ }
1142
+ data.pendingPings.set(timestamp, timestamp);
1143
+ }
1144
+ /**
1145
+ * Get the measured RTT to a peer in milliseconds.
1146
+ * Returns 0 if no RTT data is available yet.
1147
+ *
1148
+ * @param peerId - The peer to get RTT for
1149
+ */
1150
+ getRtt(peerId) {
1151
+ return this.peerRttData.get(peerId)?.rtt ?? 0;
1152
+ }
1153
+ /**
1154
+ * Broadcast input to all peers with redundancy.
1155
+ */
1156
+ broadcastInput(tick, input) {
1157
+ const inputs = [];
1158
+ // Add current tick
1159
+ inputs.push({ tick, input });
1160
+ // Add previous ticks for redundancy
1161
+ for (let i = 1; i < this.inputRedundancy; i++) {
1162
+ const prevTick = asTick(tick - i);
1163
+ if (prevTick >= 0) {
1164
+ const prevInput = this.engine.getLocalInput(prevTick);
1165
+ if (prevInput) {
1166
+ inputs.push({ tick: prevTick, input: prevInput });
1167
+ }
1168
+ }
1169
+ }
1170
+ this.broadcast(createInput(this.localPlayerId, inputs), false);
1171
+ }
1172
+ /**
1173
+ * Maybe broadcast hash for desync detection.
1174
+ *
1175
+ * IMPORTANT: We only broadcast hashes for CONFIRMED ticks, not just completed ticks.
1176
+ * A completed tick may have used predicted inputs that turn out to be wrong.
1177
+ * Broadcasting a hash based on predictions would cause false desync detections
1178
+ * when compared against a peer who has the actual inputs.
1179
+ *
1180
+ * We find the most recent hash-interval tick that is confirmed and broadcast that.
1181
+ */
1182
+ maybeBroadcastHash() {
1183
+ const confirmedTick = this.engine.confirmedTick;
1184
+ // Find the most recent hash-interval tick that is confirmed
1185
+ // For example, if confirmedTick is 37 and hashInterval is 30, we want tick 30
1186
+ const hashTick = asTick(Math.floor(confirmedTick / this.config.hashInterval) *
1187
+ this.config.hashInterval);
1188
+ // Only broadcast if:
1189
+ // - hashTick is positive (we have something to hash)
1190
+ // - We haven't already broadcast this tick
1191
+ if (hashTick > 0 && hashTick !== this.lastHashBroadcastTick) {
1192
+ this.lastHashBroadcastTick = hashTick;
1193
+ // Get the hash from the snapshot at hashTick
1194
+ const hash = this.engine.getHash(hashTick);
1195
+ if (hash === undefined) {
1196
+ // Snapshot not available (pruned) - skip this hash
1197
+ return;
1198
+ }
1199
+ const hashMsg = createHash(this.localPlayerId, hashTick, hash);
1200
+ // Host-authority in mesh: guests send to host only, host collects
1201
+ if (this.desyncManager.isHostAuthority) {
1202
+ if (this._isHost) {
1203
+ // Host records own hash and checks for desync
1204
+ this.recordHashAndCheckDesync(hashTick, this.localPlayerId, hash);
1205
+ }
1206
+ else {
1207
+ // Guest sends hash to host only
1208
+ this.sendToHost(hashMsg);
1209
+ }
1210
+ }
1211
+ else {
1212
+ // Peer mode or star topology: broadcast to all
1213
+ this.broadcast(hashMsg, true);
1214
+ }
1215
+ }
1216
+ }
1217
+ /**
1218
+ * Record a hash from a player and check for desync (host-authority mode).
1219
+ */
1220
+ recordHashAndCheckDesync(tick, playerId, hash) {
1221
+ if (!this._isHost)
1222
+ return;
1223
+ // Record the hash
1224
+ this.desyncManager.recordHash(tick, playerId, hash);
1225
+ // Check if we have hashes from all active players
1226
+ const activePlayers = this.playerManager.getActivePlayers();
1227
+ if (!this.desyncManager.hasAllHashes(tick, activePlayers.length)) {
1228
+ return; // Still waiting for more hashes
1229
+ }
1230
+ // Check for desyncs
1231
+ const desyncs = this.desyncManager.checkDesyncs(tick, this.localPlayerId);
1232
+ if (desyncs.length > 0) {
1233
+ // Log all detected desyncs
1234
+ for (const desync of desyncs) {
1235
+ this.debug.warn("Desync detected by host", {
1236
+ tick: desync.tick,
1237
+ playerId: desync.desyncedPlayerId,
1238
+ hostHash: desync.referenceHash,
1239
+ playerHash: desync.playerHash,
1240
+ });
1241
+ this.emit("desync", desync.tick, desync.referenceHash, desync.playerHash);
1242
+ }
1243
+ // Broadcast authoritative state to ALL players to prevent tick divergence
1244
+ // (see handleSyncRequest for detailed explanation of why this is necessary)
1245
+ const state = this.engine.getState();
1246
+ const syncMsg = createSync(state.tick, state.state, this.engine.getCurrentHash(), state.playerTimeline);
1247
+ this.broadcast(syncMsg, true);
1248
+ // Reset host's buffers and tick counters to match the synced state
1249
+ // (use resetForSync instead of setState to avoid unnecessary deserialize)
1250
+ this.engine.resetForSync(state.tick, state.playerTimeline);
1251
+ this.pendingHashMessages = [];
1252
+ }
1253
+ // Cleanup old tick entries
1254
+ this.desyncManager.pruneOldHashes(tick);
1255
+ }
1256
+ /**
1257
+ * Send a message to all connected peers.
1258
+ */
1259
+ broadcast(message, reliable) {
1260
+ const encoded = encodeMessage(message);
1261
+ this.transport.broadcast(encoded, reliable);
1262
+ }
1263
+ /**
1264
+ * Send a message to a specific peer.
1265
+ */
1266
+ sendToPeer(peerId, message, reliable) {
1267
+ const encoded = encodeMessage(message);
1268
+ this.transport.send(peerId, encoded, reliable);
1269
+ }
1270
+ /**
1271
+ * Send a message to the host.
1272
+ */
1273
+ sendToHost(message) {
1274
+ // Find the host player
1275
+ const host = this.playerManager.findHost();
1276
+ if (host && host.id !== this.localPlayerId) {
1277
+ this.sendToPeer(host.id, message, isReliableMessage(message));
1278
+ return;
1279
+ }
1280
+ // If we can't find the host by player info, send to first connected peer
1281
+ const peers = this.transport.connectedPeers;
1282
+ if (peers.size > 0) {
1283
+ const hostPeerId = peers.values().next().value;
1284
+ this.sendToPeer(hostPeerId, message, isReliableMessage(message));
1285
+ }
1286
+ }
1287
+ }
1288
+ /**
1289
+ * Convenience function to create a session.
1290
+ */
1291
+ export function createSession(options) {
1292
+ return new Session(options);
1293
+ }
1294
+ //# sourceMappingURL=session.js.map