hedgequantx 2.5.44 → 2.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,11 @@
1
1
  /**
2
2
  * Rithmic Connection Manager
3
3
  * Handles WebSocket connection and heartbeat
4
+ *
5
+ * OPTIMIZED FOR ULTRA-LOW LATENCY:
6
+ * - TCP_NODELAY enabled (disable Nagle's algorithm)
7
+ * - Compression disabled
8
+ * - Skip UTF8 validation for binary
4
9
  */
5
10
 
6
11
  const WebSocket = require('ws');
@@ -15,6 +20,7 @@ class RithmicConnection extends EventEmitter {
15
20
  this.config = null;
16
21
  this.state = 'DISCONNECTED';
17
22
  this.heartbeatTimer = null;
23
+ this._socket = null; // Direct socket reference for fast access
18
24
  }
19
25
 
20
26
  get isConnected() {
@@ -27,6 +33,7 @@ class RithmicConnection extends EventEmitter {
27
33
 
28
34
  /**
29
35
  * Connect to Rithmic server
36
+ * OPTIMIZED: TCP_NODELAY, no compression, skip UTF8 validation
30
37
  */
31
38
  async connect(config) {
32
39
  this.config = config;
@@ -35,10 +42,24 @@ class RithmicConnection extends EventEmitter {
35
42
  await proto.load();
36
43
 
37
44
  return new Promise((resolve, reject) => {
38
- this.ws = new WebSocket(config.uri, { rejectUnauthorized: false });
45
+ // OPTIMIZATION: Disable compression and UTF8 validation for speed
46
+ this.ws = new WebSocket(config.uri, {
47
+ rejectUnauthorized: false,
48
+ perMessageDeflate: false, // CRITICAL: Disable compression
49
+ skipUTF8Validation: true, // Skip validation for binary protobuf
50
+ maxPayload: 64 * 1024, // 64KB max (orders are small)
51
+ });
39
52
 
40
53
  this.ws.on('open', () => {
41
54
  this.state = 'CONNECTED';
55
+
56
+ // CRITICAL: Disable Nagle's algorithm for low latency
57
+ // This sends packets immediately instead of buffering
58
+ if (this.ws._socket) {
59
+ this.ws._socket.setNoDelay(true);
60
+ this._socket = this.ws._socket; // Cache for fast access
61
+ }
62
+
42
63
  this.emit('connected');
43
64
  resolve(true);
44
65
  });
@@ -92,6 +113,119 @@ class RithmicConnection extends EventEmitter {
92
113
  this.ws.send(buffer);
93
114
  }
94
115
 
116
+ /**
117
+ * Fast send - bypasses some ws overhead for hot path
118
+ * Use for time-critical order messages
119
+ * @param {Buffer} buffer - Pre-encoded protobuf buffer
120
+ */
121
+ fastSend(buffer) {
122
+ if (this.ws && this.ws.readyState === WebSocket.OPEN) {
123
+ this.ws.send(buffer);
124
+ }
125
+ }
126
+
127
+ /**
128
+ * Ultra-fast send - direct socket write with WebSocket framing
129
+ * MAXIMUM PERFORMANCE: Bypasses ws library overhead entirely
130
+ * Only use for pre-encoded binary protobuf messages
131
+ *
132
+ * @param {Buffer} payload - Pre-encoded protobuf buffer
133
+ * @returns {boolean} true if sent successfully
134
+ */
135
+ ultraSend(payload) {
136
+ // Require cached socket reference
137
+ if (!this._socket || !this.ws || this.ws.readyState !== WebSocket.OPEN) {
138
+ return false;
139
+ }
140
+
141
+ try {
142
+ // Build WebSocket frame manually for binary message
143
+ // This avoids all ws library overhead (callbacks, validation, etc.)
144
+ const frame = this._buildBinaryFrame(payload);
145
+
146
+ // Direct socket write - bypasses ws entirely
147
+ this._socket.write(frame);
148
+ return true;
149
+ } catch (e) {
150
+ // Fallback to standard send on error
151
+ this.ws.send(payload);
152
+ return true;
153
+ }
154
+ }
155
+
156
+ /**
157
+ * Build WebSocket binary frame manually
158
+ * Format: [opcode] [length] [payload]
159
+ * @private
160
+ * @param {Buffer} payload
161
+ * @returns {Buffer}
162
+ */
163
+ _buildBinaryFrame(payload) {
164
+ const len = payload.length;
165
+ let frame;
166
+
167
+ if (len < 126) {
168
+ // 2-byte header: FIN + opcode (0x82 = final binary), length
169
+ frame = Buffer.allocUnsafe(2 + len);
170
+ frame[0] = 0x82; // FIN=1, opcode=2 (binary)
171
+ frame[1] = len; // No mask (server->client would need mask, but we're client)
172
+ payload.copy(frame, 2);
173
+ } else if (len < 65536) {
174
+ // 4-byte header for medium messages
175
+ frame = Buffer.allocUnsafe(4 + len);
176
+ frame[0] = 0x82;
177
+ frame[1] = 126;
178
+ frame.writeUInt16BE(len, 2);
179
+ payload.copy(frame, 4);
180
+ } else {
181
+ // 10-byte header for large messages (unlikely for orders)
182
+ frame = Buffer.allocUnsafe(10 + len);
183
+ frame[0] = 0x82;
184
+ frame[1] = 127;
185
+ frame.writeBigUInt64BE(BigInt(len), 2);
186
+ payload.copy(frame, 10);
187
+ }
188
+
189
+ // Client frames MUST be masked per RFC 6455
190
+ // Apply masking key
191
+ return this._applyMask(frame, len < 126 ? 2 : (len < 65536 ? 4 : 10));
192
+ }
193
+
194
+ /**
195
+ * Apply WebSocket client masking
196
+ * @private
197
+ * @param {Buffer} frame - Frame with unmasked payload
198
+ * @param {number} headerLen - Length of header before payload
199
+ * @returns {Buffer} - New frame with mask applied
200
+ */
201
+ _applyMask(frame, headerLen) {
202
+ const payloadLen = frame.length - headerLen;
203
+
204
+ // Generate 4-byte mask key
205
+ const mask = Buffer.allocUnsafe(4);
206
+ mask[0] = (Math.random() * 256) | 0;
207
+ mask[1] = (Math.random() * 256) | 0;
208
+ mask[2] = (Math.random() * 256) | 0;
209
+ mask[3] = (Math.random() * 256) | 0;
210
+
211
+ // Create new frame with mask bit set and mask key inserted
212
+ const maskedFrame = Buffer.allocUnsafe(headerLen + 4 + payloadLen);
213
+
214
+ // Copy header, set mask bit
215
+ frame.copy(maskedFrame, 0, 0, headerLen);
216
+ maskedFrame[1] |= 0x80; // Set MASK bit
217
+
218
+ // Insert mask key after length
219
+ mask.copy(maskedFrame, headerLen);
220
+
221
+ // Copy and mask payload
222
+ for (let i = 0; i < payloadLen; i++) {
223
+ maskedFrame[headerLen + 4 + i] = frame[headerLen + i] ^ mask[i & 3];
224
+ }
225
+
226
+ return maskedFrame;
227
+ }
228
+
95
229
  /**
96
230
  * Login to system
97
231
  */
@@ -198,6 +332,72 @@ class RithmicConnection extends EventEmitter {
198
332
  this.heartbeatTimer = null;
199
333
  }
200
334
  }
335
+
336
+ /**
337
+ * Warmup connection for minimum latency on first order
338
+ * Call after login but before trading starts
339
+ *
340
+ * OPTIMIZATIONS:
341
+ * - Pre-load protobuf types
342
+ * - Keep TCP connection "hot" with small pings
343
+ * - Configure socket for trading
344
+ */
345
+ async warmup() {
346
+ if (!this._socket) return false;
347
+
348
+ try {
349
+ // Ensure TCP_NODELAY is set
350
+ this._socket.setNoDelay(true);
351
+
352
+ // Set socket keep-alive to prevent idle disconnection
353
+ // Aggressive keep-alive: probe every 10 seconds
354
+ this._socket.setKeepAlive(true, 10000);
355
+
356
+ // Pre-allocate socket buffer space
357
+ if (this._socket.setRecvBufferSize) {
358
+ this._socket.setRecvBufferSize(65536); // 64KB receive buffer
359
+ }
360
+ if (this._socket.setSendBufferSize) {
361
+ this._socket.setSendBufferSize(65536); // 64KB send buffer
362
+ }
363
+
364
+ // Send a heartbeat to "warm up" the connection
365
+ this.send('RequestHeartbeat', { templateId: REQ.HEARTBEAT });
366
+
367
+ this.emit('warmedUp');
368
+ return true;
369
+ } catch (e) {
370
+ return false;
371
+ }
372
+ }
373
+
374
+ /**
375
+ * Get connection diagnostics
376
+ * @returns {Object}
377
+ */
378
+ getDiagnostics() {
379
+ const diag = {
380
+ state: this.state,
381
+ isConnected: this.isConnected,
382
+ hasSocket: !!this._socket,
383
+ socketState: null,
384
+ };
385
+
386
+ if (this._socket) {
387
+ diag.socketState = {
388
+ readable: this._socket.readable,
389
+ writable: this._socket.writable,
390
+ bytesRead: this._socket.bytesRead,
391
+ bytesWritten: this._socket.bytesWritten,
392
+ localAddress: this._socket.localAddress,
393
+ localPort: this._socket.localPort,
394
+ remoteAddress: this._socket.remoteAddress,
395
+ remotePort: this._socket.remotePort,
396
+ };
397
+ }
398
+
399
+ return diag;
400
+ }
201
401
  }
202
402
 
203
403
  module.exports = { RithmicConnection };
@@ -1,14 +1,163 @@
1
1
  /**
2
2
  * Rithmic Message Handlers
3
3
  * Handles ORDER_PLANT and PNL_PLANT messages
4
+ *
5
+ * FAST SCALPING: Handles order fill notifications (351) for position tracking
6
+ *
7
+ * OPTIMIZED FOR LOW LATENCY:
8
+ * - Fast path for order notifications (351)
9
+ * - Minimal object creation in hot path
10
+ * - Template ID check before proto decode
11
+ * - Latency tracking for fills
4
12
  */
5
13
 
6
14
  const { proto, decodeAccountPnL, decodeInstrumentPnL } = require('./protobuf');
7
15
  const { RES, STREAM } = require('./constants');
16
+ const { performance } = require('perf_hooks');
8
17
 
9
- // Debug mode
18
+ // Debug mode - use no-op function when disabled for zero overhead
10
19
  const DEBUG = process.env.HQX_DEBUG === '1';
11
- const debug = (...args) => DEBUG && console.log('[Rithmic:Handler]', ...args);
20
+ const debug = DEBUG ? (...args) => console.log('[Rithmic:Handler]', ...args) : () => {};
21
+
22
+ // ==================== HIGH-RESOLUTION TIMING ====================
23
+ // Use process.hrtime.bigint for sub-millisecond precision
24
+
25
+ /**
26
+ * Get high-resolution timestamp in nanoseconds
27
+ * @returns {bigint}
28
+ */
29
+ const hrNow = () => process.hrtime.bigint();
30
+
31
+ /**
32
+ * Convert nanoseconds to milliseconds with precision
33
+ * @param {bigint} ns
34
+ * @returns {number}
35
+ */
36
+ const nsToMs = (ns) => Number(ns) / 1_000_000;
37
+
38
+ // ==================== LATENCY TRACKING ====================
39
+ // Track order-to-fill latency for performance monitoring
40
+ // OPTIMIZED: Circular buffer (no array.shift), high-resolution timing
41
+
42
+ const LatencyTracker = {
43
+ _pending: new Map(), // orderTag -> entryTime (bigint nanoseconds)
44
+ _samples: null, // Pre-allocated Float64Array circular buffer
45
+ _maxSamples: 100,
46
+ _head: 0, // Next write position
47
+ _count: 0, // Number of valid samples
48
+ _initialized: false,
49
+
50
+ /**
51
+ * Initialize circular buffer (lazy init)
52
+ * @private
53
+ */
54
+ _init() {
55
+ if (this._initialized) return;
56
+ this._samples = new Float64Array(this._maxSamples);
57
+ this._initialized = true;
58
+ },
59
+
60
+ /**
61
+ * Record order sent time with high-resolution timestamp
62
+ * @param {string} orderTag
63
+ * @param {number} entryTimeMs - Date.now() when order was sent (for compatibility)
64
+ */
65
+ recordEntry(orderTag, entryTimeMs) {
66
+ // Store high-resolution time for precise measurement
67
+ this._pending.set(orderTag, hrNow());
68
+ },
69
+
70
+ /**
71
+ * Record fill received, calculate latency with sub-ms precision
72
+ * @param {string} orderTag
73
+ * @returns {number|null} Round-trip latency in ms (with decimal precision), or null if not tracked
74
+ */
75
+ recordFill(orderTag) {
76
+ const entryTime = this._pending.get(orderTag);
77
+ if (!entryTime) return null;
78
+
79
+ this._pending.delete(orderTag);
80
+ const latencyNs = hrNow() - entryTime;
81
+ const latencyMs = nsToMs(latencyNs);
82
+
83
+ // Store in circular buffer (no shift, O(1))
84
+ this._init();
85
+ this._samples[this._head] = latencyMs;
86
+ this._head = (this._head + 1) % this._maxSamples;
87
+ if (this._count < this._maxSamples) this._count++;
88
+
89
+ return latencyMs;
90
+ },
91
+
92
+ /**
93
+ * Get average latency
94
+ * @returns {number|null}
95
+ */
96
+ getAverage() {
97
+ if (this._count === 0) return null;
98
+ let sum = 0;
99
+ for (let i = 0; i < this._count; i++) {
100
+ sum += this._samples[i];
101
+ }
102
+ return sum / this._count;
103
+ },
104
+
105
+ /**
106
+ * Get min/max/avg stats with high precision
107
+ * @returns {Object}
108
+ */
109
+ getStats() {
110
+ if (this._count === 0) {
111
+ return { min: null, max: null, avg: null, p50: null, p99: null, samples: 0 };
112
+ }
113
+
114
+ // Get valid samples
115
+ const valid = [];
116
+ for (let i = 0; i < this._count; i++) {
117
+ valid.push(this._samples[i]);
118
+ }
119
+ valid.sort((a, b) => a - b);
120
+
121
+ const sum = valid.reduce((a, b) => a + b, 0);
122
+
123
+ return {
124
+ min: valid[0],
125
+ max: valid[valid.length - 1],
126
+ avg: sum / valid.length,
127
+ p50: valid[Math.floor(valid.length * 0.5)],
128
+ p99: valid[Math.floor(valid.length * 0.99)] || valid[valid.length - 1],
129
+ samples: this._count,
130
+ };
131
+ },
132
+
133
+ /**
134
+ * Get last N latency samples
135
+ * @param {number} n
136
+ * @returns {number[]}
137
+ */
138
+ getRecent(n = 10) {
139
+ if (this._count === 0) return [];
140
+ const result = [];
141
+ const start = this._count < this._maxSamples ? 0 : this._head;
142
+ for (let i = 0; i < Math.min(n, this._count); i++) {
143
+ const idx = (start + this._count - 1 - i + this._maxSamples) % this._maxSamples;
144
+ result.push(this._samples[idx]);
145
+ }
146
+ return result;
147
+ },
148
+
149
+ /**
150
+ * Clear all tracking data
151
+ */
152
+ clear() {
153
+ this._pending.clear();
154
+ this._head = 0;
155
+ this._count = 0;
156
+ if (this._samples) {
157
+ this._samples.fill(0);
158
+ }
159
+ }
160
+ };
12
161
 
13
162
  /**
14
163
  * Create ORDER_PLANT message handler
@@ -35,11 +184,17 @@ const createOrderHandler = (service) => {
35
184
  case RES.SHOW_ORDERS:
36
185
  handleShowOrdersResponse(service, data);
37
186
  break;
187
+ case RES.NEW_ORDER:
188
+ debug('Handling NEW_ORDER response (313)');
189
+ handleNewOrderResponse(service, data);
190
+ break;
38
191
  case STREAM.EXCHANGE_NOTIFICATION:
39
- service.emit('exchangeNotification', data);
192
+ debug('Handling EXCHANGE_NOTIFICATION (352)');
193
+ handleExchangeNotification(service, data);
40
194
  break;
41
195
  case STREAM.ORDER_NOTIFICATION:
42
- service.emit('orderNotification', data);
196
+ debug('Handling ORDER_NOTIFICATION (351)');
197
+ handleOrderNotification(service, data);
43
198
  break;
44
199
  }
45
200
  };
@@ -211,7 +366,188 @@ const handleInstrumentPnLUpdate = (service, data) => {
211
366
  }
212
367
  };
213
368
 
369
+ /**
370
+ * Handle new order response (313) - confirms order accepted
371
+ */
372
+ const handleNewOrderResponse = (service, data) => {
373
+ try {
374
+ const res = proto.decode('ResponseNewOrder', data);
375
+ const orderTag = res.userMsg?.[0] || null;
376
+ const timestamp = performance.now();
377
+
378
+ debug('New order response:', {
379
+ orderTag,
380
+ rpCode: res.rpCode,
381
+ basketId: res.basketId,
382
+ ssboe: res.ssboe,
383
+ usecs: res.usecs,
384
+ });
385
+
386
+ // Emit for position manager tracking
387
+ service.emit('orderAccepted', {
388
+ orderTag,
389
+ basketId: res.basketId,
390
+ rpCode: res.rpCode,
391
+ timestamp,
392
+ });
393
+ } catch (e) {
394
+ debug('Error decoding new order response:', e.message);
395
+ }
396
+ };
397
+
398
+ // ==================== PRE-ALLOCATED OBJECTS ====================
399
+ // Reusable objects for hot path to avoid GC pressure
400
+
401
+ const FillInfoPool = {
402
+ // Pre-allocated fill info template
403
+ _template: {
404
+ orderTag: null,
405
+ basketId: null,
406
+ orderId: null,
407
+ status: null,
408
+ symbol: null,
409
+ exchange: null,
410
+ accountId: null,
411
+ fillQuantity: 0,
412
+ totalFillQuantity: 0,
413
+ remainingQuantity: 0,
414
+ avgFillPrice: 0,
415
+ lastFillPrice: 0,
416
+ transactionType: 0,
417
+ orderType: 0,
418
+ quantity: 0,
419
+ ssboe: 0,
420
+ usecs: 0,
421
+ localTimestamp: 0,
422
+ roundTripLatencyMs: null,
423
+ },
424
+
425
+ /**
426
+ * Fill template with notification data
427
+ * @param {Object} notif - Decoded notification
428
+ * @param {number} receiveTime - Local receive timestamp
429
+ * @param {number|null} latency - Round-trip latency
430
+ * @returns {Object}
431
+ */
432
+ fill(notif, receiveTime, latency) {
433
+ const o = this._template;
434
+ o.orderTag = notif.userMsg?.[0] || null;
435
+ o.basketId = notif.basketId;
436
+ o.orderId = notif.orderId;
437
+ o.status = notif.status;
438
+ o.symbol = notif.symbol;
439
+ o.exchange = notif.exchange;
440
+ o.accountId = notif.accountId;
441
+ o.fillQuantity = notif.fillQuantity || 0;
442
+ o.totalFillQuantity = notif.totalFillQuantity || 0;
443
+ o.remainingQuantity = notif.remainingQuantity || 0;
444
+ o.avgFillPrice = parseFloat(notif.avgFillPrice || 0);
445
+ o.lastFillPrice = parseFloat(notif.fillPrice || 0);
446
+ o.transactionType = notif.transactionType;
447
+ o.orderType = notif.orderType;
448
+ o.quantity = notif.quantity;
449
+ o.ssboe = notif.ssboe;
450
+ o.usecs = notif.usecs;
451
+ o.localTimestamp = receiveTime;
452
+ o.roundTripLatencyMs = latency;
453
+ return o;
454
+ },
455
+
456
+ /**
457
+ * Create a copy for async operations that need to keep the data
458
+ * @param {Object} fillInfo
459
+ * @returns {Object}
460
+ */
461
+ clone(fillInfo) {
462
+ return { ...fillInfo };
463
+ }
464
+ };
465
+
466
+ /**
467
+ * Handle order notification (351) - CRITICAL for fill tracking
468
+ * This is the primary notification for order status changes including FILLS
469
+ *
470
+ * ULTRA-OPTIMIZED:
471
+ * - Pre-allocated fill info object (zero allocation in hot path)
472
+ * - Fast path for fill detection
473
+ * - High-resolution latency tracking
474
+ */
475
+ const handleOrderNotification = (service, data) => {
476
+ const receiveTime = Date.now();
477
+
478
+ try {
479
+ const notif = proto.decode('RithmicOrderNotification', data);
480
+ const orderTag = notif.userMsg?.[0] || null;
481
+
482
+ // FAST PATH: Check for fill immediately
483
+ const fillQty = notif.fillQuantity || notif.totalFillQuantity || 0;
484
+ const isFill = fillQty > 0;
485
+
486
+ // Calculate round-trip latency if this is a fill we're tracking
487
+ let roundTripLatency = null;
488
+ if (isFill && orderTag) {
489
+ roundTripLatency = LatencyTracker.recordFill(orderTag);
490
+ }
491
+
492
+ debug('Order notification:', {
493
+ orderTag,
494
+ status: notif.status,
495
+ filledQty: fillQty,
496
+ avgFillPrice: notif.avgFillPrice,
497
+ roundTripLatency,
498
+ });
499
+
500
+ // OPTIMIZED: Use pre-allocated object
501
+ const fillInfo = FillInfoPool.fill(notif, receiveTime, roundTripLatency);
502
+
503
+ // Emit raw notification
504
+ service.emit('orderNotification', fillInfo);
505
+
506
+ // Emit fill event if this is a fill
507
+ if (isFill) {
508
+ debug('ORDER FILLED:', {
509
+ orderTag,
510
+ side: fillInfo.transactionType === 1 ? 'BUY' : 'SELL',
511
+ qty: fillQty,
512
+ avgPrice: fillInfo.avgFillPrice,
513
+ latencyMs: roundTripLatency,
514
+ });
515
+
516
+ // Clone for fill event (async handlers may need to keep the data)
517
+ service.emit('orderFilled', FillInfoPool.clone(fillInfo));
518
+ }
519
+ } catch (e) {
520
+ debug('Error decoding order notification:', e.message);
521
+ }
522
+ };
523
+
524
+ /**
525
+ * Handle exchange notification (352) - exchange-level order updates
526
+ */
527
+ const handleExchangeNotification = (service, data) => {
528
+ try {
529
+ const notif = proto.decode('ExchangeOrderNotification', data);
530
+ const timestamp = performance.now();
531
+
532
+ debug('Exchange notification:', {
533
+ orderTag: notif.userMsg?.[0],
534
+ text: notif.text,
535
+ reportType: notif.reportType,
536
+ });
537
+
538
+ service.emit('exchangeNotification', {
539
+ orderTag: notif.userMsg?.[0] || null,
540
+ text: notif.text,
541
+ reportType: notif.reportType,
542
+ timestamp,
543
+ });
544
+ } catch (e) {
545
+ debug('Error decoding exchange notification:', e.message);
546
+ }
547
+ };
548
+
214
549
  module.exports = {
215
550
  createOrderHandler,
216
- createPnLHandler
551
+ createPnLHandler,
552
+ LatencyTracker,
217
553
  };