react-native-nitro-net 0.5.1 → 0.5.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/lib/net.js CHANGED
@@ -81,6 +81,10 @@ function initWithConfig(config) {
81
81
  if (config.debug !== undefined) {
82
82
  setVerbose(config.debug);
83
83
  }
84
+ // Inject dispatcher for async events to avoid thread starvation/deadlocks
85
+ if (Driver.installDispatcher) {
86
+ Driver.installDispatcher();
87
+ }
84
88
  Driver.initWithConfig(config);
85
89
  }
86
90
  export class SocketAddress {
@@ -262,6 +266,7 @@ export class Socket extends Duplex {
262
266
  this.autoSelectFamilyAttemptedAddresses = [];
263
267
  this._autoSelectFamily = false;
264
268
  this._timeout = 0;
269
+ this._nativeWriteCallbacks = [];
265
270
  if (options?.socketDriver) {
266
271
  // Wrapping existing socket (from Server)
267
272
  this._driver = options.socketDriver;
@@ -269,9 +274,8 @@ export class Socket extends Duplex {
269
274
  this._setupEvents();
270
275
  // Enable noDelay by default
271
276
  this._driver.setNoDelay(true);
272
- // Resume the socket since it starts paused on server-accept
273
- this.resume();
274
- // Emit connect for server-side socket? No, it's already connected.
277
+ // For accepted server sockets, defer resume until after the server
278
+ // emits 'connection' so user handlers can attach first.
275
279
  }
276
280
  else {
277
281
  // New client socket
@@ -290,8 +294,8 @@ export class Socket extends Duplex {
290
294
  return this;
291
295
  }
292
296
  const ret = super.on(event, listener);
293
- if (event === 'data' && !this.isPaused() && this.readableFlowing !== true) {
294
- debugLog(`Socket on('data'), flowing: ${this.readableFlowing}`);
297
+ if (event === 'data' && this.readableFlowing !== true) {
298
+ debugLog(`Socket on('data'), flowing: ${this.readableFlowing}, paused: ${this.isPaused()}`);
295
299
  this.resume();
296
300
  }
297
301
  return ret;
@@ -327,8 +331,10 @@ export class Socket extends Duplex {
327
331
  if (data && data.byteLength > 0) {
328
332
  const buffer = Buffer.from(data);
329
333
  this.bytesRead += buffer.length;
330
- if (!this.push(buffer)) {
331
- this.pause();
334
+ this.push(buffer);
335
+ if (this.listenerCount('data') > 0 && this.readableFlowing !== true) {
336
+ debugLog(`Socket onEvent(DATA) restoring flowing mode for attached 'data' listeners`);
337
+ this.resume();
332
338
  }
333
339
  }
334
340
  break;
@@ -355,6 +361,11 @@ export class Socket extends Duplex {
355
361
  this._connected = false;
356
362
  this.connecting = false;
357
363
  this.push(null); // EOF
364
+ if (!this.allowHalfOpen && !this.writableEnded && !this.destroyed) {
365
+ // Match Node's default behavior: half-close the writable side
366
+ // when the peer finishes and allowHalfOpen is false.
367
+ this.end();
368
+ }
358
369
  this.emit('close', this._hadError);
359
370
  break;
360
371
  case NetSocketEvent.DRAIN:
@@ -506,22 +517,39 @@ export class Socket extends Duplex {
506
517
  return this;
507
518
  }
508
519
  end(chunk, encoding, cb) {
509
- debugLog(`Socket (localPort: ${this.localPort}) .end() called`);
510
520
  if (typeof chunk === 'function') {
511
- super.end(chunk);
521
+ cb = chunk;
522
+ chunk = null;
523
+ encoding = null;
512
524
  }
513
- else if (chunk == null) {
514
- super.end(cb);
525
+ else if (typeof encoding === 'function') {
526
+ cb = encoding;
527
+ encoding = null;
515
528
  }
516
- else {
517
- super.end(chunk, encoding, cb);
529
+ debugLog(`Socket (localPort: ${this.localPort}) .end() called`);
530
+ if (chunk != null) {
531
+ this.write(chunk, encoding);
518
532
  }
533
+ super.end(cb);
519
534
  return this;
520
535
  }
521
536
  _write(chunk, encoding, callback) {
522
537
  if (!this._driver) {
523
538
  return callback(new Error('Socket not connected'));
524
539
  }
540
+ if (!this._connected && this.connecting) {
541
+ const onConnect = () => {
542
+ this.removeListener('error', onError);
543
+ this._write(chunk, encoding, callback);
544
+ };
545
+ const onError = (err) => {
546
+ this.removeListener('connect', onConnect);
547
+ callback(err);
548
+ };
549
+ this.once('connect', onConnect);
550
+ this.once('error', onError);
551
+ return;
552
+ }
525
553
  try {
526
554
  const buffer = (chunk instanceof Buffer) ? chunk : Buffer.from(chunk, encoding);
527
555
  this.bytesWritten += buffer.length;
@@ -539,9 +567,24 @@ export class Socket extends Duplex {
539
567
  this._driver.resume();
540
568
  }
541
569
  _final(callback) {
542
- if (this._driver) {
543
- this._driver.shutdown();
570
+ if (!this._driver) {
571
+ return callback(null);
572
+ }
573
+ if (!this._connected && this.connecting) {
574
+ const onConnect = () => {
575
+ this.removeListener('error', onError);
576
+ this._final(callback);
577
+ };
578
+ const onError = () => {
579
+ this.removeListener('connect', onConnect);
580
+ callback(null); // Already destroyed/errored
581
+ };
582
+ this.once('connect', onConnect);
583
+ this.once('error', onError);
584
+ return;
544
585
  }
586
+ debugLog(`Socket (localPort: ${this.localPort}) ._final() called, shutting down driver`);
587
+ this._driver.shutdown();
545
588
  callback(null);
546
589
  }
547
590
  destroy(reason) {
@@ -724,6 +767,9 @@ export class Server extends EventEmitter {
724
767
  this._sockets.delete(socket);
725
768
  });
726
769
  this.emit('connection', socket);
770
+ // Start reading only after 'connection' handlers ran.
771
+ // This prevents dropping data when listeners are attached in the callback.
772
+ socket.resume();
727
773
  }
728
774
  }
729
775
  break;
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "react-native-nitro-net",
3
3
  "description": "Ultra-high-performance networking to React Native by combining a memory-safe Rust core with the zero-overhead Nitro Modules JSI bridge. Provides Node.js-compatible net, tls, http(s) API.",
4
- "version": "0.5.1",
4
+ "version": "0.5.3",
5
5
  "type": "module",
6
6
  "main": "./lib/index.js",
7
7
  "module": "./lib/index.js",
@@ -18,6 +18,7 @@
18
18
  ],
19
19
  "author": "iwater <iwater@gmail.com>",
20
20
  "license": "ISC",
21
+ "homepage": "https://github.com/iwater/react-native-nitro-net",
21
22
  "repository": {
22
23
  "type": "git",
23
24
  "url": "git+https://github.com/iwater/react-native-nitro-net.git"
@@ -6,12 +6,12 @@ Pod::Spec.new do |s|
6
6
  s.name = "react-native-nitro-net"
7
7
  s.version = package["version"]
8
8
  s.summary = package["description"]
9
- s.homepage = "https://github.com/iwater/react-native-nitro-net"
9
+ s.homepage = package["homepage"]
10
10
  s.license = package["license"]
11
11
  s.authors = package["author"]
12
12
 
13
13
  s.platform = :ios, "13.0"
14
- s.source = { :git => "https://github.com/iwater/react-native-nitro-net.git", :tag => "v#{s.version}" }
14
+ s.source = { :git => package["repository"]["url"].gsub("git+", ""), :tag => "v#{s.version}" }
15
15
 
16
16
  # Module Name must match iosModuleName in nitro.json
17
17
  s.module_name = "RustCNet"
package/src/http.ts CHANGED
@@ -105,7 +105,14 @@ export class IncomingMessage extends Readable {
105
105
  }
106
106
 
107
107
  _read() {
108
- this.socket.resume();
108
+ // Server-side: socket is kept flowing by _setupHttpConnection.
109
+ // Calling socket.resume() here is the correct Node.js backpressure pattern
110
+ // but only when socket is the actual data source (client-side IncomingMessage).
111
+ // For server-side req, body bytes come via parser→push(), not socket directly.
112
+ // Still call resume() to unblock if paused by backpressure, but guard it.
113
+ if (this.socket && !(this.socket as any)._destroyed) {
114
+ this.socket.resume();
115
+ }
109
116
  }
110
117
 
111
118
  public setTimeout(msecs: number, callback?: () => void): this {
@@ -289,8 +296,16 @@ export class OutgoingMessage extends Writable {
289
296
  }
290
297
 
291
298
  end(chunk?: any, encoding?: any, callback?: any): this {
292
- debugLog(`OutgoingMessage.end() called, already ending: ${(this as any)._writableState?.ending}, chunk: ${!!chunk}`);
293
- if (chunk) {
299
+ if (typeof chunk === 'function') {
300
+ callback = chunk;
301
+ chunk = null;
302
+ encoding = null;
303
+ } else if (typeof encoding === 'function') {
304
+ callback = encoding;
305
+ encoding = null;
306
+ }
307
+
308
+ if (chunk != null) {
294
309
  this.write(chunk, encoding);
295
310
  }
296
311
  super.end(callback);
@@ -362,24 +377,31 @@ export class ServerResponse extends OutgoingMessage {
362
377
  }
363
378
 
364
379
  end(chunk?: any, encoding?: any, callback?: any): this {
380
+ if (typeof chunk === 'function') {
381
+ callback = chunk;
382
+ chunk = null;
383
+ encoding = null;
384
+ } else if (typeof encoding === 'function') {
385
+ callback = encoding;
386
+ encoding = null;
387
+ }
388
+
365
389
  if (!this.headersSent) {
366
390
  // If we have a single chunk and no headers sent yet, we can add Content-Length
367
391
  // to avoid chunked encoding for simple responses.
368
- if (chunk) {
369
- const len = typeof chunk === 'string' ? Buffer.byteLength(chunk, encoding) : chunk.length;
392
+ if (chunk != null) {
393
+ const len = typeof chunk === 'string' ? Buffer.byteLength(chunk, (encoding as string) || undefined) : chunk.length;
370
394
  this.setHeader('Content-Length', len);
371
- } else {
395
+ } else if (!this.hasHeader('Transfer-Encoding')) {
372
396
  this.setHeader('Content-Length', 0);
373
397
  }
374
398
  this._sendResponseHeaders();
375
399
  }
376
- if (typeof chunk === 'function') {
377
- super.end(chunk);
378
- } else if (chunk == null) {
379
- super.end(callback);
380
- } else {
381
- super.end(chunk, encoding, callback);
400
+
401
+ if (chunk != null) {
402
+ this.write(chunk, encoding);
382
403
  }
404
+ super.end(callback);
383
405
  return this;
384
406
  }
385
407
  }
@@ -558,16 +580,51 @@ export class Server extends EventEmitter {
558
580
  }
559
581
  }
560
582
 
561
- if (req && parsed.body && parsed.body.length > 0) {
562
- req.push(Buffer.from(parsed.body));
563
- }
564
-
565
- if (req && parsed.complete) {
566
- req.complete = true;
567
- if (parsed.trailers) {
568
- req.trailers = parsed.trailers;
583
+ // Push body/EOF into IncomingMessage.
584
+ // CRITICAL: When headers and body arrive in the same TCP packet
585
+ // (parsed.is_headers && body present), the user's 'request' handler
586
+ // has just been called synchronously above. The readable-stream
587
+ // library schedules its internal resume/flow via process.nextTick.
588
+ // If we push() synchronously here, the data lands in the buffer
589
+ // *before* the Readable enters flowing mode, and since no further
590
+ // socket data events will arrive, the flow() loop never drains it.
591
+ // Solution: always defer body/EOF push via process.nextTick so the
592
+ // Readable has a chance to enter flowing mode first.
593
+ const _bodyToPush = req && parsed.body && parsed.body.length > 0
594
+ ? Buffer.from(parsed.body) : null;
595
+ const _isComplete = !!(req && parsed.complete);
596
+ const _trailers = parsed.trailers;
597
+ const _reqRef = req;
598
+
599
+ // Diagnostic: log body delivery state (requires debug mode)
600
+ debugLog(`[Server] handleParsedResult: is_headers=${parsed.is_headers}, ` +
601
+ `bodyLen=${_bodyToPush?.length ?? 0}, complete=${_isComplete}, ` +
602
+ `req.readableFlowing=${(_reqRef as any)?._readableState?.flowing}`);
603
+
604
+ if (_bodyToPush !== null || _isComplete) {
605
+ if (parsed.is_headers) {
606
+ // Same-packet case: defer to give Readable time to enter flowing mode
607
+ debugLog(`[Server] Deferring body/EOF push via setImmediate (same-packet)`);
608
+ setImmediate(() => {
609
+ if (!_reqRef) return;
610
+ debugLog(`[Server] setImmediate: pushing body=${_bodyToPush?.length ?? 0}, EOF=${_isComplete}`);
611
+ if (_bodyToPush) _reqRef.push(_bodyToPush);
612
+ if (_isComplete) {
613
+ _reqRef.complete = true;
614
+ if (_trailers) _reqRef.trailers = _trailers;
615
+ _reqRef.push(null);
616
+ }
617
+ });
618
+ } else {
619
+ // Subsequent-packet case: push immediately
620
+ debugLog(`[Server] Pushing body/EOF immediately (subsequent-packet)`);
621
+ if (_bodyToPush) _reqRef!.push(_bodyToPush);
622
+ if (_isComplete) {
623
+ _reqRef!.complete = true;
624
+ if (_trailers) _reqRef!.trailers = _trailers;
625
+ _reqRef!.push(null);
626
+ }
569
627
  }
570
- req.push(null);
571
628
  }
572
629
 
573
630
  // For Keep-Alive, try to parse remaining buffer in case of pipelining
@@ -954,6 +1011,25 @@ export class ClientRequest extends OutgoingMessage {
954
1011
  private _expectContinue: boolean = false;
955
1012
  private _continueReceived: boolean = false;
956
1013
 
1014
+ private _getChunkByteLength(chunk: any, encoding?: string | null): number {
1015
+ if (chunk == null) return 0;
1016
+ const normalizedEncoding = typeof encoding === 'string' ? encoding : undefined;
1017
+ if (typeof chunk === 'string') {
1018
+ return Buffer.byteLength(chunk, normalizedEncoding as BufferEncoding | undefined);
1019
+ }
1020
+ if (typeof chunk.length === 'number') {
1021
+ return chunk.length;
1022
+ }
1023
+ const buffer = Buffer.from(chunk, normalizedEncoding as BufferEncoding | undefined);
1024
+ return buffer.length;
1025
+ }
1026
+
1027
+ private _getPendingBodyLength(): number {
1028
+ return this._pendingWrites.reduce((total, pending) => {
1029
+ return total + this._getChunkByteLength(pending.chunk, pending.encoding);
1030
+ }, 0);
1031
+ }
1032
+
957
1033
  constructor(options: RequestOptions, callback?: (res: IncomingMessage) => void) {
958
1034
  super();
959
1035
  this._options = options;
@@ -1000,9 +1076,10 @@ export class ClientRequest extends OutgoingMessage {
1000
1076
  this.socket = socket;
1001
1077
  this._connected = true;
1002
1078
  this.emit('socket', this.socket);
1003
- // DO NOT call _sendRequest() here. Headers should only be sent once write() or end() is called.
1004
- this._flushPendingWrites();
1079
+ // IMPORTANT: attach response listeners BEFORE flushing writes.
1080
+ // If we flush first, the server may respond before we have a data listener.
1005
1081
  this._attachSocketListeners();
1082
+ this._flushPendingWrites();
1006
1083
  } else {
1007
1084
  this._connect();
1008
1085
  }
@@ -1017,15 +1094,18 @@ export class ClientRequest extends OutgoingMessage {
1017
1094
  this.emit('error', err);
1018
1095
  return;
1019
1096
  }
1020
- debugLog(`ClientRequest._connect: Socket connected! socket=${!!socket}, socket._driver=${!!(socket as any)._driver}`);
1021
- debugLog(`[HTTP] _connect: Socket connected!`);
1097
+ debugLog(`ClientRequest._connect: Socket connected!`);
1022
1098
  this.socket = socket;
1023
1099
  this._connected = true;
1024
1100
  this.emit('socket', this.socket);
1025
- debugLog(`ClientRequest._connect: Calling _sendRequest`);
1026
- this._sendRequest();
1027
- this._flushPendingWrites();
1101
+ // IMPORTANT: attach response listeners BEFORE flushing writes.
1102
+ // If we flush first, the server may respond before we have a data listener.
1028
1103
  this._attachSocketListeners();
1104
+ // _flushPendingWrites() internally calls _sendRequest() if headers not sent yet.
1105
+ // Do NOT call _sendRequest() separately here — _flushPendingWrites() needs to
1106
+ // inspect headersSent and _pendingWrites together so it can set Content-Length
1107
+ // before sending headers (to avoid chunked encoding when body is already known).
1108
+ this._flushPendingWrites();
1029
1109
  };
1030
1110
 
1031
1111
  this.socket = agent.createConnection(this._options, connectCallback);
@@ -1171,28 +1251,64 @@ export class ClientRequest extends OutgoingMessage {
1171
1251
  this.emit('close');
1172
1252
  }
1173
1253
 
1254
+ private _isFlushing = false;
1174
1255
  private _flushPendingWrites() {
1175
- if (!this.socket) return;
1176
- if (!this.headersSent) this._sendRequest();
1256
+ if (!this.socket || this._isFlushing) return;
1177
1257
 
1178
- // If we are waiting for 100-continue, don't flush yet
1179
- if (this._expectContinue && !this._continueReceived) {
1180
- return;
1181
- }
1258
+ this._isFlushing = true;
1259
+ try {
1260
+ if (!this.headersSent) {
1261
+ // KEY FIX: When all body data is already queued AND the request is ending,
1262
+ // we can calculate the exact Content-Length and avoid chunked encoding.
1263
+ //
1264
+ // Why this matters: without Content-Length, the request is sent with
1265
+ // Transfer-Encoding: chunked. The Rust HTTP parser on the server side
1266
+ // stores chunked body bytes in its internal buffer after parsing headers,
1267
+ // but calling parser.feed(empty_buffer) to drain those bytes does NOT work
1268
+ // — the drain call returns empty metadata and the body is permanently lost.
1269
+ //
1270
+ // By setting Content-Length here (when we have all the data), the body is
1271
+ // sent as raw bytes. The server parser simply reads N bytes and marks the
1272
+ // request complete — no chunked framing, no drain issues.
1273
+ if (this._ended
1274
+ && !this.hasHeader('Content-Length')
1275
+ && !this.hasHeader('Transfer-Encoding')
1276
+ && this._pendingWrites.length > 0) {
1277
+ const totalLen = this._getPendingBodyLength();
1278
+ this.setHeader('Content-Length', totalLen);
1279
+ }
1280
+ this._sendRequest();
1281
+ }
1182
1282
 
1183
- const writes = this._pendingWrites;
1184
- this._pendingWrites = [];
1185
- for (const pending of writes) {
1186
- this._write(pending.chunk, pending.encoding, pending.callback);
1187
- }
1188
- if (this._ended) {
1189
- this._finishRequest();
1283
+ // If we are waiting for 100-continue, don't flush yet
1284
+ if (this._expectContinue && !this._continueReceived) {
1285
+ return;
1286
+ }
1287
+
1288
+ // Keep draining the queue as long as it has items
1289
+ // This handles writes that might happen while we are flushing (e.g. from callbacks)
1290
+ while (this._pendingWrites.length > 0) {
1291
+ const writes = this._pendingWrites;
1292
+ this._pendingWrites = [];
1293
+ for (const pending of writes) {
1294
+ // Call super._write (OutgoingMessage._write) directly
1295
+ super._write(pending.chunk, pending.encoding, pending.callback);
1296
+ }
1297
+ }
1298
+
1299
+ if (this._ended) {
1300
+ super.end();
1301
+ }
1302
+ } finally {
1303
+ this._isFlushing = false;
1190
1304
  }
1191
1305
  }
1192
1306
 
1307
+ // Simplified _finishRequest - not needed as much if we call super.end() directly
1193
1308
  private _finishRequest() {
1194
- if (!this._ended) return;
1195
- super.end();
1309
+ if (this._connected && this._pendingWrites.length === 0) {
1310
+ super.end();
1311
+ }
1196
1312
  }
1197
1313
 
1198
1314
  private _sendRequest() {
@@ -1210,7 +1326,7 @@ export class ClientRequest extends OutgoingMessage {
1210
1326
 
1211
1327
  _write(chunk: any, encoding: string, callback: (error?: Error | null) => void) {
1212
1328
  this._hasBody = true;
1213
- if (!this._connected) {
1329
+ if (!this._connected || this._isFlushing) {
1214
1330
  this._pendingWrites.push({ chunk, encoding, callback });
1215
1331
  return;
1216
1332
  }
@@ -1220,39 +1336,47 @@ export class ClientRequest extends OutgoingMessage {
1220
1336
 
1221
1337
  write(chunk: any, encoding?: any, callback?: any): boolean {
1222
1338
  this._hasBody = true;
1223
- if (!this._connected) {
1339
+ // If not connected OR currently flushing, enqueue to preserve order
1340
+ if (!this._connected || this._isFlushing) {
1224
1341
  this._pendingWrites.push({ chunk, encoding, callback });
1225
1342
  return true;
1226
1343
  }
1227
- if (!this.headersSent) this._sendRequest();
1228
1344
  return super.write(chunk, encoding, callback);
1229
1345
  }
1230
1346
 
1231
1347
  end(chunk?: any, encoding?: any, callback?: any): this {
1232
- debugLog(`ClientRequest.end() called, connected=${this._connected}, headersSent=${this.headersSent}`);
1233
- if (chunk) {
1348
+ if (typeof chunk === 'function') {
1349
+ callback = chunk;
1350
+ chunk = null;
1351
+ encoding = null;
1352
+ } else if (typeof encoding === 'function') {
1353
+ callback = encoding;
1354
+ encoding = null;
1355
+ }
1356
+
1357
+ debugLog(`ClientRequest.end() called, connected=${this._connected}, chunk=${!!chunk}`);
1358
+
1359
+ if (chunk != null) {
1234
1360
  this._hasBody = true;
1235
1361
  if (!this.headersSent && !this.hasHeader('Content-Length')) {
1236
- const len = typeof chunk === 'string' ? Buffer.byteLength(chunk, encoding as any) : chunk.length;
1362
+ const len = this._getPendingBodyLength() + this._getChunkByteLength(chunk, encoding as string | undefined);
1237
1363
  this.setHeader('Content-Length', len);
1238
1364
  }
1365
+ // Use this.write to handle pending queue if not connected
1239
1366
  this.write(chunk, encoding);
1240
1367
  }
1368
+
1241
1369
  this._ended = true;
1242
1370
 
1243
- // If connected, we can send request and end immediately
1244
1371
  if (this._connected) {
1245
- if (!this.headersSent) {
1246
- this._sendRequest();
1247
- }
1248
- // Call super.end only when connected
1249
- super.end(callback);
1250
- } else {
1251
- // Socket not connected yet - _flushPendingWrites will handle ending
1252
- // Store callback if provided
1253
- if (callback) {
1372
+ // Only end if the queue is empty. _flushPendingWrites will handle it otherwise.
1373
+ if (this._pendingWrites.length === 0) {
1374
+ super.end(callback);
1375
+ } else if (callback) {
1254
1376
  this.once('finish', callback);
1255
1377
  }
1378
+ } else {
1379
+ if (callback) this.once('finish', callback);
1256
1380
  }
1257
1381
  return this;
1258
1382
  }