@leofcoin/peernet 0.11.20 → 0.11.23

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -287,11 +287,8 @@ const socketRequestClient = (url, protocols = 'echo-protocol', options = { retry
287
287
  });
288
288
  };
289
289
 
290
- const messageQue = {};
291
-
292
290
  class Peer {
293
291
  #connection
294
- #ready = false
295
292
  #connecting = false
296
293
  #connected = false
297
294
  #channelReady = false
@@ -304,10 +301,14 @@ class Peer {
304
301
  #remoteStreams = []
305
302
  #pendingCandidates = []
306
303
  #senderMap = new Map()
304
+ #messageQue = []
305
+ #chunksQue = {}
307
306
  #iceCompleteTimer
308
307
  #channel
309
308
  #peerId
310
- #chunkSize = 16384
309
+ #chunkSize = 16 * 1024 // 16384
310
+ #queRunning = false
311
+ #MAX_BUFFERED_AMOUNT = 16 * 1024 * 1024
311
312
 
312
313
  get connection() {
313
314
  return this.#connection
@@ -366,7 +367,7 @@ class Peer {
366
367
  return new Promise((resolve, reject) => {
367
368
  const splitMessage = () => {
368
369
  const chunk = message.slice(offset, offset + this.#chunkSize > size ? size : offset + this.#chunkSize);
369
- offset += offset + this.#chunkSize;
370
+ offset += this.#chunkSize;
370
371
  chunks.push(chunk);
371
372
  if (offset < size) return splitMessage()
372
373
  else resolve({chunks, size});
@@ -376,27 +377,53 @@ class Peer {
376
377
  })
377
378
  }
378
379
 
379
- async send(message, id) {
380
- const { chunks, size } = await this.splitMessage(message);
380
+ async #runQue() {
381
+ this.#queRunning = true;
382
+ if (this.#messageQue.length > 0 && this.channel.bufferedAmount + this.#messageQue[0]?.length < this.#MAX_BUFFERED_AMOUNT) {
383
+ const message = this.#messageQue.shift();
384
+
385
+ switch (this.channel?.readyState) {
386
+ case 'open':
387
+ await this.channel.send(message);
388
+ if (this.#messageQue.length > 0) return this.#runQue()
389
+ else this.#queRunning = false;
390
+ break;
391
+ case 'closed':
392
+ case 'closing':
393
+ this.#messageQue = [];
394
+ this.#queRunning = false;
395
+ debug('channel already closed, this usually means a bad implementation, try checking the readyState or check if the peer is connected before sending');
396
+ break;
397
+ case undefined:
398
+ this.#messageQue = [];
399
+ this.#queRunning = false;
400
+ debug(`trying to send before a channel is created`);
401
+ break;
402
+ }
403
+
404
+
405
+ } else {
406
+ return setTimeout(() => this.#runQue(), 50)
407
+ }
408
+ }
409
+
410
+ #trySend({ size, id, chunks }) {
381
411
  let offset = 0;
412
+
382
413
  for (const chunk of chunks) {
383
- const start = offset;
384
- const end = offset + chunk.length;
385
- const message = new TextEncoder().encode(JSON.stringify({ size, id, chunk, start, end }));
386
- switch (this.channel?.readyState) {
387
- case 'open':
388
- this.bw.up += message.length || message.byteLength;
389
- this.channel.send(message);
390
- break;
391
- case 'closed':
392
- case 'closing':
393
- debug('channel already closed, this usually means a bad implementation, try checking the readyState or check if the peer is connected before sending');
394
- break;
395
- case undefined:
396
- debug(`trying to send before a channel is created`);
397
- break;
398
- }
399
- }
414
+ const start = offset;
415
+ const end = offset + chunk.length;
416
+
417
+ const message = new TextEncoder().encode(JSON.stringify({ size, id, chunk, start, end }));
418
+ this.#messageQue.push(message);
419
+ }
420
+
421
+ if (!this.queRunning) return this.#runQue()
422
+ }
423
+
424
+ async send(message, id) {
425
+ const { chunks, size } = await this.splitMessage(message);
426
+ return this.#trySend({ size, id, chunks })
400
427
  }
401
428
 
402
429
  request(data) {
@@ -482,20 +509,22 @@ class Peer {
482
509
  message = JSON.parse(new TextDecoder().decode(message.data));
483
510
  // allow sharding (multiple peers share data)
484
511
  pubsub.publish('peernet:shard', message);
485
- if (!messageQue[message.id]) messageQue[message.id] = [];
512
+ const { id } = message;
486
513
 
487
- if (message.size > messageQue[message.id].length || message.size === messageQue[message.id].length) {
514
+ if (!this.#chunksQue[id]) this.#chunksQue[id] = [];
515
+
516
+ if (message.size > this.#chunksQue[id].length || message.size === this.#chunksQue[id].length) {
488
517
  for (const value of Object.values(message.chunk)) {
489
- messageQue[message.id].push(value);
518
+ this.#chunksQue[id].push(value);
490
519
  }
491
520
  }
492
521
 
493
- if (message.size === messageQue[message.id].length) {
494
- pubsub.publish('peer:data', {id: message.id, data: new Uint8Array(Object.values(messageQue[message.id]))});
495
- delete messageQue[message.id];
522
+ if (message.size === this.#chunksQue[id].length) {
523
+ const data = new Uint8Array(Object.values(this.#chunksQue[id]));
524
+ delete this.#chunksQue[id];
525
+ pubsub.publish('peer:data', { id, data });
496
526
  }
497
527
  this.bw.down += message.byteLength || message.length;
498
-
499
528
  }
500
529
 
501
530
  _sendMessage(message) {
@@ -10,7 +10,7 @@ function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'defau
10
10
  var PubSub__default = /*#__PURE__*/_interopDefaultLegacy(PubSub);
11
11
  var Koa__default = /*#__PURE__*/_interopDefaultLegacy(Koa);
12
12
 
13
- var version = "0.11.19";
13
+ var version = "0.11.22";
14
14
 
15
15
  var api$1 = {
16
16
  version: ({send}) => send({client: '@peernet/api/http', version}),
@@ -285,11 +285,8 @@ const socketRequestClient = (url, protocols = 'echo-protocol', options = { retry
285
285
  });
286
286
  };
287
287
 
288
- const messageQue = {};
289
-
290
288
  class Peer {
291
289
  #connection
292
- #ready = false
293
290
  #connecting = false
294
291
  #connected = false
295
292
  #channelReady = false
@@ -302,10 +299,14 @@ class Peer {
302
299
  #remoteStreams = []
303
300
  #pendingCandidates = []
304
301
  #senderMap = new Map()
302
+ #messageQue = []
303
+ #chunksQue = {}
305
304
  #iceCompleteTimer
306
305
  #channel
307
306
  #peerId
308
- #chunkSize = 16384
307
+ #chunkSize = 16 * 1024 // 16384
308
+ #queRunning = false
309
+ #MAX_BUFFERED_AMOUNT = 16 * 1024 * 1024
309
310
 
310
311
  get connection() {
311
312
  return this.#connection
@@ -364,7 +365,7 @@ class Peer {
364
365
  return new Promise((resolve, reject) => {
365
366
  const splitMessage = () => {
366
367
  const chunk = message.slice(offset, offset + this.#chunkSize > size ? size : offset + this.#chunkSize);
367
- offset += offset + this.#chunkSize;
368
+ offset += this.#chunkSize;
368
369
  chunks.push(chunk);
369
370
  if (offset < size) return splitMessage()
370
371
  else resolve({chunks, size});
@@ -374,27 +375,53 @@ class Peer {
374
375
  })
375
376
  }
376
377
 
377
- async send(message, id) {
378
- const { chunks, size } = await this.splitMessage(message);
378
+ async #runQue() {
379
+ this.#queRunning = true;
380
+ if (this.#messageQue.length > 0 && this.channel.bufferedAmount + this.#messageQue[0]?.length < this.#MAX_BUFFERED_AMOUNT) {
381
+ const message = this.#messageQue.shift();
382
+
383
+ switch (this.channel?.readyState) {
384
+ case 'open':
385
+ await this.channel.send(message);
386
+ if (this.#messageQue.length > 0) return this.#runQue()
387
+ else this.#queRunning = false;
388
+ break;
389
+ case 'closed':
390
+ case 'closing':
391
+ this.#messageQue = [];
392
+ this.#queRunning = false;
393
+ debug('channel already closed, this usually means a bad implementation, try checking the readyState or check if the peer is connected before sending');
394
+ break;
395
+ case undefined:
396
+ this.#messageQue = [];
397
+ this.#queRunning = false;
398
+ debug(`trying to send before a channel is created`);
399
+ break;
400
+ }
401
+
402
+
403
+ } else {
404
+ return setTimeout(() => this.#runQue(), 50)
405
+ }
406
+ }
407
+
408
+ #trySend({ size, id, chunks }) {
379
409
  let offset = 0;
410
+
380
411
  for (const chunk of chunks) {
381
- const start = offset;
382
- const end = offset + chunk.length;
383
- const message = new TextEncoder().encode(JSON.stringify({ size, id, chunk, start, end }));
384
- switch (this.channel?.readyState) {
385
- case 'open':
386
- this.bw.up += message.length || message.byteLength;
387
- this.channel.send(message);
388
- break;
389
- case 'closed':
390
- case 'closing':
391
- debug('channel already closed, this usually means a bad implementation, try checking the readyState or check if the peer is connected before sending');
392
- break;
393
- case undefined:
394
- debug(`trying to send before a channel is created`);
395
- break;
396
- }
397
- }
412
+ const start = offset;
413
+ const end = offset + chunk.length;
414
+
415
+ const message = new TextEncoder().encode(JSON.stringify({ size, id, chunk, start, end }));
416
+ this.#messageQue.push(message);
417
+ }
418
+
419
+ if (!this.queRunning) return this.#runQue()
420
+ }
421
+
422
+ async send(message, id) {
423
+ const { chunks, size } = await this.splitMessage(message);
424
+ return this.#trySend({ size, id, chunks })
398
425
  }
399
426
 
400
427
  request(data) {
@@ -480,20 +507,22 @@ class Peer {
480
507
  message = JSON.parse(new TextDecoder().decode(message.data));
481
508
  // allow sharding (multiple peers share data)
482
509
  pubsub.publish('peernet:shard', message);
483
- if (!messageQue[message.id]) messageQue[message.id] = [];
510
+ const { id } = message;
484
511
 
485
- if (message.size > messageQue[message.id].length || message.size === messageQue[message.id].length) {
512
+ if (!this.#chunksQue[id]) this.#chunksQue[id] = [];
513
+
514
+ if (message.size > this.#chunksQue[id].length || message.size === this.#chunksQue[id].length) {
486
515
  for (const value of Object.values(message.chunk)) {
487
- messageQue[message.id].push(value);
516
+ this.#chunksQue[id].push(value);
488
517
  }
489
518
  }
490
519
 
491
- if (message.size === messageQue[message.id].length) {
492
- pubsub.publish('peer:data', {id: message.id, data: new Uint8Array(Object.values(messageQue[message.id]))});
493
- delete messageQue[message.id];
520
+ if (message.size === this.#chunksQue[id].length) {
521
+ const data = new Uint8Array(Object.values(this.#chunksQue[id]));
522
+ delete this.#chunksQue[id];
523
+ pubsub.publish('peer:data', { id, data });
494
524
  }
495
525
  this.bw.down += message.byteLength || message.length;
496
-
497
526
  }
498
527
 
499
528
  _sendMessage(message) {
@@ -1850,7 +1879,7 @@ class Peernet {
1850
1879
  protocol: 'peernet-v0.1.0', host: '127.0.0.1', port: options.port
1851
1880
  });
1852
1881
  } else {
1853
- const http = await Promise.resolve().then(function () { return require('./http-62c5b155.js'); });
1882
+ const http = await Promise.resolve().then(function () { return require('./http-547d8100.js'); });
1854
1883
  if (environment !== 'browser') http.default(options);
1855
1884
  }
1856
1885
 
@@ -241,11 +241,8 @@ const socketRequestClient = (url, protocols = 'echo-protocol', options = { retry
241
241
  });
242
242
  };
243
243
 
244
- const messageQue = {};
245
-
246
244
  class Peer {
247
245
  #connection
248
- #ready = false
249
246
  #connecting = false
250
247
  #connected = false
251
248
  #channelReady = false
@@ -258,10 +255,14 @@ class Peer {
258
255
  #remoteStreams = []
259
256
  #pendingCandidates = []
260
257
  #senderMap = new Map()
258
+ #messageQue = []
259
+ #chunksQue = {}
261
260
  #iceCompleteTimer
262
261
  #channel
263
262
  #peerId
264
- #chunkSize = 16384
263
+ #chunkSize = 16 * 1024 // 16384
264
+ #queRunning = false
265
+ #MAX_BUFFERED_AMOUNT = 16 * 1024 * 1024
265
266
 
266
267
  get connection() {
267
268
  return this.#connection
@@ -320,7 +321,7 @@ class Peer {
320
321
  return new Promise((resolve, reject) => {
321
322
  const splitMessage = () => {
322
323
  const chunk = message.slice(offset, offset + this.#chunkSize > size ? size : offset + this.#chunkSize);
323
- offset += offset + this.#chunkSize;
324
+ offset += this.#chunkSize;
324
325
  chunks.push(chunk);
325
326
  if (offset < size) return splitMessage()
326
327
  else resolve({chunks, size});
@@ -330,27 +331,53 @@ class Peer {
330
331
  })
331
332
  }
332
333
 
333
- async send(message, id) {
334
- const { chunks, size } = await this.splitMessage(message);
334
+ async #runQue() {
335
+ this.#queRunning = true;
336
+ if (this.#messageQue.length > 0 && this.channel.bufferedAmount + this.#messageQue[0]?.length < this.#MAX_BUFFERED_AMOUNT) {
337
+ const message = this.#messageQue.shift();
338
+
339
+ switch (this.channel?.readyState) {
340
+ case 'open':
341
+ await this.channel.send(message);
342
+ if (this.#messageQue.length > 0) return this.#runQue()
343
+ else this.#queRunning = false;
344
+ break;
345
+ case 'closed':
346
+ case 'closing':
347
+ this.#messageQue = [];
348
+ this.#queRunning = false;
349
+ debug('channel already closed, this usually means a bad implementation, try checking the readyState or check if the peer is connected before sending');
350
+ break;
351
+ case undefined:
352
+ this.#messageQue = [];
353
+ this.#queRunning = false;
354
+ debug(`trying to send before a channel is created`);
355
+ break;
356
+ }
357
+
358
+
359
+ } else {
360
+ return setTimeout(() => this.#runQue(), 50)
361
+ }
362
+ }
363
+
364
+ #trySend({ size, id, chunks }) {
335
365
  let offset = 0;
366
+
336
367
  for (const chunk of chunks) {
337
- const start = offset;
338
- const end = offset + chunk.length;
339
- const message = new TextEncoder().encode(JSON.stringify({ size, id, chunk, start, end }));
340
- switch (this.channel?.readyState) {
341
- case 'open':
342
- this.bw.up += message.length || message.byteLength;
343
- this.channel.send(message);
344
- break;
345
- case 'closed':
346
- case 'closing':
347
- debug('channel already closed, this usually means a bad implementation, try checking the readyState or check if the peer is connected before sending');
348
- break;
349
- case undefined:
350
- debug(`trying to send before a channel is created`);
351
- break;
352
- }
353
- }
368
+ const start = offset;
369
+ const end = offset + chunk.length;
370
+
371
+ const message = new TextEncoder().encode(JSON.stringify({ size, id, chunk, start, end }));
372
+ this.#messageQue.push(message);
373
+ }
374
+
375
+ if (!this.queRunning) return this.#runQue()
376
+ }
377
+
378
+ async send(message, id) {
379
+ const { chunks, size } = await this.splitMessage(message);
380
+ return this.#trySend({ size, id, chunks })
354
381
  }
355
382
 
356
383
  request(data) {
@@ -436,20 +463,22 @@ class Peer {
436
463
  message = JSON.parse(new TextDecoder().decode(message.data));
437
464
  // allow sharding (multiple peers share data)
438
465
  pubsub.publish('peernet:shard', message);
439
- if (!messageQue[message.id]) messageQue[message.id] = [];
466
+ const { id } = message;
440
467
 
441
- if (message.size > messageQue[message.id].length || message.size === messageQue[message.id].length) {
468
+ if (!this.#chunksQue[id]) this.#chunksQue[id] = [];
469
+
470
+ if (message.size > this.#chunksQue[id].length || message.size === this.#chunksQue[id].length) {
442
471
  for (const value of Object.values(message.chunk)) {
443
- messageQue[message.id].push(value);
472
+ this.#chunksQue[id].push(value);
444
473
  }
445
474
  }
446
475
 
447
- if (message.size === messageQue[message.id].length) {
448
- pubsub.publish('peer:data', {id: message.id, data: new Uint8Array(Object.values(messageQue[message.id]))});
449
- delete messageQue[message.id];
476
+ if (message.size === this.#chunksQue[id].length) {
477
+ const data = new Uint8Array(Object.values(this.#chunksQue[id]));
478
+ delete this.#chunksQue[id];
479
+ pubsub.publish('peer:data', { id, data });
450
480
  }
451
481
  this.bw.down += message.byteLength || message.length;
452
-
453
482
  }
454
483
 
455
484
  _sendMessage(message) {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@leofcoin/peernet",
3
- "version": "0.11.20",
3
+ "version": "0.11.23",
4
4
  "description": "",
5
5
  "main": "dist/commonjs/peernet.js",
6
6
  "module": "dist/module/peernet.js",
@@ -25,7 +25,7 @@
25
25
  "dependencies": {
26
26
  "@leofcoin/generate-account": "^1.0.2",
27
27
  "@leofcoin/multi-wallet": "^2.1.2",
28
- "@leofcoin/peernet-swarm": "^0.1.21",
28
+ "@leofcoin/peernet-swarm": "^0.2.1",
29
29
  "@leofcoin/storage": "^2.3.0",
30
30
  "@vandeurenglenn/base32": "^1.1.0",
31
31
  "@vandeurenglenn/base58": "^1.1.0",