holepunch-hop 0.3.2 → 0.3.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/src/bees.js +363 -11
- package/src/drive.js +123 -15
- package/src/fileParser.js +0 -5
- package/src/index.js +107 -29
- package/src/peers.js +128 -53
- package/test/large-csv.test.js +9 -0
package/package.json
CHANGED
package/src/bees.js
CHANGED
|
@@ -2,8 +2,8 @@
|
|
|
2
2
|
/**
|
|
3
3
|
* Manage HyperBee key store datastore
|
|
4
4
|
*
|
|
5
|
-
* @class
|
|
6
|
-
* @package
|
|
5
|
+
* @class HyperBee
|
|
6
|
+
* @package HyperBee
|
|
7
7
|
* @copyright Copyright (c) 2024 James Littlejohn
|
|
8
8
|
* @license http://www.gnu.org/licenses/old-licenses/gpl-3.0.html
|
|
9
9
|
* @version $Id$
|
|
@@ -46,15 +46,14 @@ class HyperBee extends EventEmitter {
|
|
|
46
46
|
valueEncoding: 'json' // same options as above
|
|
47
47
|
})
|
|
48
48
|
await this.dbPublicLibrary.ready()
|
|
49
|
-
beePubkeys.push({ store: 'publiclibrary', pubkey: b4a.toString(
|
|
49
|
+
beePubkeys.push({ store: 'publiclibrary', pubkey: b4a.toString(this.dbPublicLibrary.key, 'hex')})
|
|
50
50
|
// allow other peer access to public library (need to check for DDOS ie over asked)
|
|
51
|
-
// join a topic
|
|
51
|
+
// join a topic for network
|
|
52
52
|
const discovery = this.swarm.join(this.dbPublicLibrary.discoveryKey)
|
|
53
53
|
// Only display the key once the Hyperbee has been announced to the DHT
|
|
54
54
|
discovery.flushed().then(() => {
|
|
55
55
|
})
|
|
56
56
|
|
|
57
|
-
|
|
58
57
|
const core2 = this.store.get({ name: 'peerlibrary' })
|
|
59
58
|
this.dbPeerLibrary = new Hyperbee(core2, {
|
|
60
59
|
keyEncoding: 'utf-8', // can be set to undefined (binary), utf-8, ascii or and abstract-encoding
|
|
@@ -96,6 +95,60 @@ class HyperBee extends EventEmitter {
|
|
|
96
95
|
await this.dbKBledger.ready()
|
|
97
96
|
// this.client.replicate(this.dbKBledger.feed)
|
|
98
97
|
beePubkeys.push({store:'kbledger', pubkey: b4a.toString(core5.key, 'hex')})
|
|
98
|
+
// stores of cues, media, research, markers, products/treatments
|
|
99
|
+
|
|
100
|
+
const core7 = this.store.get({ name: 'bentocues' })
|
|
101
|
+
this.dbBentocues = new Hyperbee(core7, {
|
|
102
|
+
keyEncoding: 'utf-8', // can be set to undefined (binary), utf-8, ascii or and abstract-encoding
|
|
103
|
+
valueEncoding: 'json' // same options as above
|
|
104
|
+
})
|
|
105
|
+
await this.dbBentocues.ready()
|
|
106
|
+
beePubkeys.push({store:'bentocues', pubkey: b4a.toString(core7.key, 'hex')})
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
const core8 = this.store.get({ name: 'bentodecisions' })
|
|
110
|
+
this.dbBentodecisions = new Hyperbee(core8, {
|
|
111
|
+
keyEncoding: 'utf-8', // can be set to undefined (binary), utf-8, ascii or and abstract-encoding
|
|
112
|
+
valueEncoding: 'json' // same options as above
|
|
113
|
+
})
|
|
114
|
+
await this.dbBentodecisions.ready()
|
|
115
|
+
beePubkeys.push({store:'bentodecisions', pubkey: b4a.toString(core8.key, 'hex')})
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
const core9 = this.store.get({ name: 'bentomarkers' })
|
|
119
|
+
this.dbBentomarkers = new Hyperbee(core9, {
|
|
120
|
+
keyEncoding: 'utf-8', // can be set to undefined (binary), utf-8, ascii or and abstract-encoding
|
|
121
|
+
valueEncoding: 'json' // same options as above
|
|
122
|
+
})
|
|
123
|
+
await this.dbBentomarkers.ready()
|
|
124
|
+
beePubkeys.push({store:'bentomarkers', pubkey: b4a.toString(core9.key, 'hex')})
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
const core10 = this.store.get({ name: 'research' })
|
|
128
|
+
this.dbBentoresearch = new Hyperbee(core10, {
|
|
129
|
+
keyEncoding: 'utf-8', // can be set to undefined (binary), utf-8, ascii or and abstract-encoding
|
|
130
|
+
valueEncoding: 'json' // same options as above
|
|
131
|
+
})
|
|
132
|
+
await this.dbBentoresearch.ready()
|
|
133
|
+
beePubkeys.push({store:'research', pubkey: b4a.toString(core10.key, 'hex')})
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
const core11 = this.store.get({ name: 'bentoproducts' })
|
|
137
|
+
this.dbBentoproducts = new Hyperbee(core11, {
|
|
138
|
+
keyEncoding: 'utf-8', // can be set to undefined (binary), utf-8, ascii or and abstract-encoding
|
|
139
|
+
valueEncoding: 'json' // same options as above
|
|
140
|
+
})
|
|
141
|
+
await this.dbBentoproducts.ready()
|
|
142
|
+
beePubkeys.push({store:'bentoproducts', pubkey: b4a.toString(core11.key, 'hex')})
|
|
143
|
+
|
|
144
|
+
const core12 = this.store.get({ name: 'bentomedia' })
|
|
145
|
+
this.dbBentomedia = new Hyperbee(core12, {
|
|
146
|
+
keyEncoding: 'utf-8', // can be set to undefined (binary), utf-8, ascii or and abstract-encoding
|
|
147
|
+
valueEncoding: 'json' // same options as above
|
|
148
|
+
})
|
|
149
|
+
await this.dbBentomedia.ready()
|
|
150
|
+
beePubkeys.push({store:'bentomedia', pubkey: b4a.toString(core12.key, 'hex')})
|
|
151
|
+
|
|
99
152
|
this.emit('hbee-live')
|
|
100
153
|
// return beePubkeys
|
|
101
154
|
let startBeePubkey = {}
|
|
@@ -212,8 +265,8 @@ class HyperBee extends EventEmitter {
|
|
|
212
265
|
*
|
|
213
266
|
*/
|
|
214
267
|
saveSpaceHistory = async function (spaceContract) {
|
|
215
|
-
await this.dbBentospaces.put(spaceContract.space.
|
|
216
|
-
let checkSave = await this.getBentospace(spaceContract.space.
|
|
268
|
+
await this.dbBentospaces.put(spaceContract.space.cueid, spaceContract)
|
|
269
|
+
let checkSave = await this.getBentospace(spaceContract.space.cueid)
|
|
217
270
|
return checkSave
|
|
218
271
|
}
|
|
219
272
|
|
|
@@ -223,8 +276,8 @@ class HyperBee extends EventEmitter {
|
|
|
223
276
|
*
|
|
224
277
|
*/
|
|
225
278
|
saveBentospace = async function (spaceContract) {
|
|
226
|
-
await this.dbBentospaces.put(spaceContract.
|
|
227
|
-
let checkSave = await this.getBentospace(spaceContract.
|
|
279
|
+
await this.dbBentospaces.put(spaceContract.cueid, spaceContract)
|
|
280
|
+
let checkSave = await this.getBentospace(spaceContract.cueid)
|
|
228
281
|
return checkSave
|
|
229
282
|
}
|
|
230
283
|
|
|
@@ -238,15 +291,314 @@ class HyperBee extends EventEmitter {
|
|
|
238
291
|
return nodeData
|
|
239
292
|
}
|
|
240
293
|
|
|
294
|
+
/**
|
|
295
|
+
* lookup bentospaces all
|
|
296
|
+
* @method getAllBentospaces
|
|
297
|
+
*
|
|
298
|
+
*/
|
|
299
|
+
getAllBentospaces = async function () {
|
|
300
|
+
const spacesHistory = await this.dbBentospaces.createReadStream()
|
|
301
|
+
let spacesData = []
|
|
302
|
+
for await (const { key, value } of spacesHistory) {
|
|
303
|
+
spacesData.push({ key, value })
|
|
304
|
+
}
|
|
305
|
+
return spacesData
|
|
306
|
+
}
|
|
307
|
+
|
|
308
|
+
|
|
241
309
|
/**
|
|
242
310
|
* delete nxp ref contract from peer library
|
|
243
311
|
* @method deleteBentospace
|
|
244
312
|
*
|
|
245
313
|
*/
|
|
246
314
|
deleteBentospace = async function (space) {
|
|
247
|
-
const deleteStatus = await this.dbBentospaces.del(space.
|
|
315
|
+
const deleteStatus = await this.dbBentospaces.del(space.cueid)
|
|
316
|
+
let deleteInfo = {}
|
|
317
|
+
deleteInfo.spaceid = space.cueid
|
|
318
|
+
return deleteInfo
|
|
319
|
+
}
|
|
320
|
+
|
|
321
|
+
/** CUES */
|
|
322
|
+
/**
|
|
323
|
+
* save cues
|
|
324
|
+
* @method saveCues
|
|
325
|
+
*
|
|
326
|
+
*/
|
|
327
|
+
saveCues = async function (cuesInfo) {
|
|
328
|
+
await this.dbBentocues.put(cuesInfo.cueid, cuesInfo.data)
|
|
329
|
+
let checkSave = await this.getCues(cuesInfo.cueid)
|
|
330
|
+
return checkSave
|
|
331
|
+
}
|
|
332
|
+
|
|
333
|
+
/**
|
|
334
|
+
* get one cue by id
|
|
335
|
+
* @method getCues
|
|
336
|
+
*
|
|
337
|
+
*/
|
|
338
|
+
getCues = async function (key) {
|
|
339
|
+
const nodeData = await this.dbBentocues.get(key)
|
|
340
|
+
return nodeData
|
|
341
|
+
}
|
|
342
|
+
|
|
343
|
+
/**
|
|
344
|
+
* get all cuees
|
|
345
|
+
* @method getCuesHistory
|
|
346
|
+
*
|
|
347
|
+
*/
|
|
348
|
+
getCuesHistory = async function (key) {
|
|
349
|
+
const cuesHistory = await this.dbBentocues.createReadStream()
|
|
350
|
+
let cuesData = []
|
|
351
|
+
for await (const { key, value } of cuesHistory) {
|
|
352
|
+
cuesData.push({ key, value })
|
|
353
|
+
}
|
|
354
|
+
return cuesData
|
|
355
|
+
}
|
|
356
|
+
|
|
357
|
+
/**
|
|
358
|
+
* delete nxp ref contract from peer library
|
|
359
|
+
* @method deleteBentocue
|
|
360
|
+
*/
|
|
361
|
+
deleteBentocue = async function (cue) {
|
|
362
|
+
const deleteStatus = await this.dbBentocues.del(cue.cueid)
|
|
363
|
+
let deleteInfo = {}
|
|
364
|
+
deleteInfo.spaceid = cue.cueid
|
|
365
|
+
return deleteInfo
|
|
366
|
+
}
|
|
367
|
+
|
|
368
|
+
/** MEDIA */
|
|
369
|
+
/**
|
|
370
|
+
* save media
|
|
371
|
+
* @method saveMedia
|
|
372
|
+
*
|
|
373
|
+
*/
|
|
374
|
+
saveMedia = async function (mediaInfo) {
|
|
375
|
+
await this.dbBentomedia.put(mediaInfo.cueid, mediaInfo.data)
|
|
376
|
+
let checkSave = await this.getMedia(mediaInfo.cueid)
|
|
377
|
+
return checkSave
|
|
378
|
+
}
|
|
379
|
+
|
|
380
|
+
/**
|
|
381
|
+
* get one cue by id
|
|
382
|
+
* @method getMedia
|
|
383
|
+
*
|
|
384
|
+
*/
|
|
385
|
+
getMedia = async function (key) {
|
|
386
|
+
const nodeData = await this.dbBentomedia.get(key)
|
|
387
|
+
return nodeData
|
|
388
|
+
}
|
|
389
|
+
|
|
390
|
+
/**
|
|
391
|
+
* get all media
|
|
392
|
+
* @method getMediaHistory
|
|
393
|
+
*
|
|
394
|
+
*/
|
|
395
|
+
getMediaHistory = async function (key) {
|
|
396
|
+
const cuesHistory = await this.dbBentomedia.createReadStream()
|
|
397
|
+
let cuesData = []
|
|
398
|
+
for await (const { key, value } of cuesHistory) {
|
|
399
|
+
cuesData.push({ key, value })
|
|
400
|
+
}
|
|
401
|
+
return cuesData
|
|
402
|
+
}
|
|
403
|
+
|
|
404
|
+
|
|
405
|
+
/**
|
|
406
|
+
* delete nxp ref contract from peer library
|
|
407
|
+
* @method deleteBentomedia
|
|
408
|
+
*/
|
|
409
|
+
deleteBentomedia = async function (media) {
|
|
410
|
+
const deleteStatus = await this.dbBentomedia.del(media.id)
|
|
411
|
+
let deleteInfo = {}
|
|
412
|
+
deleteInfo.spaceid = media.id
|
|
413
|
+
return deleteInfo
|
|
414
|
+
}
|
|
415
|
+
|
|
416
|
+
/** PEERS */
|
|
417
|
+
/**
|
|
418
|
+
* save research
|
|
419
|
+
* @method savePeer
|
|
420
|
+
*
|
|
421
|
+
*/
|
|
422
|
+
savePeer = async function (peerInfo) {
|
|
423
|
+
await this.dbPeers.put(peerInfo.publickey, peerInfo)
|
|
424
|
+
let checkSave = await this.getPeer(peerInfo.publickey)
|
|
425
|
+
return checkSave
|
|
426
|
+
}
|
|
427
|
+
|
|
428
|
+
/**
|
|
429
|
+
* get one peer by publickey
|
|
430
|
+
* @method getPeer
|
|
431
|
+
*
|
|
432
|
+
*/
|
|
433
|
+
getPeer = async function (key) {
|
|
434
|
+
const nodeData = await this.dbPeers.get(key)
|
|
435
|
+
return nodeData
|
|
436
|
+
}
|
|
437
|
+
|
|
438
|
+
/**
|
|
439
|
+
* get all peers
|
|
440
|
+
* @method getPeersHistory
|
|
441
|
+
*
|
|
442
|
+
*/
|
|
443
|
+
getPeersHistory = async function (key) {
|
|
444
|
+
const peerHistory = await this.dbPeers.createReadStream()
|
|
445
|
+
let peerData = []
|
|
446
|
+
for await (const { key, value } of peerHistory) {
|
|
447
|
+
peerData.push({ key, value })
|
|
448
|
+
}
|
|
449
|
+
return peerData
|
|
450
|
+
}
|
|
451
|
+
|
|
452
|
+
/**
|
|
453
|
+
* delete contract
|
|
454
|
+
* @method deletePeer
|
|
455
|
+
*/
|
|
456
|
+
deletePeer = async function (pubkey) {
|
|
457
|
+
const deleteStatus = await this.dbPeers.del(pubkey)
|
|
458
|
+
let deleteInfo = {}
|
|
459
|
+
deleteInfo.publickey = pubkey
|
|
460
|
+
return deleteInfo
|
|
461
|
+
}
|
|
462
|
+
|
|
463
|
+
|
|
464
|
+
/** RESEARCH */
|
|
465
|
+
/**
|
|
466
|
+
* save research
|
|
467
|
+
* @method saveResearch
|
|
468
|
+
*
|
|
469
|
+
*/
|
|
470
|
+
saveResearch = async function (cuesInfo) {
|
|
471
|
+
await this.dbBentoresearch.put(cuesInfo.cueid, cuesInfo.data)
|
|
472
|
+
let checkSave = await this.getResearch(cuesInfo.cueid)
|
|
473
|
+
return checkSave
|
|
474
|
+
}
|
|
475
|
+
|
|
476
|
+
/**
|
|
477
|
+
* get one cue by id
|
|
478
|
+
* @method getResearch
|
|
479
|
+
*
|
|
480
|
+
*/
|
|
481
|
+
getResearch = async function (key) {
|
|
482
|
+
const nodeData = await this.dbBentoresearch.get(key)
|
|
483
|
+
return nodeData
|
|
484
|
+
}
|
|
485
|
+
|
|
486
|
+
/**
|
|
487
|
+
* get all research
|
|
488
|
+
* @method getResearchHistory
|
|
489
|
+
*
|
|
490
|
+
*/
|
|
491
|
+
getResearchHistory = async function (key) {
|
|
492
|
+
const cuesHistory = await this.dbBentoresearch.createReadStream()
|
|
493
|
+
let cuesData = []
|
|
494
|
+
for await (const { key, value } of cuesHistory) {
|
|
495
|
+
cuesData.push({ key, value })
|
|
496
|
+
}
|
|
497
|
+
return cuesData
|
|
498
|
+
}
|
|
499
|
+
|
|
500
|
+
/**
|
|
501
|
+
* delete contract
|
|
502
|
+
* @method deleteBentoResearch
|
|
503
|
+
*/
|
|
504
|
+
deleteBentoResearch = async function (cue) {
|
|
505
|
+
const deleteStatus = await this.dbBentoresearch.del(cue.id)
|
|
506
|
+
let deleteInfo = {}
|
|
507
|
+
deleteInfo.spaceid = cue.id
|
|
508
|
+
return deleteInfo
|
|
509
|
+
}
|
|
510
|
+
|
|
511
|
+
/** MARKER */
|
|
512
|
+
/**
|
|
513
|
+
* save marker
|
|
514
|
+
* @method saveMarker
|
|
515
|
+
*
|
|
516
|
+
*/
|
|
517
|
+
saveMarker = async function (cuesInfo) {
|
|
518
|
+
await this.dbBentomarkers.put(cuesInfo.cueid, cuesInfo.data)
|
|
519
|
+
let checkSave = await this.getMarker(cuesInfo.cueid)
|
|
520
|
+
return checkSave
|
|
521
|
+
}
|
|
522
|
+
|
|
523
|
+
/**
|
|
524
|
+
* get one cue by id
|
|
525
|
+
* @method getMarker
|
|
526
|
+
*
|
|
527
|
+
*/
|
|
528
|
+
getMarker = async function (key) {
|
|
529
|
+
const nodeData = await this.dbBentomarkers.get(key)
|
|
530
|
+
return nodeData
|
|
531
|
+
}
|
|
532
|
+
|
|
533
|
+
/**
|
|
534
|
+
* get all research
|
|
535
|
+
* @method getMarkerHistory
|
|
536
|
+
*
|
|
537
|
+
*/
|
|
538
|
+
getMarkerHistory = async function (key) {
|
|
539
|
+
const cuesHistory = await this.dbBentomarkers.createReadStream()
|
|
540
|
+
let cuesData = []
|
|
541
|
+
for await (const { key, value } of cuesHistory) {
|
|
542
|
+
cuesData.push({ key, value })
|
|
543
|
+
}
|
|
544
|
+
return cuesData
|
|
545
|
+
}
|
|
546
|
+
|
|
547
|
+
/**
|
|
548
|
+
* delete contract
|
|
549
|
+
* @method deleteBentoMarker
|
|
550
|
+
*/
|
|
551
|
+
deleteBentoMarker = async function (cue) {
|
|
552
|
+
const deleteStatus = await this.dbBentomarkers.del(cue.id)
|
|
553
|
+
let deleteInfo = {}
|
|
554
|
+
deleteInfo.spaceid = cue.id
|
|
555
|
+
return deleteInfo
|
|
556
|
+
}
|
|
557
|
+
|
|
558
|
+
/** Product */
|
|
559
|
+
/**
|
|
560
|
+
* save product
|
|
561
|
+
* @method saveProduct
|
|
562
|
+
*
|
|
563
|
+
*/
|
|
564
|
+
saveProduct = async function (cuesInfo) {
|
|
565
|
+
await this.dbBentoproducts.put(cuesInfo.cueid, cuesInfo.data)
|
|
566
|
+
let checkSave = await this.getProduct(cuesInfo.cueid)
|
|
567
|
+
return checkSave
|
|
568
|
+
}
|
|
569
|
+
|
|
570
|
+
/**
|
|
571
|
+
* get one cue by id
|
|
572
|
+
* @method getProduct
|
|
573
|
+
*
|
|
574
|
+
*/
|
|
575
|
+
getProduct = async function (key) {
|
|
576
|
+
const nodeData = await this.dbBentoproducts.get(key)
|
|
577
|
+
return nodeData
|
|
578
|
+
}
|
|
579
|
+
|
|
580
|
+
/**
|
|
581
|
+
* get all prodcut
|
|
582
|
+
* @method getProductHistory
|
|
583
|
+
*
|
|
584
|
+
*/
|
|
585
|
+
getProductHistory = async function (key) {
|
|
586
|
+
const cuesHistory = await this.dbBentoproducts.createReadStream()
|
|
587
|
+
let cuesData = []
|
|
588
|
+
for await (const { key, value } of cuesHistory) {
|
|
589
|
+
cuesData.push({ key, value })
|
|
590
|
+
}
|
|
591
|
+
return cuesData
|
|
592
|
+
}
|
|
593
|
+
|
|
594
|
+
/**
|
|
595
|
+
* delete contract
|
|
596
|
+
* @method deleteBentoProduct
|
|
597
|
+
*/
|
|
598
|
+
deleteBentoProduct = async function (cue) {
|
|
599
|
+
const deleteStatus = await this.dbBentoproducts.del(cue.id)
|
|
248
600
|
let deleteInfo = {}
|
|
249
|
-
deleteInfo.spaceid =
|
|
601
|
+
deleteInfo.spaceid = cue.id
|
|
250
602
|
return deleteInfo
|
|
251
603
|
}
|
|
252
604
|
|
package/src/drive.js
CHANGED
|
@@ -86,18 +86,9 @@ class HypDrive extends EventEmitter {
|
|
|
86
86
|
* @method listFilesFolder
|
|
87
87
|
*
|
|
88
88
|
*/
|
|
89
|
-
listFilesFolder = function (folder) {
|
|
90
|
-
const stream = this.drive.list(
|
|
91
|
-
|
|
92
|
-
let dataDrive = []
|
|
93
|
-
stream.on('data', function(chunk) {
|
|
94
|
-
dataDrive.push(chunk)
|
|
95
|
-
})
|
|
96
|
-
|
|
97
|
-
stream.on('end', function(chunk) {
|
|
98
|
-
// console.log('stream at end')
|
|
99
|
-
// console.log(dataDrive)
|
|
100
|
-
})
|
|
89
|
+
listFilesFolder = async function (folder) {
|
|
90
|
+
const stream = await this.drive.list(folder) // [options])
|
|
91
|
+
return stream
|
|
101
92
|
}
|
|
102
93
|
|
|
103
94
|
/**
|
|
@@ -222,6 +213,109 @@ class HypDrive extends EventEmitter {
|
|
|
222
213
|
return hyperdrivePath
|
|
223
214
|
}
|
|
224
215
|
|
|
216
|
+
|
|
217
|
+
/**
|
|
218
|
+
* save a stream of file ie. large file
|
|
219
|
+
* @method hyperdriveStreamSave
|
|
220
|
+
*
|
|
221
|
+
*/
|
|
222
|
+
hyperdriveStreamSave = async function (path, data, first) {
|
|
223
|
+
let ws
|
|
224
|
+
if (first === true) {
|
|
225
|
+
// await this.drive.del(path)
|
|
226
|
+
ws = this.drive.createWriteStream(path)
|
|
227
|
+
ws.write(data)
|
|
228
|
+
}
|
|
229
|
+
// use listener
|
|
230
|
+
this.on('stream-update', (data) => {
|
|
231
|
+
ws.write(data)
|
|
232
|
+
})
|
|
233
|
+
|
|
234
|
+
this.on('stream-complete', async (data) => {
|
|
235
|
+
ws.end()
|
|
236
|
+
ws.once('close', () =>
|
|
237
|
+
console.log('stream-close'),
|
|
238
|
+
await this.checkLargeList(path)
|
|
239
|
+
)
|
|
240
|
+
})
|
|
241
|
+
|
|
242
|
+
}
|
|
243
|
+
|
|
244
|
+
/**
|
|
245
|
+
* check read the file if save large file
|
|
246
|
+
* @method checkLargeList
|
|
247
|
+
*
|
|
248
|
+
*/
|
|
249
|
+
checkLargeList = async function (path) {
|
|
250
|
+
let localthis = this
|
|
251
|
+
let folder = 'test'
|
|
252
|
+
let folderList = await this.listFilesFolder(folder)
|
|
253
|
+
let dataDrive = []
|
|
254
|
+
folderList.on('data', function(chunk) {
|
|
255
|
+
dataDrive.push(chunk)
|
|
256
|
+
})
|
|
257
|
+
|
|
258
|
+
folderList.on('end', async function() {
|
|
259
|
+
for (let file of dataDrive) {
|
|
260
|
+
if (file.key === path) {
|
|
261
|
+
await localthis.firstLineLargeCSV(path)
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
})
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
/**
|
|
268
|
+
* check read the file if save large file
|
|
269
|
+
* @method checkLargeSave
|
|
270
|
+
*
|
|
271
|
+
*/
|
|
272
|
+
checkLargeSave = async function (path) {
|
|
273
|
+
const rs = this.drive.createReadStream(path)
|
|
274
|
+
for await (const chunk of rs) {
|
|
275
|
+
console.log('rs', chunk.toString()) // => <Buffer ..>
|
|
276
|
+
}
|
|
277
|
+
}
|
|
278
|
+
|
|
279
|
+
/**
|
|
280
|
+
* check read the file if save large file
|
|
281
|
+
* @method firstLineLargeCSV
|
|
282
|
+
*
|
|
283
|
+
*/
|
|
284
|
+
firstLineLargeCSV = async function (path) {
|
|
285
|
+
let readFile = await this.readCSVfileStream(path)
|
|
286
|
+
let makeString = readFile[0].toString()
|
|
287
|
+
let csvFormat = makeString.split(/\r?\n/)
|
|
288
|
+
let headerList = csvFormat[0].split(",");
|
|
289
|
+
let largeMessage = {}
|
|
290
|
+
largeMessage.type = 'library'
|
|
291
|
+
largeMessage.action = 'PUT-stream'
|
|
292
|
+
largeMessage.task = 'PUT-stream'
|
|
293
|
+
largeMessage.save = true
|
|
294
|
+
largeMessage.data = {}
|
|
295
|
+
largeMessage.data.path = path
|
|
296
|
+
largeMessage.data.columns = headerList
|
|
297
|
+
this.emit('largefile-save', largeMessage)
|
|
298
|
+
}
|
|
299
|
+
|
|
300
|
+
/**
|
|
301
|
+
* stream save update
|
|
302
|
+
* @method streamSavedata
|
|
303
|
+
*
|
|
304
|
+
*/
|
|
305
|
+
streamSavedata = async function (path, data) {
|
|
306
|
+
this.emit('stream-update', data)
|
|
307
|
+
}
|
|
308
|
+
|
|
309
|
+
/**
|
|
310
|
+
* stream save complete
|
|
311
|
+
* @method streamSaveComplete
|
|
312
|
+
*
|
|
313
|
+
*/
|
|
314
|
+
streamSaveComplete = async function (data) {
|
|
315
|
+
this.emit('stream-update', data)
|
|
316
|
+
this.emit('stream-complete', data)
|
|
317
|
+
}
|
|
318
|
+
|
|
225
319
|
/**
|
|
226
320
|
* read file nav to folder
|
|
227
321
|
* @method hyperdriveReadfile
|
|
@@ -300,7 +394,6 @@ class HypDrive extends EventEmitter {
|
|
|
300
394
|
*
|
|
301
395
|
*/
|
|
302
396
|
SQLiteQuery = async function (dataInfo) {
|
|
303
|
-
console.log('HP--DRIVE--sqlite')
|
|
304
397
|
let timestampCol = ''
|
|
305
398
|
// is the sqliite database sill accive?
|
|
306
399
|
// const stream = this.liveDataAPI.DriveFiles.listFilesFolder('sqlite/')
|
|
@@ -324,7 +417,6 @@ class HypDrive extends EventEmitter {
|
|
|
324
417
|
blindData.label = extractLabel
|
|
325
418
|
return blindData
|
|
326
419
|
} else {
|
|
327
|
-
console.log('no data for that query')
|
|
328
420
|
let blindData = {}
|
|
329
421
|
blindData.data = []
|
|
330
422
|
blindData.label = []
|
|
@@ -362,7 +454,7 @@ class HypDrive extends EventEmitter {
|
|
|
362
454
|
const rs = this.drive.createReadStream(fpath) // 'text/csv/testshed11530500.csv') // '/blob.txt')
|
|
363
455
|
return new Promise((resolve, reject) => {
|
|
364
456
|
const results = []
|
|
365
|
-
//this.drive.createReadStream(fpath)
|
|
457
|
+
// this.drive.createReadStream(fpath)
|
|
366
458
|
rs.pipe(csv({ headers: headerSet.headerset, separator: headerSet.delimiter, skipLines: headerSet.dataline }))
|
|
367
459
|
.on('data', (data) => results.push(data))
|
|
368
460
|
.on('end', () => {
|
|
@@ -371,6 +463,22 @@ class HypDrive extends EventEmitter {
|
|
|
371
463
|
})
|
|
372
464
|
}
|
|
373
465
|
|
|
466
|
+
/**
|
|
467
|
+
* stream out line by line
|
|
468
|
+
* @method readCSVfileStream
|
|
469
|
+
*
|
|
470
|
+
*/
|
|
471
|
+
readCSVfileStream = async function (fpath) {
|
|
472
|
+
const rs = this.drive.createReadStream(fpath, { start: 0, end: 120 })
|
|
473
|
+
return new Promise((resolve, reject) => {
|
|
474
|
+
let results = []
|
|
475
|
+
rs.on('data', (data) => results.push(data.toString()))
|
|
476
|
+
rs.on('end', () => {
|
|
477
|
+
resolve(results)
|
|
478
|
+
})
|
|
479
|
+
})
|
|
480
|
+
}
|
|
481
|
+
|
|
374
482
|
/**
|
|
375
483
|
* replicate a hyperdrive
|
|
376
484
|
* @method hyperdriveReplicate
|
package/src/fileParser.js
CHANGED
|
@@ -143,7 +143,6 @@ FileParser.prototype.webCSVparse = function (fData) {
|
|
|
143
143
|
*
|
|
144
144
|
*/
|
|
145
145
|
FileParser.prototype.TEMPwebJSONparse = function (fjData) {
|
|
146
|
-
console.log(fjData)
|
|
147
146
|
let extractLabel = []
|
|
148
147
|
let extractCol = []
|
|
149
148
|
for (let df of fjData.content) {
|
|
@@ -315,20 +314,16 @@ FileParser.prototype.convertJSON = function (o, headerSet, results, source, newF
|
|
|
315
314
|
const datacolumn = o.data[0].info.datename
|
|
316
315
|
const flowList = []
|
|
317
316
|
for (const rs of results) {
|
|
318
|
-
// console.log(rs)
|
|
319
317
|
let timeLength = 0
|
|
320
318
|
// what length is date number? Need to make ms time standard to convert
|
|
321
319
|
if (rs[datacolumn].length === 10) {
|
|
322
|
-
// console.log('not ms time add 000')
|
|
323
320
|
timeLength = rs[datacolumn] * 1000
|
|
324
321
|
} else {
|
|
325
322
|
// console.log('assume ms time ')
|
|
326
323
|
timeLength = rs[datacolumn]
|
|
327
324
|
}
|
|
328
325
|
const dateFormat = new Date(timeLength)
|
|
329
|
-
// console.log(dateFormat)
|
|
330
326
|
const msDate = dateFormat.getTime()
|
|
331
|
-
// console.log(msDate)
|
|
332
327
|
rs[datacolumn] = msDate / 1000
|
|
333
328
|
flowList.push(rs)
|
|
334
329
|
}
|
package/src/index.js
CHANGED
|
@@ -48,7 +48,7 @@ class HolepunchWorker extends EventEmitter {
|
|
|
48
48
|
this.store = new Corestore(os.homedir() + '/.hop-storage')
|
|
49
49
|
this.swarm = new Hyperswarm()
|
|
50
50
|
// make replication possible
|
|
51
|
-
|
|
51
|
+
this.swarm.on('connection', conn => this.store.replicate(conn))
|
|
52
52
|
goodbye(() => this.swarm.destroy())
|
|
53
53
|
this.BeeData = new BeeWorker(this.store, this.swarm)
|
|
54
54
|
this.DriveFiles = new DriveWorker(this.store, this.swarm)
|
|
@@ -76,9 +76,17 @@ class HolepunchWorker extends EventEmitter {
|
|
|
76
76
|
this.wsocket = ws
|
|
77
77
|
this.BeeData.setWebsocket(ws)
|
|
78
78
|
this.DriveFiles.setWebsocket(ws)
|
|
79
|
-
this.activateHypercores()
|
|
79
|
+
// this.activateHypercores()
|
|
80
80
|
}
|
|
81
81
|
|
|
82
|
+
/**
|
|
83
|
+
* active hypercores auth verified
|
|
84
|
+
* @method startStores
|
|
85
|
+
*
|
|
86
|
+
*/
|
|
87
|
+
startStores = function (ws) {
|
|
88
|
+
this.activateHypercores()
|
|
89
|
+
}
|
|
82
90
|
|
|
83
91
|
/**
|
|
84
92
|
* listen for outputs from workers
|
|
@@ -89,46 +97,62 @@ class HolepunchWorker extends EventEmitter {
|
|
|
89
97
|
this.Peers.on('peer-network', (data) => {
|
|
90
98
|
this.wsocket.send(JSON.stringify(data))
|
|
91
99
|
})
|
|
92
|
-
// peer connection active
|
|
100
|
+
// peer connection active for first time
|
|
93
101
|
this.Peers.on('peer-connect', (data) => {
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
102
|
+
//this.warmPeerPrepare(data)
|
|
103
|
+
})
|
|
104
|
+
// share connection failed
|
|
105
|
+
this.Peers.on('peer-share-fail', (data) => {
|
|
106
|
+
let peerFail = {}
|
|
107
|
+
peerFail.type = 'account'
|
|
108
|
+
peerFail.action = 'peer-share-fail'
|
|
109
|
+
peerFail.data = { publickey: data }
|
|
110
|
+
this.wsocket.send(JSON.stringify(peerFail))
|
|
111
|
+
})
|
|
112
|
+
// save peer topic
|
|
113
|
+
this.Peers.on('peer-topic-save', async (data) => {
|
|
114
|
+
await this.emit('peer-topic-save', data)
|
|
115
|
+
})
|
|
116
|
+
// peer reconnection topic ie. able to reconnect again
|
|
117
|
+
this.Peers.on('peer-reconnect-topic', (data) => {
|
|
118
|
+
this.emit('peer-reconnect-topic', data)
|
|
112
119
|
})
|
|
113
120
|
// data for beebee
|
|
114
121
|
this.Peers.on('beebee-data', (data) => {
|
|
115
122
|
this.emit('peer-topeer', data)
|
|
116
123
|
})
|
|
124
|
+
// cue space share
|
|
125
|
+
this.Peers.on('cuespace-notification', (data) => {
|
|
126
|
+
this.emit('peer-cuespace', data)
|
|
127
|
+
})
|
|
117
128
|
// public library notification
|
|
118
129
|
this.Peers.on('publiclibrarynotification', (data) => {
|
|
119
130
|
this.BeeData.replicatePubliclibrary(data)
|
|
120
131
|
})
|
|
132
|
+
// beebee notifications public
|
|
121
133
|
this.BeeData.on('publibbeebee-notification', (data) => {
|
|
122
134
|
this.emit('beebee-publib-notification', data)
|
|
123
135
|
})
|
|
124
136
|
// new warm incoming peer
|
|
125
|
-
this.Peers.on('connect-warm', (data) => {
|
|
137
|
+
this.Peers.on('connect-warm-first', (data) => {
|
|
138
|
+
let peerInfo = this.Peers.peerHolder[data]
|
|
139
|
+
if (peerInfo === undefined) {
|
|
140
|
+
// receiving peer
|
|
141
|
+
peerInfo = { name: 'peernew'}
|
|
142
|
+
}
|
|
126
143
|
let peerId = {}
|
|
127
|
-
peerId.name =
|
|
144
|
+
peerId.name = peerInfo.name
|
|
128
145
|
peerId.publickey = data
|
|
129
|
-
peerId.
|
|
146
|
+
peerId.longterm = true
|
|
147
|
+
peerId.settopic = false
|
|
148
|
+
peerId.topic = ''
|
|
149
|
+
peerId.live = false
|
|
130
150
|
this.warmPeers.push(peerId)
|
|
131
|
-
this.emit('peer-incoming', peerId)
|
|
151
|
+
this.emit('peer-incoming-save', peerId)
|
|
152
|
+
})
|
|
153
|
+
// drive listener
|
|
154
|
+
this.DriveFiles.on('largefile-save', (data) => {
|
|
155
|
+
this.emit('drive-save-large', data)
|
|
132
156
|
})
|
|
133
157
|
}
|
|
134
158
|
|
|
@@ -146,16 +170,22 @@ class HolepunchWorker extends EventEmitter {
|
|
|
146
170
|
peerMatch = true
|
|
147
171
|
}
|
|
148
172
|
}
|
|
149
|
-
|
|
150
|
-
|
|
173
|
+
if (message.task === 'peer-share-invite') {
|
|
174
|
+
// keep track of role, reciving or extended invite
|
|
175
|
+
this.Peers.setRole(message.data.publickey)
|
|
151
176
|
if (peerMatch === true) {
|
|
152
177
|
this.Peers.peerAlreadyJoinSetData(message.data)
|
|
153
|
-
this.Peers.writeTonetwork(message.data.publickey)
|
|
178
|
+
// this.Peers.writeTonetwork(message.data.publickey)
|
|
179
|
+
this.warmPeerPrepare(message.data.publickey, true)
|
|
154
180
|
} else {
|
|
155
181
|
this.warmPeers.push(message.data)
|
|
182
|
+
this.Peers.peerAlreadyJoinSetData(message.data)
|
|
156
183
|
this.Peers.peerJoin(message.data)
|
|
157
184
|
}
|
|
158
|
-
} else if (message.task === 'peer-
|
|
185
|
+
} else if (message.task === 'peer-share-topic') {
|
|
186
|
+
// existing peers reconnecting via topic
|
|
187
|
+
this.Peers.topicConnect(message.data)
|
|
188
|
+
} else if (message.task === 'public-n1-experiment') {
|
|
159
189
|
if (peerMatch === true) {
|
|
160
190
|
this.Peers.peerAlreadyJoinSetData(message.data)
|
|
161
191
|
this.Peers.writeToPublicLibrary(message.data.publickey)
|
|
@@ -166,6 +196,16 @@ class HolepunchWorker extends EventEmitter {
|
|
|
166
196
|
this.Peers.peerAlreadyJoinSetData(message.data)
|
|
167
197
|
this.Peers.writeToPublicLibrary(message.data.publickey)
|
|
168
198
|
}
|
|
199
|
+
} else if (message.task = 'cue-space') {
|
|
200
|
+
if (peerMatch === true) {
|
|
201
|
+
this.Peers.peerAlreadyJoinSetData(message.data)
|
|
202
|
+
this.Peers.writeToCueSpace(message.data.publickey)
|
|
203
|
+
} else {
|
|
204
|
+
this.warmPeers.push(message.data)
|
|
205
|
+
this.Peers.peerJoin(message.data)
|
|
206
|
+
// this.Peers.peerAlreadyJoinSetData(message.data)
|
|
207
|
+
// this.Peers.writeToCueSpace(message.data.publickey)
|
|
208
|
+
}
|
|
169
209
|
} else if (message.task === 'peer-write') {
|
|
170
210
|
this.emit('peer-write', message.data)
|
|
171
211
|
} else if (message.task === 'topic') {
|
|
@@ -174,6 +214,44 @@ class HolepunchWorker extends EventEmitter {
|
|
|
174
214
|
}
|
|
175
215
|
}
|
|
176
216
|
|
|
217
|
+
|
|
218
|
+
/**
|
|
219
|
+
* prepare data to send to a warm peer
|
|
220
|
+
* @method warmPeerPrepare
|
|
221
|
+
*/
|
|
222
|
+
warmPeerPrepare = function (data, existing) {
|
|
223
|
+
// two checks, if topic send to other peer
|
|
224
|
+
if (existing !== true) {
|
|
225
|
+
let peerRole = this.Peers.peersRole[data]
|
|
226
|
+
if (peerRole === undefined) {
|
|
227
|
+
this.Peers.writeTonetworkTopic(data)
|
|
228
|
+
} else {
|
|
229
|
+
console.log(' this peer set the topic')
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
// if data within coming then process that
|
|
233
|
+
let peerDataExist = this.Peers.peerHolder[data]
|
|
234
|
+
if (peerDataExist === undefined) {
|
|
235
|
+
} else {
|
|
236
|
+
// what type of data being shared?
|
|
237
|
+
// check for data along with new peer?
|
|
238
|
+
if (peerDataExist.data !== undefined) {
|
|
239
|
+
if (peerDataExist.data.type === 'private-chart') {
|
|
240
|
+
this.Peers.writeTonetworkData(data, peerDataExist.data)
|
|
241
|
+
} else if (peerDataExist.data.type === 'private-cue-space') {
|
|
242
|
+
this.Peers.writeToCueSpace(this.Peers.peerHolder[peerFirstID].publickey)
|
|
243
|
+
} else if (peerDataExist.data.type === 'public-n1-experiment') {
|
|
244
|
+
this.Peers.writeTonetworkData(data, peerDataExist.data)
|
|
245
|
+
} else if (peerDataExist.data.type === 'public-library') {
|
|
246
|
+
this.Peers.writeToPublicLibrary(data)
|
|
247
|
+
} else if (peerDataExist.data.type === 'text-message') {
|
|
248
|
+
// simpole text message
|
|
249
|
+
this.Peers.writeTonetwork(data)
|
|
250
|
+
}
|
|
251
|
+
}
|
|
252
|
+
}
|
|
253
|
+
}
|
|
254
|
+
|
|
177
255
|
/**
|
|
178
256
|
* corestore test example
|
|
179
257
|
* @method testCorestore
|
package/src/peers.js
CHANGED
|
@@ -9,9 +9,6 @@
|
|
|
9
9
|
* @version $Id$
|
|
10
10
|
*/
|
|
11
11
|
import EventEmitter from 'events'
|
|
12
|
-
// import DHT from '@hyperswarm/dht'
|
|
13
|
-
import goodbye from 'graceful-goodbye'
|
|
14
|
-
import b4a from 'b4a'
|
|
15
12
|
|
|
16
13
|
|
|
17
14
|
class NetworkPeers extends EventEmitter {
|
|
@@ -24,6 +21,7 @@ class NetworkPeers extends EventEmitter {
|
|
|
24
21
|
this.drive = {}
|
|
25
22
|
this.peerHolder = {}
|
|
26
23
|
this.peerConnect = {}
|
|
24
|
+
this.peersRole = {}
|
|
27
25
|
}
|
|
28
26
|
|
|
29
27
|
/**
|
|
@@ -32,6 +30,17 @@ class NetworkPeers extends EventEmitter {
|
|
|
32
30
|
*
|
|
33
31
|
*/
|
|
34
32
|
networkKeys = function () {
|
|
33
|
+
// console.log('swarm on start')
|
|
34
|
+
// console.log(this.swarm._discovery) // .toString('hex'))
|
|
35
|
+
|
|
36
|
+
/*this.swarm._discovery.forEach((value, key) => {
|
|
37
|
+
console.log('key')
|
|
38
|
+
console.log(key)
|
|
39
|
+
console.log('-----------swarm discovery IN-------------------')
|
|
40
|
+
console.log(Object.keys(value))
|
|
41
|
+
console.log(value.topic)
|
|
42
|
+
console.log(value.topic.toString('hex'))
|
|
43
|
+
}) */
|
|
35
44
|
let peerNxKeys = {}
|
|
36
45
|
peerNxKeys.publickey = this.swarm.keyPair.publicKey.toString('hex')
|
|
37
46
|
let networkMessage = {}
|
|
@@ -48,15 +57,14 @@ class NetworkPeers extends EventEmitter {
|
|
|
48
57
|
* @method listenNetwork
|
|
49
58
|
*
|
|
50
59
|
*/
|
|
51
|
-
listenNetwork = function
|
|
60
|
+
listenNetwork = function () {
|
|
52
61
|
this.swarm.on('connection', (conn, info) => {
|
|
53
|
-
//
|
|
54
|
-
this.store.replicate(conn)
|
|
55
|
-
// listener to write message to peers or network partial or broadcast
|
|
62
|
+
// save peer connection instance for ongoing communication
|
|
56
63
|
let publicKeylive = info.publicKey.toString('hex')
|
|
57
|
-
this.emit('connect-warm', publicKeylive)
|
|
58
64
|
this.peerConnect[publicKeylive] = conn
|
|
59
|
-
this.emit('
|
|
65
|
+
this.emit('connect-warm-first', publicKeylive)
|
|
66
|
+
// listen for replication NEED UPTATED LOGIC
|
|
67
|
+
this.store.replicate(conn)
|
|
60
68
|
// process network message
|
|
61
69
|
conn.on('data', data =>
|
|
62
70
|
// assess data
|
|
@@ -66,6 +74,15 @@ class NetworkPeers extends EventEmitter {
|
|
|
66
74
|
})
|
|
67
75
|
}
|
|
68
76
|
|
|
77
|
+
/**
|
|
78
|
+
* set role in peer to peer relationship, invte or receive?
|
|
79
|
+
* @method setRole
|
|
80
|
+
*
|
|
81
|
+
*/
|
|
82
|
+
setRole = function (pubKey) {
|
|
83
|
+
let setRole = { send: 'prime' , invite: pubKey}
|
|
84
|
+
this.peersRole[pubKey] = setRole
|
|
85
|
+
}
|
|
69
86
|
/**
|
|
70
87
|
*
|
|
71
88
|
* @method assessData data and act
|
|
@@ -75,17 +92,21 @@ class NetworkPeers extends EventEmitter {
|
|
|
75
92
|
if (Buffer.isBuffer(data)) {
|
|
76
93
|
try {
|
|
77
94
|
let dataShareIn = JSON.parse(data.toString())
|
|
78
|
-
if (dataShareIn.type === 'chart') {
|
|
95
|
+
if (dataShareIn.type === 'private-chart') {
|
|
79
96
|
this.emit('beebee-data', dataShareIn)
|
|
80
97
|
// need to look at NXP, modules and within for reference contracts.
|
|
81
98
|
// Need to replicate public library for contracts (repliate hyberbee)
|
|
82
99
|
// Need to ask for data source e.g. file (replicate hyberdrive)
|
|
83
100
|
// Lastly put together SafeFlowECS query to produce chart
|
|
101
|
+
} else if (dataShareIn.type === 'private-cue-space') {
|
|
102
|
+
this.emit('cuespace-notification', dataShareIn)
|
|
84
103
|
} else if (dataShareIn.type === 'public-library') {
|
|
85
104
|
this.emit('publiclibrarynotification', dataShareIn)
|
|
86
105
|
} else if (dataShareIn.type === 'peer') {
|
|
106
|
+
} else if (dataShareIn.type === 'topic-reconnect') {
|
|
107
|
+
// peer has share a topic for future reconnect
|
|
108
|
+
this.emit('peer-reconnect-topic', dataShareIn)
|
|
87
109
|
}
|
|
88
|
-
console.log(a)
|
|
89
110
|
} catch (e) {
|
|
90
111
|
return console.error('ignore err')
|
|
91
112
|
}
|
|
@@ -97,22 +118,40 @@ class NetworkPeers extends EventEmitter {
|
|
|
97
118
|
* @method writeTonetwork
|
|
98
119
|
*
|
|
99
120
|
*/
|
|
100
|
-
writeTonetwork = function (
|
|
121
|
+
writeTonetwork = function (data, messType) {
|
|
101
122
|
// check this peer has asked for chart data
|
|
102
|
-
let
|
|
103
|
-
|
|
104
|
-
if (connectTrue === true && chartTrue === true) {
|
|
105
|
-
let chartData = this.peerHolder[publickey]
|
|
106
|
-
let dataShare = {}
|
|
107
|
-
dataShare.hop = chartData.hop
|
|
108
|
-
dataShare.data = chartData.data
|
|
109
|
-
dataShare.type = 'chart'
|
|
110
|
-
this.peerConnect[publickey].write(JSON.stringify(dataShare))
|
|
111
|
-
} else {
|
|
112
|
-
console.log('non chart write')
|
|
113
|
-
}
|
|
123
|
+
let dataSend = data
|
|
124
|
+
this.peerConnect[data].write(JSON.stringify(dataSend))
|
|
114
125
|
}
|
|
115
126
|
|
|
127
|
+
/**
|
|
128
|
+
* write message to network
|
|
129
|
+
* @method writeTonetworkTopic
|
|
130
|
+
*
|
|
131
|
+
*/
|
|
132
|
+
writeTonetworkTopic = function (publickey) {
|
|
133
|
+
let topicGeneration = 'kprel135811'
|
|
134
|
+
// need to save the topic initiator of warm peer save relationship
|
|
135
|
+
this.emit('peer-topic-save', { peerkey: publickey, topic: topicGeneration })
|
|
136
|
+
// send to other peer topic to allow reconnection in future
|
|
137
|
+
let topicShare = {}
|
|
138
|
+
topicShare.type = 'topic-reconnect'
|
|
139
|
+
topicShare.publickey = this.swarm.keyPair.publicKey.toString('hex')
|
|
140
|
+
topicShare.data = topicGeneration
|
|
141
|
+
// inform peer that topic has been created
|
|
142
|
+
this.peerConnect[publickey].write(JSON.stringify(topicShare))
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
/**
|
|
146
|
+
* write message to network
|
|
147
|
+
* @method writeTonetworkData
|
|
148
|
+
*
|
|
149
|
+
*/
|
|
150
|
+
writeTonetworkData = function (publickey, dataShare) {
|
|
151
|
+
this.peerConnect[publickey].write(JSON.stringify(dataShare))
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
|
|
116
155
|
/**
|
|
117
156
|
* write message connect public library
|
|
118
157
|
* @method writeToPublicLibrary
|
|
@@ -133,14 +172,32 @@ class NetworkPeers extends EventEmitter {
|
|
|
133
172
|
}
|
|
134
173
|
}
|
|
135
174
|
|
|
175
|
+
/**
|
|
176
|
+
* write message connect peers space
|
|
177
|
+
* @method writeToCueSpace
|
|
178
|
+
*
|
|
179
|
+
*/
|
|
180
|
+
writeToCueSpace = function (publickey) {
|
|
181
|
+
// check this peer has asked for space data
|
|
182
|
+
let connectTrue = publickey in this.peerConnect
|
|
183
|
+
let spaceTrue = publickey in this.peerHolder
|
|
184
|
+
if (connectTrue === true && spaceTrue === true) {
|
|
185
|
+
let libraryData = this.peerHolder[publickey]
|
|
186
|
+
this.peerConnect[publickey].write(JSON.stringify(libraryData))
|
|
187
|
+
} else {
|
|
188
|
+
console.log('no cuespace to write ie share with a peer')
|
|
189
|
+
}
|
|
190
|
+
}
|
|
136
191
|
|
|
137
192
|
|
|
138
193
|
/**
|
|
139
|
-
* join peer to peer private (server)
|
|
194
|
+
* join peer to peer direct private (server)
|
|
140
195
|
* @method peerJoin
|
|
141
196
|
*
|
|
142
197
|
*/
|
|
143
198
|
peerJoin = function (peerContext) {
|
|
199
|
+
// set timeer to inform if not connection can be established
|
|
200
|
+
this. checkTimerConnection(peerContext.publickey)
|
|
144
201
|
this.peerHolder[peerContext.publickey] = peerContext
|
|
145
202
|
const noisePublicKey = Buffer.from(peerContext.publickey, 'hex') // must be 32 bytes
|
|
146
203
|
if (noisePublicKey.length === 32) {
|
|
@@ -148,6 +205,37 @@ class NetworkPeers extends EventEmitter {
|
|
|
148
205
|
}
|
|
149
206
|
}
|
|
150
207
|
|
|
208
|
+
/**
|
|
209
|
+
* give 2 seconds for connection to establish
|
|
210
|
+
* @method checkTimerConnection
|
|
211
|
+
*
|
|
212
|
+
*/
|
|
213
|
+
checkTimerConnection (key) {
|
|
214
|
+
// if peerconnect not set the inform beebee not connection accepted try again
|
|
215
|
+
let localthis = this
|
|
216
|
+
// setTimeout(checkPeerState(localthis, key), 2000)
|
|
217
|
+
setTimeout(() => checkPeerState(localthis, key), 6000)
|
|
218
|
+
|
|
219
|
+
function checkPeerState (localthis, publicKeylive) {
|
|
220
|
+
if (localthis.peerConnect[publicKeylive] === undefined) {
|
|
221
|
+
// failed peer connection
|
|
222
|
+
localthis.emit('peer-share-fail', publicKeylive)
|
|
223
|
+
} else {
|
|
224
|
+
// connnection established
|
|
225
|
+
}
|
|
226
|
+
}
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
/**
|
|
230
|
+
* leave a direct peer connection
|
|
231
|
+
* @method peerLeave
|
|
232
|
+
*
|
|
233
|
+
*/
|
|
234
|
+
peerLeave = function (peerLeaveKey) {
|
|
235
|
+
this.peerHolder[peerLeaveKey] = {}
|
|
236
|
+
this.swarm.leavePeer(peerLeaveKey)
|
|
237
|
+
}
|
|
238
|
+
|
|
151
239
|
/**
|
|
152
240
|
* already joined but keep track context data
|
|
153
241
|
* @method peerAlreadyJoinSetData
|
|
@@ -155,6 +243,7 @@ class NetworkPeers extends EventEmitter {
|
|
|
155
243
|
*/
|
|
156
244
|
peerAlreadyJoinSetData = function (peerContext) {
|
|
157
245
|
this.peerHolder[peerContext.publickey] = peerContext
|
|
246
|
+
return true
|
|
158
247
|
}
|
|
159
248
|
|
|
160
249
|
|
|
@@ -168,48 +257,34 @@ class NetworkPeers extends EventEmitter {
|
|
|
168
257
|
}
|
|
169
258
|
|
|
170
259
|
/**
|
|
171
|
-
*
|
|
172
|
-
* @method
|
|
260
|
+
* out message topics as a client
|
|
261
|
+
* @method topicConnect
|
|
173
262
|
*
|
|
174
263
|
*/
|
|
175
|
-
|
|
264
|
+
topicConnect = async function (topic) {
|
|
176
265
|
const noisePublicKey = Buffer.alloc(32).fill(topic) // A topic must be 32 bytes
|
|
177
|
-
const peerConnect = this.swarm.join(noisePublicKey, { server:
|
|
266
|
+
const peerConnect = this.swarm.join(noisePublicKey, { server: true, client: false })
|
|
178
267
|
await peerConnect.flushed() // Waits for the topic to be fully announced on the DHT
|
|
179
268
|
}
|
|
180
269
|
|
|
181
270
|
/**
|
|
182
|
-
*
|
|
183
|
-
* @method
|
|
271
|
+
* out message topics as a client
|
|
272
|
+
* @method topicListen
|
|
184
273
|
*
|
|
185
274
|
*/
|
|
186
|
-
|
|
187
|
-
|
|
275
|
+
topicListen = async function (topic) {
|
|
276
|
+
const noisePublicKey = Buffer.alloc(32).fill(topic) // A topic must be 32 bytes
|
|
277
|
+
const peerConnect = this.swarm.join(noisePublicKey, { server: false, client: true })
|
|
278
|
+
await peerConnect.flushed() // Waits for the topic to be fully announced on the DHT
|
|
188
279
|
}
|
|
189
280
|
|
|
190
281
|
/**
|
|
191
|
-
*
|
|
192
|
-
* @method
|
|
282
|
+
* leave topic
|
|
283
|
+
* @method leaveTopic
|
|
193
284
|
*
|
|
194
285
|
*/
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
// This keypair is your peer identifier in the DHT
|
|
199
|
-
const keyPair = DHT.keyPair()
|
|
200
|
-
|
|
201
|
-
const server = dht.createServer(conn => {
|
|
202
|
-
console.log('got connection!')
|
|
203
|
-
process.stdin.pipe(conn).pipe(process.stdout)
|
|
204
|
-
})
|
|
205
|
-
|
|
206
|
-
server.listen(keyPair).then(() => {
|
|
207
|
-
console.log('listening on:', b4a.toString(keyPair.publicKey, 'hex'))
|
|
208
|
-
})
|
|
209
|
-
|
|
210
|
-
// Unnannounce the public key before exiting the process
|
|
211
|
-
// (This is not a requirement, but it helps avoid DHT pollution)
|
|
212
|
-
goodbye(() => server.close()) */
|
|
286
|
+
leaveTopic = async function (topic) {
|
|
287
|
+
await this.swarm.leave(topic)
|
|
213
288
|
}
|
|
214
289
|
|
|
215
290
|
}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import assert from 'assert'
|
|
2
|
+
import HolepunchData from '../src/index.js'
|
|
3
|
+
// need helpers prepare input to HP via (mock) HOP
|
|
4
|
+
|
|
5
|
+
describe('upload a large csv in chunks', function () {
|
|
6
|
+
it('read in file', async function () {
|
|
7
|
+
let dataAPI = new HolepunchData()
|
|
8
|
+
})
|
|
9
|
+
})
|