synapse 2.154.1__py311-none-any.whl → 2.156.0__py311-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of synapse might be problematic. Click here for more details.
- synapse/cmds/cortex.py +2 -14
- synapse/common.py +13 -36
- synapse/cortex.py +15 -508
- synapse/lib/ast.py +215 -22
- synapse/lib/cell.py +35 -8
- synapse/lib/certdir.py +11 -0
- synapse/lib/cmdr.py +0 -5
- synapse/lib/gis.py +2 -2
- synapse/lib/httpapi.py +14 -43
- synapse/lib/layer.py +64 -201
- synapse/lib/lmdbslab.py +11 -0
- synapse/lib/node.py +1 -3
- synapse/lib/parser.py +10 -0
- synapse/lib/slabseqn.py +2 -1
- synapse/lib/snap.py +121 -21
- synapse/lib/spooled.py +9 -0
- synapse/lib/storm.lark +23 -6
- synapse/lib/storm.py +16 -339
- synapse/lib/storm_format.py +5 -0
- synapse/lib/stormhttp.py +10 -1
- synapse/lib/stormlib/gen.py +1 -2
- synapse/lib/stormlib/gis.py +41 -0
- synapse/lib/stormlib/graph.py +2 -1
- synapse/lib/stormlib/stats.py +21 -2
- synapse/lib/stormlib/storm.py +16 -1
- synapse/lib/stormtypes.py +244 -16
- synapse/lib/types.py +16 -2
- synapse/lib/version.py +2 -2
- synapse/lib/view.py +118 -25
- synapse/models/base.py +2 -2
- synapse/models/inet.py +60 -30
- synapse/models/infotech.py +130 -8
- synapse/models/orgs.py +3 -0
- synapse/models/proj.py +3 -0
- synapse/models/risk.py +24 -6
- synapse/models/syn.py +0 -38
- synapse/tests/test_cmds_cortex.py +1 -1
- synapse/tests/test_cortex.py +70 -338
- synapse/tests/test_lib_agenda.py +19 -54
- synapse/tests/test_lib_aha.py +97 -0
- synapse/tests/test_lib_ast.py +596 -0
- synapse/tests/test_lib_grammar.py +30 -10
- synapse/tests/test_lib_httpapi.py +33 -49
- synapse/tests/test_lib_layer.py +19 -234
- synapse/tests/test_lib_lmdbslab.py +22 -0
- synapse/tests/test_lib_snap.py +9 -0
- synapse/tests/test_lib_spooled.py +4 -0
- synapse/tests/test_lib_storm.py +16 -309
- synapse/tests/test_lib_stormlib_gis.py +21 -0
- synapse/tests/test_lib_stormlib_stats.py +107 -20
- synapse/tests/test_lib_stormlib_storm.py +25 -0
- synapse/tests/test_lib_stormtypes.py +253 -8
- synapse/tests/test_lib_types.py +40 -0
- synapse/tests/test_lib_view.py +6 -13
- synapse/tests/test_model_base.py +1 -1
- synapse/tests/test_model_inet.py +15 -0
- synapse/tests/test_model_infotech.py +110 -0
- synapse/tests/test_model_orgs.py +10 -0
- synapse/tests/test_model_person.py +0 -3
- synapse/tests/test_model_proj.py +2 -1
- synapse/tests/test_model_risk.py +24 -0
- synapse/tests/test_model_syn.py +20 -34
- synapse/tests/test_tools_csvtool.py +2 -1
- synapse/tests/test_tools_feed.py +4 -30
- synapse/tools/csvtool.py +2 -1
- {synapse-2.154.1.dist-info → synapse-2.156.0.dist-info}/METADATA +9 -9
- {synapse-2.154.1.dist-info → synapse-2.156.0.dist-info}/RECORD +70 -72
- {synapse-2.154.1.dist-info → synapse-2.156.0.dist-info}/WHEEL +1 -1
- synapse/cmds/cron.py +0 -726
- synapse/cmds/trigger.py +0 -319
- synapse/tests/test_cmds_cron.py +0 -453
- synapse/tests/test_cmds_trigger.py +0 -176
- {synapse-2.154.1.dist-info → synapse-2.156.0.dist-info}/LICENSE +0 -0
- {synapse-2.154.1.dist-info → synapse-2.156.0.dist-info}/top_level.txt +0 -0
synapse/lib/ast.py
CHANGED
|
@@ -330,6 +330,7 @@ class SubGraph:
|
|
|
330
330
|
'degrees': 1,
|
|
331
331
|
|
|
332
332
|
'edges': True,
|
|
333
|
+
'edgelimit': 3000,
|
|
333
334
|
'filterinput': True,
|
|
334
335
|
'yieldfiltered': False,
|
|
335
336
|
|
|
@@ -377,6 +378,7 @@ class SubGraph:
|
|
|
377
378
|
self.rules.setdefault('edges', True)
|
|
378
379
|
self.rules.setdefault('degrees', 1)
|
|
379
380
|
self.rules.setdefault('maxsize', 100000)
|
|
381
|
+
self.rules.setdefault('edgelimit', 3000)
|
|
380
382
|
|
|
381
383
|
self.rules.setdefault('filterinput', True)
|
|
382
384
|
self.rules.setdefault('yieldfiltered', False)
|
|
@@ -408,7 +410,7 @@ class SubGraph:
|
|
|
408
410
|
self.omits[node.buid] = False
|
|
409
411
|
return False
|
|
410
412
|
|
|
411
|
-
async def pivots(self, runt, node, path):
|
|
413
|
+
async def pivots(self, runt, node, path, existing):
|
|
412
414
|
|
|
413
415
|
if self.rules.get('refs'):
|
|
414
416
|
|
|
@@ -420,6 +422,13 @@ class SubGraph:
|
|
|
420
422
|
|
|
421
423
|
yield (pivonode, path.fork(pivonode), {'type': 'prop', 'prop': propname})
|
|
422
424
|
|
|
425
|
+
for iden in existing:
|
|
426
|
+
buid = s_common.uhex(iden)
|
|
427
|
+
othr = await node.snap.getNodeByBuid(buid)
|
|
428
|
+
for propname, ndef in othr.getNodeRefs():
|
|
429
|
+
if ndef == node.ndef:
|
|
430
|
+
yield (othr, path, {'type': 'prop', 'prop': propname, 'reverse': True})
|
|
431
|
+
|
|
423
432
|
for pivq in self.rules.get('pivots'):
|
|
424
433
|
indx = 0
|
|
425
434
|
async for node, path in node.storm(runt, pivq):
|
|
@@ -442,10 +451,25 @@ class SubGraph:
|
|
|
442
451
|
yield (node, path, {'type': 'rules', 'scope': scope, 'index': indx})
|
|
443
452
|
indx += 1
|
|
444
453
|
|
|
454
|
+
async def _edgefallback(self, runt, results, node):
|
|
455
|
+
async for buid01 in results:
|
|
456
|
+
await asyncio.sleep(0)
|
|
457
|
+
|
|
458
|
+
iden01 = s_common.ehex(buid01)
|
|
459
|
+
async for verb in node.iterEdgeVerbs(buid01):
|
|
460
|
+
await asyncio.sleep(0)
|
|
461
|
+
yield (iden01, {'type': 'edge', 'verb': verb})
|
|
462
|
+
|
|
463
|
+
# for existing nodes, we need to add n2 -> n1 edges in reverse
|
|
464
|
+
async for verb in runt.snap.iterEdgeVerbs(buid01, node.buid):
|
|
465
|
+
await asyncio.sleep(0)
|
|
466
|
+
yield (iden01, {'type': 'edge', 'verb': verb, 'reverse': True})
|
|
467
|
+
|
|
445
468
|
async def run(self, runt, genr):
|
|
446
469
|
|
|
447
470
|
# NOTE: this function must agressively yield the ioloop
|
|
448
471
|
|
|
472
|
+
edgelimit = self.rules.get('edgelimit')
|
|
449
473
|
doedges = self.rules.get('edges')
|
|
450
474
|
degrees = self.rules.get('degrees')
|
|
451
475
|
maxsize = self.rules.get('maxsize')
|
|
@@ -465,9 +489,57 @@ class SubGraph:
|
|
|
465
489
|
results = await stack.enter_async_context(await s_spooled.Set.anit(dirn=core.dirn, cell=core))
|
|
466
490
|
revpivs = await stack.enter_async_context(await s_spooled.Dict.anit(dirn=core.dirn, cell=core))
|
|
467
491
|
|
|
492
|
+
revedge = await stack.enter_async_context(await s_spooled.Dict.anit(dirn=core.dirn, cell=core))
|
|
493
|
+
edgecounts = await stack.enter_async_context(await s_spooled.Dict.anit(dirn=core.dirn, cell=core))
|
|
494
|
+
n1delayed = await stack.enter_async_context(await s_spooled.Set.anit(dirn=core.dirn, cell=core))
|
|
495
|
+
n2delayed = await stack.enter_async_context(await s_spooled.Set.anit(dirn=core.dirn, cell=core))
|
|
496
|
+
|
|
468
497
|
# load the existing graph as already done
|
|
469
498
|
[await results.add(s_common.uhex(b)) for b in existing]
|
|
470
499
|
|
|
500
|
+
if doedges:
|
|
501
|
+
for b in existing:
|
|
502
|
+
ecnt = 0
|
|
503
|
+
cache = collections.defaultdict(list)
|
|
504
|
+
async for verb, n2iden in runt.snap.iterNodeEdgesN1(s_common.uhex(b)):
|
|
505
|
+
await asyncio.sleep(0)
|
|
506
|
+
|
|
507
|
+
if s_common.uhex(n2iden) in results:
|
|
508
|
+
continue
|
|
509
|
+
|
|
510
|
+
ecnt += 1
|
|
511
|
+
if ecnt > edgelimit:
|
|
512
|
+
break
|
|
513
|
+
|
|
514
|
+
cache[n2iden].append(verb)
|
|
515
|
+
|
|
516
|
+
if ecnt > edgelimit:
|
|
517
|
+
# don't let it into the cache.
|
|
518
|
+
# We've hit a potential death star and need to deal with it specially
|
|
519
|
+
await n1delayed.add(b)
|
|
520
|
+
continue
|
|
521
|
+
|
|
522
|
+
for n2iden, verbs in cache.items():
|
|
523
|
+
await asyncio.sleep(0)
|
|
524
|
+
if n2delayed.has(n2iden):
|
|
525
|
+
continue
|
|
526
|
+
|
|
527
|
+
if not revedge.has(n2iden):
|
|
528
|
+
await revedge.set(n2iden, {})
|
|
529
|
+
|
|
530
|
+
re = revedge.get(n2iden)
|
|
531
|
+
if b not in re:
|
|
532
|
+
re[b] = []
|
|
533
|
+
|
|
534
|
+
count = edgecounts.get(n2iden, defv=0) + len(verbs)
|
|
535
|
+
if count > edgelimit:
|
|
536
|
+
await n2delayed.add(n2iden)
|
|
537
|
+
revedge.pop(n2iden)
|
|
538
|
+
else:
|
|
539
|
+
await edgecounts.set(n2iden, count)
|
|
540
|
+
re[b] += verbs
|
|
541
|
+
await revedge.set(n2iden, re)
|
|
542
|
+
|
|
471
543
|
async def todogenr():
|
|
472
544
|
|
|
473
545
|
async for node, path in genr:
|
|
@@ -482,7 +554,8 @@ class SubGraph:
|
|
|
482
554
|
|
|
483
555
|
await asyncio.sleep(0)
|
|
484
556
|
|
|
485
|
-
|
|
557
|
+
buid = node.buid
|
|
558
|
+
if buid in done:
|
|
486
559
|
continue
|
|
487
560
|
|
|
488
561
|
count += 1
|
|
@@ -491,8 +564,8 @@ class SubGraph:
|
|
|
491
564
|
await runt.snap.warn(f'Graph projection hit max size {maxsize}. Truncating results.')
|
|
492
565
|
break
|
|
493
566
|
|
|
494
|
-
await done.add(
|
|
495
|
-
intodo.discard(
|
|
567
|
+
await done.add(buid)
|
|
568
|
+
intodo.discard(buid)
|
|
496
569
|
|
|
497
570
|
omitted = False
|
|
498
571
|
if dist > 0 or filterinput:
|
|
@@ -504,8 +577,9 @@ class SubGraph:
|
|
|
504
577
|
# we must traverse the pivots for the node *regardless* of degrees
|
|
505
578
|
# due to needing to tie any leaf nodes to nodes that were already yielded
|
|
506
579
|
|
|
507
|
-
|
|
508
|
-
|
|
580
|
+
nodeiden = node.iden()
|
|
581
|
+
edges = list(revpivs.get(buid, defv=()))
|
|
582
|
+
async for pivn, pivp, pinfo in self.pivots(runt, node, path, existing):
|
|
509
583
|
|
|
510
584
|
await asyncio.sleep(0)
|
|
511
585
|
|
|
@@ -514,7 +588,7 @@ class SubGraph:
|
|
|
514
588
|
else:
|
|
515
589
|
pinfo['reverse'] = True
|
|
516
590
|
pivedges = revpivs.get(pivn.buid, defv=())
|
|
517
|
-
await revpivs.set(pivn.buid, pivedges + ((
|
|
591
|
+
await revpivs.set(pivn.buid, pivedges + ((nodeiden, pinfo),))
|
|
518
592
|
|
|
519
593
|
# we dont pivot from omitted nodes
|
|
520
594
|
if omitted:
|
|
@@ -528,28 +602,82 @@ class SubGraph:
|
|
|
528
602
|
if pivn.buid in intodo:
|
|
529
603
|
continue
|
|
530
604
|
|
|
605
|
+
# no need to pivot to existing nodes
|
|
606
|
+
if pivn.iden() in existing:
|
|
607
|
+
continue
|
|
608
|
+
|
|
531
609
|
# do we have room to go another degree out?
|
|
532
610
|
if degrees is None or dist < degrees:
|
|
533
611
|
todo.append((pivn, pivp, dist + 1))
|
|
534
612
|
await intodo.add(pivn.buid)
|
|
535
613
|
|
|
536
614
|
if doedges:
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
|
|
615
|
+
ecnt = 0
|
|
616
|
+
cache = collections.defaultdict(list)
|
|
617
|
+
await results.add(buid)
|
|
618
|
+
# Try to lift and cache the potential edges for a node so that if we end up
|
|
619
|
+
# seeing n2 later, we won't have to go back and check for it
|
|
620
|
+
async for verb, n2iden in runt.snap.iterNodeEdgesN1(buid):
|
|
540
621
|
await asyncio.sleep(0)
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
622
|
+
if ecnt > edgelimit:
|
|
623
|
+
break
|
|
624
|
+
|
|
625
|
+
ecnt += 1
|
|
626
|
+
cache[n2iden].append(verb)
|
|
627
|
+
|
|
628
|
+
if ecnt > edgelimit:
|
|
629
|
+
# The current node in the pipeline has too many edges from it, so it's
|
|
630
|
+
# less prohibitive to just check against the graph
|
|
631
|
+
await n1delayed.add(nodeiden)
|
|
632
|
+
async for e in self._edgefallback(runt, results, node):
|
|
633
|
+
edges.append(e)
|
|
634
|
+
else:
|
|
635
|
+
for n2iden, verbs in cache.items():
|
|
549
636
|
await asyncio.sleep(0)
|
|
550
|
-
edges.append((iden01, {'type': 'edge', 'verb': verb, 'reverse': True}))
|
|
551
637
|
|
|
552
|
-
|
|
638
|
+
if n2delayed.has(n2iden):
|
|
639
|
+
continue
|
|
640
|
+
|
|
641
|
+
if not revedge.has(n2iden):
|
|
642
|
+
await revedge.set(n2iden, {})
|
|
643
|
+
|
|
644
|
+
re = revedge.get(n2iden)
|
|
645
|
+
if nodeiden not in re:
|
|
646
|
+
re[nodeiden] = []
|
|
647
|
+
|
|
648
|
+
count = edgecounts.get(n2iden, defv=0) + len(verbs)
|
|
649
|
+
if count > edgelimit:
|
|
650
|
+
await n2delayed.add(n2iden)
|
|
651
|
+
revedge.pop(n2iden)
|
|
652
|
+
else:
|
|
653
|
+
await edgecounts.set(n2iden, count)
|
|
654
|
+
re[nodeiden] += verbs
|
|
655
|
+
await revedge.set(n2iden, re)
|
|
656
|
+
|
|
657
|
+
if revedge.has(nodeiden):
|
|
658
|
+
for n2iden, verbs in revedge.get(nodeiden).items():
|
|
659
|
+
for verb in verbs:
|
|
660
|
+
await asyncio.sleep(0)
|
|
661
|
+
edges.append((n2iden, {'type': 'edge', 'verb': verb, 'reverse': True}))
|
|
662
|
+
|
|
663
|
+
if n2delayed.has(nodeiden):
|
|
664
|
+
async for buid01 in results:
|
|
665
|
+
async for verb in runt.snap.iterEdgeVerbs(buid01, buid):
|
|
666
|
+
await asyncio.sleep(0)
|
|
667
|
+
edges.append((s_common.ehex(buid01), {'type': 'edge', 'verb': verb, 'reverse': True}))
|
|
668
|
+
for n2iden, verbs in cache.items():
|
|
669
|
+
if s_common.uhex(n2iden) not in results:
|
|
670
|
+
continue
|
|
671
|
+
|
|
672
|
+
for v in verbs:
|
|
673
|
+
await asyncio.sleep(0)
|
|
674
|
+
edges.append((n2iden, {'type': 'edge', 'verb': v}))
|
|
675
|
+
|
|
676
|
+
async for n1iden in n1delayed:
|
|
677
|
+
n1buid = s_common.uhex(n1iden)
|
|
678
|
+
async for verb in runt.snap.iterEdgeVerbs(n1buid, buid):
|
|
679
|
+
await asyncio.sleep(0)
|
|
680
|
+
edges.append((n1iden, {'type': 'edge', 'verb': verb, 'reverse': True}))
|
|
553
681
|
|
|
554
682
|
path.metadata['edges'] = edges
|
|
555
683
|
yield node, path
|
|
@@ -568,6 +696,9 @@ class SubQuery(Oper):
|
|
|
568
696
|
if len(kids):
|
|
569
697
|
self.text = kids[0].getAstText()
|
|
570
698
|
|
|
699
|
+
def isRuntSafe(self, runt):
|
|
700
|
+
return True
|
|
701
|
+
|
|
571
702
|
async def run(self, runt, genr):
|
|
572
703
|
|
|
573
704
|
subq = self.kids[0]
|
|
@@ -676,6 +807,38 @@ class InitBlock(AstNode):
|
|
|
676
807
|
async for innr in subq.run(runt, s_common.agen()):
|
|
677
808
|
yield innr
|
|
678
809
|
|
|
810
|
+
class EmptyBlock(AstNode):
|
|
811
|
+
'''
|
|
812
|
+
An AST node that only runs if there are not inbound nodes in the pipeline. It is
|
|
813
|
+
capable of yielding nodes into the pipeline.
|
|
814
|
+
|
|
815
|
+
Example:
|
|
816
|
+
|
|
817
|
+
Using an empty block::
|
|
818
|
+
|
|
819
|
+
empty {
|
|
820
|
+
// the pipeline is empty so this block will execute
|
|
821
|
+
}
|
|
822
|
+
|
|
823
|
+
[foo:bar=*]
|
|
824
|
+
empty {
|
|
825
|
+
// there is a node in the pipeline so this block will not run
|
|
826
|
+
}
|
|
827
|
+
'''
|
|
828
|
+
async def run(self, runt, genr):
|
|
829
|
+
|
|
830
|
+
subq = self.kids[0]
|
|
831
|
+
self.reqRuntSafe(runt, 'Empty block query must be runtsafe')
|
|
832
|
+
|
|
833
|
+
empty = True
|
|
834
|
+
async for item in genr:
|
|
835
|
+
empty = False
|
|
836
|
+
yield item
|
|
837
|
+
|
|
838
|
+
if empty:
|
|
839
|
+
async for subn in subq.run(runt, s_common.agen()):
|
|
840
|
+
yield subn
|
|
841
|
+
|
|
679
842
|
class FiniBlock(AstNode):
|
|
680
843
|
'''
|
|
681
844
|
An AST node that runs only once after all nodes have been consumed.
|
|
@@ -728,7 +891,7 @@ class TryCatch(AstNode):
|
|
|
728
891
|
async for subi in block.run(runt, agen):
|
|
729
892
|
yield subi
|
|
730
893
|
|
|
731
|
-
if count == 0
|
|
894
|
+
if count == 0:
|
|
732
895
|
try:
|
|
733
896
|
async for item in self.kids[0].run(runt, genr):
|
|
734
897
|
yield item
|
|
@@ -1738,6 +1901,9 @@ class N1WalkNPivo(PivotOut):
|
|
|
1738
1901
|
|
|
1739
1902
|
async for node, path in genr:
|
|
1740
1903
|
|
|
1904
|
+
if self.isjoin:
|
|
1905
|
+
yield node, path
|
|
1906
|
+
|
|
1741
1907
|
async for item in self.getPivsOut(runt, node, path):
|
|
1742
1908
|
yield item
|
|
1743
1909
|
|
|
@@ -1857,6 +2023,9 @@ class N2WalkNPivo(PivotIn):
|
|
|
1857
2023
|
|
|
1858
2024
|
async for node, path in genr:
|
|
1859
2025
|
|
|
2026
|
+
if self.isjoin:
|
|
2027
|
+
yield node, path
|
|
2028
|
+
|
|
1860
2029
|
async for item in self.getPivsIn(runt, node, path):
|
|
1861
2030
|
yield item
|
|
1862
2031
|
|
|
@@ -1900,7 +2069,8 @@ class PivotInFrom(PivotOper):
|
|
|
1900
2069
|
yield node, path
|
|
1901
2070
|
|
|
1902
2071
|
if not isinstance(node.form.type, s_types.Edge):
|
|
1903
|
-
|
|
2072
|
+
mesg = f'Pivot in from a specific form cannot be used with nodes of type {node.form.type.name}'
|
|
2073
|
+
raise self.addExcInfo(s_exc.StormRuntimeError(mesg=mesg, name=node.form.type.name))
|
|
1904
2074
|
|
|
1905
2075
|
# dont bother traversing edges to the wrong form
|
|
1906
2076
|
if node.get('n1:form') != form.name:
|
|
@@ -2762,6 +2932,9 @@ class RelPropCond(Cond):
|
|
|
2762
2932
|
if not isinstance(xval, s_node.Node):
|
|
2763
2933
|
xval = await s_stormtypes.tostor(xval)
|
|
2764
2934
|
|
|
2935
|
+
if xval is None:
|
|
2936
|
+
return False
|
|
2937
|
+
|
|
2765
2938
|
ctor = prop.type.getCmprCtor(cmpr)
|
|
2766
2939
|
if ctor is None:
|
|
2767
2940
|
raise self.kids[1].addExcInfo(s_exc.NoSuchCmpr(cmpr=cmpr, name=prop.type.name))
|
|
@@ -2881,6 +3054,8 @@ class PropValue(Value):
|
|
|
2881
3054
|
return False
|
|
2882
3055
|
|
|
2883
3056
|
async def getPropAndValu(self, runt, path):
|
|
3057
|
+
if not path:
|
|
3058
|
+
return None, None
|
|
2884
3059
|
|
|
2885
3060
|
name = await self.kids[0].compute(runt, path)
|
|
2886
3061
|
|
|
@@ -2894,6 +3069,10 @@ class PropValue(Value):
|
|
|
2894
3069
|
name=name, form=path.node.form.name))
|
|
2895
3070
|
|
|
2896
3071
|
valu = path.node.get(name)
|
|
3072
|
+
if isinstance(valu, (dict, list, tuple)):
|
|
3073
|
+
# these get special cased because changing them affects the node
|
|
3074
|
+
# while it's in the pipeline but the modification doesn't get stored
|
|
3075
|
+
valu = s_msgpack.deepcopy(valu)
|
|
2897
3076
|
return prop, valu
|
|
2898
3077
|
|
|
2899
3078
|
# handle implicit pivot properties
|
|
@@ -2915,6 +3094,10 @@ class PropValue(Value):
|
|
|
2915
3094
|
name=name, form=node.form.name))
|
|
2916
3095
|
|
|
2917
3096
|
if i >= imax:
|
|
3097
|
+
if isinstance(valu, (dict, list, tuple)):
|
|
3098
|
+
# these get special cased because changing them affects the node
|
|
3099
|
+
# while it's in the pipeline but the modification doesn't get stored
|
|
3100
|
+
valu = s_msgpack.deepcopy(valu)
|
|
2918
3101
|
return prop, valu
|
|
2919
3102
|
|
|
2920
3103
|
form = runt.model.forms.get(prop.type.name)
|
|
@@ -3766,6 +3949,13 @@ class EditUnivDel(Edit):
|
|
|
3766
3949
|
|
|
3767
3950
|
class N1Walk(Oper):
|
|
3768
3951
|
|
|
3952
|
+
def __init__(self, astinfo, kids=(), isjoin=False):
|
|
3953
|
+
Oper.__init__(self, astinfo, kids=kids)
|
|
3954
|
+
self.isjoin = isjoin
|
|
3955
|
+
|
|
3956
|
+
def repr(self):
|
|
3957
|
+
return f'{self.__class__.__name__}: {self.kids}, isjoin={self.isjoin}'
|
|
3958
|
+
|
|
3769
3959
|
async def walkNodeEdges(self, runt, node, verb=None):
|
|
3770
3960
|
async for _, iden in node.iterEdgesN1(verb=verb):
|
|
3771
3961
|
buid = s_common.uhex(iden)
|
|
@@ -3824,6 +4014,9 @@ class N1Walk(Oper):
|
|
|
3824
4014
|
|
|
3825
4015
|
async for node, path in genr:
|
|
3826
4016
|
|
|
4017
|
+
if self.isjoin:
|
|
4018
|
+
yield node, path
|
|
4019
|
+
|
|
3827
4020
|
verbs = await self.kids[0].compute(runt, path)
|
|
3828
4021
|
verbs = await s_stormtypes.toprim(verbs)
|
|
3829
4022
|
|
synapse/lib/cell.py
CHANGED
|
@@ -2771,6 +2771,8 @@ class Cell(s_nexus.Pusher, s_telepath.Aware):
|
|
|
2771
2771
|
|
|
2772
2772
|
self.addHttpApi('/robots.txt', s_httpapi.RobotHandler, {'cell': self})
|
|
2773
2773
|
self.addHttpApi('/api/v1/login', s_httpapi.LoginV1, {'cell': self})
|
|
2774
|
+
self.addHttpApi('/api/v1/logout', s_httpapi.LogoutV1, {'cell': self})
|
|
2775
|
+
|
|
2774
2776
|
self.addHttpApi('/api/v1/active', s_httpapi.ActiveV1, {'cell': self})
|
|
2775
2777
|
self.addHttpApi('/api/v1/healthcheck', s_httpapi.HealthCheckV1, {'cell': self})
|
|
2776
2778
|
|
|
@@ -3418,20 +3420,45 @@ class Cell(s_nexus.Pusher, s_telepath.Aware):
|
|
|
3418
3420
|
ahaname = provconf.get('aha:name')
|
|
3419
3421
|
ahanetw = provconf.get('aha:network')
|
|
3420
3422
|
|
|
3421
|
-
|
|
3422
|
-
|
|
3423
|
+
_crt = certdir.getCaCertPath(ahanetw)
|
|
3424
|
+
if _crt:
|
|
3425
|
+
logger.debug(f'Removing existing CA crt: {_crt}')
|
|
3426
|
+
os.unlink(_crt)
|
|
3427
|
+
certdir.saveCaCertByts(await prov.getCaCert())
|
|
3423
3428
|
|
|
3424
3429
|
await self._bootProvConf(provconf)
|
|
3425
3430
|
|
|
3426
3431
|
hostname = f'{ahaname}.{ahanetw}'
|
|
3427
|
-
|
|
3428
|
-
|
|
3429
|
-
|
|
3432
|
+
_crt = certdir.getHostCertPath(hostname)
|
|
3433
|
+
if _crt:
|
|
3434
|
+
logger.debug(f'Removing existing host crt {_crt}')
|
|
3435
|
+
os.unlink(_crt)
|
|
3436
|
+
_kp = certdir.getHostKeyPath(hostname)
|
|
3437
|
+
if _kp:
|
|
3438
|
+
logger.debug(f'Removing existing host key {_kp}')
|
|
3439
|
+
os.unlink(_kp)
|
|
3440
|
+
_csr = certdir.getHostCsrPath(hostname)
|
|
3441
|
+
if _csr:
|
|
3442
|
+
logger.debug(f'Removing existing host csr {_csr}')
|
|
3443
|
+
os.unlink(_csr)
|
|
3444
|
+
hostcsr = certdir.genHostCsr(hostname)
|
|
3445
|
+
certdir.saveHostCertByts(await prov.signHostCsr(hostcsr))
|
|
3430
3446
|
|
|
3431
3447
|
userfull = f'{ahauser}@{ahanetw}'
|
|
3432
|
-
|
|
3433
|
-
|
|
3434
|
-
|
|
3448
|
+
_crt = certdir.getUserCertPath(userfull)
|
|
3449
|
+
if _crt:
|
|
3450
|
+
logger.debug(f'Removing existing user crt {_crt}')
|
|
3451
|
+
os.unlink(_crt)
|
|
3452
|
+
_kp = certdir.getUserKeyPath(userfull)
|
|
3453
|
+
if _kp:
|
|
3454
|
+
logger.debug(f'Removing existing user key {_kp}')
|
|
3455
|
+
os.unlink(_kp)
|
|
3456
|
+
_csr = certdir.getUserCsrPath(userfull)
|
|
3457
|
+
if _csr:
|
|
3458
|
+
logger.debug(f'Removing existing user csr {_csr}')
|
|
3459
|
+
os.unlink(_csr)
|
|
3460
|
+
usercsr = certdir.genUserCsr(userfull)
|
|
3461
|
+
certdir.saveUserCertByts(await prov.signUserCsr(usercsr))
|
|
3435
3462
|
|
|
3436
3463
|
with s_common.genfile(self.dirn, 'prov.done') as fd:
|
|
3437
3464
|
fd.write(providen.encode())
|
synapse/lib/certdir.py
CHANGED
|
@@ -944,6 +944,17 @@ class CertDir:
|
|
|
944
944
|
if os.path.isfile(path):
|
|
945
945
|
return path
|
|
946
946
|
|
|
947
|
+
def getUserCsrPath(self, name):
|
|
948
|
+
for cdir in self.certdirs:
|
|
949
|
+
path = s_common.genpath(cdir, 'users', '%s.csr' % name)
|
|
950
|
+
if os.path.isfile(path):
|
|
951
|
+
return path
|
|
952
|
+
|
|
953
|
+
def getHostCsrPath(self, name):
|
|
954
|
+
for cdir in self.certdirs:
|
|
955
|
+
path = s_common.genpath(cdir, 'hosts', '%s.csr' % name)
|
|
956
|
+
if os.path.isfile(path):
|
|
957
|
+
return path
|
|
947
958
|
def importFile(self, path, mode, outp=None):
|
|
948
959
|
'''
|
|
949
960
|
Imports certs and keys into the Synapse cert directory
|
synapse/lib/cmdr.py
CHANGED
|
@@ -1,10 +1,8 @@
|
|
|
1
1
|
import synapse.lib.cli as s_cli
|
|
2
2
|
|
|
3
3
|
import synapse.cmds.boss as s_cmds_boss
|
|
4
|
-
import synapse.cmds.cron as s_cmds_cron
|
|
5
4
|
import synapse.cmds.hive as s_cmds_hive
|
|
6
5
|
import synapse.cmds.cortex as s_cmds_cortex
|
|
7
|
-
import synapse.cmds.trigger as s_cmds_trigger
|
|
8
6
|
|
|
9
7
|
cmdsbycell = {
|
|
10
8
|
'cell': (
|
|
@@ -14,14 +12,11 @@ cmdsbycell = {
|
|
|
14
12
|
),
|
|
15
13
|
|
|
16
14
|
'cortex': (
|
|
17
|
-
s_cmds_cron.At,
|
|
18
|
-
s_cmds_cron.Cron,
|
|
19
15
|
s_cmds_cortex.Log,
|
|
20
16
|
s_cmds_boss.PsCmd,
|
|
21
17
|
s_cmds_boss.KillCmd,
|
|
22
18
|
s_cmds_hive.HiveCmd,
|
|
23
19
|
s_cmds_cortex.StormCmd,
|
|
24
|
-
s_cmds_trigger.Trigger,
|
|
25
20
|
),
|
|
26
21
|
}
|
|
27
22
|
|
synapse/lib/gis.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import math
|
|
2
2
|
|
|
3
3
|
'''
|
|
4
|
-
Synapse module with helpers for earth based
|
|
4
|
+
Synapse module with helpers for earth based geospatial calculations.
|
|
5
5
|
'''
|
|
6
6
|
|
|
7
7
|
# base earth geo distances will be in mm
|
|
@@ -66,7 +66,7 @@ def haversine(px, py, r=r_mm):
|
|
|
66
66
|
|
|
67
67
|
def bbox(lat, lon, dist):
|
|
68
68
|
'''
|
|
69
|
-
Calculate a min/max bounding box for the circle defined by
|
|
69
|
+
Calculate a min/max bounding box for the circle defined by lat/lon/dist.
|
|
70
70
|
|
|
71
71
|
Args:
|
|
72
72
|
lat (float): The latitude in degrees
|
synapse/lib/httpapi.py
CHANGED
|
@@ -518,7 +518,7 @@ class StormV1(StormHandler):
|
|
|
518
518
|
if opts is None:
|
|
519
519
|
return
|
|
520
520
|
|
|
521
|
-
opts.setdefault('editformat', '
|
|
521
|
+
opts.setdefault('editformat', 'nodeedits')
|
|
522
522
|
|
|
523
523
|
async for mesg in self.getCore().storm(query, opts=opts):
|
|
524
524
|
self.write(json.dumps(mesg))
|
|
@@ -612,48 +612,6 @@ class ReqValidStormV1(StormHandler):
|
|
|
612
612
|
else:
|
|
613
613
|
return self.sendRestRetn(valid)
|
|
614
614
|
|
|
615
|
-
class WatchSockV1(WebSocket):
|
|
616
|
-
'''
|
|
617
|
-
A web-socket based API endpoint for distributing cortex tag events.
|
|
618
|
-
|
|
619
|
-
Deprecated.
|
|
620
|
-
'''
|
|
621
|
-
async def onWatchMesg(self, byts):
|
|
622
|
-
# Note: This API handler is intended to be used on a heavy Cortex object.
|
|
623
|
-
try:
|
|
624
|
-
|
|
625
|
-
wdef = json.loads(byts)
|
|
626
|
-
iden = wdef.get('view', self.cell.view.iden)
|
|
627
|
-
|
|
628
|
-
perm = ('watch', 'view', iden)
|
|
629
|
-
await self._reqUserAllow(perm)
|
|
630
|
-
|
|
631
|
-
async with self.cell.watcher(wdef) as watcher:
|
|
632
|
-
|
|
633
|
-
await self.xmit('init')
|
|
634
|
-
|
|
635
|
-
async for mesg in watcher:
|
|
636
|
-
await self.xmit(mesg[0], **mesg[1])
|
|
637
|
-
|
|
638
|
-
# pragma: no cover
|
|
639
|
-
# (this would only happen on slow-consumer)
|
|
640
|
-
await self.xmit('fini')
|
|
641
|
-
|
|
642
|
-
except s_exc.SynErr as e:
|
|
643
|
-
|
|
644
|
-
text = e.get('mesg', str(e))
|
|
645
|
-
await self.xmit('errx', code=e.__class__.__name__, mesg=text)
|
|
646
|
-
|
|
647
|
-
except asyncio.CancelledError: # pragma: no cover TODO: remove once >= py 3.8 only
|
|
648
|
-
raise
|
|
649
|
-
|
|
650
|
-
except Exception as e:
|
|
651
|
-
await self.xmit('errx', code=e.__class__.__name__, mesg=str(e))
|
|
652
|
-
|
|
653
|
-
async def on_message(self, byts):
|
|
654
|
-
s_common.deprdate('/api/v1/watch HTTP API', s_common._splicedepr)
|
|
655
|
-
self.cell.schedCoro(self.onWatchMesg(byts))
|
|
656
|
-
|
|
657
615
|
class BeholdSockV1(WebSocket):
|
|
658
616
|
|
|
659
617
|
async def onInitMessage(self, byts):
|
|
@@ -724,6 +682,19 @@ class LoginV1(Handler):
|
|
|
724
682
|
|
|
725
683
|
return self.sendRestRetn(await authcell.getUserDef(iden))
|
|
726
684
|
|
|
685
|
+
class LogoutV1(Handler):
|
|
686
|
+
|
|
687
|
+
async def get(self):
|
|
688
|
+
sess = await self.sess(gen=False)
|
|
689
|
+
if sess is not None:
|
|
690
|
+
self.web_useriden = sess.info.get('user')
|
|
691
|
+
self.web_username = sess.info.get('username', '<no username>')
|
|
692
|
+
await self.getAuthCell().delHttpSess(sess.iden)
|
|
693
|
+
|
|
694
|
+
self.clear_cookie('sess')
|
|
695
|
+
|
|
696
|
+
self.sendRestRetn(True)
|
|
697
|
+
|
|
727
698
|
class AuthUsersV1(Handler):
|
|
728
699
|
|
|
729
700
|
async def get(self):
|