synapse 2.192.0__py311-none-any.whl → 2.194.0__py311-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of synapse might be problematic. Click here for more details.
- synapse/common.py +15 -0
- synapse/cortex.py +19 -25
- synapse/datamodel.py +6 -3
- synapse/exc.py +6 -1
- synapse/lib/agenda.py +17 -6
- synapse/lib/ast.py +242 -97
- synapse/lib/auth.py +1 -0
- synapse/lib/cell.py +31 -85
- synapse/lib/cli.py +20 -11
- synapse/lib/parser.py +5 -1
- synapse/lib/snap.py +44 -15
- synapse/lib/storm.lark +16 -1
- synapse/lib/storm.py +40 -21
- synapse/lib/storm_format.py +1 -0
- synapse/lib/stormctrl.py +88 -6
- synapse/lib/stormlib/cache.py +6 -2
- synapse/lib/stormlib/json.py +5 -2
- synapse/lib/stormlib/scrape.py +1 -1
- synapse/lib/stormlib/stix.py +8 -8
- synapse/lib/stormtypes.py +32 -5
- synapse/lib/version.py +2 -2
- synapse/lib/view.py +20 -3
- synapse/models/geopol.py +1 -0
- synapse/models/geospace.py +1 -0
- synapse/models/inet.py +20 -1
- synapse/models/infotech.py +24 -6
- synapse/models/orgs.py +7 -2
- synapse/models/person.py +15 -4
- synapse/models/risk.py +19 -2
- synapse/models/telco.py +10 -3
- synapse/tests/test_axon.py +6 -6
- synapse/tests/test_cortex.py +133 -14
- synapse/tests/test_exc.py +4 -0
- synapse/tests/test_lib_agenda.py +282 -2
- synapse/tests/test_lib_aha.py +13 -6
- synapse/tests/test_lib_ast.py +301 -10
- synapse/tests/test_lib_auth.py +6 -7
- synapse/tests/test_lib_cell.py +71 -1
- synapse/tests/test_lib_grammar.py +14 -0
- synapse/tests/test_lib_layer.py +1 -1
- synapse/tests/test_lib_lmdbslab.py +3 -3
- synapse/tests/test_lib_storm.py +273 -55
- synapse/tests/test_lib_stormctrl.py +65 -0
- synapse/tests/test_lib_stormhttp.py +5 -5
- synapse/tests/test_lib_stormlib_auth.py +5 -5
- synapse/tests/test_lib_stormlib_cache.py +38 -6
- synapse/tests/test_lib_stormlib_json.py +20 -0
- synapse/tests/test_lib_stormlib_modelext.py +3 -3
- synapse/tests/test_lib_stormlib_scrape.py +6 -6
- synapse/tests/test_lib_stormlib_spooled.py +1 -1
- synapse/tests/test_lib_stormlib_xml.py +5 -5
- synapse/tests/test_lib_stormtypes.py +54 -57
- synapse/tests/test_lib_view.py +1 -1
- synapse/tests/test_model_base.py +1 -2
- synapse/tests/test_model_geopol.py +4 -0
- synapse/tests/test_model_geospace.py +6 -0
- synapse/tests/test_model_inet.py +43 -5
- synapse/tests/test_model_infotech.py +10 -1
- synapse/tests/test_model_orgs.py +17 -2
- synapse/tests/test_model_person.py +23 -1
- synapse/tests/test_model_risk.py +13 -0
- synapse/tests/test_tools_healthcheck.py +4 -4
- synapse/tests/test_tools_storm.py +95 -0
- synapse/tests/test_utils.py +17 -18
- synapse/tests/test_utils_getrefs.py +1 -1
- synapse/tests/utils.py +0 -35
- synapse/tools/changelog.py +6 -4
- synapse/tools/storm.py +1 -1
- synapse/utils/getrefs.py +14 -3
- synapse/vendor/cpython/lib/http/__init__.py +0 -0
- synapse/vendor/cpython/lib/http/cookies.py +59 -0
- synapse/vendor/cpython/lib/test/test_http_cookies.py +49 -0
- {synapse-2.192.0.dist-info → synapse-2.194.0.dist-info}/METADATA +6 -6
- {synapse-2.192.0.dist-info → synapse-2.194.0.dist-info}/RECORD +77 -73
- {synapse-2.192.0.dist-info → synapse-2.194.0.dist-info}/WHEEL +1 -1
- {synapse-2.192.0.dist-info → synapse-2.194.0.dist-info}/LICENSE +0 -0
- {synapse-2.192.0.dist-info → synapse-2.194.0.dist-info}/top_level.txt +0 -0
synapse/lib/auth.py
CHANGED
|
@@ -1545,6 +1545,7 @@ class User(Ruler):
|
|
|
1545
1545
|
# Prevent empty string or non-string values
|
|
1546
1546
|
if passwd is None:
|
|
1547
1547
|
shadow = None
|
|
1548
|
+
enforce_policy = False
|
|
1548
1549
|
elif passwd and isinstance(passwd, str):
|
|
1549
1550
|
shadow = await s_passwd.getShadowV2(passwd=passwd)
|
|
1550
1551
|
else:
|
synapse/lib/cell.py
CHANGED
|
@@ -2623,18 +2623,12 @@ class Cell(s_nexus.Pusher, s_telepath.Aware):
|
|
|
2623
2623
|
walkpath(self.backdirn)
|
|
2624
2624
|
return backups
|
|
2625
2625
|
|
|
2626
|
-
async def
|
|
2627
|
-
|
|
2628
|
-
success = False
|
|
2629
|
-
loglevel = logging.WARNING
|
|
2630
|
-
|
|
2631
|
-
path = self._reqBackDirn(name)
|
|
2632
|
-
cellguid = os.path.join(path, 'cell.guid')
|
|
2633
|
-
if not os.path.isfile(cellguid):
|
|
2634
|
-
mesg = 'Specified backup path has no cell.guid file.'
|
|
2635
|
-
raise s_exc.BadArg(mesg=mesg, arg='path', valu=path)
|
|
2636
|
-
|
|
2626
|
+
async def _streamBackupArchive(self, path, user, name):
|
|
2637
2627
|
link = s_scope.get('link')
|
|
2628
|
+
if link is None:
|
|
2629
|
+
mesg = 'Link not found in scope. This API must be called via a CellApi.'
|
|
2630
|
+
raise s_exc.SynErr(mesg=mesg)
|
|
2631
|
+
|
|
2638
2632
|
linkinfo = await link.getSpawnInfo()
|
|
2639
2633
|
linkinfo['logconf'] = await self._getSpawnLogConf()
|
|
2640
2634
|
|
|
@@ -2642,42 +2636,42 @@ class Cell(s_nexus.Pusher, s_telepath.Aware):
|
|
|
2642
2636
|
|
|
2643
2637
|
ctx = multiprocessing.get_context('spawn')
|
|
2644
2638
|
|
|
2645
|
-
proc = None
|
|
2646
|
-
mesg = 'Streaming complete'
|
|
2647
|
-
|
|
2648
2639
|
def getproc():
|
|
2649
2640
|
proc = ctx.Process(target=_iterBackupProc, args=(path, linkinfo))
|
|
2650
2641
|
proc.start()
|
|
2651
2642
|
return proc
|
|
2652
2643
|
|
|
2644
|
+
mesg = 'Streaming complete'
|
|
2645
|
+
proc = await s_coro.executor(getproc)
|
|
2646
|
+
cancelled = False
|
|
2653
2647
|
try:
|
|
2654
|
-
proc = await s_coro.executor(getproc)
|
|
2655
|
-
|
|
2656
2648
|
await s_coro.executor(proc.join)
|
|
2649
|
+
self.backlastuploaddt = datetime.datetime.now()
|
|
2650
|
+
logger.debug(f'Backup streaming completed successfully for {name}')
|
|
2657
2651
|
|
|
2658
|
-
except
|
|
2652
|
+
except asyncio.CancelledError:
|
|
2653
|
+
logger.warning('Backup streaming was cancelled.')
|
|
2654
|
+
cancelled = True
|
|
2655
|
+
raise
|
|
2659
2656
|
|
|
2660
|
-
|
|
2661
|
-
# could be the result of a remote link terminating due to the
|
|
2662
|
-
# backup stream being completed, prior to this function
|
|
2663
|
-
# finishing.
|
|
2657
|
+
except Exception as e:
|
|
2664
2658
|
logger.exception('Error during backup streaming.')
|
|
2665
|
-
|
|
2666
|
-
if proc:
|
|
2667
|
-
proc.terminate()
|
|
2668
|
-
|
|
2669
2659
|
mesg = repr(e)
|
|
2670
2660
|
raise
|
|
2671
2661
|
|
|
2672
|
-
else:
|
|
2673
|
-
success = True
|
|
2674
|
-
loglevel = logging.DEBUG
|
|
2675
|
-
self.backlastuploaddt = datetime.datetime.now()
|
|
2676
|
-
|
|
2677
2662
|
finally:
|
|
2678
|
-
|
|
2679
|
-
|
|
2680
|
-
|
|
2663
|
+
proc.terminate()
|
|
2664
|
+
|
|
2665
|
+
if not cancelled:
|
|
2666
|
+
raise s_exc.DmonSpawn(mesg=mesg)
|
|
2667
|
+
|
|
2668
|
+
async def iterBackupArchive(self, name, user):
|
|
2669
|
+
path = self._reqBackDirn(name)
|
|
2670
|
+
cellguid = os.path.join(path, 'cell.guid')
|
|
2671
|
+
if not os.path.isfile(cellguid):
|
|
2672
|
+
mesg = 'Specified backup path has no cell.guid file.'
|
|
2673
|
+
raise s_exc.BadArg(mesg=mesg, arg='path', valu=path)
|
|
2674
|
+
await self._streamBackupArchive(path, user, name)
|
|
2681
2675
|
|
|
2682
2676
|
async def iterNewBackupArchive(self, user, name=None, remove=False):
|
|
2683
2677
|
|
|
@@ -2688,9 +2682,6 @@ class Cell(s_nexus.Pusher, s_telepath.Aware):
|
|
|
2688
2682
|
if remove:
|
|
2689
2683
|
self.backupstreaming = True
|
|
2690
2684
|
|
|
2691
|
-
success = False
|
|
2692
|
-
loglevel = logging.WARNING
|
|
2693
|
-
|
|
2694
2685
|
if name is None:
|
|
2695
2686
|
name = time.strftime('%Y%m%d%H%M%S', datetime.datetime.now().timetuple())
|
|
2696
2687
|
|
|
@@ -2699,10 +2690,6 @@ class Cell(s_nexus.Pusher, s_telepath.Aware):
|
|
|
2699
2690
|
mesg = 'Backup with name already exists'
|
|
2700
2691
|
raise s_exc.BadArg(mesg=mesg)
|
|
2701
2692
|
|
|
2702
|
-
link = s_scope.get('link')
|
|
2703
|
-
linkinfo = await link.getSpawnInfo()
|
|
2704
|
-
linkinfo['logconf'] = await self._getSpawnLogConf()
|
|
2705
|
-
|
|
2706
2693
|
try:
|
|
2707
2694
|
await self.runBackup(name)
|
|
2708
2695
|
except Exception:
|
|
@@ -2712,54 +2699,13 @@ class Cell(s_nexus.Pusher, s_telepath.Aware):
|
|
|
2712
2699
|
logger.debug(f'Removed {path}')
|
|
2713
2700
|
raise
|
|
2714
2701
|
|
|
2715
|
-
await self.
|
|
2716
|
-
|
|
2717
|
-
ctx = multiprocessing.get_context('spawn')
|
|
2718
|
-
|
|
2719
|
-
proc = None
|
|
2720
|
-
mesg = 'Streaming complete'
|
|
2721
|
-
|
|
2722
|
-
def getproc():
|
|
2723
|
-
proc = ctx.Process(target=_iterBackupProc, args=(path, linkinfo))
|
|
2724
|
-
proc.start()
|
|
2725
|
-
return proc
|
|
2726
|
-
|
|
2727
|
-
try:
|
|
2728
|
-
proc = await s_coro.executor(getproc)
|
|
2729
|
-
|
|
2730
|
-
await s_coro.executor(proc.join)
|
|
2731
|
-
|
|
2732
|
-
except (asyncio.CancelledError, Exception) as e:
|
|
2733
|
-
|
|
2734
|
-
# We want to log all exceptions here, an asyncio.CancelledError
|
|
2735
|
-
# could be the result of a remote link terminating due to the
|
|
2736
|
-
# backup stream being completed, prior to this function
|
|
2737
|
-
# finishing.
|
|
2738
|
-
logger.exception('Error during backup streaming.')
|
|
2739
|
-
|
|
2740
|
-
if proc:
|
|
2741
|
-
proc.terminate()
|
|
2742
|
-
|
|
2743
|
-
mesg = repr(e)
|
|
2744
|
-
raise
|
|
2745
|
-
|
|
2746
|
-
else:
|
|
2747
|
-
success = True
|
|
2748
|
-
loglevel = logging.DEBUG
|
|
2749
|
-
self.backlastuploaddt = datetime.datetime.now()
|
|
2750
|
-
|
|
2751
|
-
finally:
|
|
2752
|
-
if remove:
|
|
2753
|
-
logger.debug(f'Removing {path}')
|
|
2754
|
-
await s_coro.executor(shutil.rmtree, path, ignore_errors=True)
|
|
2755
|
-
logger.debug(f'Removed {path}')
|
|
2756
|
-
|
|
2757
|
-
phrase = 'successfully' if success else 'with failure'
|
|
2758
|
-
logger.log(loglevel, f'iterNewBackupArchive completed {phrase} for {name}')
|
|
2759
|
-
raise s_exc.DmonSpawn(mesg=mesg)
|
|
2702
|
+
await self._streamBackupArchive(path, user, name)
|
|
2760
2703
|
|
|
2761
2704
|
finally:
|
|
2762
2705
|
if remove:
|
|
2706
|
+
logger.debug(f'Removing {path}')
|
|
2707
|
+
await s_coro.executor(shutil.rmtree, path, ignore_errors=True)
|
|
2708
|
+
logger.debug(f'Removed {path}')
|
|
2763
2709
|
self.backupstreaming = False
|
|
2764
2710
|
|
|
2765
2711
|
async def isUserAllowed(self, iden, perm, gateiden=None, default=False):
|
synapse/lib/cli.py
CHANGED
|
@@ -281,18 +281,26 @@ class Cli(s_base.Base):
|
|
|
281
281
|
|
|
282
282
|
await self.fini()
|
|
283
283
|
|
|
284
|
-
async def addSignalHandlers(self):
|
|
284
|
+
async def addSignalHandlers(self): # pragma: no cover
|
|
285
285
|
'''
|
|
286
286
|
Register SIGINT signal handler with the ioloop to cancel the currently running cmdloop task.
|
|
287
|
+
Removes the handler when the cli is fini'd.
|
|
287
288
|
'''
|
|
288
|
-
|
|
289
289
|
def sigint():
|
|
290
|
-
self.printf('<ctrl-c>')
|
|
291
290
|
if self.cmdtask is not None:
|
|
292
291
|
self.cmdtask.cancel()
|
|
293
292
|
|
|
294
293
|
self.loop.add_signal_handler(signal.SIGINT, sigint)
|
|
295
294
|
|
|
295
|
+
def onfini():
|
|
296
|
+
# N.B. This is reaches into some loop / handle internals but
|
|
297
|
+
# prevents us from removing a handler that overwrote our own.
|
|
298
|
+
hndl = self.loop._signal_handlers.get(signal.SIGINT, None) # type: asyncio.Handle
|
|
299
|
+
if hndl is not None and hndl._callback is sigint:
|
|
300
|
+
self.loop.remove_signal_handler(signal.SIGINT)
|
|
301
|
+
|
|
302
|
+
self.onfini(onfini)
|
|
303
|
+
|
|
296
304
|
def get(self, name, defval=None):
|
|
297
305
|
return self.locs.get(name, defval)
|
|
298
306
|
|
|
@@ -324,8 +332,12 @@ class Cli(s_base.Base):
|
|
|
324
332
|
if text is None:
|
|
325
333
|
text = self.cmdprompt
|
|
326
334
|
|
|
327
|
-
with patch_stdout():
|
|
328
|
-
retn = await self.sess.prompt_async(text,
|
|
335
|
+
with patch_stdout(): # pragma: no cover
|
|
336
|
+
retn = await self.sess.prompt_async(text,
|
|
337
|
+
vi_mode=self.vi_mode,
|
|
338
|
+
enable_open_in_editor=True,
|
|
339
|
+
handle_sigint=False # We handle sigint in the loop
|
|
340
|
+
)
|
|
329
341
|
return retn
|
|
330
342
|
|
|
331
343
|
def printf(self, mesg, addnl=True, color=None):
|
|
@@ -390,7 +402,7 @@ class Cli(s_base.Base):
|
|
|
390
402
|
self.cmdtask = self.schedCoro(coro)
|
|
391
403
|
await self.cmdtask
|
|
392
404
|
|
|
393
|
-
except KeyboardInterrupt:
|
|
405
|
+
except (KeyboardInterrupt, asyncio.CancelledError):
|
|
394
406
|
|
|
395
407
|
if self.isfini:
|
|
396
408
|
return
|
|
@@ -408,11 +420,8 @@ class Cli(s_base.Base):
|
|
|
408
420
|
if self.cmdtask is not None:
|
|
409
421
|
self.cmdtask.cancel()
|
|
410
422
|
try:
|
|
411
|
-
self.cmdtask.
|
|
412
|
-
except asyncio.CancelledError:
|
|
413
|
-
# Wait a beat to let any remaining nodes to print out before we print the prompt
|
|
414
|
-
await asyncio.sleep(1)
|
|
415
|
-
except Exception:
|
|
423
|
+
await asyncio.wait_for(self.cmdtask, timeout=0.1)
|
|
424
|
+
except (asyncio.CancelledError, asyncio.TimeoutError):
|
|
416
425
|
pass
|
|
417
426
|
|
|
418
427
|
async def runCmdLine(self, line):
|
synapse/lib/parser.py
CHANGED
|
@@ -95,6 +95,7 @@ terminalEnglishMap = {
|
|
|
95
95
|
'TRYSETPLUS': '?+=',
|
|
96
96
|
'TRYSETMINUS': '?-=',
|
|
97
97
|
'UNIVNAME': 'universal property',
|
|
98
|
+
'UNSET': 'unset',
|
|
98
99
|
'EXPRUNIVNAME': 'universal property',
|
|
99
100
|
'VARTOKN': 'variable',
|
|
100
101
|
'EXPRVARTOKN': 'variable',
|
|
@@ -507,7 +508,7 @@ class Parser:
|
|
|
507
508
|
origexc = e.orig_exc
|
|
508
509
|
if not isinstance(origexc, s_exc.SynErr):
|
|
509
510
|
raise e.orig_exc # pragma: no cover
|
|
510
|
-
origexc.
|
|
511
|
+
origexc.set('text', self.text)
|
|
511
512
|
return s_exc.BadSyntax(**origexc.errinfo)
|
|
512
513
|
|
|
513
514
|
elif isinstance(e, lark.exceptions.UnexpectedCharacters): # pragma: no cover
|
|
@@ -642,6 +643,8 @@ ruleClassMap = {
|
|
|
642
643
|
'andexpr': s_ast.AndCond,
|
|
643
644
|
'baresubquery': s_ast.SubQuery,
|
|
644
645
|
'catchblock': s_ast.CatchBlock,
|
|
646
|
+
'condsetoper': s_ast.CondSetOper,
|
|
647
|
+
'condtrysetoper': lambda astinfo, kids: s_ast.CondSetOper(astinfo, kids, errok=True),
|
|
645
648
|
'condsubq': s_ast.SubqCond,
|
|
646
649
|
'dollarexpr': s_ast.DollarExpr,
|
|
647
650
|
'edgeaddn1': s_ast.EditEdgeAdd,
|
|
@@ -657,6 +660,7 @@ ruleClassMap = {
|
|
|
657
660
|
'formname': s_ast.FormName,
|
|
658
661
|
'editpropdel': lambda astinfo, kids: s_ast.EditPropDel(astinfo, kids[1:]),
|
|
659
662
|
'editpropset': s_ast.EditPropSet,
|
|
663
|
+
'editcondpropset': s_ast.EditCondPropSet,
|
|
660
664
|
'edittagadd': s_ast.EditTagAdd,
|
|
661
665
|
'edittagdel': lambda astinfo, kids: s_ast.EditTagDel(astinfo, kids[1:]),
|
|
662
666
|
'edittagpropset': s_ast.EditTagPropSet,
|
synapse/lib/snap.py
CHANGED
|
@@ -362,10 +362,10 @@ class ProtoNode:
|
|
|
362
362
|
try:
|
|
363
363
|
valu, norminfo = prop.type.norm(valu)
|
|
364
364
|
except s_exc.BadTypeValu as e:
|
|
365
|
-
oldm = e.
|
|
366
|
-
e.
|
|
367
|
-
|
|
368
|
-
|
|
365
|
+
oldm = e.get('mesg')
|
|
366
|
+
e.update({'prop': prop.name,
|
|
367
|
+
'form': prop.form.name,
|
|
368
|
+
'mesg': f'Bad prop value {prop.full}={valu!r} : {oldm}'})
|
|
369
369
|
if self.ctx.snap.strict:
|
|
370
370
|
raise e
|
|
371
371
|
await self.ctx.snap.warn(e)
|
|
@@ -493,7 +493,7 @@ class SnapEditor:
|
|
|
493
493
|
try:
|
|
494
494
|
valu, norminfo = form.type.norm(valu)
|
|
495
495
|
except s_exc.BadTypeValu as e:
|
|
496
|
-
e.
|
|
496
|
+
e.set('form', form.name)
|
|
497
497
|
if self.snap.strict: raise e
|
|
498
498
|
await self.snap.warn(f'addNode() BadTypeValu {form.name}={valu} {e}')
|
|
499
499
|
return None
|
|
@@ -1404,25 +1404,54 @@ class Snap(s_base.Base):
|
|
|
1404
1404
|
|
|
1405
1405
|
trycast = vals.pop('$try', False)
|
|
1406
1406
|
addprops = vals.pop('$props', None)
|
|
1407
|
-
if addprops is not None:
|
|
1408
|
-
props.update(addprops)
|
|
1409
1407
|
|
|
1410
|
-
|
|
1411
|
-
|
|
1408
|
+
if not vals:
|
|
1409
|
+
mesg = f'No values provided for form {form.full}'
|
|
1410
|
+
raise s_exc.BadTypeValu(mesg=mesg)
|
|
1411
|
+
|
|
1412
|
+
for name, valu in list(props.items()):
|
|
1413
|
+
try:
|
|
1412
1414
|
props[name] = form.reqProp(name).type.norm(valu)
|
|
1415
|
+
except s_exc.BadTypeValu as e:
|
|
1416
|
+
mesg = e.get('mesg')
|
|
1417
|
+
e.update({
|
|
1418
|
+
'prop': name,
|
|
1419
|
+
'form': form.name,
|
|
1420
|
+
'mesg': f'Bad value for prop {form.name}:{name}: {mesg}',
|
|
1421
|
+
})
|
|
1422
|
+
raise e
|
|
1413
1423
|
|
|
1414
|
-
|
|
1424
|
+
if addprops is not None:
|
|
1425
|
+
for name, valu in addprops.items():
|
|
1426
|
+
try:
|
|
1427
|
+
props[name] = form.reqProp(name).type.norm(valu)
|
|
1428
|
+
except s_exc.BadTypeValu as e:
|
|
1429
|
+
mesg = e.get("mesg")
|
|
1430
|
+
if not trycast:
|
|
1431
|
+
e.update({
|
|
1432
|
+
'prop': name,
|
|
1433
|
+
'form': form.name,
|
|
1434
|
+
'mesg': f'Bad value for prop {form.name}:{name}: {mesg}'
|
|
1435
|
+
})
|
|
1436
|
+
raise e
|
|
1437
|
+
await self.warn(f'Skipping bad value for prop {form.name}:{name}: {mesg}')
|
|
1438
|
+
|
|
1439
|
+
for name, valu in vals.items():
|
|
1415
1440
|
|
|
1441
|
+
try:
|
|
1416
1442
|
prop = form.reqProp(name)
|
|
1417
1443
|
norm, norminfo = prop.type.norm(valu)
|
|
1418
1444
|
|
|
1419
1445
|
norms[name] = (prop, norm, norminfo)
|
|
1420
1446
|
proplist.append((name, norm))
|
|
1421
|
-
|
|
1422
|
-
|
|
1423
|
-
|
|
1424
|
-
|
|
1425
|
-
|
|
1447
|
+
except s_exc.BadTypeValu as e:
|
|
1448
|
+
mesg = e.get('mesg')
|
|
1449
|
+
e.update({
|
|
1450
|
+
'prop': name,
|
|
1451
|
+
'form': form.name,
|
|
1452
|
+
'mesg': f'Bad value for prop {form.name}:{name}: {mesg}',
|
|
1453
|
+
})
|
|
1454
|
+
raise e
|
|
1426
1455
|
|
|
1427
1456
|
proplist.sort()
|
|
1428
1457
|
|
synapse/lib/storm.lark
CHANGED
|
@@ -39,7 +39,7 @@ _editblock: "[" _editoper* "]"
|
|
|
39
39
|
|
|
40
40
|
// A single edit operation
|
|
41
41
|
_editoper: editnodeadd
|
|
42
|
-
| editpropset | editunivset | edittagpropset | edittagadd
|
|
42
|
+
| editpropset | editunivset | edittagpropset | edittagadd | editcondpropset
|
|
43
43
|
| editpropdel | editunivdel | edittagpropdel | edittagdel
|
|
44
44
|
| editparens | edgeaddn1 | edgedeln1 | edgeaddn2 | edgedeln2
|
|
45
45
|
|
|
@@ -49,11 +49,13 @@ edittagadd: "+" [SETTAGOPER] tagname [(EQSPACE | EQNOSPACE) _valu]
|
|
|
49
49
|
editunivdel: EXPRMINUS univprop
|
|
50
50
|
edittagdel: EXPRMINUS tagname
|
|
51
51
|
editpropset: relprop (EQSPACE | EQNOSPACE | MODSET | TRYSET | TRYSETPLUS | TRYSETMINUS) _valu
|
|
52
|
+
editcondpropset: relprop condsetoper _valu
|
|
52
53
|
editpropdel: EXPRMINUS relprop
|
|
53
54
|
editunivset: univprop (EQSPACE | EQNOSPACE | MODSET | TRYSET | TRYSETPLUS | TRYSETMINUS) _valu
|
|
54
55
|
editnodeadd: formname (EQSPACE | EQNOSPACE | MODSET | TRYSET | TRYSETPLUS | TRYSETMINUS) _valu
|
|
55
56
|
edittagpropset: "+" tagprop (EQSPACE | EQNOSPACE | MODSET | TRYSET | TRYSETPLUS | TRYSETMINUS) _valu
|
|
56
57
|
edittagpropdel: EXPRMINUS tagprop
|
|
58
|
+
|
|
57
59
|
EQSPACE: /((?<=\s)=|=(?=\s))/
|
|
58
60
|
MODSET.4: "+=" | "-="
|
|
59
61
|
TRYSETPLUS.1: "?+="
|
|
@@ -61,6 +63,19 @@ TRYSETMINUS.1: "?-="
|
|
|
61
63
|
TRYSET.1: "?="
|
|
62
64
|
SETTAGOPER: "?"
|
|
63
65
|
|
|
66
|
+
condsetoper: ("*" UNSET | _DEREF "$" _condvarvaluatom) "="
|
|
67
|
+
| ("*" UNSET | _DEREF "$" _condvarvaluatom) "?=" -> condtrysetoper
|
|
68
|
+
UNSET: "unset"
|
|
69
|
+
_condvarvaluatom: condvarvalue | condvarderef | condfunccall
|
|
70
|
+
condvarvalue: VARTOKN -> varvalue
|
|
71
|
+
|
|
72
|
+
!condvarderef: _condvarvaluatom "." (VARTOKN | "$" VARTOKN | _condderefexpr) -> varderef
|
|
73
|
+
_condderefexpr: "$"? conddollarexpr
|
|
74
|
+
conddollarexpr: "(" expror ")" -> dollarexpr
|
|
75
|
+
|
|
76
|
+
condfunccall: _condvarvaluatom _condcallargs -> funccall
|
|
77
|
+
_condcallargs: _LPARNOSPACE [(_valu | VARTOKN | (VARTOKN | NONQUOTEWORD) (EQSPACE | EQNOSPACE) _valu) ("," (_valu | VARTOKN | (VARTOKN | NONQUOTEWORD) (EQSPACE | EQNOSPACE) _valu))*] ","? ")"
|
|
78
|
+
|
|
64
79
|
// The set of non-edit non-commands in storm
|
|
65
80
|
|
|
66
81
|
_oper: stormfunc | initblock | emptyblock | finiblock | trycatch | subquery | _formpivot | formjoin
|
synapse/lib/storm.py
CHANGED
|
@@ -984,7 +984,9 @@ stormcmds = (
|
|
|
984
984
|
$ssl = $lib.true
|
|
985
985
|
if $cmdopts.ssl_noverify { $ssl = $lib.false }
|
|
986
986
|
|
|
987
|
-
$
|
|
987
|
+
$headers = ({'X-Synapse-Version': $lib.str.join('.', $lib.version.synapse())})
|
|
988
|
+
|
|
989
|
+
$resp = $lib.inet.http.get($cmdopts.url, ssl_verify=$ssl, headers=$headers)
|
|
988
990
|
|
|
989
991
|
if ($resp.code != 200) {
|
|
990
992
|
$lib.warn("pkg.load got HTTP code: {code} for URL: {url}", code=$resp.code, url=$cmdopts.url)
|
|
@@ -1603,7 +1605,7 @@ stormcmds = (
|
|
|
1603
1605
|
function fetchnodes(url, ssl) {
|
|
1604
1606
|
$resp = $lib.inet.http.get($url, ssl_verify=$ssl)
|
|
1605
1607
|
if ($resp.code = 200) {
|
|
1606
|
-
$nodes =
|
|
1608
|
+
$nodes = ()
|
|
1607
1609
|
for $valu in $resp.msgpack() {
|
|
1608
1610
|
$nodes.append($valu)
|
|
1609
1611
|
}
|
|
@@ -3552,7 +3554,7 @@ class HelpCmd(Cmd):
|
|
|
3552
3554
|
await runt.printf(line)
|
|
3553
3555
|
|
|
3554
3556
|
else: # pragma: no cover
|
|
3555
|
-
raise s_exc.StormRuntimeError(
|
|
3557
|
+
raise s_exc.StormRuntimeError(mesg=f'Unknown bound method {func}')
|
|
3556
3558
|
|
|
3557
3559
|
async def _handleStormLibMethod(self, func, runt: Runtime, verbose: bool =False):
|
|
3558
3560
|
# Storm library methods must be derived from a library definition.
|
|
@@ -3583,7 +3585,7 @@ class HelpCmd(Cmd):
|
|
|
3583
3585
|
await runt.printf(line)
|
|
3584
3586
|
|
|
3585
3587
|
else: # pragma: no cover
|
|
3586
|
-
raise s_exc.StormRuntimeError(
|
|
3588
|
+
raise s_exc.StormRuntimeError(mesg=f'Unknown runtime lib method {func} {cls} {fname}')
|
|
3587
3589
|
|
|
3588
3590
|
class DiffCmd(Cmd):
|
|
3589
3591
|
'''
|
|
@@ -5344,6 +5346,12 @@ class ParallelCmd(Cmd):
|
|
|
5344
5346
|
inet:ipv4#foo | parallel { $place = $lib.import(foobar).lookup(:latlong) [ :place=$place ] }
|
|
5345
5347
|
|
|
5346
5348
|
NOTE: Storm variables set within the parallel query pipelines do not interact.
|
|
5349
|
+
|
|
5350
|
+
NOTE: If there are inbound nodes to the parallel command, parallel pipelines will be created as each node
|
|
5351
|
+
is processed, up to the number specified by --size. If the number of nodes in the pipeline is less
|
|
5352
|
+
than the value specified by --size, additional pipelines with no inbound node will not be created.
|
|
5353
|
+
If there are no inbound nodes to the parallel command, the number of pipelines specified by --size
|
|
5354
|
+
will always be created.
|
|
5347
5355
|
'''
|
|
5348
5356
|
name = 'parallel'
|
|
5349
5357
|
readonly = True
|
|
@@ -5400,19 +5408,33 @@ class ParallelCmd(Cmd):
|
|
|
5400
5408
|
inq = asyncio.Queue(maxsize=size)
|
|
5401
5409
|
outq = asyncio.Queue(maxsize=size)
|
|
5402
5410
|
|
|
5403
|
-
|
|
5404
|
-
|
|
5405
|
-
|
|
5406
|
-
|
|
5407
|
-
|
|
5408
|
-
|
|
5409
|
-
|
|
5410
|
-
|
|
5411
|
-
|
|
5412
|
-
|
|
5413
|
-
|
|
5414
|
-
|
|
5415
|
-
|
|
5411
|
+
tsks = 0
|
|
5412
|
+
try:
|
|
5413
|
+
while tsks < size:
|
|
5414
|
+
await inq.put(await genr.__anext__())
|
|
5415
|
+
base.schedCoro(self.pipeline(runt, query, inq, outq))
|
|
5416
|
+
tsks += 1
|
|
5417
|
+
except StopAsyncIteration:
|
|
5418
|
+
[await inq.put(None) for i in range(tsks)]
|
|
5419
|
+
|
|
5420
|
+
# If a full set of tasks were created, keep pumping nodes into the queue
|
|
5421
|
+
if tsks == size:
|
|
5422
|
+
async def pump():
|
|
5423
|
+
try:
|
|
5424
|
+
async for pumpitem in genr:
|
|
5425
|
+
await inq.put(pumpitem)
|
|
5426
|
+
[await inq.put(None) for i in range(size)]
|
|
5427
|
+
except Exception as e:
|
|
5428
|
+
await outq.put(e)
|
|
5429
|
+
|
|
5430
|
+
base.schedCoro(pump())
|
|
5431
|
+
|
|
5432
|
+
# If no tasks were created, make a full set
|
|
5433
|
+
elif tsks == 0:
|
|
5434
|
+
tsks = size
|
|
5435
|
+
for i in range(size):
|
|
5436
|
+
base.schedCoro(self.pipeline(runt, query, inq, outq))
|
|
5437
|
+
[await inq.put(None) for i in range(tsks)]
|
|
5416
5438
|
|
|
5417
5439
|
exited = 0
|
|
5418
5440
|
while True:
|
|
@@ -5423,7 +5445,7 @@ class ParallelCmd(Cmd):
|
|
|
5423
5445
|
|
|
5424
5446
|
if item is None:
|
|
5425
5447
|
exited += 1
|
|
5426
|
-
if exited ==
|
|
5448
|
+
if exited == tsks:
|
|
5427
5449
|
return
|
|
5428
5450
|
continue
|
|
5429
5451
|
|
|
@@ -5566,9 +5588,6 @@ class TeeCmd(Cmd):
|
|
|
5566
5588
|
|
|
5567
5589
|
await outq.put(None)
|
|
5568
5590
|
|
|
5569
|
-
except asyncio.CancelledError: # pragma: no cover
|
|
5570
|
-
raise
|
|
5571
|
-
|
|
5572
5591
|
except Exception as e:
|
|
5573
5592
|
await outq.put(e)
|
|
5574
5593
|
|
synapse/lib/storm_format.py
CHANGED
synapse/lib/stormctrl.py
CHANGED
|
@@ -1,9 +1,91 @@
|
|
|
1
1
|
class StormCtrlFlow(Exception):
|
|
2
|
+
'''
|
|
3
|
+
Base class all StormCtrlFlow exceptions derive from.
|
|
4
|
+
'''
|
|
5
|
+
def __init__(self):
|
|
6
|
+
raise NotImplementedError
|
|
7
|
+
|
|
8
|
+
class _SynErrMixin(Exception):
|
|
9
|
+
'''
|
|
10
|
+
An exception mixin to give some control flow classes functionality like SynErr.
|
|
11
|
+
'''
|
|
12
|
+
def __init__(self, *args, **info):
|
|
13
|
+
self.errinfo = info
|
|
14
|
+
Exception.__init__(self, self._getExcMsg())
|
|
15
|
+
|
|
16
|
+
def _getExcMsg(self):
|
|
17
|
+
props = sorted(self.errinfo.items())
|
|
18
|
+
displ = ' '.join(['%s=%r' % (p, v) for (p, v) in props])
|
|
19
|
+
return '%s: %s' % (self.__class__.__name__, displ)
|
|
20
|
+
|
|
21
|
+
def _setExcMesg(self):
|
|
22
|
+
'''Should be called when self.errinfo is modified.'''
|
|
23
|
+
self.args = (self._getExcMsg(),)
|
|
24
|
+
|
|
25
|
+
def __setstate__(self, state):
|
|
26
|
+
'''Pickle support.'''
|
|
27
|
+
super(StormCtrlFlow, self).__setstate__(state)
|
|
28
|
+
self._setExcMesg()
|
|
29
|
+
|
|
30
|
+
def items(self):
|
|
31
|
+
return {k: v for k, v in self.errinfo.items()}
|
|
32
|
+
|
|
33
|
+
def get(self, name, defv=None):
|
|
34
|
+
'''
|
|
35
|
+
Return a value from the errinfo dict.
|
|
36
|
+
|
|
37
|
+
Example:
|
|
38
|
+
|
|
39
|
+
try:
|
|
40
|
+
foothing()
|
|
41
|
+
except SynErr as e:
|
|
42
|
+
blah = e.get('blah')
|
|
43
|
+
|
|
44
|
+
'''
|
|
45
|
+
return self.errinfo.get(name, defv)
|
|
46
|
+
|
|
47
|
+
def set(self, name, valu):
|
|
48
|
+
'''
|
|
49
|
+
Set a value in the errinfo dict.
|
|
50
|
+
'''
|
|
51
|
+
self.errinfo[name] = valu
|
|
52
|
+
self._setExcMesg()
|
|
53
|
+
|
|
54
|
+
def setdefault(self, name, valu):
|
|
55
|
+
'''
|
|
56
|
+
Set a value in errinfo dict if it is not already set.
|
|
57
|
+
'''
|
|
58
|
+
if name in self.errinfo:
|
|
59
|
+
return
|
|
60
|
+
self.errinfo[name] = valu
|
|
61
|
+
self._setExcMesg()
|
|
62
|
+
|
|
63
|
+
def update(self, items: dict):
|
|
64
|
+
'''Update multiple items in the errinfo dict at once.'''
|
|
65
|
+
self.errinfo.update(items)
|
|
66
|
+
self._setExcMesg()
|
|
67
|
+
|
|
68
|
+
class StormLoopCtrl(_SynErrMixin):
|
|
69
|
+
# Control flow statements for WHILE and FOR loop control
|
|
70
|
+
statement = ''
|
|
71
|
+
|
|
72
|
+
class StormGenrCtrl(_SynErrMixin):
|
|
73
|
+
# Control flow statements for GENERATOR control
|
|
74
|
+
statement = ''
|
|
75
|
+
|
|
76
|
+
class StormStop(StormGenrCtrl, StormCtrlFlow):
|
|
77
|
+
statement = 'stop'
|
|
78
|
+
|
|
79
|
+
class StormBreak(StormLoopCtrl, StormCtrlFlow):
|
|
80
|
+
statement = 'break'
|
|
81
|
+
|
|
82
|
+
class StormContinue(StormLoopCtrl, StormCtrlFlow):
|
|
83
|
+
statement = 'continue'
|
|
84
|
+
|
|
85
|
+
class StormExit(_SynErrMixin, StormCtrlFlow): pass
|
|
86
|
+
|
|
87
|
+
# StormReturn is kept thin since it is commonly used and just
|
|
88
|
+
# needs to be the container for moving an item up a frame.
|
|
89
|
+
class StormReturn(StormCtrlFlow):
|
|
2
90
|
def __init__(self, item=None):
|
|
3
91
|
self.item = item
|
|
4
|
-
|
|
5
|
-
class StormExit(StormCtrlFlow): pass
|
|
6
|
-
class StormStop(StormCtrlFlow): pass
|
|
7
|
-
class StormBreak(StormCtrlFlow): pass
|
|
8
|
-
class StormReturn(StormCtrlFlow): pass
|
|
9
|
-
class StormContinue(StormCtrlFlow): pass
|
synapse/lib/stormlib/cache.py
CHANGED
|
@@ -172,8 +172,12 @@ class FixedCache(s_stormtypes.StormType):
|
|
|
172
172
|
await asyncio.sleep(0)
|
|
173
173
|
except s_stormctrl.StormReturn as e:
|
|
174
174
|
return await s_stormtypes.toprim(e.item)
|
|
175
|
-
except s_stormctrl.StormCtrlFlow:
|
|
176
|
-
|
|
175
|
+
except s_stormctrl.StormCtrlFlow as e:
|
|
176
|
+
name = e.__class__.__name__
|
|
177
|
+
if hasattr(e, 'statement'):
|
|
178
|
+
name = e.statement
|
|
179
|
+
exc = s_exc.StormRuntimeError(mesg=f'Storm control flow "{name}" not allowed in cache callbacks.')
|
|
180
|
+
raise exc from None
|
|
177
181
|
|
|
178
182
|
async def _reqKey(self, key):
|
|
179
183
|
if s_stormtypes.ismutable(key):
|
synapse/lib/stormlib/json.py
CHANGED
|
@@ -93,6 +93,7 @@ class JsonLib(s_stormtypes.Lib):
|
|
|
93
93
|
'type': {'type': 'function', '_funcname': '_jsonSave',
|
|
94
94
|
'args': (
|
|
95
95
|
{'name': 'item', 'type': 'any', 'desc': 'The item to be serialized as a JSON string.', },
|
|
96
|
+
{'name': 'indent', 'type': 'int', 'desc': 'Specify a number of spaces to indent with.', 'default': None},
|
|
96
97
|
),
|
|
97
98
|
'returns': {'type': 'str', 'desc': 'The JSON serialized object.', }}},
|
|
98
99
|
{'name': 'schema', 'desc': 'Get a JS schema validation object.',
|
|
@@ -115,10 +116,12 @@ class JsonLib(s_stormtypes.Lib):
|
|
|
115
116
|
}
|
|
116
117
|
|
|
117
118
|
@s_stormtypes.stormfunc(readonly=True)
|
|
118
|
-
async def _jsonSave(self, item):
|
|
119
|
+
async def _jsonSave(self, item, indent=None):
|
|
120
|
+
indent = await s_stormtypes.toint(indent, noneok=True)
|
|
121
|
+
|
|
119
122
|
try:
|
|
120
123
|
item = await s_stormtypes.toprim(item)
|
|
121
|
-
return json.dumps(item)
|
|
124
|
+
return json.dumps(item, indent=indent)
|
|
122
125
|
except Exception as e:
|
|
123
126
|
mesg = f'Argument is not JSON compatible: {item}'
|
|
124
127
|
raise s_exc.MustBeJsonSafe(mesg=mesg)
|