synapse 2.192.0__py311-none-any.whl → 2.193.0__py311-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of synapse might be problematic. Click here for more details.

Files changed (37) hide show
  1. synapse/common.py +15 -0
  2. synapse/cortex.py +16 -18
  3. synapse/exc.py +6 -1
  4. synapse/lib/agenda.py +0 -2
  5. synapse/lib/ast.py +25 -11
  6. synapse/lib/cell.py +31 -85
  7. synapse/lib/cli.py +20 -11
  8. synapse/lib/parser.py +1 -1
  9. synapse/lib/snap.py +4 -4
  10. synapse/lib/storm.py +34 -17
  11. synapse/lib/stormlib/json.py +5 -2
  12. synapse/lib/stormtypes.py +19 -0
  13. synapse/lib/version.py +2 -2
  14. synapse/models/inet.py +17 -1
  15. synapse/models/infotech.py +14 -4
  16. synapse/models/risk.py +16 -2
  17. synapse/tests/test_cortex.py +3 -3
  18. synapse/tests/test_exc.py +3 -0
  19. synapse/tests/test_lib_agenda.py +157 -1
  20. synapse/tests/test_lib_ast.py +43 -1
  21. synapse/tests/test_lib_cell.py +71 -1
  22. synapse/tests/test_lib_storm.py +72 -30
  23. synapse/tests/test_lib_stormlib_json.py +20 -0
  24. synapse/tests/test_lib_stormlib_scrape.py +2 -2
  25. synapse/tests/test_model_inet.py +40 -5
  26. synapse/tests/test_model_risk.py +2 -0
  27. synapse/tests/test_tools_storm.py +95 -0
  28. synapse/tests/test_utils_getrefs.py +1 -1
  29. synapse/utils/getrefs.py +14 -3
  30. synapse/vendor/cpython/lib/http/__init__.py +0 -0
  31. synapse/vendor/cpython/lib/http/cookies.py +59 -0
  32. synapse/vendor/cpython/lib/test/test_http_cookies.py +49 -0
  33. {synapse-2.192.0.dist-info → synapse-2.193.0.dist-info}/METADATA +2 -2
  34. {synapse-2.192.0.dist-info → synapse-2.193.0.dist-info}/RECORD +37 -34
  35. {synapse-2.192.0.dist-info → synapse-2.193.0.dist-info}/WHEEL +1 -1
  36. {synapse-2.192.0.dist-info → synapse-2.193.0.dist-info}/LICENSE +0 -0
  37. {synapse-2.192.0.dist-info → synapse-2.193.0.dist-info}/top_level.txt +0 -0
synapse/common.py CHANGED
@@ -29,6 +29,8 @@ import traceback
29
29
  import contextlib
30
30
  import collections
31
31
 
32
+ import http.cookies
33
+
32
34
  import yaml
33
35
  import regex
34
36
 
@@ -38,6 +40,8 @@ import synapse.lib.msgpack as s_msgpack
38
40
  import synapse.lib.structlog as s_structlog
39
41
 
40
42
  import synapse.vendor.cpython.lib.ipaddress as ipaddress
43
+ import synapse.vendor.cpython.lib.http.cookies as v_cookies
44
+
41
45
 
42
46
  try:
43
47
  from yaml import CSafeLoader as Loader
@@ -1218,6 +1222,17 @@ def trimText(text: str, n: int = 256, placeholder: str = '...') -> str:
1218
1222
  assert n > plen
1219
1223
  return f'{text[:mlen]}{placeholder}'
1220
1224
 
1225
+ def _patch_http_cookies():
1226
+ '''
1227
+ Patch stdlib http.cookies._unquote from the 3.11.10 implementation if
1228
+ the interpreter we are using is not patched for CVE-2024-7592.
1229
+ '''
1230
+ if not hasattr(http.cookies, '_QuotePatt'):
1231
+ return
1232
+ http.cookies._unquote = v_cookies._unquote
1233
+
1234
+ _patch_http_cookies()
1235
+
1221
1236
  # TODO: Switch back to using asyncio.wait_for when we are using py 3.12+
1222
1237
  # This is a workaround for a race where asyncio.wait_for can end up
1223
1238
  # ignoring cancellation https://github.com/python/cpython/issues/86296
synapse/cortex.py CHANGED
@@ -2240,7 +2240,7 @@ class Cortex(s_oauth.OAuthMixin, s_cell.Cell): # type: ignore
2240
2240
 
2241
2241
  '''
2242
2242
  name = cdef.get('name')
2243
- await self._setStormCmd(cdef)
2243
+ self._setStormCmd(cdef)
2244
2244
  self.cmddefs.set(name, cdef)
2245
2245
 
2246
2246
  async def _reqStormCmd(self, cdef):
@@ -2483,7 +2483,7 @@ class Cortex(s_oauth.OAuthMixin, s_cell.Cell): # type: ignore
2483
2483
  async for sodes in self._mergeSodes(layers, genrs, cmprkey_indx, filtercmpr, reverse=reverse):
2484
2484
  yield sodes
2485
2485
 
2486
- async def _setStormCmd(self, cdef):
2486
+ def _setStormCmd(self, cdef):
2487
2487
  '''
2488
2488
  Note:
2489
2489
  No change control or persistence
@@ -2543,13 +2543,9 @@ class Cortex(s_oauth.OAuthMixin, s_cell.Cell): # type: ignore
2543
2543
  name = cdef.get('name')
2544
2544
  self.stormcmds[name] = ctor
2545
2545
 
2546
- await self.fire('core:cmd:change', cmd=name, act='add')
2547
-
2548
- async def _popStormCmd(self, name):
2546
+ def _popStormCmd(self, name):
2549
2547
  self.stormcmds.pop(name, None)
2550
2548
 
2551
- await self.fire('core:cmd:change', cmd=name, act='del')
2552
-
2553
2549
  async def delStormCmd(self, name):
2554
2550
  '''
2555
2551
  Remove a previously set pure storm command.
@@ -2575,8 +2571,6 @@ class Cortex(s_oauth.OAuthMixin, s_cell.Cell): # type: ignore
2575
2571
  self.cmddefs.pop(name)
2576
2572
  self.stormcmds.pop(name, None)
2577
2573
 
2578
- await self.fire('core:cmd:change', cmd=name, act='del')
2579
-
2580
2574
  async def addStormPkg(self, pkgdef, verify=False):
2581
2575
  '''
2582
2576
  Add the given storm package to the cortex.
@@ -2630,11 +2624,11 @@ class Cortex(s_oauth.OAuthMixin, s_cell.Cell): # type: ignore
2630
2624
  olddef = self.pkgdefs.get(name, None)
2631
2625
  if olddef is not None:
2632
2626
  if s_hashitem.hashitem(pkgdef) != s_hashitem.hashitem(olddef):
2633
- await self._dropStormPkg(olddef)
2627
+ self._dropStormPkg(olddef)
2634
2628
  else:
2635
2629
  return
2636
2630
 
2637
- await self.loadStormPkg(pkgdef)
2631
+ self.loadStormPkg(pkgdef)
2638
2632
  self.pkgdefs.set(name, pkgdef)
2639
2633
 
2640
2634
  self._clearPermDefs()
@@ -2664,7 +2658,7 @@ class Cortex(s_oauth.OAuthMixin, s_cell.Cell): # type: ignore
2664
2658
  if pkgdef is None:
2665
2659
  return
2666
2660
 
2667
- await self._dropStormPkg(pkgdef)
2661
+ self._dropStormPkg(pkgdef)
2668
2662
 
2669
2663
  self._clearPermDefs()
2670
2664
 
@@ -2713,7 +2707,7 @@ class Cortex(s_oauth.OAuthMixin, s_cell.Cell): # type: ignore
2713
2707
  async def _tryLoadStormPkg(self, pkgdef):
2714
2708
  try:
2715
2709
  await self._normStormPkg(pkgdef, validstorm=False)
2716
- await self.loadStormPkg(pkgdef)
2710
+ self.loadStormPkg(pkgdef)
2717
2711
 
2718
2712
  except asyncio.CancelledError: # pragma: no cover TODO: remove once >= py 3.8 only
2719
2713
  raise
@@ -2881,7 +2875,9 @@ class Cortex(s_oauth.OAuthMixin, s_cell.Cell): # type: ignore
2881
2875
  for configvar in pkgdef.get('configvars', ()):
2882
2876
  self._reqStormPkgVarType(pkgname, configvar.get('type'))
2883
2877
 
2884
- async def loadStormPkg(self, pkgdef):
2878
+ # N.B. This function is intentionally not async in order to prevent possible user race conditions for code
2879
+ # executing outside of the nexus lock.
2880
+ def loadStormPkg(self, pkgdef):
2885
2881
  '''
2886
2882
  Load a storm package into the storm library for this cortex.
2887
2883
 
@@ -2911,7 +2907,7 @@ class Cortex(s_oauth.OAuthMixin, s_cell.Cell): # type: ignore
2911
2907
  self.stormmods = stormmods
2912
2908
 
2913
2909
  for cdef in cmds:
2914
- await self._setStormCmd(cdef)
2910
+ self._setStormCmd(cdef)
2915
2911
 
2916
2912
  for gdef in pkgdef.get('graphs', ()):
2917
2913
  gdef = copy.deepcopy(gdef)
@@ -2937,7 +2933,9 @@ class Cortex(s_oauth.OAuthMixin, s_cell.Cell): # type: ignore
2937
2933
  await self.fire('core:pkg:onload:complete', pkg=name)
2938
2934
  self.schedCoro(_onload())
2939
2935
 
2940
- async def _dropStormPkg(self, pkgdef):
2936
+ # N.B. This function is intentionally not async in order to prevent possible user race conditions for code
2937
+ # executing outside of the nexus lock.
2938
+ def _dropStormPkg(self, pkgdef):
2941
2939
  '''
2942
2940
  Reverse the process of loadStormPkg()
2943
2941
  '''
@@ -2948,7 +2946,7 @@ class Cortex(s_oauth.OAuthMixin, s_cell.Cell): # type: ignore
2948
2946
 
2949
2947
  for cdef in pkgdef.get('commands', ()):
2950
2948
  name = cdef.get('name')
2951
- await self._popStormCmd(name)
2949
+ self._popStormCmd(name)
2952
2950
 
2953
2951
  pkgname = pkgdef.get('name')
2954
2952
 
@@ -4435,7 +4433,7 @@ class Cortex(s_oauth.OAuthMixin, s_cell.Cell): # type: ignore
4435
4433
 
4436
4434
  async def _trySetStormCmd(self, name, cdef):
4437
4435
  try:
4438
- await self._setStormCmd(cdef)
4436
+ self._setStormCmd(cdef)
4439
4437
  except (asyncio.CancelledError, Exception):
4440
4438
  logger.exception(f'Storm command load failed: {name}')
4441
4439
 
synapse/exc.py CHANGED
@@ -56,9 +56,14 @@ class SynErr(Exception):
56
56
  self.errinfo[name] = valu
57
57
  self._setExcMesg()
58
58
 
59
+ def update(self, items: dict):
60
+ '''Update multiple items in the errinfo dict at once.'''
61
+ self.errinfo.update(**items)
62
+ self._setExcMesg()
63
+
59
64
  class StormRaise(SynErr):
60
65
  '''
61
- This represents a user provided exception inside of a Storm runtime. It requires a errname key.
66
+ This represents a user provided exception raised in the Storm runtime. It requires a errname key.
62
67
  '''
63
68
  def __init__(self, *args, **info):
64
69
  SynErr.__init__(self, *args, **info)
synapse/lib/agenda.py CHANGED
@@ -5,9 +5,7 @@ import asyncio
5
5
  import logging
6
6
  import calendar
7
7
  import datetime
8
- import functools
9
8
  import itertools
10
- import collections
11
9
  from datetime import timezone as tz
12
10
  from collections.abc import Iterable, Mapping
13
11
 
synapse/lib/ast.py CHANGED
@@ -60,7 +60,7 @@ class AstNode:
60
60
 
61
61
  def addExcInfo(self, exc):
62
62
  if 'highlight' not in exc.errinfo:
63
- exc.errinfo['highlight'] = self.getPosInfo()
63
+ exc.set('highlight', self.getPosInfo())
64
64
  return exc
65
65
 
66
66
  def repr(self):
@@ -3579,10 +3579,23 @@ class FuncCall(Value):
3579
3579
  kwargs = {k: v for (k, v) in await self.kids[2].compute(runt, path)}
3580
3580
 
3581
3581
  with s_scope.enter({'runt': runt}):
3582
- retn = func(*argv, **kwargs)
3583
- if s_coro.iscoro(retn):
3584
- return await retn
3585
- return retn
3582
+ try:
3583
+ retn = func(*argv, **kwargs)
3584
+ if s_coro.iscoro(retn):
3585
+ return await retn
3586
+ return retn
3587
+
3588
+ except TypeError as e:
3589
+ mesg = str(e)
3590
+ if (funcpath := getattr(func, '_storm_funcpath', None)) is not None:
3591
+ mesg = f"{funcpath}(){mesg.split(')', 1)[1]}"
3592
+
3593
+ raise self.addExcInfo(s_exc.StormRuntimeError(mesg=mesg))
3594
+
3595
+ except s_exc.SynErr as e:
3596
+ if getattr(func, '_storm_runtime_lib_func', None) is not None:
3597
+ e.errinfo.pop('highlight', None)
3598
+ raise self.addExcInfo(e)
3586
3599
 
3587
3600
  class DollarExpr(Value):
3588
3601
  '''
@@ -4891,8 +4904,9 @@ class Function(AstNode):
4891
4904
 
4892
4905
  @s_stormtypes.stormfunc(readonly=True)
4893
4906
  async def realfunc(*args, **kwargs):
4894
- return await self.callfunc(runt, argdefs, args, kwargs)
4907
+ return await self.callfunc(runt, argdefs, args, kwargs, realfunc._storm_funcpath)
4895
4908
 
4909
+ realfunc._storm_funcpath = self.name
4896
4910
  await runt.setVar(self.name, realfunc)
4897
4911
 
4898
4912
  count = 0
@@ -4914,7 +4928,7 @@ class Function(AstNode):
4914
4928
  # var scope validation occurs in the sub-runtime
4915
4929
  pass
4916
4930
 
4917
- async def callfunc(self, runt, argdefs, args, kwargs):
4931
+ async def callfunc(self, runt, argdefs, args, kwargs, funcpath):
4918
4932
  '''
4919
4933
  Execute a function call using the given runtime.
4920
4934
 
@@ -4925,7 +4939,7 @@ class Function(AstNode):
4925
4939
 
4926
4940
  argcount = len(args) + len(kwargs)
4927
4941
  if argcount > len(argdefs):
4928
- mesg = f'{self.name}() takes {len(argdefs)} arguments but {argcount} were provided'
4942
+ mesg = f'{funcpath}() takes {len(argdefs)} arguments but {argcount} were provided'
4929
4943
  raise self.kids[1].addExcInfo(s_exc.StormRuntimeError(mesg=mesg))
4930
4944
 
4931
4945
  # Fill in the positional arguments
@@ -4939,7 +4953,7 @@ class Function(AstNode):
4939
4953
  valu = kwargs.pop(name, s_common.novalu)
4940
4954
  if valu is s_common.novalu:
4941
4955
  if defv is s_common.novalu:
4942
- mesg = f'{self.name}() missing required argument {name}'
4956
+ mesg = f'{funcpath}() missing required argument {name}'
4943
4957
  raise self.kids[1].addExcInfo(s_exc.StormRuntimeError(mesg=mesg))
4944
4958
  valu = defv
4945
4959
 
@@ -4950,11 +4964,11 @@ class Function(AstNode):
4950
4964
  # used a kwarg not defined.
4951
4965
  kwkeys = list(kwargs.keys())
4952
4966
  if kwkeys[0] in posnames:
4953
- mesg = f'{self.name}() got multiple values for parameter {kwkeys[0]}'
4967
+ mesg = f'{funcpath}() got multiple values for parameter {kwkeys[0]}'
4954
4968
  raise self.kids[1].addExcInfo(s_exc.StormRuntimeError(mesg=mesg))
4955
4969
 
4956
4970
  plural = 's' if len(kwargs) > 1 else ''
4957
- mesg = f'{self.name}() got unexpected keyword argument{plural}: {",".join(kwkeys)}'
4971
+ mesg = f'{funcpath}() got unexpected keyword argument{plural}: {",".join(kwkeys)}'
4958
4972
  raise self.kids[1].addExcInfo(s_exc.StormRuntimeError(mesg=mesg))
4959
4973
 
4960
4974
  assert len(mergargs) == len(argdefs)
synapse/lib/cell.py CHANGED
@@ -2623,18 +2623,12 @@ class Cell(s_nexus.Pusher, s_telepath.Aware):
2623
2623
  walkpath(self.backdirn)
2624
2624
  return backups
2625
2625
 
2626
- async def iterBackupArchive(self, name, user):
2627
-
2628
- success = False
2629
- loglevel = logging.WARNING
2630
-
2631
- path = self._reqBackDirn(name)
2632
- cellguid = os.path.join(path, 'cell.guid')
2633
- if not os.path.isfile(cellguid):
2634
- mesg = 'Specified backup path has no cell.guid file.'
2635
- raise s_exc.BadArg(mesg=mesg, arg='path', valu=path)
2636
-
2626
+ async def _streamBackupArchive(self, path, user, name):
2637
2627
  link = s_scope.get('link')
2628
+ if link is None:
2629
+ mesg = 'Link not found in scope. This API must be called via a CellApi.'
2630
+ raise s_exc.SynErr(mesg=mesg)
2631
+
2638
2632
  linkinfo = await link.getSpawnInfo()
2639
2633
  linkinfo['logconf'] = await self._getSpawnLogConf()
2640
2634
 
@@ -2642,42 +2636,42 @@ class Cell(s_nexus.Pusher, s_telepath.Aware):
2642
2636
 
2643
2637
  ctx = multiprocessing.get_context('spawn')
2644
2638
 
2645
- proc = None
2646
- mesg = 'Streaming complete'
2647
-
2648
2639
  def getproc():
2649
2640
  proc = ctx.Process(target=_iterBackupProc, args=(path, linkinfo))
2650
2641
  proc.start()
2651
2642
  return proc
2652
2643
 
2644
+ mesg = 'Streaming complete'
2645
+ proc = await s_coro.executor(getproc)
2646
+ cancelled = False
2653
2647
  try:
2654
- proc = await s_coro.executor(getproc)
2655
-
2656
2648
  await s_coro.executor(proc.join)
2649
+ self.backlastuploaddt = datetime.datetime.now()
2650
+ logger.debug(f'Backup streaming completed successfully for {name}')
2657
2651
 
2658
- except (asyncio.CancelledError, Exception) as e:
2652
+ except asyncio.CancelledError:
2653
+ logger.warning('Backup streaming was cancelled.')
2654
+ cancelled = True
2655
+ raise
2659
2656
 
2660
- # We want to log all exceptions here, an asyncio.CancelledError
2661
- # could be the result of a remote link terminating due to the
2662
- # backup stream being completed, prior to this function
2663
- # finishing.
2657
+ except Exception as e:
2664
2658
  logger.exception('Error during backup streaming.')
2665
-
2666
- if proc:
2667
- proc.terminate()
2668
-
2669
2659
  mesg = repr(e)
2670
2660
  raise
2671
2661
 
2672
- else:
2673
- success = True
2674
- loglevel = logging.DEBUG
2675
- self.backlastuploaddt = datetime.datetime.now()
2676
-
2677
2662
  finally:
2678
- phrase = 'successfully' if success else 'with failure'
2679
- logger.log(loglevel, f'iterBackupArchive completed {phrase} for {name}')
2680
- raise s_exc.DmonSpawn(mesg=mesg)
2663
+ proc.terminate()
2664
+
2665
+ if not cancelled:
2666
+ raise s_exc.DmonSpawn(mesg=mesg)
2667
+
2668
+ async def iterBackupArchive(self, name, user):
2669
+ path = self._reqBackDirn(name)
2670
+ cellguid = os.path.join(path, 'cell.guid')
2671
+ if not os.path.isfile(cellguid):
2672
+ mesg = 'Specified backup path has no cell.guid file.'
2673
+ raise s_exc.BadArg(mesg=mesg, arg='path', valu=path)
2674
+ await self._streamBackupArchive(path, user, name)
2681
2675
 
2682
2676
  async def iterNewBackupArchive(self, user, name=None, remove=False):
2683
2677
 
@@ -2688,9 +2682,6 @@ class Cell(s_nexus.Pusher, s_telepath.Aware):
2688
2682
  if remove:
2689
2683
  self.backupstreaming = True
2690
2684
 
2691
- success = False
2692
- loglevel = logging.WARNING
2693
-
2694
2685
  if name is None:
2695
2686
  name = time.strftime('%Y%m%d%H%M%S', datetime.datetime.now().timetuple())
2696
2687
 
@@ -2699,10 +2690,6 @@ class Cell(s_nexus.Pusher, s_telepath.Aware):
2699
2690
  mesg = 'Backup with name already exists'
2700
2691
  raise s_exc.BadArg(mesg=mesg)
2701
2692
 
2702
- link = s_scope.get('link')
2703
- linkinfo = await link.getSpawnInfo()
2704
- linkinfo['logconf'] = await self._getSpawnLogConf()
2705
-
2706
2693
  try:
2707
2694
  await self.runBackup(name)
2708
2695
  except Exception:
@@ -2712,54 +2699,13 @@ class Cell(s_nexus.Pusher, s_telepath.Aware):
2712
2699
  logger.debug(f'Removed {path}')
2713
2700
  raise
2714
2701
 
2715
- await self.boss.promote('backup:stream', user=user, info={'name': name})
2716
-
2717
- ctx = multiprocessing.get_context('spawn')
2718
-
2719
- proc = None
2720
- mesg = 'Streaming complete'
2721
-
2722
- def getproc():
2723
- proc = ctx.Process(target=_iterBackupProc, args=(path, linkinfo))
2724
- proc.start()
2725
- return proc
2726
-
2727
- try:
2728
- proc = await s_coro.executor(getproc)
2729
-
2730
- await s_coro.executor(proc.join)
2731
-
2732
- except (asyncio.CancelledError, Exception) as e:
2733
-
2734
- # We want to log all exceptions here, an asyncio.CancelledError
2735
- # could be the result of a remote link terminating due to the
2736
- # backup stream being completed, prior to this function
2737
- # finishing.
2738
- logger.exception('Error during backup streaming.')
2739
-
2740
- if proc:
2741
- proc.terminate()
2742
-
2743
- mesg = repr(e)
2744
- raise
2745
-
2746
- else:
2747
- success = True
2748
- loglevel = logging.DEBUG
2749
- self.backlastuploaddt = datetime.datetime.now()
2750
-
2751
- finally:
2752
- if remove:
2753
- logger.debug(f'Removing {path}')
2754
- await s_coro.executor(shutil.rmtree, path, ignore_errors=True)
2755
- logger.debug(f'Removed {path}')
2756
-
2757
- phrase = 'successfully' if success else 'with failure'
2758
- logger.log(loglevel, f'iterNewBackupArchive completed {phrase} for {name}')
2759
- raise s_exc.DmonSpawn(mesg=mesg)
2702
+ await self._streamBackupArchive(path, user, name)
2760
2703
 
2761
2704
  finally:
2762
2705
  if remove:
2706
+ logger.debug(f'Removing {path}')
2707
+ await s_coro.executor(shutil.rmtree, path, ignore_errors=True)
2708
+ logger.debug(f'Removed {path}')
2763
2709
  self.backupstreaming = False
2764
2710
 
2765
2711
  async def isUserAllowed(self, iden, perm, gateiden=None, default=False):
synapse/lib/cli.py CHANGED
@@ -281,18 +281,26 @@ class Cli(s_base.Base):
281
281
 
282
282
  await self.fini()
283
283
 
284
- async def addSignalHandlers(self):
284
+ async def addSignalHandlers(self): # pragma: no cover
285
285
  '''
286
286
  Register SIGINT signal handler with the ioloop to cancel the currently running cmdloop task.
287
+ Removes the handler when the cli is fini'd.
287
288
  '''
288
-
289
289
  def sigint():
290
- self.printf('<ctrl-c>')
291
290
  if self.cmdtask is not None:
292
291
  self.cmdtask.cancel()
293
292
 
294
293
  self.loop.add_signal_handler(signal.SIGINT, sigint)
295
294
 
295
+ def onfini():
296
+ # N.B. This is reaches into some loop / handle internals but
297
+ # prevents us from removing a handler that overwrote our own.
298
+ hndl = self.loop._signal_handlers.get(signal.SIGINT, None) # type: asyncio.Handle
299
+ if hndl is not None and hndl._callback is sigint:
300
+ self.loop.remove_signal_handler(signal.SIGINT)
301
+
302
+ self.onfini(onfini)
303
+
296
304
  def get(self, name, defval=None):
297
305
  return self.locs.get(name, defval)
298
306
 
@@ -324,8 +332,12 @@ class Cli(s_base.Base):
324
332
  if text is None:
325
333
  text = self.cmdprompt
326
334
 
327
- with patch_stdout():
328
- retn = await self.sess.prompt_async(text, vi_mode=self.vi_mode, enable_open_in_editor=True)
335
+ with patch_stdout(): # pragma: no cover
336
+ retn = await self.sess.prompt_async(text,
337
+ vi_mode=self.vi_mode,
338
+ enable_open_in_editor=True,
339
+ handle_sigint=False # We handle sigint in the loop
340
+ )
329
341
  return retn
330
342
 
331
343
  def printf(self, mesg, addnl=True, color=None):
@@ -390,7 +402,7 @@ class Cli(s_base.Base):
390
402
  self.cmdtask = self.schedCoro(coro)
391
403
  await self.cmdtask
392
404
 
393
- except KeyboardInterrupt:
405
+ except (KeyboardInterrupt, asyncio.CancelledError):
394
406
 
395
407
  if self.isfini:
396
408
  return
@@ -408,11 +420,8 @@ class Cli(s_base.Base):
408
420
  if self.cmdtask is not None:
409
421
  self.cmdtask.cancel()
410
422
  try:
411
- self.cmdtask.result()
412
- except asyncio.CancelledError:
413
- # Wait a beat to let any remaining nodes to print out before we print the prompt
414
- await asyncio.sleep(1)
415
- except Exception:
423
+ await asyncio.wait_for(self.cmdtask, timeout=0.1)
424
+ except (asyncio.CancelledError, asyncio.TimeoutError):
416
425
  pass
417
426
 
418
427
  async def runCmdLine(self, line):
synapse/lib/parser.py CHANGED
@@ -507,7 +507,7 @@ class Parser:
507
507
  origexc = e.orig_exc
508
508
  if not isinstance(origexc, s_exc.SynErr):
509
509
  raise e.orig_exc # pragma: no cover
510
- origexc.errinfo['text'] = self.text
510
+ origexc.set('text', self.text)
511
511
  return s_exc.BadSyntax(**origexc.errinfo)
512
512
 
513
513
  elif isinstance(e, lark.exceptions.UnexpectedCharacters): # pragma: no cover
synapse/lib/snap.py CHANGED
@@ -363,9 +363,9 @@ class ProtoNode:
363
363
  valu, norminfo = prop.type.norm(valu)
364
364
  except s_exc.BadTypeValu as e:
365
365
  oldm = e.errinfo.get('mesg')
366
- e.errinfo['prop'] = prop.name
367
- e.errinfo['form'] = prop.form.name
368
- e.errinfo['mesg'] = f'Bad prop value {prop.full}={valu!r} : {oldm}'
366
+ e.update({'prop': prop.name,
367
+ 'form': prop.form.name,
368
+ 'mesg': f'Bad prop value {prop.full}={valu!r} : {oldm}'})
369
369
  if self.ctx.snap.strict:
370
370
  raise e
371
371
  await self.ctx.snap.warn(e)
@@ -493,7 +493,7 @@ class SnapEditor:
493
493
  try:
494
494
  valu, norminfo = form.type.norm(valu)
495
495
  except s_exc.BadTypeValu as e:
496
- e.errinfo['form'] = form.name
496
+ e.set('form', form.name)
497
497
  if self.snap.strict: raise e
498
498
  await self.snap.warn(f'addNode() BadTypeValu {form.name}={valu} {e}')
499
499
  return None
synapse/lib/storm.py CHANGED
@@ -5344,6 +5344,12 @@ class ParallelCmd(Cmd):
5344
5344
  inet:ipv4#foo | parallel { $place = $lib.import(foobar).lookup(:latlong) [ :place=$place ] }
5345
5345
 
5346
5346
  NOTE: Storm variables set within the parallel query pipelines do not interact.
5347
+
5348
+ NOTE: If there are inbound nodes to the parallel command, parallel pipelines will be created as each node
5349
+ is processed, up to the number specified by --size. If the number of nodes in the pipeline is less
5350
+ than the value specified by --size, additional pipelines with no inbound node will not be created.
5351
+ If there are no inbound nodes to the parallel command, the number of pipelines specified by --size
5352
+ will always be created.
5347
5353
  '''
5348
5354
  name = 'parallel'
5349
5355
  readonly = True
@@ -5400,19 +5406,33 @@ class ParallelCmd(Cmd):
5400
5406
  inq = asyncio.Queue(maxsize=size)
5401
5407
  outq = asyncio.Queue(maxsize=size)
5402
5408
 
5403
- async def pump():
5404
- try:
5405
- async for pumpitem in genr:
5406
- await inq.put(pumpitem)
5407
- [await inq.put(None) for i in range(size)]
5408
- except asyncio.CancelledError: # pragma: no cover
5409
- raise
5410
- except Exception as e:
5411
- await outq.put(e)
5412
-
5413
- base.schedCoro(pump())
5414
- for i in range(size):
5415
- base.schedCoro(self.pipeline(runt, query, inq, outq))
5409
+ tsks = 0
5410
+ try:
5411
+ while tsks < size:
5412
+ await inq.put(await genr.__anext__())
5413
+ base.schedCoro(self.pipeline(runt, query, inq, outq))
5414
+ tsks += 1
5415
+ except StopAsyncIteration:
5416
+ [await inq.put(None) for i in range(tsks)]
5417
+
5418
+ # If a full set of tasks were created, keep pumping nodes into the queue
5419
+ if tsks == size:
5420
+ async def pump():
5421
+ try:
5422
+ async for pumpitem in genr:
5423
+ await inq.put(pumpitem)
5424
+ [await inq.put(None) for i in range(size)]
5425
+ except Exception as e:
5426
+ await outq.put(e)
5427
+
5428
+ base.schedCoro(pump())
5429
+
5430
+ # If no tasks were created, make a full set
5431
+ elif tsks == 0:
5432
+ tsks = size
5433
+ for i in range(size):
5434
+ base.schedCoro(self.pipeline(runt, query, inq, outq))
5435
+ [await inq.put(None) for i in range(tsks)]
5416
5436
 
5417
5437
  exited = 0
5418
5438
  while True:
@@ -5423,7 +5443,7 @@ class ParallelCmd(Cmd):
5423
5443
 
5424
5444
  if item is None:
5425
5445
  exited += 1
5426
- if exited == size:
5446
+ if exited == tsks:
5427
5447
  return
5428
5448
  continue
5429
5449
 
@@ -5566,9 +5586,6 @@ class TeeCmd(Cmd):
5566
5586
 
5567
5587
  await outq.put(None)
5568
5588
 
5569
- except asyncio.CancelledError: # pragma: no cover
5570
- raise
5571
-
5572
5589
  except Exception as e:
5573
5590
  await outq.put(e)
5574
5591
 
@@ -93,6 +93,7 @@ class JsonLib(s_stormtypes.Lib):
93
93
  'type': {'type': 'function', '_funcname': '_jsonSave',
94
94
  'args': (
95
95
  {'name': 'item', 'type': 'any', 'desc': 'The item to be serialized as a JSON string.', },
96
+ {'name': 'indent', 'type': 'int', 'desc': 'Specify a number of spaces to indent with.', 'default': None},
96
97
  ),
97
98
  'returns': {'type': 'str', 'desc': 'The JSON serialized object.', }}},
98
99
  {'name': 'schema', 'desc': 'Get a JS schema validation object.',
@@ -115,10 +116,12 @@ class JsonLib(s_stormtypes.Lib):
115
116
  }
116
117
 
117
118
  @s_stormtypes.stormfunc(readonly=True)
118
- async def _jsonSave(self, item):
119
+ async def _jsonSave(self, item, indent=None):
120
+ indent = await s_stormtypes.toint(indent, noneok=True)
121
+
119
122
  try:
120
123
  item = await s_stormtypes.toprim(item)
121
- return json.dumps(item)
124
+ return json.dumps(item, indent=indent)
122
125
  except Exception as e:
123
126
  mesg = f'Argument is not JSON compatible: {item}'
124
127
  raise s_exc.MustBeJsonSafe(mesg=mesg)