synapse 2.180.1__py311-none-any.whl → 2.182.0__py311-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of synapse might be problematic. Click here for more details.

Files changed (90) hide show
  1. synapse/assets/__init__.py +35 -0
  2. synapse/assets/storm/migrations/model-0.2.28.storm +355 -0
  3. synapse/common.py +2 -1
  4. synapse/cortex.py +49 -35
  5. synapse/cryotank.py +1 -1
  6. synapse/datamodel.py +30 -0
  7. synapse/lib/ast.py +12 -7
  8. synapse/lib/auth.py +17 -0
  9. synapse/lib/cell.py +7 -9
  10. synapse/lib/chop.py +0 -1
  11. synapse/lib/drive.py +8 -8
  12. synapse/lib/layer.py +55 -13
  13. synapse/lib/lmdbslab.py +26 -5
  14. synapse/lib/modelrev.py +28 -1
  15. synapse/lib/modules.py +1 -0
  16. synapse/lib/nexus.py +1 -1
  17. synapse/lib/node.py +5 -0
  18. synapse/lib/parser.py +23 -16
  19. synapse/lib/scrape.py +1 -1
  20. synapse/lib/slabseqn.py +2 -2
  21. synapse/lib/snap.py +129 -0
  22. synapse/lib/storm.lark +16 -2
  23. synapse/lib/storm.py +20 -3
  24. synapse/lib/storm_format.py +1 -0
  25. synapse/lib/stormhttp.py +34 -1
  26. synapse/lib/stormlib/auth.py +5 -3
  27. synapse/lib/stormlib/cortex.py +5 -2
  28. synapse/lib/stormlib/easyperm.py +2 -2
  29. synapse/lib/stormlib/ipv6.py +2 -2
  30. synapse/lib/stormlib/model.py +114 -12
  31. synapse/lib/stormlib/project.py +1 -1
  32. synapse/lib/stormtypes.py +81 -7
  33. synapse/lib/types.py +7 -0
  34. synapse/lib/version.py +2 -2
  35. synapse/lib/view.py +47 -0
  36. synapse/models/inet.py +10 -3
  37. synapse/models/infotech.py +2 -1
  38. synapse/models/language.py +4 -0
  39. synapse/models/math.py +50 -0
  40. synapse/models/orgs.py +8 -0
  41. synapse/models/risk.py +9 -0
  42. synapse/tests/files/stormcov/pragma-nocov.storm +18 -0
  43. synapse/tests/test_assets.py +25 -0
  44. synapse/tests/test_cortex.py +129 -0
  45. synapse/tests/test_datamodel.py +6 -0
  46. synapse/tests/test_lib_cell.py +12 -0
  47. synapse/tests/test_lib_grammar.py +7 -1
  48. synapse/tests/test_lib_layer.py +35 -0
  49. synapse/tests/test_lib_lmdbslab.py +11 -9
  50. synapse/tests/test_lib_modelrev.py +655 -1
  51. synapse/tests/test_lib_slabseqn.py +5 -4
  52. synapse/tests/test_lib_snap.py +4 -0
  53. synapse/tests/test_lib_storm.py +110 -1
  54. synapse/tests/test_lib_stormhttp.py +99 -1
  55. synapse/tests/test_lib_stormlib_auth.py +15 -0
  56. synapse/tests/test_lib_stormlib_cortex.py +21 -4
  57. synapse/tests/test_lib_stormlib_iters.py +8 -5
  58. synapse/tests/test_lib_stormlib_model.py +45 -6
  59. synapse/tests/test_lib_stormtypes.py +158 -2
  60. synapse/tests/test_lib_types.py +6 -0
  61. synapse/tests/test_model_inet.py +10 -0
  62. synapse/tests/test_model_language.py +4 -0
  63. synapse/tests/test_model_math.py +22 -0
  64. synapse/tests/test_model_orgs.py +6 -2
  65. synapse/tests/test_model_risk.py +4 -0
  66. synapse/tests/test_tools_storm.py +1 -1
  67. synapse/tests/test_utils_stormcov.py +5 -0
  68. synapse/tests/utils.py +18 -5
  69. synapse/utils/stormcov/plugin.py +31 -1
  70. synapse/vendor/cpython/LICENSE +279 -0
  71. synapse/vendor/cpython/__init__.py +0 -0
  72. synapse/vendor/cpython/lib/__init__.py +0 -0
  73. synapse/vendor/cpython/lib/email/__init__.py +0 -0
  74. synapse/vendor/cpython/lib/email/_parseaddr.py +560 -0
  75. synapse/vendor/cpython/lib/email/utils.py +505 -0
  76. synapse/vendor/cpython/lib/ipaddress.py +2366 -0
  77. synapse/vendor/cpython/lib/test/__init__.py +0 -0
  78. synapse/vendor/cpython/lib/test/support/__init__.py +114 -0
  79. synapse/vendor/cpython/lib/test/test_email/__init__.py +0 -0
  80. synapse/vendor/cpython/lib/test/test_email/test_email.py +480 -0
  81. synapse/vendor/cpython/lib/test/test_email/test_utils.py +167 -0
  82. synapse/vendor/cpython/lib/test/test_ipaddress.py +2672 -0
  83. synapse/vendor/utils.py +4 -3
  84. {synapse-2.180.1.dist-info → synapse-2.182.0.dist-info}/METADATA +3 -3
  85. {synapse-2.180.1.dist-info → synapse-2.182.0.dist-info}/RECORD +88 -71
  86. {synapse-2.180.1.dist-info → synapse-2.182.0.dist-info}/WHEEL +1 -1
  87. synapse/lib/jupyter.py +0 -505
  88. synapse/tests/test_lib_jupyter.py +0 -224
  89. {synapse-2.180.1.dist-info → synapse-2.182.0.dist-info}/LICENSE +0 -0
  90. {synapse-2.180.1.dist-info → synapse-2.182.0.dist-info}/top_level.txt +0 -0
synapse/lib/ast.py CHANGED
@@ -1369,14 +1369,14 @@ class SwitchCase(Oper):
1369
1369
  self.defcase = None
1370
1370
 
1371
1371
  for cent in self.kids[1:]:
1372
+ *vals, subq = cent.kids
1372
1373
 
1373
- # if they only have one kid, it's a default case.
1374
- if len(cent.kids) == 1:
1375
- self.defcase = cent.kids[0]
1374
+ if cent.defcase:
1375
+ self.defcase = subq
1376
1376
  continue
1377
1377
 
1378
- valu = cent.kids[0].value()
1379
- self.cases[valu] = cent.kids[1]
1378
+ for valu in vals:
1379
+ self.cases[valu.value()] = subq
1380
1380
 
1381
1381
  async def run(self, runt, genr):
1382
1382
  count = 0
@@ -1410,9 +1410,10 @@ class SwitchCase(Oper):
1410
1410
  async for item in subq.inline(runt, s_common.agen()):
1411
1411
  yield item
1412
1412
 
1413
-
1414
1413
  class CaseEntry(AstNode):
1415
- pass
1414
+ def __init__(self, astinfo, kids=(), defcase=False):
1415
+ AstNode.__init__(self, astinfo, kids=kids)
1416
+ self.defcase = defcase
1416
1417
 
1417
1418
  class LiftOper(Oper):
1418
1419
 
@@ -4899,6 +4900,7 @@ class Function(AstNode):
4899
4900
  subr.funcscope = True
4900
4901
 
4901
4902
  try:
4903
+ await asyncio.sleep(0)
4902
4904
  async for item in subr.execute():
4903
4905
  await asyncio.sleep(0)
4904
4906
 
@@ -4913,10 +4915,13 @@ class Function(AstNode):
4913
4915
  subr.funcscope = True
4914
4916
  try:
4915
4917
  if self.hasemit:
4918
+ await asyncio.sleep(0)
4916
4919
  async with contextlib.aclosing(await subr.emitter()) as agen:
4917
4920
  async for item in agen:
4918
4921
  yield item
4922
+ await asyncio.sleep(0)
4919
4923
  else:
4924
+ await asyncio.sleep(0)
4920
4925
  async with contextlib.aclosing(subr.execute()) as agen:
4921
4926
  async for node, path in agen:
4922
4927
  yield node, path
synapse/lib/auth.py CHANGED
@@ -579,11 +579,28 @@ class Auth(s_nexus.Pusher):
579
579
  await self.feedBeholder('user:add', user.pack())
580
580
 
581
581
  async def addRole(self, name, iden=None):
582
+ '''
583
+ Add a Role to the Auth system.
584
+
585
+ Args:
586
+ name (str): The name of the role.
587
+ iden (str): A optional iden to use as the role iden.
588
+
589
+ Returns:
590
+ Role: A Role.
591
+ '''
582
592
  if self.roleidenbynamecache.get(name) is not None:
583
593
  raise s_exc.DupRoleName(mesg=f'Duplicate role name, {name=} already exists.', name=name)
584
594
 
585
595
  if iden is None:
586
596
  iden = s_common.guid()
597
+ else:
598
+ if not s_common.isguid(iden):
599
+ raise s_exc.BadArg(name='iden', arg=iden, mesg=f'Argument {iden} it not a valid iden.')
600
+
601
+ if self.rolebyidencache.get(iden) is not None:
602
+ raise s_exc.DupIden(name=name, iden=iden,
603
+ mesg=f'Role already exists for {iden=}.')
587
604
 
588
605
  await self._push('role:add', iden, name)
589
606
 
synapse/lib/cell.py CHANGED
@@ -443,8 +443,8 @@ class CellApi(s_base.Base):
443
443
  return await self.cell.delUser(iden)
444
444
 
445
445
  @adminapi(log=True)
446
- async def addRole(self, name):
447
- return await self.cell.addRole(name)
446
+ async def addRole(self, name, iden=None):
447
+ return await self.cell.addRole(name, iden=iden)
448
448
 
449
449
  @adminapi(log=True)
450
450
  async def delRole(self, iden):
@@ -1637,12 +1637,10 @@ class Cell(s_nexus.Pusher, s_telepath.Aware):
1637
1637
  f'{disk.free / disk.total * 100:.2f}%), setting Cell to read-only.'
1638
1638
  logger.error(mesg)
1639
1639
 
1640
- elif nexsroot.readonly:
1641
-
1642
- await nexsroot.delWriteHold(diskspace)
1640
+ elif nexsroot.readonly and await nexsroot.delWriteHold(diskspace):
1643
1641
 
1644
1642
  mesg = f'Free space on {self.dirn} above minimum threshold (currently ' \
1645
- f'{disk.free / disk.total * 100:.2f}%), re-enabling writes.'
1643
+ f'{disk.free / disk.total * 100:.2f}%), removing free space write hold.'
1646
1644
  logger.error(mesg)
1647
1645
 
1648
1646
  await self._checkspace.timewait(timeout=self.FREE_SPACE_CHECK_FREQ)
@@ -1851,7 +1849,7 @@ class Cell(s_nexus.Pusher, s_telepath.Aware):
1851
1849
 
1852
1850
  @s_nexus.Pusher.onPushAuto('drive:data:set')
1853
1851
  async def setDriveData(self, iden, versinfo, data):
1854
- return self.drive.setItemData(iden, versinfo, data)
1852
+ return await self.drive.setItemData(iden, versinfo, data)
1855
1853
 
1856
1854
  async def delDriveData(self, iden, vers=None):
1857
1855
  if vers is None:
@@ -2883,8 +2881,8 @@ class Cell(s_nexus.Pusher, s_telepath.Aware):
2883
2881
  logger.info(f'Deleted user={name}',
2884
2882
  extra=await self.getLogExtra(target_user=iden, target_username=name, status='DELETE'))
2885
2883
 
2886
- async def addRole(self, name):
2887
- role = await self.auth.addRole(name)
2884
+ async def addRole(self, name, iden=None):
2885
+ role = await self.auth.addRole(name, iden=iden)
2888
2886
  logger.info(f'Added role={name}',
2889
2887
  extra=await self.getLogExtra(target_role=role.iden, target_rolename=role.name, status='CREATE'))
2890
2888
  return role.pack()
synapse/lib/chop.py CHANGED
@@ -1,5 +1,4 @@
1
1
  import binascii
2
- import ipaddress
3
2
 
4
3
  import regex
5
4
 
synapse/lib/drive.py CHANGED
@@ -147,7 +147,7 @@ class Drive(s_base.Base):
147
147
  rows.append((LKEY_INFO + oldb, s_msgpack.en(oldpinfo)))
148
148
 
149
149
  self.slab.delete(LKEY_DIRN + oldb + oldname.encode(), db=self.dbname)
150
- self.slab.putmulti(rows, db=self.dbname)
150
+ await self.slab.putmulti(rows, db=self.dbname)
151
151
 
152
152
  pathinfo.append(info)
153
153
  return pathinfo
@@ -167,7 +167,7 @@ class Drive(s_base.Base):
167
167
  if byts is not None:
168
168
  return s_msgpack.un(byts)
169
169
 
170
- def _addStepInfo(self, parbidn, parinfo, info):
170
+ async def _addStepInfo(self, parbidn, parinfo, info):
171
171
 
172
172
  newbidn = s_common.uhex(info.get('iden'))
173
173
 
@@ -190,7 +190,7 @@ class Drive(s_base.Base):
190
190
  typekey = LKEY_INFO_BYTYPE + typename.encode() + b'\x00' + newbidn
191
191
  rows.append((typekey, b'\x01'))
192
192
 
193
- self.slab.putmulti(rows, db=self.dbname)
193
+ await self.slab.putmulti(rows, db=self.dbname)
194
194
 
195
195
  def setItemPerm(self, iden, perm):
196
196
  return self._setItemPerm(s_common.uhex(iden), perm)
@@ -290,7 +290,7 @@ class Drive(s_base.Base):
290
290
  mesg = f'A drive entry with ID {iden} already exists.'
291
291
  raise s_exc.DupIden(mesg=mesg)
292
292
 
293
- self._addStepInfo(parbidn, parinfo, info)
293
+ await self._addStepInfo(parbidn, parinfo, info)
294
294
 
295
295
  pathinfo.append(info)
296
296
  return pathinfo
@@ -384,10 +384,10 @@ class Drive(s_base.Base):
384
384
 
385
385
  yield info
386
386
 
387
- def setItemData(self, iden, versinfo, data):
388
- return self._setItemData(s_common.uhex(iden), versinfo, data)
387
+ async def setItemData(self, iden, versinfo, data):
388
+ return await self._setItemData(s_common.uhex(iden), versinfo, data)
389
389
 
390
- def _setItemData(self, bidn, versinfo, data):
390
+ async def _setItemData(self, bidn, versinfo, data):
391
391
 
392
392
  info = self._reqItemInfo(bidn)
393
393
 
@@ -419,7 +419,7 @@ class Drive(s_base.Base):
419
419
  info.update(versinfo)
420
420
  rows.append((LKEY_INFO + bidn, s_msgpack.en(info)))
421
421
 
422
- self.slab.putmulti(rows, db=self.dbname)
422
+ await self.slab.putmulti(rows, db=self.dbname)
423
423
 
424
424
  return info, versinfo
425
425
 
synapse/lib/layer.py CHANGED
@@ -62,7 +62,6 @@ import shutil
62
62
  import struct
63
63
  import asyncio
64
64
  import logging
65
- import ipaddress
66
65
  import contextlib
67
66
  import collections
68
67
 
@@ -87,6 +86,8 @@ import synapse.lib.slabseqn as s_slabseqn
87
86
 
88
87
  from synapse.lib.msgpack import deepcopy
89
88
 
89
+ ipaddress = s_common.ipaddress
90
+
90
91
  logger = logging.getLogger(__name__)
91
92
 
92
93
  import synapse.lib.msgpack as s_msgpack
@@ -2156,7 +2157,7 @@ class Layer(s_nexus.Pusher):
2156
2157
 
2157
2158
  if len(tostor) >= 10000:
2158
2159
  logger.warning(f'...syncing 10k nodes @{count}')
2159
- self.layrslab.putmulti(tostor, db=self.bybuidv3)
2160
+ await self.layrslab.putmulti(tostor, db=self.bybuidv3)
2160
2161
  tostor.clear()
2161
2162
 
2162
2163
  lastbuid = buid
@@ -2197,7 +2198,7 @@ class Layer(s_nexus.Pusher):
2197
2198
  count += 1
2198
2199
  tostor.append((lastbuid, s_msgpack.en(sode)))
2199
2200
  if tostor:
2200
- self.layrslab.putmulti(tostor, db=self.bybuidv3)
2201
+ await self.layrslab.putmulti(tostor, db=self.bybuidv3)
2201
2202
 
2202
2203
  logger.warning('...removing old bybuid index')
2203
2204
  self.layrslab.dropdb('bybuid')
@@ -2623,8 +2624,8 @@ class Layer(s_nexus.Pusher):
2623
2624
 
2624
2625
  logger.warning(f'Adding n1+n2 index to edges in layer {self.iden}')
2625
2626
 
2626
- def commit():
2627
- self.layrslab.putmulti(putkeys, db=self.edgesn1n2)
2627
+ async def commit():
2628
+ await self.layrslab.putmulti(putkeys, db=self.edgesn1n2)
2628
2629
  putkeys.clear()
2629
2630
 
2630
2631
  putkeys = []
@@ -2635,10 +2636,10 @@ class Layer(s_nexus.Pusher):
2635
2636
 
2636
2637
  putkeys.append((n1buid + n2buid, venc))
2637
2638
  if len(putkeys) > MIGR_COMMIT_SIZE:
2638
- commit()
2639
+ await commit()
2639
2640
 
2640
2641
  if len(putkeys):
2641
- commit()
2642
+ await commit()
2642
2643
 
2643
2644
  self.meta.set('version', 10)
2644
2645
  self.layrvers = 10
@@ -2649,8 +2650,8 @@ class Layer(s_nexus.Pusher):
2649
2650
 
2650
2651
  logger.warning(f'Adding byform index to layer {self.iden}')
2651
2652
 
2652
- def commit():
2653
- self.layrslab.putmulti(putkeys, db=self.byform)
2653
+ async def commit():
2654
+ await self.layrslab.putmulti(putkeys, db=self.byform)
2654
2655
  putkeys.clear()
2655
2656
 
2656
2657
  putkeys = []
@@ -2662,10 +2663,10 @@ class Layer(s_nexus.Pusher):
2662
2663
  putkeys.append((abrv, buid))
2663
2664
 
2664
2665
  if len(putkeys) > MIGR_COMMIT_SIZE:
2665
- commit()
2666
+ await commit()
2666
2667
 
2667
2668
  if putkeys:
2668
- commit()
2669
+ await commit()
2669
2670
 
2670
2671
  self.meta.set('version', 11)
2671
2672
  self.layrvers = 11
@@ -2899,7 +2900,7 @@ class Layer(s_nexus.Pusher):
2899
2900
  self.buidcache[buid] = sode
2900
2901
  kvlist.append((buid, s_msgpack.en(sode)))
2901
2902
 
2902
- self.layrslab.putmulti(kvlist, db=self.bybuidv3)
2903
+ self.layrslab._putmulti(kvlist, db=self.bybuidv3)
2903
2904
  self.dirty.clear()
2904
2905
 
2905
2906
  def getStorNodeCount(self):
@@ -3050,6 +3051,47 @@ class Layer(s_nexus.Pusher):
3050
3051
 
3051
3052
  return count
3052
3053
 
3054
+ async def iterPropValues(self, formname, propname, stortype):
3055
+ try:
3056
+ abrv = self.getPropAbrv(formname, propname)
3057
+ except s_exc.NoSuchAbrv:
3058
+ return
3059
+
3060
+ if stortype & 0x8000:
3061
+ stortype = STOR_TYPE_MSGP
3062
+
3063
+ stor = self.stortypes[stortype]
3064
+ abrvlen = len(abrv)
3065
+
3066
+ async for lkey in s_coro.pause(self.layrslab.scanKeysByPref(abrv, db=self.byprop, nodup=True)):
3067
+
3068
+ indx = lkey[abrvlen:]
3069
+ valu = stor.decodeIndx(indx)
3070
+ if valu is not s_common.novalu:
3071
+ yield indx, valu
3072
+ continue
3073
+
3074
+ buid = self.layrslab.get(lkey, db=self.byprop)
3075
+ if buid is not None:
3076
+ sode = self._getStorNode(buid)
3077
+ if sode is not None:
3078
+ if propname is None:
3079
+ valt = sode.get('valu')
3080
+ else:
3081
+ valt = sode['props'].get(propname)
3082
+
3083
+ if valt is not None:
3084
+ yield indx, valt[0]
3085
+
3086
+ async def iterPropIndxBuids(self, formname, propname, indx):
3087
+ try:
3088
+ abrv = self.getPropAbrv(formname, propname)
3089
+ except s_exc.NoSuchAbrv:
3090
+ return
3091
+
3092
+ async for _, buid in s_coro.pause(self.layrslab.scanByDups(abrv + indx, db=self.byprop)):
3093
+ yield buid
3094
+
3053
3095
  async def liftByTag(self, tag, form=None, reverse=False):
3054
3096
 
3055
3097
  try:
@@ -3769,7 +3811,7 @@ class Layer(s_nexus.Pusher):
3769
3811
  kvpairs.append((tp_abrv + indx, buid))
3770
3812
  kvpairs.append((ftp_abrv + indx, buid))
3771
3813
 
3772
- self.layrslab.putmulti(kvpairs, db=self.bytagprop)
3814
+ await self.layrslab.putmulti(kvpairs, db=self.bytagprop)
3773
3815
 
3774
3816
  return (
3775
3817
  (EDIT_TAGPROP_SET, (tag, prop, valu, oldv, stortype), ()),
synapse/lib/lmdbslab.py CHANGED
@@ -417,7 +417,7 @@ class HotKeyVal(s_base.Base):
417
417
  if not tups:
418
418
  return
419
419
 
420
- self.slab.putmulti(tups, db=self.db)
420
+ self.slab._putmulti(tups, db=self.db)
421
421
  self.dirty.clear()
422
422
 
423
423
  def pack(self):
@@ -1651,7 +1651,28 @@ class Slab(s_base.Base):
1651
1651
  except lmdb.MapFullError:
1652
1652
  return self._handle_mapfull()
1653
1653
 
1654
- def putmulti(self, kvpairs, dupdata=False, append=False, db=None):
1654
+ async def putmulti(self, kvpairs, dupdata=False, append=False, db=None):
1655
+
1656
+ # Use a fast path when we have a small amount of data to prevent creating new
1657
+ # list objects when we don't have to.
1658
+ if isinstance(kvpairs, (list, tuple)) and len(kvpairs) <= self.max_xactops_len:
1659
+ ret = self._putmulti(kvpairs, dupdata=dupdata, append=append, db=db)
1660
+ await asyncio.sleep(0)
1661
+ return ret
1662
+
1663
+ # Otherwise, we chunk the large data or a generator into lists no greater than
1664
+ # max_xactops_len and allow the slab the opportunity to commit between chunks.
1665
+ # This helps to avoid a situation where a large generator or list of kvpairs
1666
+ # could cause a greedy commit operation from happening.
1667
+ consumed, added = 0, 0
1668
+ for chunk in s_common.chunks(kvpairs, self.max_xactops_len):
1669
+ rc, ra = self._putmulti(chunk, dupdata=dupdata, append=append, db=db)
1670
+ consumed = consumed + rc
1671
+ added = added + ra
1672
+ await asyncio.sleep(0)
1673
+ return (consumed, added)
1674
+
1675
+ def _putmulti(self, kvpairs: list, dupdata: bool =False, append: bool =False, db: str =None):
1655
1676
  '''
1656
1677
  Returns:
1657
1678
  Tuple of number of items consumed, number of items added
@@ -1669,7 +1690,7 @@ class Slab(s_base.Base):
1669
1690
  self.dirty = True
1670
1691
 
1671
1692
  if not self.recovering:
1672
- self._logXactOper(self.putmulti, kvpairs, dupdata=dupdata, append=append, db=db)
1693
+ self._logXactOper(self._putmulti, kvpairs, dupdata=dupdata, append=append, db=db)
1673
1694
 
1674
1695
  with self.xact.cursor(db=realdb) as curs:
1675
1696
  return curs.putmulti(kvpairs, dupdata=dupdata, append=append)
@@ -1677,7 +1698,7 @@ class Slab(s_base.Base):
1677
1698
  except lmdb.MapFullError:
1678
1699
  return self._handle_mapfull()
1679
1700
 
1680
- def copydb(self, sourcedbname, destslab, destdbname=None, progresscb=None):
1701
+ async def copydb(self, sourcedbname, destslab, destdbname=None, progresscb=None):
1681
1702
  '''
1682
1703
  Copy an entire database in this slab to a new database in potentially another slab.
1683
1704
 
@@ -1707,7 +1728,7 @@ class Slab(s_base.Base):
1707
1728
  rowcount = 0
1708
1729
 
1709
1730
  for chunk in s_common.chunks(self.scanByFull(db=sourcedbname), COPY_CHUNKSIZE):
1710
- ccount, acount = destslab.putmulti(chunk, dupdata=True, append=True, db=destdbname)
1731
+ ccount, acount = await destslab.putmulti(chunk, dupdata=True, append=True, db=destdbname)
1711
1732
  if ccount != len(chunk) or acount != len(chunk):
1712
1733
  raise s_exc.BadCoreStore(mesg='Unexpected number of values written') # pragma: no cover
1713
1734
 
synapse/lib/modelrev.py CHANGED
@@ -2,13 +2,14 @@ import regex
2
2
  import logging
3
3
 
4
4
  import synapse.exc as s_exc
5
+ import synapse.assets as s_assets
5
6
  import synapse.common as s_common
6
7
 
7
8
  import synapse.lib.layer as s_layer
8
9
 
9
10
  logger = logging.getLogger(__name__)
10
11
 
11
- maxvers = (0, 2, 27)
12
+ maxvers = (0, 2, 30)
12
13
 
13
14
  class ModelRev:
14
15
 
@@ -41,6 +42,9 @@ class ModelRev:
41
42
  ((0, 2, 25), self.revModel_0_2_25),
42
43
  ((0, 2, 26), self.revModel_0_2_26),
43
44
  ((0, 2, 27), self.revModel_0_2_27),
45
+ # Model revision 0.2.28 skipped
46
+ ((0, 2, 29), self.revModel_0_2_29),
47
+ ((0, 2, 30), self.revModel_0_2_30),
44
48
  )
45
49
 
46
50
  async def _uniqSortArray(self, todoprops, layers):
@@ -787,6 +791,29 @@ class ModelRev:
787
791
  async def revModel_0_2_27(self, layers):
788
792
  await self._normPropValu(layers, 'it:dev:repo:commit:id')
789
793
 
794
+ async def revModel_0_2_28(self, layers):
795
+
796
+ opts = {'vars': {
797
+ 'layridens': [layr.iden for layr in layers],
798
+ }}
799
+
800
+ text = s_assets.getStorm('migrations', 'model-0.2.28.storm')
801
+ await self.runStorm(text, opts=opts)
802
+
803
+ async def revModel_0_2_29(self, layers):
804
+ await self._propToForm(layers, 'ou:industry:type', 'ou:industry:type:taxonomy')
805
+
806
+ async def revModel_0_2_30(self, layers):
807
+ await self._normFormSubs(layers, 'inet:ipv4', cmprvalu='192.0.0.0/24')
808
+ await self._normFormSubs(layers, 'inet:ipv6', cmprvalu='64:ff9b:1::/48')
809
+ await self._normFormSubs(layers, 'inet:ipv6', cmprvalu='2002::/16')
810
+ await self._normFormSubs(layers, 'inet:ipv6', cmprvalu='2001:1::1/128')
811
+ await self._normFormSubs(layers, 'inet:ipv6', cmprvalu='2001:1::2/128')
812
+ await self._normFormSubs(layers, 'inet:ipv6', cmprvalu='2001:3::/32')
813
+ await self._normFormSubs(layers, 'inet:ipv6', cmprvalu='2001:4:112::/48')
814
+ await self._normFormSubs(layers, 'inet:ipv6', cmprvalu='2001:20::/28')
815
+ await self._normFormSubs(layers, 'inet:ipv6', cmprvalu='2001:30::/28')
816
+
790
817
  async def runStorm(self, text, opts=None):
791
818
  '''
792
819
  Run storm code in a schedcoro and log the output messages.
synapse/lib/modules.py CHANGED
@@ -7,6 +7,7 @@ coremods = (
7
7
  'synapse.models.syn.SynModule',
8
8
  'synapse.models.auth.AuthModule',
9
9
  'synapse.models.base.BaseModule',
10
+ 'synapse.models.math.MathModule',
10
11
  'synapse.models.risk.RiskModule',
11
12
  'synapse.models.person.PsModule',
12
13
  'synapse.models.files.FileModule',
synapse/lib/nexus.py CHANGED
@@ -204,7 +204,7 @@ class NexsRoot(s_base.Base):
204
204
  olddb = self.nexsslab.initdb('nexs:indx')
205
205
  self.nexsslab.dropdb(olddb)
206
206
  db = newslab.initdb('nexs:indx')
207
- newslab.copydb('nexs:indx', self.nexsslab, destdbname='nexs:indx')
207
+ await newslab.copydb('nexs:indx', self.nexsslab, destdbname='nexs:indx')
208
208
  newslab.dropdb(db)
209
209
 
210
210
  self.nexshot.set('version', 2)
synapse/lib/node.py CHANGED
@@ -664,6 +664,11 @@ class Node:
664
664
  for name in self.props.keys():
665
665
  edits.extend(await self._getPropDelEdits(name, init=True))
666
666
 
667
+ # Only remove nodedata if we're in a layer that doesn't have the full node
668
+ if self.snap.wlyr.iden != self.bylayer['ndef']:
669
+ async for name in self.iterDataKeys():
670
+ edits.append((s_layer.EDIT_NODEDATA_DEL, (name, None), ()))
671
+
667
672
  edits.append(
668
673
  (s_layer.EDIT_NODE_DEL, (formvalu, self.form.type.stortype), ()),
669
674
  )
synapse/lib/parser.py CHANGED
@@ -69,6 +69,7 @@ terminalEnglishMap = {
69
69
  'LISTTOKN': 'unquoted list value',
70
70
  'LPAR': '(',
71
71
  'LSQB': '[',
72
+ 'MCASEBARE': 'case multi-value',
72
73
  'MODSET': '+= or -=',
73
74
  'NONQUOTEWORD': 'unquoted value',
74
75
  'NOT': 'not',
@@ -427,29 +428,35 @@ class AstConverter(lark.Transformer):
427
428
  return s_ast.VarDeref(astinfo, kids=(kids[0], newkid))
428
429
 
429
430
  @lark.v_args(meta=True)
430
- def switchcase(self, meta, kids):
431
-
431
+ def caseentry(self, meta, kids):
432
+ assert kids and len(kids) >= 2
432
433
  astinfo = self.metaToAstInfo(meta)
434
+ newkids = self._convert_children(kids)
433
435
 
434
- newkids = []
436
+ defcase = False
435
437
 
436
- it = iter(kids)
438
+ if len(kids) == 2 and kids[0].type == 'DEFAULTCASE':
439
+ defcase = True
440
+ # Strip off the "Const: *" node
441
+ newkids = [newkids[1]]
437
442
 
438
- varvalu = next(it)
439
- newkids.append(varvalu)
443
+ return s_ast.CaseEntry(astinfo, kids=newkids, defcase=defcase)
440
444
 
441
- for casekid, sqkid in zip(it, it):
442
- subquery = self._convert_child(sqkid)
443
- if casekid.type == 'DEFAULTCASE':
444
- caseinfo = self.metaToAstInfo(casekid)
445
- caseentry = s_ast.CaseEntry(caseinfo, kids=[subquery])
446
- else:
447
- casekid = self._convert_child(casekid)
448
- caseentry = s_ast.CaseEntry(casekid.astinfo, kids=[casekid, subquery])
445
+ @lark.v_args(meta=True)
446
+ def switchcase(self, meta, kids):
447
+ kids = self._convert_children(kids)
448
+
449
+ astinfo = self.metaToAstInfo(meta)
450
+
451
+ # Check that we only have one default case
452
+ defcase = [k for k in kids[1:] if k.defcase]
449
453
 
450
- newkids.append(caseentry)
454
+ deflen = len(defcase)
455
+ if deflen > 1:
456
+ mesg = f'Switch statements cannot have more than one default case. Found {deflen}.'
457
+ raise self.raiseBadSyntax(mesg, astinfo)
451
458
 
452
- return s_ast.SwitchCase(astinfo, newkids)
459
+ return s_ast.SwitchCase(astinfo, kids)
453
460
 
454
461
  @lark.v_args(meta=True)
455
462
  def liftreverse(self, meta, kids):
synapse/lib/scrape.py CHANGED
@@ -7,7 +7,6 @@ import collections
7
7
 
8
8
  import idna
9
9
  import regex
10
- import ipaddress
11
10
  import unicodedata
12
11
 
13
12
  import synapse.exc as s_exc
@@ -21,6 +20,7 @@ import synapse.lib.msgpack as s_msgpack
21
20
 
22
21
  import synapse.lib.crypto.coin as s_coin
23
22
 
23
+ ipaddress = s_common.ipaddress
24
24
 
25
25
  logger = logging.getLogger(__name__)
26
26
 
synapse/lib/slabseqn.py CHANGED
@@ -108,7 +108,7 @@ class SlabSeqn:
108
108
  def stat(self):
109
109
  return self.slab.stat(db=self.db)
110
110
 
111
- def save(self, items):
111
+ async def save(self, items):
112
112
  '''
113
113
  Save a series of items to a sequence.
114
114
 
@@ -136,7 +136,7 @@ class SlabSeqn:
136
136
 
137
137
  rows.append((lkey, byts))
138
138
 
139
- retn = self.slab.putmulti(rows, append=True, db=self.db)
139
+ retn = await self.slab.putmulti(rows, append=True, db=self.db)
140
140
  took = s_common.mononow() - abstick
141
141
 
142
142
  assert retn, "Not adding the largest indices"