synapse 2.192.0__py311-none-any.whl → 2.193.0__py311-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of synapse might be problematic. Click here for more details.

Files changed (37) hide show
  1. synapse/common.py +15 -0
  2. synapse/cortex.py +16 -18
  3. synapse/exc.py +6 -1
  4. synapse/lib/agenda.py +0 -2
  5. synapse/lib/ast.py +25 -11
  6. synapse/lib/cell.py +31 -85
  7. synapse/lib/cli.py +20 -11
  8. synapse/lib/parser.py +1 -1
  9. synapse/lib/snap.py +4 -4
  10. synapse/lib/storm.py +34 -17
  11. synapse/lib/stormlib/json.py +5 -2
  12. synapse/lib/stormtypes.py +19 -0
  13. synapse/lib/version.py +2 -2
  14. synapse/models/inet.py +17 -1
  15. synapse/models/infotech.py +14 -4
  16. synapse/models/risk.py +16 -2
  17. synapse/tests/test_cortex.py +3 -3
  18. synapse/tests/test_exc.py +3 -0
  19. synapse/tests/test_lib_agenda.py +157 -1
  20. synapse/tests/test_lib_ast.py +43 -1
  21. synapse/tests/test_lib_cell.py +71 -1
  22. synapse/tests/test_lib_storm.py +72 -30
  23. synapse/tests/test_lib_stormlib_json.py +20 -0
  24. synapse/tests/test_lib_stormlib_scrape.py +2 -2
  25. synapse/tests/test_model_inet.py +40 -5
  26. synapse/tests/test_model_risk.py +2 -0
  27. synapse/tests/test_tools_storm.py +95 -0
  28. synapse/tests/test_utils_getrefs.py +1 -1
  29. synapse/utils/getrefs.py +14 -3
  30. synapse/vendor/cpython/lib/http/__init__.py +0 -0
  31. synapse/vendor/cpython/lib/http/cookies.py +59 -0
  32. synapse/vendor/cpython/lib/test/test_http_cookies.py +49 -0
  33. {synapse-2.192.0.dist-info → synapse-2.193.0.dist-info}/METADATA +2 -2
  34. {synapse-2.192.0.dist-info → synapse-2.193.0.dist-info}/RECORD +37 -34
  35. {synapse-2.192.0.dist-info → synapse-2.193.0.dist-info}/WHEEL +1 -1
  36. {synapse-2.192.0.dist-info → synapse-2.193.0.dist-info}/LICENSE +0 -0
  37. {synapse-2.192.0.dist-info → synapse-2.193.0.dist-info}/top_level.txt +0 -0
synapse/lib/stormtypes.py CHANGED
@@ -201,11 +201,29 @@ class StormTypesRegistry:
201
201
  raise Exception('no key!')
202
202
  self.addStormLib(path, ctor)
203
203
 
204
+ for info in ctor._storm_locals:
205
+ rtype = info.get('type')
206
+ if isinstance(rtype, dict) and rtype.get('type') == 'function':
207
+ if (fname := rtype.get('_funcname')) == '_storm_query':
208
+ continue
209
+
210
+ if (func := getattr(ctor, fname, None)) is not None:
211
+ funcpath = '.'.join(('lib',) + ctor._storm_lib_path + (info['name'],))
212
+ func._storm_funcpath = f"${funcpath}"
213
+
204
214
  return ctor
205
215
 
206
216
  def registerType(self, ctor):
207
217
  '''Decorator to register a StormPrim'''
208
218
  self.addStormType(ctor.__name__, ctor)
219
+
220
+ for info in ctor._storm_locals:
221
+ rtype = info.get('type')
222
+ if isinstance(rtype, dict) and rtype.get('type') == 'function':
223
+ fname = rtype.get('_funcname')
224
+ if (func := getattr(ctor, fname, None)) is not None:
225
+ func._storm_funcpath = f"{ctor._storm_typename}.{info['name']}"
226
+
209
227
  return ctor
210
228
 
211
229
  def iterLibs(self):
@@ -628,6 +646,7 @@ class Lib(StormType):
628
646
  if callable(v) and v.__name__ == 'realfunc':
629
647
  v._storm_runtime_lib = self
630
648
  v._storm_runtime_lib_func = k
649
+ v._storm_funcpath = f'${".".join(("lib",) + self.name + (k,))}'
631
650
 
632
651
  self.locls[k] = v
633
652
 
synapse/lib/version.py CHANGED
@@ -223,6 +223,6 @@ def reqVersion(valu, reqver,
223
223
  ##############################################################################
224
224
  # The following are touched during the release process by bumpversion.
225
225
  # Do not modify these directly.
226
- version = (2, 192, 0)
226
+ version = (2, 193, 0)
227
227
  verstring = '.'.join([str(x) for x in version])
228
- commit = '8a442a534d27fff4c6189922c4b0ea85d332f18e'
228
+ commit = 'ad17bf2e740ba11453b7827ce70c140f969a1430'
synapse/models/inet.py CHANGED
@@ -1442,7 +1442,11 @@ class InetModule(s_module.CoreModule):
1442
1442
  'doc': 'A channel within a web service or instance such as slack or discord.'
1443
1443
  }),
1444
1444
 
1445
- ('inet:web:hashtag', ('str', {'lower': True, 'regex': r'^#\w[\w·]*(?<!·)$'}), {
1445
+ ('inet:web:hashtag', ('str', {'lower': True, 'strip': True, 'regex': r'^#[^\p{Z}#]+$'}), {
1446
+ # regex explanation:
1447
+ # - starts with pound
1448
+ # - one or more non-whitespace/non-pound character
1449
+ # The minimum hashtag is a pound with a single non-whitespace character
1446
1450
  'doc': 'A hashtag used in a web post.',
1447
1451
  }),
1448
1452
 
@@ -1728,6 +1732,9 @@ class InetModule(s_module.CoreModule):
1728
1732
  'template': {'service:base': 'object'},
1729
1733
  'props': (
1730
1734
 
1735
+ ('url', ('inet:url', {}), {
1736
+ 'doc': 'The primary URL associated with the {service:base}.'}),
1737
+
1731
1738
  ('status', ('inet:service:object:status', {}), {
1732
1739
  'doc': 'The status of the {service:base}.'}),
1733
1740
 
@@ -1811,6 +1818,9 @@ class InetModule(s_module.CoreModule):
1811
1818
 
1812
1819
  ('inet:email:message', {}, (
1813
1820
 
1821
+ ('id', ('str', {'strip': True}), {
1822
+ 'doc': 'The ID parsed from the "message-id" header.'}),
1823
+
1814
1824
  ('to', ('inet:email', {}), {
1815
1825
  'doc': 'The email address of the recipient.'}),
1816
1826
 
@@ -2171,6 +2181,9 @@ class InetModule(s_module.CoreModule):
2171
2181
  ('dst:ssh:key', ('crypto:key', {}), {
2172
2182
  'doc': 'The key sent by the server as part of an SSH session setup.'}),
2173
2183
 
2184
+ ('capture:host', ('it:host', {}), {
2185
+ 'doc': 'The host which captured the flow.'}),
2186
+
2174
2187
  ('raw', ('data', {}), {
2175
2188
  'doc': 'A raw record used to create the flow which may contain additional protocol details.'}),
2176
2189
  )),
@@ -2194,6 +2207,9 @@ class InetModule(s_module.CoreModule):
2194
2207
  ('host', ('it:host', {}), {
2195
2208
  'doc': 'The host that used the network egress.'}),
2196
2209
 
2210
+ ('host:iface', ('inet:iface', {}), {
2211
+ 'doc': 'The interface which the host used to connect out via the egress.'}),
2212
+
2197
2213
  ('account', ('inet:service:account', {}), {
2198
2214
  'doc': 'The service account which used the client address to egress.'}),
2199
2215
 
@@ -655,6 +655,7 @@ class ItModule(s_module.CoreModule):
655
655
 
656
656
  ('it:host', ('guid', {}), {
657
657
  'interfaces': ('inet:service:object',),
658
+ 'template': {'service:base': 'host'},
658
659
  'doc': 'A GUID that represents a host or system.'}),
659
660
 
660
661
  ('it:log:event:type:taxonomy', ('taxonomy', {}), {
@@ -784,6 +785,7 @@ class ItModule(s_module.CoreModule):
784
785
  }),
785
786
  ('it:dev:repo', ('guid', {}), {
786
787
  'interfaces': ('inet:service:object',),
788
+ 'template': {'service:base': 'repository'},
787
789
  'doc': 'A version control system instance.',
788
790
  }),
789
791
  ('it:dev:repo:remote', ('guid', {}), {
@@ -791,10 +793,12 @@ class ItModule(s_module.CoreModule):
791
793
  }),
792
794
  ('it:dev:repo:branch', ('guid', {}), {
793
795
  'interfaces': ('inet:service:object',),
796
+ 'template': {'service:base': 'repository branch'},
794
797
  'doc': 'A branch in a version control system instance.',
795
798
  }),
796
799
  ('it:dev:repo:commit', ('guid', {}), {
797
800
  'interfaces': ('inet:service:object',),
801
+ 'template': {'service:base': 'repository commit'},
798
802
  'doc': 'A commit to a repository.',
799
803
  }),
800
804
  ('it:dev:repo:diff', ('guid', {}), {
@@ -802,18 +806,22 @@ class ItModule(s_module.CoreModule):
802
806
  }),
803
807
  ('it:dev:repo:issue:label', ('guid', {}), {
804
808
  'interfaces': ('inet:service:object',),
809
+ 'template': {'service:base': 'repository issue label'},
805
810
  'doc': 'A label applied to a repository issue.',
806
811
  }),
807
812
  ('it:dev:repo:issue', ('guid', {}), {
808
813
  'interfaces': ('inet:service:object',),
814
+ 'template': {'service:base': 'repository issue'},
809
815
  'doc': 'An issue raised in a repository.',
810
816
  }),
811
817
  ('it:dev:repo:issue:comment', ('guid', {}), {
812
818
  'interfaces': ('inet:service:object',),
819
+ 'template': {'service:base': 'repository issue comment'},
813
820
  'doc': 'A comment on an issue in a repository.',
814
821
  }),
815
822
  ('it:dev:repo:diff:comment', ('guid', {}), {
816
823
  'interfaces': ('inet:service:object',),
824
+ 'template': {'service:base': 'repository diff comment'},
817
825
  'doc': 'A comment on a diff in a repository.',
818
826
  }),
819
827
  ('it:prod:soft', ('guid', {}), {
@@ -963,12 +971,12 @@ class ItModule(s_module.CoreModule):
963
971
  }),
964
972
  ('it:exec:pipe', ('guid', {}), {
965
973
  'interfaces': ('it:host:activity',),
966
- 'doc': 'A named pipe created by a process at runtime.',
967
- }),
974
+ 'doc': 'A named pipe created by a process at runtime.'}),
975
+
968
976
  ('it:exec:url', ('guid', {}), {
969
977
  'interfaces': ('it:host:activity',),
970
- 'doc': 'An instance of a host requesting a URL.',
971
- }),
978
+ 'doc': 'An instance of a host requesting a URL using any protocol scheme.'}),
979
+
972
980
  ('it:exec:bind', ('guid', {}), {
973
981
  'interfaces': ('it:host:activity',),
974
982
  'doc': 'An instance of a host binding a listening port.',
@@ -1046,6 +1054,7 @@ class ItModule(s_module.CoreModule):
1046
1054
 
1047
1055
  ('it:host:tenancy', ('guid', {}), {
1048
1056
  'interfaces': ('inet:service:object',),
1057
+ 'template': {'service:base': 'host tenancy'},
1049
1058
  'doc': 'A time window where a host was a tenant run by another host.'}),
1050
1059
 
1051
1060
  ('it:software:image:type:taxonomy', ('taxonomy', {}), {
@@ -1054,6 +1063,7 @@ class ItModule(s_module.CoreModule):
1054
1063
 
1055
1064
  ('it:software:image', ('guid', {}), {
1056
1065
  'interfaces': ('inet:service:object',),
1066
+ 'template': {'service:base': 'software image'},
1057
1067
  'doc': 'The base image used to create a container or OS.'}),
1058
1068
 
1059
1069
  ('it:storage:mount', ('guid', {}), {
synapse/models/risk.py CHANGED
@@ -242,9 +242,18 @@ class RiskModule(s_module.CoreModule):
242
242
  (('risk:mitigation', 'uses', 'inet:service:rule'), {
243
243
  'doc': 'The mitigation uses the service rule.'}),
244
244
 
245
+ (('risk:mitigation', 'uses', 'it:prod:softver'), {
246
+ 'doc': 'The mitigation uses the software version.'}),
247
+
248
+ (('risk:mitigation', 'uses', 'it:prod:hardware'), {
249
+ 'doc': 'The mitigation uses the hardware.'}),
250
+
245
251
  (('risk:leak', 'leaked', None), {
246
252
  'doc': 'The leak included the disclosure of the target node.'}),
247
253
 
254
+ (('risk:leak', 'enabled', 'risk:leak'), {
255
+ 'doc': 'The source leak enabled the target leak to occur.'}),
256
+
248
257
  (('risk:extortion', 'leveraged', None), {
249
258
  'doc': 'The extortion event was based on attacker access to the target node.'}),
250
259
 
@@ -407,10 +416,12 @@ class RiskModule(s_module.CoreModule):
407
416
  'doc': 'A description of the mitigation approach for the vulnerability.'}),
408
417
 
409
418
  ('software', ('it:prod:softver', {}), {
410
- 'doc': 'A software version which implements a fix for the vulnerability.'}),
419
+ 'deprecated': True,
420
+ 'doc': 'Deprecated. Please use risk:mitigation -(uses)> it:prod:softver.'}),
411
421
 
412
422
  ('hardware', ('it:prod:hardware', {}), {
413
- 'doc': 'A hardware version which implements a fix for the vulnerability.'}),
423
+ 'deprecated': True,
424
+ 'doc': 'Deprecated. Please use risk:mitigation -(uses)> it:prod:hardware.'}),
414
425
 
415
426
  ('reporter', ('ou:org', {}), {
416
427
  'doc': 'The organization reporting on the mitigation.'}),
@@ -1034,6 +1045,9 @@ class RiskModule(s_module.CoreModule):
1034
1045
  ('leaker', ('ps:contact', {}), {
1035
1046
  'doc': 'The identity which leaked the information.'}),
1036
1047
 
1048
+ ('recipient', ('ps:contact', {}), {
1049
+ 'doc': 'The identity which received the leaked information.'}),
1050
+
1037
1051
  ('type', ('risk:leak:type:taxonomy', {}), {
1038
1052
  'doc': 'A type taxonomy for the leak.'}),
1039
1053
 
@@ -484,7 +484,7 @@ class CortexTest(s_t_utils.SynTest):
484
484
  self.len(0, mods)
485
485
  self.len(0, core.modsbyiface.get('lookup'))
486
486
 
487
- await core.loadStormPkg(pkgdef)
487
+ core.loadStormPkg(pkgdef)
488
488
 
489
489
  mods = await core.getStormIfaces('lookup')
490
490
  self.len(1, mods)
@@ -513,7 +513,7 @@ class CortexTest(s_t_utils.SynTest):
513
513
  vals = [r async for r in core.view.callStormIface('boom', todo)]
514
514
  self.eq((), vals)
515
515
 
516
- await core._dropStormPkg(pkgdef)
516
+ core._dropStormPkg(pkgdef)
517
517
  self.none(core.modsbyiface.get('lookup'))
518
518
 
519
519
  mods = await core.getStormIfaces('lookup')
@@ -558,7 +558,7 @@ class CortexTest(s_t_utils.SynTest):
558
558
  nodes = await core.nodes('foo@bar.com foo@bar.com', opts={'mode': 'lookup'})
559
559
  self.eq(['inet:email', 'inet:email'], [n.ndef[0] for n in nodes])
560
560
 
561
- await core.loadStormPkg(pkgdef)
561
+ core.loadStormPkg(pkgdef)
562
562
  self.len(1, await core.getStormIfaces('search'))
563
563
 
564
564
  todo = s_common.todo('search', ('foo@bar.com',))
synapse/tests/test_exc.py CHANGED
@@ -27,6 +27,9 @@ class ExcTest(s_t_utils.SynTest):
27
27
  e.setdefault('defv', 2)
28
28
  self.eq("SynErr: defv=1 foo='words' hehe=1234 mesg='words'", str(e))
29
29
 
30
+ e.update({'foo': 'newwords', 'bar': 'baz'})
31
+ self.eq("SynErr: bar='baz' defv=1 foo='newwords' hehe=1234 mesg='words'", str(e))
32
+
30
33
  self.eq(e.errname, 'SynErr')
31
34
 
32
35
  e2 = s_exc.BadTypeValu(mesg='haha')
@@ -1,3 +1,4 @@
1
+ import time
1
2
  import asyncio
2
3
  import hashlib
3
4
  import datetime
@@ -361,7 +362,9 @@ class AgendaTest(s_t_utils.SynTest):
361
362
 
362
363
  appt = await agenda.get(guid)
363
364
  self.eq(appt.isrunning, False)
364
- self.eq(appt.lastresult, "raised exception StormRaise: errname='OmgWtfBbq' mesg='boom'")
365
+ self.isin("raised exception StormRaise: errname='OmgWtfBbq'", appt.lastresult)
366
+ self.isin("highlight={'hash': '6736b8252d9413221a9b693b2b19cf53'", appt.lastresult)
367
+ self.isin("mesg='boom'", appt.lastresult)
365
368
 
366
369
  # Test setting the global enable/disable flag
367
370
  await agenda.delete(guid)
@@ -824,6 +827,159 @@ class AgendaTest(s_t_utils.SynTest):
824
827
  data = stream.read()
825
828
  self.isin("_Appt.edits() Invalid attribute received: invalid = 'newp'", data)
826
829
 
830
+ async def test_agenda_promotions(self):
831
+ # Adjust this knob for the number of cron jobs you want to test. Below
832
+ # are some average run times from my dev box
833
+ # 100 -> ~15s
834
+ # 250 -> ~18s
835
+ # 500 -> ~22s
836
+ # 5000 -> ~88s
837
+ NUMJOBS = 100
838
+
839
+ async with self.getTestAha() as aha:
840
+
841
+ conf00 = {
842
+ 'aha:provision': await aha.addAhaSvcProv('00.cortex')
843
+ }
844
+
845
+ async with self.getTestCore(conf=conf00) as core00:
846
+ self.false(core00.conf.get('mirror'))
847
+
848
+ msgs = await core00.stormlist('[it:dev:str=foo]')
849
+ self.stormHasNoWarnErr(msgs)
850
+
851
+ # Forward wind agenda to two minutes past the hour so we don't hit any weird timing windows
852
+ tick = core00.agenda._getNowTick()
853
+ now = time.gmtime(int(tick))
854
+ diff = (60 - now.tm_min) * 60
855
+ core00.agenda._addTickOff(diff + 120)
856
+
857
+ # Add NUMJOBS cron jobs that starts every hour
858
+ q = '''
859
+ for $ii in $lib.range($numjobs) {
860
+ cron.add --name `CRON{$ii}` --hour +1 { $lib.time.sleep(90) }
861
+ }
862
+ '''
863
+ opts = {'vars': {'numjobs': NUMJOBS}}
864
+ await core00.callStorm(q, opts=opts)
865
+
866
+ prov01 = {'mirror': '00.cortex'}
867
+ conf01 = {
868
+ 'aha:provision': await aha.addAhaSvcProv('01.cortex', provinfo=prov01),
869
+ }
870
+
871
+ async with self.getTestCore(conf=conf01) as core01:
872
+ # Advance the ticks so the cronjob starts sooner
873
+ core00.agenda._addTickOff(3600)
874
+
875
+ # Sync agenda ticks
876
+ diff = core00.agenda._getNowTick() - core01.agenda._getNowTick()
877
+ core01.agenda._addTickOff(diff)
878
+
879
+ mesgs = []
880
+ async for mesg in core00.behold():
881
+ mesgs.append(mesg)
882
+ if len(mesgs) >= NUMJOBS:
883
+ break
884
+
885
+ for mesg in mesgs:
886
+ self.eq(mesg['event'], 'cron:start')
887
+
888
+ # Inspect crons and tasks
889
+ crons00 = await core00.callStorm('return($lib.cron.list())')
890
+ self.len(NUMJOBS, crons00)
891
+ # isrunning is synced via nexus so it should be true for both cortexes
892
+ for cron in crons00:
893
+ self.true(cron.get('isrunning'))
894
+
895
+ cronidens = [k['iden'] for k in crons00]
896
+
897
+ await core01.sync()
898
+
899
+ crons01 = await core01.callStorm('return($lib.cron.list())')
900
+ self.len(NUMJOBS, crons01)
901
+ # isrunning is synced via nexus so it should be true for both cortexes
902
+ for cron in crons01:
903
+ self.true(cron.get('isrunning'))
904
+
905
+ tasks00 = await core00.callStorm('return($lib.ps.list())')
906
+ # 101 tasks: one for the main task and NUMJOBS for the cronjob instances
907
+ self.len(NUMJOBS + 1, tasks00)
908
+ self.eq(tasks00[0]['info']['query'], '[it:dev:str=foo]')
909
+ for idx, task in enumerate(tasks00):
910
+ if idx == 0:
911
+ continue
912
+
913
+ self.isin(task['info']['iden'], cronidens)
914
+ self.eq(task['info']['query'], '$lib.time.sleep(90)')
915
+
916
+ # No tasks running on the follower
917
+ tasks01 = await core01.callStorm('return($lib.ps.list())')
918
+ self.len(0, tasks01)
919
+
920
+ with self.getLoggerStream('synapse.lib.agenda', mesg='name=CRON99') as stream:
921
+ # Promote and inspect cortex status
922
+ await core01.promote(graceful=True)
923
+ self.false(core00.isactive)
924
+ self.true(core01.isactive)
925
+
926
+ stream.seek(0)
927
+ data = stream.read()
928
+ for ii in range(NUMJOBS):
929
+ self.isin(f' name=CRON{ii} with result "cancelled" took ', data)
930
+
931
+ # Sync the (now) follower so the isrunning status gets updated to false on both cortexes
932
+ await core00.sync()
933
+
934
+ crons00 = await core00.callStorm('return($lib.cron.list())')
935
+ self.len(NUMJOBS, crons00)
936
+ for cron in crons00:
937
+ self.false(cron.get('isrunning'))
938
+
939
+ crons01 = await core01.callStorm('return($lib.cron.list())')
940
+ self.len(NUMJOBS, crons01)
941
+ for cron in crons01:
942
+ self.false(cron.get('isrunning'))
943
+
944
+ # Bump the ticks on core01 so the cron jobs start
945
+ core01.agenda._addTickOff(3600)
946
+
947
+ mesgs = []
948
+ async for mesg in core01.behold():
949
+ mesgs.append(mesg)
950
+ if len(mesgs) >= NUMJOBS:
951
+ break
952
+
953
+ for mesg in mesgs:
954
+ self.eq(mesg['event'], 'cron:start')
955
+
956
+ # Sync the follower to get the latest isrunning status
957
+ await core00.sync()
958
+
959
+ crons00 = await core00.callStorm('return($lib.cron.list())')
960
+ self.len(NUMJOBS, crons00)
961
+ # Cronjobs are running so true on both cortexes
962
+ for cron in crons00:
963
+ self.true(cron.get('isrunning'))
964
+
965
+ crons01 = await core01.callStorm('return($lib.cron.list())')
966
+ self.len(NUMJOBS, crons01)
967
+ # Cronjobs are running so true on both cortexes
968
+ for cron in crons01:
969
+ self.true(cron.get('isrunning'))
970
+
971
+ tasks00 = await core00.callStorm('return($lib.ps.list())')
972
+ # This task is the main task from before promotion
973
+ self.len(1, tasks00)
974
+ self.eq(tasks00[0]['info']['query'], '[it:dev:str=foo]')
975
+
976
+ tasks01 = await core01.callStorm('return($lib.ps.list())')
977
+ # The cronjob instances are the only tasks
978
+ self.len(NUMJOBS, tasks01)
979
+ for task in tasks01:
980
+ self.isin(task['info']['iden'], cronidens)
981
+ self.eq(task['info']['query'], '$lib.time.sleep(90)')
982
+
827
983
  async def test_cron_kill(self):
828
984
  async with self.getTestCore() as core:
829
985
 
@@ -131,7 +131,7 @@ class AstTest(s_test.SynTest):
131
131
  self.stormIsInWarn('Storm search interface is not enabled!', msgs)
132
132
 
133
133
  async with self.getTestCore() as core:
134
- await core.loadStormPkg({
134
+ core.loadStormPkg({
135
135
  'name': 'testsearch',
136
136
  'modules': [
137
137
  {'name': 'testsearch', 'interfaces': ['search'], 'storm': '''
@@ -3119,6 +3119,48 @@ class AstTest(s_test.SynTest):
3119
3119
  off, end = errm[1][1]['highlight']['offsets']
3120
3120
  self.eq('newp', text[off:end])
3121
3121
 
3122
+ visi = (await core.addUser('visi'))['iden']
3123
+ text = '$users=$lib.auth.users.list() $lib.print($users.0.profile)'
3124
+ msgs = await core.stormlist(text, opts={'user': visi})
3125
+ errm = [m for m in msgs if m[0] == 'err'][0]
3126
+ off, end = errm[1][1]['highlight']['offsets']
3127
+ self.eq('lib.print($users.0.profile)', text[off:end])
3128
+
3129
+ text = '$lib.len(foo, bar)'
3130
+ msgs = await core.stormlist(text)
3131
+ errm = [m for m in msgs if m[0] == 'err'][0]
3132
+ off, end = errm[1][1]['highlight']['offsets']
3133
+ self.eq('lib.len(foo, bar)', text[off:end])
3134
+ self.stormIsInErr('$lib.len()', msgs)
3135
+
3136
+ text = '$foo=$lib.pkg.get $foo()'
3137
+ msgs = await core.stormlist(text)
3138
+ errm = [m for m in msgs if m[0] == 'err'][0]
3139
+ off, end = errm[1][1]['highlight']['offsets']
3140
+ self.eq('foo()', text[off:end])
3141
+ self.stormIsInErr('$lib.pkg.get()', msgs)
3142
+
3143
+ text = '$obj = $lib.pipe.gen(${ $obj.put() }) $obj.put(foo, bar, baz)'
3144
+ msgs = await core.stormlist(text)
3145
+ errm = [m for m in msgs if m[0] == 'err'][0]
3146
+ off, end = errm[1][1]['highlight']['offsets']
3147
+ self.eq('obj.put(foo, bar, baz)', text[off:end])
3148
+ self.stormIsInErr('pipe.put()', msgs)
3149
+
3150
+ text = '$lib.gen.campaign(foo, bar, baz)'
3151
+ msgs = await core.stormlist(text)
3152
+ errm = [m for m in msgs if m[0] == 'err'][0]
3153
+ off, end = errm[1][1]['highlight']['offsets']
3154
+ self.eq('lib.gen.campaign(foo, bar, baz)', text[off:end])
3155
+ self.stormIsInErr('$lib.gen.campaign()', msgs)
3156
+
3157
+ text = '$gen = $lib.gen.campaign $gen(foo, bar, baz)'
3158
+ msgs = await core.stormlist(text)
3159
+ errm = [m for m in msgs if m[0] == 'err'][0]
3160
+ off, end = errm[1][1]['highlight']['offsets']
3161
+ self.eq('gen(foo, bar, baz)', text[off:end])
3162
+ self.stormIsInErr('$lib.gen.campaign()', msgs)
3163
+
3122
3164
  async def test_ast_bulkedges(self):
3123
3165
 
3124
3166
  async with self.getTestCore() as core:
@@ -1433,6 +1433,11 @@ class CellTest(s_t_utils.SynTest):
1433
1433
  with mock.patch('os.stat', diffdev):
1434
1434
  await self.asyncraises(s_exc.LowSpace, proxy.runBackup())
1435
1435
 
1436
+ user = await core.auth.getUserByName('root')
1437
+ with self.raises(s_exc.SynErr) as cm:
1438
+ await core.iterNewBackupArchive(user)
1439
+ self.isin('This API must be called via a CellApi', cm.exception.get('mesg'))
1440
+
1436
1441
  async def err(*args, **kwargs):
1437
1442
  raise RuntimeError('boom')
1438
1443
 
@@ -2298,11 +2303,13 @@ class CellTest(s_t_utils.SynTest):
2298
2303
  # Backup the mirror (core01) which points to the core00
2299
2304
  async with await axon00.upload() as upfd:
2300
2305
  async with core01.getLocalProxy() as prox:
2306
+ tot_chunks = 0
2301
2307
  async for chunk in prox.iterNewBackupArchive():
2302
2308
  await upfd.write(chunk)
2309
+ tot_chunks += len(chunk)
2303
2310
 
2304
2311
  size, sha256 = await upfd.save()
2305
- await asyncio.sleep(0)
2312
+ self.eq(size, tot_chunks)
2306
2313
 
2307
2314
  furl = f'{url}{s_common.ehex(sha256)}'
2308
2315
  purl = await aha.addAhaSvcProv('00.mynewcortex')
@@ -3276,3 +3283,66 @@ class CellTest(s_t_utils.SynTest):
3276
3283
  with self.raises(s_exc.BadState) as cm:
3277
3284
  await cell00.promote(graceful=True)
3278
3285
  self.isin('02.cell is not the current leader', cm.exception.get('mesg'))
3286
+
3287
+ async def test_stream_backup_exception(self):
3288
+
3289
+ with self.getTestDir() as dirn:
3290
+ backdirn = os.path.join(dirn, 'backups')
3291
+ coredirn = os.path.join(dirn, 'cortex')
3292
+
3293
+ conf = {'backup:dir': backdirn}
3294
+ s_common.yamlsave(conf, coredirn, 'cell.yaml')
3295
+
3296
+ async with self.getTestCore(dirn=coredirn) as core:
3297
+ async with core.getLocalProxy() as proxy:
3298
+
3299
+ await proxy.runBackup(name='bkup')
3300
+
3301
+ mock_proc = mock.Mock()
3302
+ mock_proc.join = mock.Mock()
3303
+
3304
+ async def mock_executor(func, *args, **kwargs):
3305
+ if isinstance(func, mock.Mock) and func is mock_proc.join:
3306
+ raise Exception('boom')
3307
+ return mock_proc
3308
+
3309
+ with mock.patch('synapse.lib.cell.s_coro.executor', mock_executor):
3310
+ with self.getAsyncLoggerStream('synapse.lib.cell', 'Error during backup streaming') as stream:
3311
+ with self.raises(Exception) as cm:
3312
+ async for _ in proxy.iterBackupArchive('bkup'):
3313
+ pass
3314
+ self.true(await stream.wait(timeout=6))
3315
+
3316
+ async def test_iter_new_backup_archive(self):
3317
+
3318
+ with self.getTestDir() as dirn:
3319
+ backdirn = os.path.join(dirn, 'backups')
3320
+ coredirn = os.path.join(dirn, 'cortex')
3321
+
3322
+ conf = {'backup:dir': backdirn}
3323
+ s_common.yamlsave(conf, coredirn, 'cell.yaml')
3324
+
3325
+ async with self.getTestCore(dirn=coredirn) as core:
3326
+ async with core.getLocalProxy() as proxy:
3327
+
3328
+ async def mock_runBackup(*args, **kwargs):
3329
+ raise Exception('backup failed')
3330
+
3331
+ with mock.patch.object(s_cell.Cell, 'runBackup', mock_runBackup):
3332
+ with self.getAsyncLoggerStream('synapse.lib.cell', 'Removing') as stream:
3333
+ with self.raises(s_exc.SynErr) as cm:
3334
+ async for _ in proxy.iterNewBackupArchive('failedbackup', remove=True):
3335
+ pass
3336
+
3337
+ self.isin('backup failed', str(cm.exception))
3338
+ self.true(await stream.wait(timeout=6))
3339
+
3340
+ path = os.path.join(backdirn, 'failedbackup')
3341
+ self.false(os.path.exists(path))
3342
+
3343
+ self.false(core.backupstreaming)
3344
+
3345
+ core.backupstreaming = True
3346
+ with self.raises(s_exc.BackupAlreadyRunning):
3347
+ async for _ in proxy.iterNewBackupArchive('newbackup', remove=True):
3348
+ pass