synapse 2.191.0__py311-none-any.whl → 2.193.0__py311-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of synapse might be problematic. Click here for more details.
- synapse/axon.py +54 -23
- synapse/common.py +15 -0
- synapse/cortex.py +18 -20
- synapse/exc.py +6 -1
- synapse/lib/agenda.py +0 -2
- synapse/lib/ast.py +30 -12
- synapse/lib/cell.py +79 -85
- synapse/lib/cli.py +20 -11
- synapse/lib/nexus.py +2 -1
- synapse/lib/parser.py +1 -1
- synapse/lib/snap.py +4 -4
- synapse/lib/storm.py +34 -17
- synapse/lib/stormhttp.py +32 -35
- synapse/lib/stormlib/json.py +5 -2
- synapse/lib/stormtypes.py +121 -20
- synapse/lib/version.py +2 -2
- synapse/models/inet.py +17 -1
- synapse/models/infotech.py +14 -4
- synapse/models/risk.py +16 -2
- synapse/tests/test_axon.py +10 -0
- synapse/tests/test_cortex.py +55 -3
- synapse/tests/test_exc.py +3 -0
- synapse/tests/test_lib_agenda.py +157 -1
- synapse/tests/test_lib_ast.py +49 -1
- synapse/tests/test_lib_cell.py +106 -1
- synapse/tests/test_lib_httpapi.py +9 -2
- synapse/tests/test_lib_storm.py +72 -30
- synapse/tests/test_lib_stormhttp.py +57 -12
- synapse/tests/test_lib_stormlib_json.py +20 -0
- synapse/tests/test_lib_stormlib_scrape.py +2 -2
- synapse/tests/test_model_inet.py +40 -5
- synapse/tests/test_model_risk.py +2 -0
- synapse/tests/test_servers_univ.py +0 -12
- synapse/tests/test_tools_apikey.py +227 -0
- synapse/tests/test_tools_storm.py +95 -0
- synapse/tests/test_utils_getrefs.py +1 -1
- synapse/tools/apikey.py +93 -0
- synapse/utils/getrefs.py +14 -3
- synapse/vendor/cpython/lib/http/__init__.py +0 -0
- synapse/vendor/cpython/lib/http/cookies.py +59 -0
- synapse/vendor/cpython/lib/test/test_http_cookies.py +49 -0
- {synapse-2.191.0.dist-info → synapse-2.193.0.dist-info}/METADATA +2 -2
- {synapse-2.191.0.dist-info → synapse-2.193.0.dist-info}/RECORD +46 -41
- {synapse-2.191.0.dist-info → synapse-2.193.0.dist-info}/WHEEL +1 -1
- {synapse-2.191.0.dist-info → synapse-2.193.0.dist-info}/LICENSE +0 -0
- {synapse-2.191.0.dist-info → synapse-2.193.0.dist-info}/top_level.txt +0 -0
synapse/tests/test_lib_agenda.py
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import time
|
|
1
2
|
import asyncio
|
|
2
3
|
import hashlib
|
|
3
4
|
import datetime
|
|
@@ -361,7 +362,9 @@ class AgendaTest(s_t_utils.SynTest):
|
|
|
361
362
|
|
|
362
363
|
appt = await agenda.get(guid)
|
|
363
364
|
self.eq(appt.isrunning, False)
|
|
364
|
-
self.
|
|
365
|
+
self.isin("raised exception StormRaise: errname='OmgWtfBbq'", appt.lastresult)
|
|
366
|
+
self.isin("highlight={'hash': '6736b8252d9413221a9b693b2b19cf53'", appt.lastresult)
|
|
367
|
+
self.isin("mesg='boom'", appt.lastresult)
|
|
365
368
|
|
|
366
369
|
# Test setting the global enable/disable flag
|
|
367
370
|
await agenda.delete(guid)
|
|
@@ -824,6 +827,159 @@ class AgendaTest(s_t_utils.SynTest):
|
|
|
824
827
|
data = stream.read()
|
|
825
828
|
self.isin("_Appt.edits() Invalid attribute received: invalid = 'newp'", data)
|
|
826
829
|
|
|
830
|
+
async def test_agenda_promotions(self):
|
|
831
|
+
# Adjust this knob for the number of cron jobs you want to test. Below
|
|
832
|
+
# are some average run times from my dev box
|
|
833
|
+
# 100 -> ~15s
|
|
834
|
+
# 250 -> ~18s
|
|
835
|
+
# 500 -> ~22s
|
|
836
|
+
# 5000 -> ~88s
|
|
837
|
+
NUMJOBS = 100
|
|
838
|
+
|
|
839
|
+
async with self.getTestAha() as aha:
|
|
840
|
+
|
|
841
|
+
conf00 = {
|
|
842
|
+
'aha:provision': await aha.addAhaSvcProv('00.cortex')
|
|
843
|
+
}
|
|
844
|
+
|
|
845
|
+
async with self.getTestCore(conf=conf00) as core00:
|
|
846
|
+
self.false(core00.conf.get('mirror'))
|
|
847
|
+
|
|
848
|
+
msgs = await core00.stormlist('[it:dev:str=foo]')
|
|
849
|
+
self.stormHasNoWarnErr(msgs)
|
|
850
|
+
|
|
851
|
+
# Forward wind agenda to two minutes past the hour so we don't hit any weird timing windows
|
|
852
|
+
tick = core00.agenda._getNowTick()
|
|
853
|
+
now = time.gmtime(int(tick))
|
|
854
|
+
diff = (60 - now.tm_min) * 60
|
|
855
|
+
core00.agenda._addTickOff(diff + 120)
|
|
856
|
+
|
|
857
|
+
# Add NUMJOBS cron jobs that starts every hour
|
|
858
|
+
q = '''
|
|
859
|
+
for $ii in $lib.range($numjobs) {
|
|
860
|
+
cron.add --name `CRON{$ii}` --hour +1 { $lib.time.sleep(90) }
|
|
861
|
+
}
|
|
862
|
+
'''
|
|
863
|
+
opts = {'vars': {'numjobs': NUMJOBS}}
|
|
864
|
+
await core00.callStorm(q, opts=opts)
|
|
865
|
+
|
|
866
|
+
prov01 = {'mirror': '00.cortex'}
|
|
867
|
+
conf01 = {
|
|
868
|
+
'aha:provision': await aha.addAhaSvcProv('01.cortex', provinfo=prov01),
|
|
869
|
+
}
|
|
870
|
+
|
|
871
|
+
async with self.getTestCore(conf=conf01) as core01:
|
|
872
|
+
# Advance the ticks so the cronjob starts sooner
|
|
873
|
+
core00.agenda._addTickOff(3600)
|
|
874
|
+
|
|
875
|
+
# Sync agenda ticks
|
|
876
|
+
diff = core00.agenda._getNowTick() - core01.agenda._getNowTick()
|
|
877
|
+
core01.agenda._addTickOff(diff)
|
|
878
|
+
|
|
879
|
+
mesgs = []
|
|
880
|
+
async for mesg in core00.behold():
|
|
881
|
+
mesgs.append(mesg)
|
|
882
|
+
if len(mesgs) >= NUMJOBS:
|
|
883
|
+
break
|
|
884
|
+
|
|
885
|
+
for mesg in mesgs:
|
|
886
|
+
self.eq(mesg['event'], 'cron:start')
|
|
887
|
+
|
|
888
|
+
# Inspect crons and tasks
|
|
889
|
+
crons00 = await core00.callStorm('return($lib.cron.list())')
|
|
890
|
+
self.len(NUMJOBS, crons00)
|
|
891
|
+
# isrunning is synced via nexus so it should be true for both cortexes
|
|
892
|
+
for cron in crons00:
|
|
893
|
+
self.true(cron.get('isrunning'))
|
|
894
|
+
|
|
895
|
+
cronidens = [k['iden'] for k in crons00]
|
|
896
|
+
|
|
897
|
+
await core01.sync()
|
|
898
|
+
|
|
899
|
+
crons01 = await core01.callStorm('return($lib.cron.list())')
|
|
900
|
+
self.len(NUMJOBS, crons01)
|
|
901
|
+
# isrunning is synced via nexus so it should be true for both cortexes
|
|
902
|
+
for cron in crons01:
|
|
903
|
+
self.true(cron.get('isrunning'))
|
|
904
|
+
|
|
905
|
+
tasks00 = await core00.callStorm('return($lib.ps.list())')
|
|
906
|
+
# 101 tasks: one for the main task and NUMJOBS for the cronjob instances
|
|
907
|
+
self.len(NUMJOBS + 1, tasks00)
|
|
908
|
+
self.eq(tasks00[0]['info']['query'], '[it:dev:str=foo]')
|
|
909
|
+
for idx, task in enumerate(tasks00):
|
|
910
|
+
if idx == 0:
|
|
911
|
+
continue
|
|
912
|
+
|
|
913
|
+
self.isin(task['info']['iden'], cronidens)
|
|
914
|
+
self.eq(task['info']['query'], '$lib.time.sleep(90)')
|
|
915
|
+
|
|
916
|
+
# No tasks running on the follower
|
|
917
|
+
tasks01 = await core01.callStorm('return($lib.ps.list())')
|
|
918
|
+
self.len(0, tasks01)
|
|
919
|
+
|
|
920
|
+
with self.getLoggerStream('synapse.lib.agenda', mesg='name=CRON99') as stream:
|
|
921
|
+
# Promote and inspect cortex status
|
|
922
|
+
await core01.promote(graceful=True)
|
|
923
|
+
self.false(core00.isactive)
|
|
924
|
+
self.true(core01.isactive)
|
|
925
|
+
|
|
926
|
+
stream.seek(0)
|
|
927
|
+
data = stream.read()
|
|
928
|
+
for ii in range(NUMJOBS):
|
|
929
|
+
self.isin(f' name=CRON{ii} with result "cancelled" took ', data)
|
|
930
|
+
|
|
931
|
+
# Sync the (now) follower so the isrunning status gets updated to false on both cortexes
|
|
932
|
+
await core00.sync()
|
|
933
|
+
|
|
934
|
+
crons00 = await core00.callStorm('return($lib.cron.list())')
|
|
935
|
+
self.len(NUMJOBS, crons00)
|
|
936
|
+
for cron in crons00:
|
|
937
|
+
self.false(cron.get('isrunning'))
|
|
938
|
+
|
|
939
|
+
crons01 = await core01.callStorm('return($lib.cron.list())')
|
|
940
|
+
self.len(NUMJOBS, crons01)
|
|
941
|
+
for cron in crons01:
|
|
942
|
+
self.false(cron.get('isrunning'))
|
|
943
|
+
|
|
944
|
+
# Bump the ticks on core01 so the cron jobs start
|
|
945
|
+
core01.agenda._addTickOff(3600)
|
|
946
|
+
|
|
947
|
+
mesgs = []
|
|
948
|
+
async for mesg in core01.behold():
|
|
949
|
+
mesgs.append(mesg)
|
|
950
|
+
if len(mesgs) >= NUMJOBS:
|
|
951
|
+
break
|
|
952
|
+
|
|
953
|
+
for mesg in mesgs:
|
|
954
|
+
self.eq(mesg['event'], 'cron:start')
|
|
955
|
+
|
|
956
|
+
# Sync the follower to get the latest isrunning status
|
|
957
|
+
await core00.sync()
|
|
958
|
+
|
|
959
|
+
crons00 = await core00.callStorm('return($lib.cron.list())')
|
|
960
|
+
self.len(NUMJOBS, crons00)
|
|
961
|
+
# Cronjobs are running so true on both cortexes
|
|
962
|
+
for cron in crons00:
|
|
963
|
+
self.true(cron.get('isrunning'))
|
|
964
|
+
|
|
965
|
+
crons01 = await core01.callStorm('return($lib.cron.list())')
|
|
966
|
+
self.len(NUMJOBS, crons01)
|
|
967
|
+
# Cronjobs are running so true on both cortexes
|
|
968
|
+
for cron in crons01:
|
|
969
|
+
self.true(cron.get('isrunning'))
|
|
970
|
+
|
|
971
|
+
tasks00 = await core00.callStorm('return($lib.ps.list())')
|
|
972
|
+
# This task is the main task from before promotion
|
|
973
|
+
self.len(1, tasks00)
|
|
974
|
+
self.eq(tasks00[0]['info']['query'], '[it:dev:str=foo]')
|
|
975
|
+
|
|
976
|
+
tasks01 = await core01.callStorm('return($lib.ps.list())')
|
|
977
|
+
# The cronjob instances are the only tasks
|
|
978
|
+
self.len(NUMJOBS, tasks01)
|
|
979
|
+
for task in tasks01:
|
|
980
|
+
self.isin(task['info']['iden'], cronidens)
|
|
981
|
+
self.eq(task['info']['query'], '$lib.time.sleep(90)')
|
|
982
|
+
|
|
827
983
|
async def test_cron_kill(self):
|
|
828
984
|
async with self.getTestCore() as core:
|
|
829
985
|
|
synapse/tests/test_lib_ast.py
CHANGED
|
@@ -131,7 +131,7 @@ class AstTest(s_test.SynTest):
|
|
|
131
131
|
self.stormIsInWarn('Storm search interface is not enabled!', msgs)
|
|
132
132
|
|
|
133
133
|
async with self.getTestCore() as core:
|
|
134
|
-
|
|
134
|
+
core.loadStormPkg({
|
|
135
135
|
'name': 'testsearch',
|
|
136
136
|
'modules': [
|
|
137
137
|
{'name': 'testsearch', 'interfaces': ['search'], 'storm': '''
|
|
@@ -3113,6 +3113,54 @@ class AstTest(s_test.SynTest):
|
|
|
3113
3113
|
off, end = errm[1][1]['highlight']['offsets']
|
|
3114
3114
|
self.eq('haha', text[off:end])
|
|
3115
3115
|
|
|
3116
|
+
text = '$lib.newp'
|
|
3117
|
+
msgs = await core.stormlist(text)
|
|
3118
|
+
errm = [m for m in msgs if m[0] == 'err'][0]
|
|
3119
|
+
off, end = errm[1][1]['highlight']['offsets']
|
|
3120
|
+
self.eq('newp', text[off:end])
|
|
3121
|
+
|
|
3122
|
+
visi = (await core.addUser('visi'))['iden']
|
|
3123
|
+
text = '$users=$lib.auth.users.list() $lib.print($users.0.profile)'
|
|
3124
|
+
msgs = await core.stormlist(text, opts={'user': visi})
|
|
3125
|
+
errm = [m for m in msgs if m[0] == 'err'][0]
|
|
3126
|
+
off, end = errm[1][1]['highlight']['offsets']
|
|
3127
|
+
self.eq('lib.print($users.0.profile)', text[off:end])
|
|
3128
|
+
|
|
3129
|
+
text = '$lib.len(foo, bar)'
|
|
3130
|
+
msgs = await core.stormlist(text)
|
|
3131
|
+
errm = [m for m in msgs if m[0] == 'err'][0]
|
|
3132
|
+
off, end = errm[1][1]['highlight']['offsets']
|
|
3133
|
+
self.eq('lib.len(foo, bar)', text[off:end])
|
|
3134
|
+
self.stormIsInErr('$lib.len()', msgs)
|
|
3135
|
+
|
|
3136
|
+
text = '$foo=$lib.pkg.get $foo()'
|
|
3137
|
+
msgs = await core.stormlist(text)
|
|
3138
|
+
errm = [m for m in msgs if m[0] == 'err'][0]
|
|
3139
|
+
off, end = errm[1][1]['highlight']['offsets']
|
|
3140
|
+
self.eq('foo()', text[off:end])
|
|
3141
|
+
self.stormIsInErr('$lib.pkg.get()', msgs)
|
|
3142
|
+
|
|
3143
|
+
text = '$obj = $lib.pipe.gen(${ $obj.put() }) $obj.put(foo, bar, baz)'
|
|
3144
|
+
msgs = await core.stormlist(text)
|
|
3145
|
+
errm = [m for m in msgs if m[0] == 'err'][0]
|
|
3146
|
+
off, end = errm[1][1]['highlight']['offsets']
|
|
3147
|
+
self.eq('obj.put(foo, bar, baz)', text[off:end])
|
|
3148
|
+
self.stormIsInErr('pipe.put()', msgs)
|
|
3149
|
+
|
|
3150
|
+
text = '$lib.gen.campaign(foo, bar, baz)'
|
|
3151
|
+
msgs = await core.stormlist(text)
|
|
3152
|
+
errm = [m for m in msgs if m[0] == 'err'][0]
|
|
3153
|
+
off, end = errm[1][1]['highlight']['offsets']
|
|
3154
|
+
self.eq('lib.gen.campaign(foo, bar, baz)', text[off:end])
|
|
3155
|
+
self.stormIsInErr('$lib.gen.campaign()', msgs)
|
|
3156
|
+
|
|
3157
|
+
text = '$gen = $lib.gen.campaign $gen(foo, bar, baz)'
|
|
3158
|
+
msgs = await core.stormlist(text)
|
|
3159
|
+
errm = [m for m in msgs if m[0] == 'err'][0]
|
|
3160
|
+
off, end = errm[1][1]['highlight']['offsets']
|
|
3161
|
+
self.eq('gen(foo, bar, baz)', text[off:end])
|
|
3162
|
+
self.stormIsInErr('$lib.gen.campaign()', msgs)
|
|
3163
|
+
|
|
3116
3164
|
async def test_ast_bulkedges(self):
|
|
3117
3165
|
|
|
3118
3166
|
async with self.getTestCore() as core:
|
synapse/tests/test_lib_cell.py
CHANGED
|
@@ -125,6 +125,10 @@ class EchoAuthApi(s_cell.CellApi):
|
|
|
125
125
|
|
|
126
126
|
class EchoAuth(s_cell.Cell):
|
|
127
127
|
cellapi = EchoAuthApi
|
|
128
|
+
# non-default commit / version / verstring
|
|
129
|
+
COMMIT = 'mycommit'
|
|
130
|
+
VERSION = (1, 2, 3)
|
|
131
|
+
VERSTRING = '1.2.3'
|
|
128
132
|
|
|
129
133
|
async def answer(self):
|
|
130
134
|
return 42
|
|
@@ -815,6 +819,37 @@ class CellTest(s_t_utils.SynTest):
|
|
|
815
819
|
https = netw.get('https')
|
|
816
820
|
self.eq(https, http_info)
|
|
817
821
|
|
|
822
|
+
# Mirrors & ready flags
|
|
823
|
+
async with self.getTestAha() as aha: # type: s_aha.AhaCell
|
|
824
|
+
|
|
825
|
+
with self.getTestDir() as dirn:
|
|
826
|
+
cdr0 = s_common.genpath(dirn, 'cell00')
|
|
827
|
+
cdr1 = s_common.genpath(dirn, 'cell01')
|
|
828
|
+
cell00 = await aha.enter_context(self.addSvcToAha(aha, '00.cell', EchoAuth,
|
|
829
|
+
dirn=cdr0)) # type: EchoAuth
|
|
830
|
+
# Ensure we have a nexus transaction
|
|
831
|
+
await cell00.sync()
|
|
832
|
+
cell01 = await aha.enter_context(self.addSvcToAha(aha, '01.cell', EchoAuth,
|
|
833
|
+
dirn=cdr1,
|
|
834
|
+
provinfo={'mirror': 'cell'})) # type: EchoAuth
|
|
835
|
+
|
|
836
|
+
self.true(await asyncio.wait_for(cell01.nexsroot.ready.wait(), timeout=12))
|
|
837
|
+
await cell01.sync()
|
|
838
|
+
|
|
839
|
+
cnfo0 = await cell00.getCellInfo()
|
|
840
|
+
cnfo1 = await cell01.getCellInfo()
|
|
841
|
+
self.true(cnfo0['cell']['ready'])
|
|
842
|
+
self.false(cnfo0['cell']['uplink'])
|
|
843
|
+
self.none(cnfo0['cell']['mirror'])
|
|
844
|
+
self.eq(cnfo0['cell']['version'], (1, 2, 3))
|
|
845
|
+
|
|
846
|
+
self.true(cnfo1['cell']['ready'])
|
|
847
|
+
self.true(cnfo1['cell']['uplink'])
|
|
848
|
+
self.eq(cnfo1['cell']['mirror'], 'aha://root@cell...')
|
|
849
|
+
self.eq(cnfo1['cell']['version'], (1, 2, 3))
|
|
850
|
+
|
|
851
|
+
self.eq(cnfo0['cell']['nexsindx'], cnfo1['cell']['nexsindx'])
|
|
852
|
+
|
|
818
853
|
async def test_cell_dyncall(self):
|
|
819
854
|
|
|
820
855
|
with self.getTestDir() as dirn:
|
|
@@ -1398,6 +1433,11 @@ class CellTest(s_t_utils.SynTest):
|
|
|
1398
1433
|
with mock.patch('os.stat', diffdev):
|
|
1399
1434
|
await self.asyncraises(s_exc.LowSpace, proxy.runBackup())
|
|
1400
1435
|
|
|
1436
|
+
user = await core.auth.getUserByName('root')
|
|
1437
|
+
with self.raises(s_exc.SynErr) as cm:
|
|
1438
|
+
await core.iterNewBackupArchive(user)
|
|
1439
|
+
self.isin('This API must be called via a CellApi', cm.exception.get('mesg'))
|
|
1440
|
+
|
|
1401
1441
|
async def err(*args, **kwargs):
|
|
1402
1442
|
raise RuntimeError('boom')
|
|
1403
1443
|
|
|
@@ -2263,11 +2303,13 @@ class CellTest(s_t_utils.SynTest):
|
|
|
2263
2303
|
# Backup the mirror (core01) which points to the core00
|
|
2264
2304
|
async with await axon00.upload() as upfd:
|
|
2265
2305
|
async with core01.getLocalProxy() as prox:
|
|
2306
|
+
tot_chunks = 0
|
|
2266
2307
|
async for chunk in prox.iterNewBackupArchive():
|
|
2267
2308
|
await upfd.write(chunk)
|
|
2309
|
+
tot_chunks += len(chunk)
|
|
2268
2310
|
|
|
2269
2311
|
size, sha256 = await upfd.save()
|
|
2270
|
-
|
|
2312
|
+
self.eq(size, tot_chunks)
|
|
2271
2313
|
|
|
2272
2314
|
furl = f'{url}{s_common.ehex(sha256)}'
|
|
2273
2315
|
purl = await aha.addAhaSvcProv('00.mynewcortex')
|
|
@@ -3241,3 +3283,66 @@ class CellTest(s_t_utils.SynTest):
|
|
|
3241
3283
|
with self.raises(s_exc.BadState) as cm:
|
|
3242
3284
|
await cell00.promote(graceful=True)
|
|
3243
3285
|
self.isin('02.cell is not the current leader', cm.exception.get('mesg'))
|
|
3286
|
+
|
|
3287
|
+
async def test_stream_backup_exception(self):
|
|
3288
|
+
|
|
3289
|
+
with self.getTestDir() as dirn:
|
|
3290
|
+
backdirn = os.path.join(dirn, 'backups')
|
|
3291
|
+
coredirn = os.path.join(dirn, 'cortex')
|
|
3292
|
+
|
|
3293
|
+
conf = {'backup:dir': backdirn}
|
|
3294
|
+
s_common.yamlsave(conf, coredirn, 'cell.yaml')
|
|
3295
|
+
|
|
3296
|
+
async with self.getTestCore(dirn=coredirn) as core:
|
|
3297
|
+
async with core.getLocalProxy() as proxy:
|
|
3298
|
+
|
|
3299
|
+
await proxy.runBackup(name='bkup')
|
|
3300
|
+
|
|
3301
|
+
mock_proc = mock.Mock()
|
|
3302
|
+
mock_proc.join = mock.Mock()
|
|
3303
|
+
|
|
3304
|
+
async def mock_executor(func, *args, **kwargs):
|
|
3305
|
+
if isinstance(func, mock.Mock) and func is mock_proc.join:
|
|
3306
|
+
raise Exception('boom')
|
|
3307
|
+
return mock_proc
|
|
3308
|
+
|
|
3309
|
+
with mock.patch('synapse.lib.cell.s_coro.executor', mock_executor):
|
|
3310
|
+
with self.getAsyncLoggerStream('synapse.lib.cell', 'Error during backup streaming') as stream:
|
|
3311
|
+
with self.raises(Exception) as cm:
|
|
3312
|
+
async for _ in proxy.iterBackupArchive('bkup'):
|
|
3313
|
+
pass
|
|
3314
|
+
self.true(await stream.wait(timeout=6))
|
|
3315
|
+
|
|
3316
|
+
async def test_iter_new_backup_archive(self):
|
|
3317
|
+
|
|
3318
|
+
with self.getTestDir() as dirn:
|
|
3319
|
+
backdirn = os.path.join(dirn, 'backups')
|
|
3320
|
+
coredirn = os.path.join(dirn, 'cortex')
|
|
3321
|
+
|
|
3322
|
+
conf = {'backup:dir': backdirn}
|
|
3323
|
+
s_common.yamlsave(conf, coredirn, 'cell.yaml')
|
|
3324
|
+
|
|
3325
|
+
async with self.getTestCore(dirn=coredirn) as core:
|
|
3326
|
+
async with core.getLocalProxy() as proxy:
|
|
3327
|
+
|
|
3328
|
+
async def mock_runBackup(*args, **kwargs):
|
|
3329
|
+
raise Exception('backup failed')
|
|
3330
|
+
|
|
3331
|
+
with mock.patch.object(s_cell.Cell, 'runBackup', mock_runBackup):
|
|
3332
|
+
with self.getAsyncLoggerStream('synapse.lib.cell', 'Removing') as stream:
|
|
3333
|
+
with self.raises(s_exc.SynErr) as cm:
|
|
3334
|
+
async for _ in proxy.iterNewBackupArchive('failedbackup', remove=True):
|
|
3335
|
+
pass
|
|
3336
|
+
|
|
3337
|
+
self.isin('backup failed', str(cm.exception))
|
|
3338
|
+
self.true(await stream.wait(timeout=6))
|
|
3339
|
+
|
|
3340
|
+
path = os.path.join(backdirn, 'failedbackup')
|
|
3341
|
+
self.false(os.path.exists(path))
|
|
3342
|
+
|
|
3343
|
+
self.false(core.backupstreaming)
|
|
3344
|
+
|
|
3345
|
+
core.backupstreaming = True
|
|
3346
|
+
with self.raises(s_exc.BackupAlreadyRunning):
|
|
3347
|
+
async for _ in proxy.iterNewBackupArchive('newbackup', remove=True):
|
|
3348
|
+
pass
|
|
@@ -1744,8 +1744,12 @@ class HttpApiTest(s_tests.SynTest):
|
|
|
1744
1744
|
|
|
1745
1745
|
with self.getStructuredAsyncLoggerStream(logname, 'api/v1/auth/adduser') as stream:
|
|
1746
1746
|
|
|
1747
|
+
headers = {
|
|
1748
|
+
'X-Forwarded-For': '1.2.3.4',
|
|
1749
|
+
'User-Agent': 'test_request_logging',
|
|
1750
|
+
}
|
|
1747
1751
|
async with sess.post(f'https://root:root@localhost:{port}/api/v1/auth/adduser',
|
|
1748
|
-
json=info, headers=
|
|
1752
|
+
json=info, headers=headers) as resp:
|
|
1749
1753
|
item = await resp.json()
|
|
1750
1754
|
self.nn(item.get('result').get('iden'))
|
|
1751
1755
|
visiiden = item['result']['iden']
|
|
@@ -1756,6 +1760,8 @@ class HttpApiTest(s_tests.SynTest):
|
|
|
1756
1760
|
self.eq(mesg.get('uri'), '/api/v1/auth/adduser')
|
|
1757
1761
|
self.eq(mesg.get('username'), 'root')
|
|
1758
1762
|
self.eq(mesg.get('user'), core.auth.rootuser.iden)
|
|
1763
|
+
self.isin('headers', mesg)
|
|
1764
|
+
self.eq(mesg['headers'].get('user-agent'), 'test_request_logging')
|
|
1759
1765
|
self.isin('remoteip', mesg)
|
|
1760
1766
|
self.isin('(root)', mesg.get('message'))
|
|
1761
1767
|
self.isin('200 POST /api/v1/auth/adduser', mesg.get('message'))
|
|
@@ -1763,12 +1769,13 @@ class HttpApiTest(s_tests.SynTest):
|
|
|
1763
1769
|
|
|
1764
1770
|
# No auth provided
|
|
1765
1771
|
with self.getStructuredAsyncLoggerStream(logname, 'api/v1/active') as stream:
|
|
1766
|
-
async with sess.get(f'https://root:root@localhost:{port}/api/v1/active') as resp:
|
|
1772
|
+
async with sess.get(f'https://root:root@localhost:{port}/api/v1/active', skip_auto_headers=['User-Agent']) as resp:
|
|
1767
1773
|
self.eq(resp.status, 200)
|
|
1768
1774
|
self.true(await stream.wait(6))
|
|
1769
1775
|
|
|
1770
1776
|
mesg = get_mesg(stream)
|
|
1771
1777
|
self.eq(mesg.get('uri'), '/api/v1/active')
|
|
1778
|
+
self.notin('headers', mesg)
|
|
1772
1779
|
self.notin('username', mesg)
|
|
1773
1780
|
self.notin('user', mesg)
|
|
1774
1781
|
self.isin('remoteip', mesg)
|
synapse/tests/test_lib_storm.py
CHANGED
|
@@ -4,6 +4,7 @@ import asyncio
|
|
|
4
4
|
import datetime
|
|
5
5
|
import itertools
|
|
6
6
|
import urllib.parse as u_parse
|
|
7
|
+
import unittest.mock as mock
|
|
7
8
|
|
|
8
9
|
import synapse.exc as s_exc
|
|
9
10
|
import synapse.common as s_common
|
|
@@ -646,32 +647,6 @@ class StormTest(s_t_utils.SynTest):
|
|
|
646
647
|
self.none(task['info'].get('opts'))
|
|
647
648
|
self.eq(core.view.iden, task['info'].get('view'))
|
|
648
649
|
|
|
649
|
-
# test the parallel command
|
|
650
|
-
nodes = await core.nodes('parallel --size 4 { [ ou:org=* ] }')
|
|
651
|
-
self.len(4, nodes)
|
|
652
|
-
|
|
653
|
-
# check that subquery validation happens
|
|
654
|
-
with self.raises(s_exc.NoSuchVar):
|
|
655
|
-
await core.nodes('parallel --size 4 { [ ou:org=$foo ] }')
|
|
656
|
-
|
|
657
|
-
# check that an exception on inbound percolates correctly
|
|
658
|
-
with self.raises(s_exc.BadTypeValu):
|
|
659
|
-
await core.nodes('[ ou:org=* ou:org=foo ] | parallel { [:name=bar] }')
|
|
660
|
-
|
|
661
|
-
# check that an exception in the parallel pipeline percolates correctly
|
|
662
|
-
with self.raises(s_exc.BadTypeValu):
|
|
663
|
-
await core.nodes('parallel { [ou:org=foo] }')
|
|
664
|
-
|
|
665
|
-
nodes = await core.nodes('ou:org | parallel {[ :name=foo ]}')
|
|
666
|
-
self.true(all([n.get('name') == 'foo' for n in nodes]))
|
|
667
|
-
|
|
668
|
-
# Runtsafety test
|
|
669
|
-
q = '[ inet:fqdn=www.vertex.link ] $q=:domain | parallel $q'
|
|
670
|
-
await self.asyncraises(s_exc.StormRuntimeError, core.nodes(q))
|
|
671
|
-
|
|
672
|
-
nodes = await core.nodes('ou:org | parallel ${ $foo=bar [ :name=$foo ]}')
|
|
673
|
-
self.true(all([n.get('name') == 'bar' for n in nodes]))
|
|
674
|
-
|
|
675
650
|
# test $lib.exit() and the StormExit handlers
|
|
676
651
|
msgs = [m async for m in core.view.storm('$lib.exit()')]
|
|
677
652
|
self.eq(msgs[-1][0], 'fini')
|
|
@@ -789,10 +764,10 @@ class StormTest(s_t_utils.SynTest):
|
|
|
789
764
|
},
|
|
790
765
|
)
|
|
791
766
|
}
|
|
792
|
-
|
|
767
|
+
core.loadStormPkg(emptypkg)
|
|
793
768
|
await core.addStormPkg(strverpkg)
|
|
794
769
|
|
|
795
|
-
|
|
770
|
+
core.loadStormPkg(pkg0)
|
|
796
771
|
|
|
797
772
|
await core.nodes('$lib.import(foo.baz)', opts=opts)
|
|
798
773
|
await core.nodes('$lib.import(foo.baz, reqvers="==0.0.1")', opts=opts)
|
|
@@ -3437,6 +3412,73 @@ class StormTest(s_t_utils.SynTest):
|
|
|
3437
3412
|
q = '[ inet:fqdn=www.vertex.link ] $q=:domain | tee $q'
|
|
3438
3413
|
await self.asyncraises(s_exc.StormRuntimeError, core.nodes(q))
|
|
3439
3414
|
|
|
3415
|
+
async def test_storm_parallel(self):
|
|
3416
|
+
|
|
3417
|
+
async with self.getTestCore() as core:
|
|
3418
|
+
|
|
3419
|
+
nodes = await core.nodes('parallel --size 4 { [ ou:org=* ] }')
|
|
3420
|
+
self.len(4, nodes)
|
|
3421
|
+
|
|
3422
|
+
# check that subquery validation happens
|
|
3423
|
+
with self.raises(s_exc.NoSuchVar):
|
|
3424
|
+
await core.nodes('parallel --size 4 { [ ou:org=$foo ] }')
|
|
3425
|
+
|
|
3426
|
+
# check that an exception on inbound percolates correctly
|
|
3427
|
+
with self.raises(s_exc.BadTypeValu):
|
|
3428
|
+
await core.nodes('[ ou:org=(foo,) ou:org=foo ] | parallel { [:name=bar] }')
|
|
3429
|
+
|
|
3430
|
+
with self.raises(s_exc.BadTypeValu):
|
|
3431
|
+
await core.nodes('[ ou:org=(foo,) ou:org=foo ] | parallel --size 1 { [:name=bar] }')
|
|
3432
|
+
|
|
3433
|
+
# check that an exception in the parallel pipeline percolates correctly
|
|
3434
|
+
with self.raises(s_exc.BadTypeValu):
|
|
3435
|
+
await core.nodes('parallel { [ou:org=foo] }')
|
|
3436
|
+
|
|
3437
|
+
nodes = await core.nodes('ou:org | parallel {[ :name=foo ]}')
|
|
3438
|
+
self.true(all([n.get('name') == 'foo' for n in nodes]))
|
|
3439
|
+
|
|
3440
|
+
# Runtsafety test
|
|
3441
|
+
q = '[ inet:fqdn=www.vertex.link ] $q=:domain | parallel $q'
|
|
3442
|
+
await self.asyncraises(s_exc.StormRuntimeError, core.nodes(q))
|
|
3443
|
+
|
|
3444
|
+
nodes = await core.nodes('ou:org | parallel ${ $foo=bar [ :name=$foo ]}')
|
|
3445
|
+
self.true(all([n.get('name') == 'bar' for n in nodes]))
|
|
3446
|
+
|
|
3447
|
+
orig = s_storm.ParallelCmd.pipeline
|
|
3448
|
+
tsks = {'cnt': 0}
|
|
3449
|
+
|
|
3450
|
+
async def pipecnt(self, runt, query, inq, outq):
|
|
3451
|
+
tsks['cnt'] += 1
|
|
3452
|
+
await orig(self, runt, query, inq, outq)
|
|
3453
|
+
|
|
3454
|
+
with mock.patch('synapse.lib.storm.ParallelCmd.pipeline', pipecnt):
|
|
3455
|
+
|
|
3456
|
+
nodes = await core.nodes('ou:org parallel --size 4 {[ :name=bar ]}')
|
|
3457
|
+
self.len(5, nodes)
|
|
3458
|
+
self.true(all([n.get('name') == 'bar' for n in nodes]))
|
|
3459
|
+
self.eq(4, tsks['cnt'])
|
|
3460
|
+
|
|
3461
|
+
tsks['cnt'] = 0
|
|
3462
|
+
|
|
3463
|
+
nodes = await core.nodes('ou:org parallel --size 5 {[ :name=bar ]}')
|
|
3464
|
+
self.len(5, nodes)
|
|
3465
|
+
self.true(all([n.get('name') == 'bar' for n in nodes]))
|
|
3466
|
+
self.eq(5, tsks['cnt'])
|
|
3467
|
+
|
|
3468
|
+
tsks['cnt'] = 0
|
|
3469
|
+
|
|
3470
|
+
# --size greater than number of nodes only creates a pipeline for each node
|
|
3471
|
+
nodes = await core.nodes('ou:org parallel --size 10 {[ :name=foo ]}')
|
|
3472
|
+
self.len(5, nodes)
|
|
3473
|
+
self.true(all([n.get('name') == 'foo' for n in nodes]))
|
|
3474
|
+
self.eq(5, tsks['cnt'])
|
|
3475
|
+
|
|
3476
|
+
tsks['cnt'] = 0
|
|
3477
|
+
|
|
3478
|
+
nodes = await core.nodes('parallel --size 4 {[ ou:org=* ]}')
|
|
3479
|
+
self.len(4, nodes)
|
|
3480
|
+
self.eq(4, tsks['cnt'])
|
|
3481
|
+
|
|
3440
3482
|
async def test_storm_yieldvalu(self):
|
|
3441
3483
|
|
|
3442
3484
|
async with self.getTestCore() as core:
|
|
@@ -3882,7 +3924,7 @@ class StormTest(s_t_utils.SynTest):
|
|
|
3882
3924
|
)},
|
|
3883
3925
|
),
|
|
3884
3926
|
}
|
|
3885
|
-
|
|
3927
|
+
core.loadStormPkg(pdef)
|
|
3886
3928
|
msgs = await core.stormlist('woot --help')
|
|
3887
3929
|
helptext = '\n'.join([m[1].get('mesg') for m in msgs if m[0] == 'print'])
|
|
3888
3930
|
self.isin('Inputs:\n\n hehe:haha\n hoho:lol - We know whats up', helptext)
|
|
@@ -4656,7 +4698,7 @@ class StormTest(s_t_utils.SynTest):
|
|
|
4656
4698
|
async def test_storm_cmdscope(self):
|
|
4657
4699
|
|
|
4658
4700
|
async with self.getTestCore() as core:
|
|
4659
|
-
|
|
4701
|
+
core.loadStormPkg({
|
|
4660
4702
|
'name': 'testpkg',
|
|
4661
4703
|
'version': '0.0.1',
|
|
4662
4704
|
'commands': (
|