synapse 2.173.1__py311-none-any.whl → 2.174.0__py311-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of synapse might be problematic. Click here for more details.
- synapse/axon.py +1 -1
- synapse/common.py +19 -5
- synapse/cortex.py +46 -10
- synapse/lib/agenda.py +6 -0
- synapse/lib/ast.py +1 -1
- synapse/lib/lmdbslab.py +11 -1
- synapse/lib/modelrev.py +17 -1
- synapse/lib/modules.py +1 -0
- synapse/lib/msgpack.py +25 -3
- synapse/lib/nexus.py +26 -22
- synapse/lib/schemas.py +31 -0
- synapse/lib/stormsvc.py +30 -11
- synapse/lib/stormtypes.py +23 -9
- synapse/lib/trigger.py +0 -4
- synapse/lib/version.py +2 -2
- synapse/lib/view.py +2 -0
- synapse/models/crypto.py +22 -0
- synapse/models/economic.py +23 -2
- synapse/models/entity.py +16 -0
- synapse/models/files.py +4 -1
- synapse/models/geopol.py +3 -0
- synapse/models/orgs.py +3 -4
- synapse/tests/test_cortex.py +13 -0
- synapse/tests/test_lib_agenda.py +129 -1
- synapse/tests/test_lib_ast.py +21 -0
- synapse/tests/test_lib_grammar.py +4 -0
- synapse/tests/test_lib_httpapi.py +1 -0
- synapse/tests/test_lib_lmdbslab.py +16 -1
- synapse/tests/test_lib_modelrev.py +57 -0
- synapse/tests/test_lib_msgpack.py +58 -8
- synapse/tests/test_lib_nexus.py +44 -1
- synapse/tests/test_lib_storm.py +7 -7
- synapse/tests/test_lib_stormsvc.py +128 -51
- synapse/tests/test_lib_stormtypes.py +43 -4
- synapse/tests/test_lib_trigger.py +23 -4
- synapse/tests/test_model_crypto.py +6 -0
- synapse/tests/test_model_economic.py +14 -1
- synapse/tests/test_model_geopol.py +3 -0
- synapse/tools/changelog.py +236 -0
- {synapse-2.173.1.dist-info → synapse-2.174.0.dist-info}/METADATA +1 -1
- {synapse-2.173.1.dist-info → synapse-2.174.0.dist-info}/RECORD +44 -42
- {synapse-2.173.1.dist-info → synapse-2.174.0.dist-info}/WHEEL +1 -1
- {synapse-2.173.1.dist-info → synapse-2.174.0.dist-info}/LICENSE +0 -0
- {synapse-2.173.1.dist-info → synapse-2.174.0.dist-info}/top_level.txt +0 -0
synapse/tests/test_lib_storm.py
CHANGED
|
@@ -2283,9 +2283,9 @@ class StormTest(s_t_utils.SynTest):
|
|
|
2283
2283
|
msgs = await core.stormlist(f'pkg.load --ssl-noverify https://127.0.0.1:{port}/api/v1/pkgtest/notok')
|
|
2284
2284
|
self.stormIsInWarn('pkg.load got JSON error: FooBar', msgs)
|
|
2285
2285
|
|
|
2286
|
-
|
|
2287
|
-
|
|
2288
|
-
waiter = core.waiter(
|
|
2286
|
+
# onload will on fire once. all other pkg.load events will effectively bounce
|
|
2287
|
+
# because the pkg hasn't changed so no loading occurs
|
|
2288
|
+
waiter = core.waiter(1, 'core:pkg:onload:complete')
|
|
2289
2289
|
|
|
2290
2290
|
with self.getAsyncLoggerStream('synapse.cortex') as stream:
|
|
2291
2291
|
msgs = await core.stormlist(f'pkg.load --ssl-noverify https://127.0.0.1:{port}/api/v1/pkgtest/yep')
|
|
@@ -2301,10 +2301,10 @@ class StormTest(s_t_utils.SynTest):
|
|
|
2301
2301
|
self.isin("No var with name: newp", buf)
|
|
2302
2302
|
self.len(1, await core.nodes(f'ps:contact={cont}'))
|
|
2303
2303
|
|
|
2304
|
-
evnts = await waiter.wait(timeout=
|
|
2305
|
-
exp = [
|
|
2306
|
-
|
|
2307
|
-
|
|
2304
|
+
evnts = await waiter.wait(timeout=4)
|
|
2305
|
+
exp = [
|
|
2306
|
+
('core:pkg:onload:complete', {'pkg': 'testload'})
|
|
2307
|
+
]
|
|
2308
2308
|
self.eq(exp, evnts)
|
|
2309
2309
|
|
|
2310
2310
|
async def test_storm_tree(self):
|
|
@@ -2,7 +2,6 @@ import asyncio
|
|
|
2
2
|
import contextlib
|
|
3
3
|
import synapse.exc as s_exc
|
|
4
4
|
import synapse.common as s_common
|
|
5
|
-
import synapse.cortex as s_cortex
|
|
6
5
|
|
|
7
6
|
import synapse.tests.utils as s_test
|
|
8
7
|
|
|
@@ -796,56 +795,134 @@ class StormSvcTest(s_test.SynTest):
|
|
|
796
795
|
|
|
797
796
|
with self.getTestDir() as dirn:
|
|
798
797
|
async with self.getTestCore(dirn=dirn) as core:
|
|
799
|
-
with
|
|
800
|
-
|
|
801
|
-
|
|
802
|
-
|
|
803
|
-
|
|
804
|
-
|
|
805
|
-
|
|
806
|
-
|
|
807
|
-
|
|
808
|
-
|
|
809
|
-
|
|
810
|
-
|
|
811
|
-
|
|
812
|
-
|
|
813
|
-
|
|
814
|
-
|
|
815
|
-
|
|
816
|
-
|
|
817
|
-
|
|
818
|
-
|
|
819
|
-
|
|
820
|
-
|
|
821
|
-
|
|
822
|
-
|
|
823
|
-
|
|
824
|
-
|
|
825
|
-
|
|
826
|
-
|
|
827
|
-
|
|
828
|
-
|
|
829
|
-
|
|
830
|
-
|
|
831
|
-
|
|
832
|
-
|
|
833
|
-
|
|
834
|
-
|
|
835
|
-
|
|
836
|
-
|
|
837
|
-
|
|
838
|
-
|
|
839
|
-
|
|
840
|
-
|
|
841
|
-
|
|
842
|
-
|
|
843
|
-
|
|
844
|
-
|
|
845
|
-
|
|
846
|
-
|
|
847
|
-
|
|
848
|
-
|
|
798
|
+
async with core.beholder() as wind:
|
|
799
|
+
with self.getTestDir() as svcd:
|
|
800
|
+
async with await ChangingService.anit(svcd) as chng:
|
|
801
|
+
chng.dmon.share('chng', chng)
|
|
802
|
+
|
|
803
|
+
root = await chng.auth.getUserByName('root')
|
|
804
|
+
await root.setPasswd('root')
|
|
805
|
+
|
|
806
|
+
info = await chng.dmon.listen('tcp://127.0.0.1:0/')
|
|
807
|
+
host, port = info
|
|
808
|
+
|
|
809
|
+
curl = f'tcp://root:root@127.0.0.1:{port}/chng'
|
|
810
|
+
|
|
811
|
+
await core.nodes(f'service.add chng {curl}')
|
|
812
|
+
await core.nodes('$lib.service.wait(chng)')
|
|
813
|
+
|
|
814
|
+
self.nn(core.getStormCmd('oldcmd'))
|
|
815
|
+
self.nn(core.getStormCmd('old.bar'))
|
|
816
|
+
self.nn(core.getStormCmd('old.baz'))
|
|
817
|
+
self.none(core.getStormCmd('new.baz'))
|
|
818
|
+
self.none(core.getStormCmd('runtecho'))
|
|
819
|
+
self.none(core.getStormCmd('newcmd'))
|
|
820
|
+
self.isin('old', core.stormpkgs)
|
|
821
|
+
self.isin('old.bar', core.stormmods)
|
|
822
|
+
self.isin('old.baz', core.stormmods)
|
|
823
|
+
pkg = await core.getStormPkg('old')
|
|
824
|
+
self.eq(pkg.get('version'), '0.0.1')
|
|
825
|
+
|
|
826
|
+
waiter = core.waiter(1, 'stormsvc:client:unready')
|
|
827
|
+
|
|
828
|
+
self.true(await waiter.wait(10))
|
|
829
|
+
async with await ChangingService.anit(svcd, {'updated': True}) as chng:
|
|
830
|
+
chng.dmon.share('chng', chng)
|
|
831
|
+
await chng.dmon.listen(f'tcp://127.0.0.1:{port}/')
|
|
832
|
+
|
|
833
|
+
await core.nodes('$lib.service.wait(chng)')
|
|
834
|
+
|
|
835
|
+
self.nn(core.getStormCmd('newcmd'))
|
|
836
|
+
self.nn(core.getStormCmd('new.baz'))
|
|
837
|
+
self.nn(core.getStormCmd('old.bar'))
|
|
838
|
+
self.nn(core.getStormCmd('runtecho'))
|
|
839
|
+
self.none(core.getStormCmd('oldcmd'))
|
|
840
|
+
self.none(core.getStormCmd('old.baz'))
|
|
841
|
+
self.isin('old', core.stormpkgs)
|
|
842
|
+
self.isin('new', core.stormpkgs)
|
|
843
|
+
self.isin('echo', core.stormmods)
|
|
844
|
+
self.isin('old.bar', core.stormmods)
|
|
845
|
+
self.isin('new.baz', core.stormmods)
|
|
846
|
+
self.notin('old.baz', core.stormmods)
|
|
847
|
+
pkg = await core.getStormPkg('old')
|
|
848
|
+
self.eq(pkg.get('version'), '0.1.0')
|
|
849
|
+
|
|
850
|
+
async with await ChangingService.anit(svcd, {'updated': False}) as chng:
|
|
851
|
+
chng.dmon.share('chng', chng)
|
|
852
|
+
await chng.dmon.listen(f'tcp://127.0.0.1:{port}/')
|
|
853
|
+
|
|
854
|
+
await core.nodes('$lib.service.wait(chng)')
|
|
855
|
+
self.nn(core.getStormCmd('oldcmd'))
|
|
856
|
+
self.nn(core.getStormCmd('old.bar'))
|
|
857
|
+
self.nn(core.getStormCmd('old.baz'))
|
|
858
|
+
self.none(core.getStormCmd('new.baz'))
|
|
859
|
+
self.none(core.getStormCmd('runtecho'))
|
|
860
|
+
self.none(core.getStormCmd('newcmd'))
|
|
861
|
+
self.isin('old', core.stormpkgs)
|
|
862
|
+
self.isin('old.bar', core.stormmods)
|
|
863
|
+
self.isin('old.baz', core.stormmods)
|
|
864
|
+
|
|
865
|
+
self.none(await core.getStormPkg('new'))
|
|
866
|
+
|
|
867
|
+
pkg = await core.getStormPkg('old')
|
|
868
|
+
self.eq(pkg.get('version'), '0.0.1')
|
|
869
|
+
|
|
870
|
+
svcs = await core.callStorm('return($lib.service.list())')
|
|
871
|
+
self.len(1, svcs)
|
|
872
|
+
|
|
873
|
+
async with await ChangingService.anit(svcd, {'updated': True}) as chng:
|
|
874
|
+
chng.dmon.share('chng', chng)
|
|
875
|
+
await chng.dmon.listen(f'tcp://127.0.0.1:{port}/')
|
|
876
|
+
|
|
877
|
+
await core.nodes('$lib.service.wait(chng)')
|
|
878
|
+
|
|
879
|
+
async with await ChangingService.anit(svcd, {'updated': True}) as chng:
|
|
880
|
+
chng.dmon.share('chng', chng)
|
|
881
|
+
await chng.dmon.listen(f'tcp://127.0.0.1:{port}/')
|
|
882
|
+
|
|
883
|
+
await core.nodes('$lib.service.wait(chng)')
|
|
884
|
+
|
|
885
|
+
events = []
|
|
886
|
+
async for m in wind:
|
|
887
|
+
events.append(m)
|
|
888
|
+
|
|
889
|
+
self.len(16, events)
|
|
890
|
+
|
|
891
|
+
# updated = false
|
|
892
|
+
self.eq('svc:set', events[-9]['event'])
|
|
893
|
+
self.eq('chng', events[-9]['info']['name'])
|
|
894
|
+
self.eq((0, 0, 1), events[-9]['info']['version'])
|
|
895
|
+
|
|
896
|
+
self.eq('pkg:del', events[-8]['event'])
|
|
897
|
+
self.eq('old', events[-8]['info']['name'])
|
|
898
|
+
|
|
899
|
+
self.eq('pkg:add', events[-7]['event'])
|
|
900
|
+
self.eq('old', events[-7]['info']['name'])
|
|
901
|
+
self.eq('0.0.1', events[-7]['info']['version'])
|
|
902
|
+
|
|
903
|
+
self.eq('pkg:del', events[-6]['event'])
|
|
904
|
+
self.eq('new', events[-6]['info']['name'])
|
|
905
|
+
|
|
906
|
+
# updated = true
|
|
907
|
+
self.eq('svc:set', events[-5]['event'])
|
|
908
|
+
self.eq('chng', events[-5]['info']['name'])
|
|
909
|
+
self.eq((0, 0, 1), events[-5]['info']['version'])
|
|
910
|
+
|
|
911
|
+
self.eq('pkg:del', events[-4]['event'])
|
|
912
|
+
self.eq('old', events[-4]['info']['name'])
|
|
913
|
+
|
|
914
|
+
self.eq('pkg:add', events[-3]['event'])
|
|
915
|
+
self.eq('old', events[-3]['info']['name'])
|
|
916
|
+
self.eq('0.1.0', events[-3]['info']['version'])
|
|
917
|
+
|
|
918
|
+
self.eq('pkg:add', events[-2]['event'])
|
|
919
|
+
self.eq('new', events[-2]['info']['name'])
|
|
920
|
+
|
|
921
|
+
# we get the set to let us know things are back, not no adds since the pkgs are the same
|
|
922
|
+
# so this is the last
|
|
923
|
+
self.eq('svc:set', events[-1]['event'])
|
|
924
|
+
self.eq('chng', events[-1]['info']['name'])
|
|
925
|
+
self.eq((0, 0, 1), events[-1]['info']['version'])
|
|
849
926
|
|
|
850
927
|
# This test verifies that storm commands loaded from a previously connected service are still available,
|
|
851
928
|
# even if the service is not available now
|
|
@@ -322,6 +322,42 @@ class StormTypesTest(s_test.SynTest):
|
|
|
322
322
|
msgs = await core.stormlist('$lib.debug = (1) hehe.haha')
|
|
323
323
|
self.stormIsInPrint('hehe.haha', msgs)
|
|
324
324
|
|
|
325
|
+
async def test_storm_doubleadd_pkg(self):
|
|
326
|
+
async with self.getTestCore() as core:
|
|
327
|
+
async with core.beholder() as wind:
|
|
328
|
+
pkg = {
|
|
329
|
+
'name': 'hehe',
|
|
330
|
+
'version': '1.1.1',
|
|
331
|
+
'modules': [
|
|
332
|
+
{'name': 'hehe', 'storm': 'function getDebug() { return($lib.debug) }'},
|
|
333
|
+
],
|
|
334
|
+
'commands': [
|
|
335
|
+
{'name': 'hehe.haha', 'storm': 'if $lib.debug { $lib.print(hehe.haha) }'},
|
|
336
|
+
],
|
|
337
|
+
}
|
|
338
|
+
|
|
339
|
+
# all but the first of these should bounce
|
|
340
|
+
for i in range(5):
|
|
341
|
+
await core.addStormPkg(pkg)
|
|
342
|
+
|
|
343
|
+
pkg['version'] = '1.2.3'
|
|
344
|
+
|
|
345
|
+
# all but the first of these should bounce
|
|
346
|
+
for i in range(5):
|
|
347
|
+
await core.addStormPkg(pkg)
|
|
348
|
+
|
|
349
|
+
events = []
|
|
350
|
+
async for m in wind:
|
|
351
|
+
events.append(m)
|
|
352
|
+
self.len(2, events)
|
|
353
|
+
self.eq('pkg:add', events[0]['event'])
|
|
354
|
+
self.eq('hehe', events[0]['info']['name'])
|
|
355
|
+
self.eq('1.1.1', events[0]['info']['version'])
|
|
356
|
+
|
|
357
|
+
self.eq('pkg:add', events[1]['event'])
|
|
358
|
+
self.eq('hehe', events[1]['info']['name'])
|
|
359
|
+
self.eq('1.2.3', events[1]['info']['version'])
|
|
360
|
+
|
|
325
361
|
async def test_storm_private(self):
|
|
326
362
|
async with self.getTestCore() as core:
|
|
327
363
|
await core.addStormPkg({
|
|
@@ -4642,6 +4678,11 @@ class StormTypesTest(s_test.SynTest):
|
|
|
4642
4678
|
|
|
4643
4679
|
opts = {'vars': {'iden': iden0}}
|
|
4644
4680
|
|
|
4681
|
+
# for coverage...
|
|
4682
|
+
self.false(await core.killCronTask('newp'))
|
|
4683
|
+
self.false(await core._killCronTask('newp'))
|
|
4684
|
+
self.false(await core.callStorm(f'return($lib.cron.get({iden0}).kill())'))
|
|
4685
|
+
|
|
4645
4686
|
cdef = await core.callStorm('return($lib.cron.get($iden).pack())', opts=opts)
|
|
4646
4687
|
self.eq('mydoc', cdef.get('doc'))
|
|
4647
4688
|
self.eq('myname', cdef.get('name'))
|
|
@@ -4793,12 +4834,10 @@ class StormTypesTest(s_test.SynTest):
|
|
|
4793
4834
|
self.stormIsInErr('does not match', mesgs)
|
|
4794
4835
|
|
|
4795
4836
|
# Make sure the old one didn't run and the new query ran
|
|
4837
|
+
nextlayroffs = await layr.getEditOffs() + 1
|
|
4796
4838
|
unixtime += 60
|
|
4797
|
-
await
|
|
4839
|
+
await layr.waitEditOffs(nextlayroffs, timeout=5)
|
|
4798
4840
|
self.eq(1, await prox.count('meta:note:type=m1'))
|
|
4799
|
-
# UNG WTF
|
|
4800
|
-
await asyncio.sleep(0)
|
|
4801
|
-
await asyncio.sleep(0)
|
|
4802
4841
|
self.eq(1, await prox.count('meta:note:type=m2'))
|
|
4803
4842
|
|
|
4804
4843
|
# Delete the job
|
|
@@ -3,9 +3,6 @@ import json
|
|
|
3
3
|
import synapse.exc as s_exc
|
|
4
4
|
import synapse.common as s_common
|
|
5
5
|
|
|
6
|
-
from synapse.common import aspin
|
|
7
|
-
|
|
8
|
-
import synapse.cortex as s_cortex
|
|
9
6
|
import synapse.telepath as s_telepath
|
|
10
7
|
import synapse.tests.utils as s_t_utils
|
|
11
8
|
import synapse.tools.backup as s_tools_backup
|
|
@@ -294,6 +291,18 @@ class TrigTest(s_t_utils.SynTest):
|
|
|
294
291
|
with self.raises(s_exc.SchemaViolation):
|
|
295
292
|
await view.addTrigger({'cond': 'tag:add', 'storm': '[ +#count test:str=$tag ]', 'tag': 'foo&baz'})
|
|
296
293
|
|
|
294
|
+
# View iden mismatch
|
|
295
|
+
trigiden = s_common.guid()
|
|
296
|
+
viewiden = s_common.guid()
|
|
297
|
+
tdef = {'iden': trigiden, 'cond': 'node:add', 'storm': 'test:int=4', 'form': 'test:int', 'view': viewiden}
|
|
298
|
+
await view.addTrigger(tdef)
|
|
299
|
+
trigger = await view.getTrigger(trigiden)
|
|
300
|
+
self.eq(trigger.get('view'), view.iden)
|
|
301
|
+
with self.raises(s_exc.BadArg) as exc:
|
|
302
|
+
await view.setTriggerInfo(trigiden, 'view', viewiden)
|
|
303
|
+
self.eq(exc.exception.get('mesg'), 'Invalid key name provided: view')
|
|
304
|
+
await view.delTrigger(trigiden)
|
|
305
|
+
|
|
297
306
|
# Trigger list
|
|
298
307
|
triglist = await view.listTriggers()
|
|
299
308
|
self.len(12, triglist)
|
|
@@ -551,6 +560,10 @@ class TrigTest(s_t_utils.SynTest):
|
|
|
551
560
|
|
|
552
561
|
derp = await core.auth.addUser('derp')
|
|
553
562
|
|
|
563
|
+
# This is so we can later update the trigger in a view other than the one which it was created
|
|
564
|
+
viewiden = await core.callStorm('$view = $lib.view.get().fork() return($view.iden)')
|
|
565
|
+
inview = {'view': viewiden}
|
|
566
|
+
|
|
554
567
|
tdef = {'cond': 'node:add', 'form': 'inet:ipv4', 'storm': '[ +#foo ]'}
|
|
555
568
|
opts = {'vars': {'tdef': tdef}}
|
|
556
569
|
|
|
@@ -562,7 +575,7 @@ class TrigTest(s_t_utils.SynTest):
|
|
|
562
575
|
self.nn(nodes[0].getTag('foo'))
|
|
563
576
|
|
|
564
577
|
opts = {'vars': {'iden': trig.get('iden'), 'derp': derp.iden}}
|
|
565
|
-
await core.callStorm('$lib.trigger.get($iden).set(user, $derp)', opts=opts)
|
|
578
|
+
await core.callStorm('$lib.trigger.get($iden).set(user, $derp)', opts=opts | inview)
|
|
566
579
|
|
|
567
580
|
nodes = await core.nodes('[ inet:ipv4=8.8.8.8 ]')
|
|
568
581
|
self.len(1, nodes)
|
|
@@ -885,3 +898,9 @@ class TrigTest(s_t_utils.SynTest):
|
|
|
885
898
|
|
|
886
899
|
await core.nodes('for $trig in $lib.trigger.list() { $lib.trigger.del($trig.iden) }')
|
|
887
900
|
self.len(0, await core.nodes('syn:trigger'))
|
|
901
|
+
|
|
902
|
+
async def test_trigger_viewiden_migration(self):
|
|
903
|
+
async with self.getRegrCore('trigger-viewiden-migration') as core:
|
|
904
|
+
for view in core.views.values():
|
|
905
|
+
for _, trigger in view.triggers.list():
|
|
906
|
+
self.eq(trigger.tdef.get('view'), view.iden)
|
|
@@ -49,7 +49,9 @@ class CryptoModelTest(s_t_utils.SynTest):
|
|
|
49
49
|
:algorithm=aes256
|
|
50
50
|
:mode=CBC
|
|
51
51
|
:iv=41414141
|
|
52
|
+
:iv:text=AAAA
|
|
52
53
|
:private=00000000
|
|
54
|
+
:private:text=hehe
|
|
53
55
|
:private:md5=$md5
|
|
54
56
|
:private:sha1=$sha1
|
|
55
57
|
:private:sha256=$sha256
|
|
@@ -57,6 +59,7 @@ class CryptoModelTest(s_t_utils.SynTest):
|
|
|
57
59
|
:public:md5=$md5
|
|
58
60
|
:public:sha1=$sha1
|
|
59
61
|
:public:sha256=$sha256
|
|
62
|
+
:public:text=haha
|
|
60
63
|
:seed:passwd=s3cret
|
|
61
64
|
:seed:algorithm=pbkdf2 ]
|
|
62
65
|
}]
|
|
@@ -72,6 +75,9 @@ class CryptoModelTest(s_t_utils.SynTest):
|
|
|
72
75
|
+:mode=cbc
|
|
73
76
|
+:iv=41414141
|
|
74
77
|
'''))
|
|
78
|
+
self.len(1, await core.nodes('it:dev:str=AAAA -> crypto:key'))
|
|
79
|
+
self.len(1, await core.nodes('it:dev:str=hehe -> crypto:key'))
|
|
80
|
+
self.len(1, await core.nodes('it:dev:str=haha -> crypto:key'))
|
|
75
81
|
self.len(1, await core.nodes('inet:passwd=s3cret -> crypto:key -> crypto:currency:address'))
|
|
76
82
|
|
|
77
83
|
self.len(2, await core.nodes('crypto:key -> hash:md5'))
|
|
@@ -129,8 +129,21 @@ class EconTest(s_utils.SynTest):
|
|
|
129
129
|
|
|
130
130
|
:time=20180202
|
|
131
131
|
:purchase={perc.ndef[1]}
|
|
132
|
+
|
|
133
|
+
:place=*
|
|
134
|
+
:place:loc=us.ny.brooklyn
|
|
135
|
+
:place:name=myhouse
|
|
136
|
+
:place:address="123 main street, brooklyn, ny, 11223"
|
|
137
|
+
:place:latlong=(90,80)
|
|
132
138
|
]'''
|
|
133
|
-
await core.nodes(text)
|
|
139
|
+
nodes = await core.nodes(text)
|
|
140
|
+
|
|
141
|
+
self.eq('myhouse', nodes[0].get('place:name'))
|
|
142
|
+
self.eq((90, 80), nodes[0].get('place:latlong'))
|
|
143
|
+
self.eq('us.ny.brooklyn', nodes[0].get('place:loc'))
|
|
144
|
+
self.eq('123 main street, brooklyn, ny, 11223', nodes[0].get('place:address'))
|
|
145
|
+
|
|
146
|
+
self.len(1, await core.nodes('econ:acct:payment -> geo:place'))
|
|
134
147
|
|
|
135
148
|
self.len(1, await core.nodes('econ:acct:payment +:time@=(2017,2019) +{-> econ:pay:card +:name="bob smith"}'))
|
|
136
149
|
|
|
@@ -14,6 +14,7 @@ class GeoPolModelTest(s_t_utils.SynTest):
|
|
|
14
14
|
:iso2=vi
|
|
15
15
|
:iso3=vis
|
|
16
16
|
:isonum=31337
|
|
17
|
+
:currencies=(usd, vcoins, PESOS, USD)
|
|
17
18
|
]
|
|
18
19
|
''')
|
|
19
20
|
self.len(1, nodes)
|
|
@@ -24,7 +25,9 @@ class GeoPolModelTest(s_t_utils.SynTest):
|
|
|
24
25
|
self.eq('vi', nodes[0].get('iso2'))
|
|
25
26
|
self.eq('vis', nodes[0].get('iso3'))
|
|
26
27
|
self.eq(31337, nodes[0].get('isonum'))
|
|
28
|
+
self.eq(('pesos', 'usd', 'vcoins'), nodes[0].get('currencies'))
|
|
27
29
|
self.len(2, await core.nodes('pol:country -> geo:name'))
|
|
30
|
+
self.len(3, await core.nodes('pol:country -> econ:currency'))
|
|
28
31
|
|
|
29
32
|
nodes = await core.nodes('''
|
|
30
33
|
[ pol:vitals=*
|
|
@@ -0,0 +1,236 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import sys
|
|
3
|
+
import pprint
|
|
4
|
+
import asyncio
|
|
5
|
+
import argparse
|
|
6
|
+
import datetime
|
|
7
|
+
import textwrap
|
|
8
|
+
import traceback
|
|
9
|
+
import subprocess
|
|
10
|
+
import collections
|
|
11
|
+
|
|
12
|
+
import regex
|
|
13
|
+
|
|
14
|
+
import synapse.common as s_common
|
|
15
|
+
|
|
16
|
+
import synapse.lib.output as s_output
|
|
17
|
+
import synapse.lib.schemas as s_schemas
|
|
18
|
+
|
|
19
|
+
defstruct = (
|
|
20
|
+
('type', None),
|
|
21
|
+
('desc', ''),
|
|
22
|
+
('prs', ()),
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
SKIP_FILES = (
|
|
26
|
+
'.gitkeep',
|
|
27
|
+
)
|
|
28
|
+
|
|
29
|
+
version_regex = r'^v[0-9]\.[0-9]+\.[0-9]+((a|b|rc)[0-9]*)?$'
|
|
30
|
+
def gen(opts: argparse.Namespace,
|
|
31
|
+
outp: s_output.OutPut):
|
|
32
|
+
if opts.verbose:
|
|
33
|
+
outp.printf(f'{opts=}')
|
|
34
|
+
|
|
35
|
+
name = opts.name
|
|
36
|
+
if name is None:
|
|
37
|
+
name = f'{s_common.guid()}.yaml'
|
|
38
|
+
fp = s_common.genpath(opts.cdir, name)
|
|
39
|
+
|
|
40
|
+
data = dict(defstruct)
|
|
41
|
+
data['type'] = opts.type
|
|
42
|
+
data['desc'] = opts.desc
|
|
43
|
+
|
|
44
|
+
if opts.migration_desc:
|
|
45
|
+
data['migration_desc'] = opts.migration_desc
|
|
46
|
+
if opts.pr:
|
|
47
|
+
data['prs'] = [opts.pr]
|
|
48
|
+
|
|
49
|
+
if opts.verbose:
|
|
50
|
+
outp.printf('Validating data against schema')
|
|
51
|
+
|
|
52
|
+
s_schemas._reqChanglogSchema(data)
|
|
53
|
+
|
|
54
|
+
if opts.verbose:
|
|
55
|
+
outp.printf('Saving the following information:')
|
|
56
|
+
outp.printf(s_common.yamldump(data).decode())
|
|
57
|
+
|
|
58
|
+
s_common.yamlsave(data, fp)
|
|
59
|
+
|
|
60
|
+
outp.printf(f'Saved changelog entry to {fp=}')
|
|
61
|
+
|
|
62
|
+
if opts.add:
|
|
63
|
+
if opts.verbose:
|
|
64
|
+
outp.printf('Adding file to git staging')
|
|
65
|
+
argv = ['git', 'add', fp]
|
|
66
|
+
ret = subprocess.run(argv, capture_output=True)
|
|
67
|
+
if opts.verbose:
|
|
68
|
+
outp.printf(f'stddout={ret.stdout}')
|
|
69
|
+
outp.printf(f'stderr={ret.stderr}')
|
|
70
|
+
ret.check_returncode()
|
|
71
|
+
|
|
72
|
+
return 0
|
|
73
|
+
|
|
74
|
+
def format(opts: argparse.Namespace,
|
|
75
|
+
outp: s_output.OutPut):
|
|
76
|
+
if opts.verbose:
|
|
77
|
+
outp.printf(f'{opts=}')
|
|
78
|
+
|
|
79
|
+
if not regex.match(version_regex, opts.version):
|
|
80
|
+
outp.printf(f'Failed to match {opts.version} vs {version_regex}')
|
|
81
|
+
return 1
|
|
82
|
+
|
|
83
|
+
entries = collections.defaultdict(list)
|
|
84
|
+
|
|
85
|
+
files_processed = [] # Eventually for removing files from git.
|
|
86
|
+
|
|
87
|
+
for fn in os.listdir(opts.cdir):
|
|
88
|
+
if fn in SKIP_FILES:
|
|
89
|
+
continue
|
|
90
|
+
fp = s_common.genpath(opts.cdir, fn)
|
|
91
|
+
if opts.verbose:
|
|
92
|
+
outp.printf(f'Reading: {fp=}')
|
|
93
|
+
try:
|
|
94
|
+
data = s_common.yamlload(fp)
|
|
95
|
+
except Exception as e:
|
|
96
|
+
outp.printf(f'Error parsing yaml from {fp=}: {e}')
|
|
97
|
+
continue
|
|
98
|
+
|
|
99
|
+
if opts.verbose:
|
|
100
|
+
outp.printf('Got the following data:')
|
|
101
|
+
outp.printf(pprint.pformat(data))
|
|
102
|
+
|
|
103
|
+
files_processed.append(fp)
|
|
104
|
+
|
|
105
|
+
s_schemas._reqChanglogSchema(data)
|
|
106
|
+
|
|
107
|
+
if opts.prs_from_git:
|
|
108
|
+
outp.printf('--prs-from-git not yet implemented.')
|
|
109
|
+
return 1
|
|
110
|
+
|
|
111
|
+
if opts.enforce_prs and not data.get('prs'):
|
|
112
|
+
outp.printf(f'Entry is missing PR numbers: {fp=}')
|
|
113
|
+
return 1
|
|
114
|
+
data.setdefault('prs', [])
|
|
115
|
+
|
|
116
|
+
if opts.verbose:
|
|
117
|
+
outp.printf(f'Got data from {fp=}')
|
|
118
|
+
|
|
119
|
+
data.get('prs').sort() # sort the PRs inplace
|
|
120
|
+
entries[data.get('type')].append(data)
|
|
121
|
+
|
|
122
|
+
if not entries:
|
|
123
|
+
outp.printf(f'No files passed validation from {opts.dir}')
|
|
124
|
+
return 1
|
|
125
|
+
|
|
126
|
+
if 'model' in entries:
|
|
127
|
+
outp.printf('Model specific entries are not yet implemented.')
|
|
128
|
+
return 1
|
|
129
|
+
|
|
130
|
+
date = opts.date
|
|
131
|
+
if date is None:
|
|
132
|
+
date = datetime.datetime.utcnow().strftime('%Y-%m-%d')
|
|
133
|
+
header = f'{opts.version} - {date}'
|
|
134
|
+
text = f'{header}\n{"=" * len(header)}\n'
|
|
135
|
+
|
|
136
|
+
for key, header in s_schemas._changelogTypes.items():
|
|
137
|
+
dataz = entries.get(key)
|
|
138
|
+
if dataz:
|
|
139
|
+
text = text + f'\n{header}\n{"-" * len(header)}'
|
|
140
|
+
dataz.sort(key=lambda x: x.get('prs'))
|
|
141
|
+
for data in dataz:
|
|
142
|
+
desc = data.get('desc')
|
|
143
|
+
for line in textwrap.wrap(desc, initial_indent='- ', subsequent_indent=' ', width=opts.width):
|
|
144
|
+
text = f'{text}\n{line}'
|
|
145
|
+
if not opts.hide_prs:
|
|
146
|
+
for pr in data.get('prs'):
|
|
147
|
+
text = f'{text}\n (`#{pr} <https://github.com/vertexproject/synapse/pull/{pr}>`_)'
|
|
148
|
+
if key == 'migration':
|
|
149
|
+
text = text + '\n- See :ref:`datamigration` for more information about automatic migrations.'
|
|
150
|
+
text = text + '\n'
|
|
151
|
+
|
|
152
|
+
if opts.rm:
|
|
153
|
+
if opts.verbose:
|
|
154
|
+
outp.printf('Staging file removals in git')
|
|
155
|
+
for fp in files_processed:
|
|
156
|
+
argv = ['git', 'rm', fp]
|
|
157
|
+
ret = subprocess.run(argv, capture_output=True)
|
|
158
|
+
if opts.verbose:
|
|
159
|
+
outp.printf(f'stddout={ret.stdout}')
|
|
160
|
+
outp.printf(f'stderr={ret.stderr}')
|
|
161
|
+
ret.check_returncode()
|
|
162
|
+
|
|
163
|
+
outp.printf(text)
|
|
164
|
+
|
|
165
|
+
return 0
|
|
166
|
+
|
|
167
|
+
async def main(argv, outp=None):
|
|
168
|
+
if outp is None:
|
|
169
|
+
outp = s_output.OutPut()
|
|
170
|
+
|
|
171
|
+
pars = makeargparser()
|
|
172
|
+
|
|
173
|
+
opts = pars.parse_args(argv)
|
|
174
|
+
if opts.git_dir_check:
|
|
175
|
+
if not os.path.exists(os.path.join(os.getcwd(), '.git')):
|
|
176
|
+
outp.print('Current working directury must be the root of the repository.')
|
|
177
|
+
return 1
|
|
178
|
+
try:
|
|
179
|
+
return opts.func(opts, outp)
|
|
180
|
+
except Exception as e:
|
|
181
|
+
outp.printf(f'Error running {opts.func}: {traceback.format_exc()}')
|
|
182
|
+
return 1
|
|
183
|
+
|
|
184
|
+
def makeargparser():
|
|
185
|
+
desc = '''Command line tool to manage changelog entries.
|
|
186
|
+
This tool and any data formats associated with it may change at any time.
|
|
187
|
+
'''
|
|
188
|
+
pars = argparse.ArgumentParser('synapse.tools.changelog', description=desc)
|
|
189
|
+
|
|
190
|
+
subpars = pars.add_subparsers(required=True,
|
|
191
|
+
title='subcommands',
|
|
192
|
+
dest='cmd', )
|
|
193
|
+
gen_pars = subpars.add_parser('gen', help='Generate a new changelog entry.')
|
|
194
|
+
gen_pars.set_defaults(func=gen)
|
|
195
|
+
gen_pars.add_argument('-t', '--type', required=True, choices=list(s_schemas._changelogTypes.keys()),
|
|
196
|
+
help='The changelog type.')
|
|
197
|
+
gen_pars.add_argument('desc', type=str,
|
|
198
|
+
help='The description to populate the initial changelog entry with.', )
|
|
199
|
+
gen_pars.add_argument('-p', '--pr', type=int, default=False,
|
|
200
|
+
help='PR number associated with the changelog entry.')
|
|
201
|
+
gen_pars.add_argument('-a', '--add', default=False, action='store_true',
|
|
202
|
+
help='Add the newly created file to the current git staging area.')
|
|
203
|
+
# Hidden name override. Mainly for testing.
|
|
204
|
+
gen_pars.add_argument('-n', '--name', default=None, type=str,
|
|
205
|
+
help=argparse.SUPPRESS)
|
|
206
|
+
|
|
207
|
+
format_pars = subpars.add_parser('format', help='Format existing files into a RST block.')
|
|
208
|
+
format_pars.set_defaults(func=format)
|
|
209
|
+
mux_prs = format_pars.add_mutually_exclusive_group()
|
|
210
|
+
mux_prs.add_argument('--hide-prs', default=False, action='store_true',
|
|
211
|
+
help='Hide PR entries.')
|
|
212
|
+
mux_prs.add_argument('--enforce-prs', default=False, action='store_true',
|
|
213
|
+
help='Enforce PRs list to be populated with at least one number.', )
|
|
214
|
+
format_pars.add_argument('--prs-from-git', default=False, action='store_true',
|
|
215
|
+
help='Attempt to populate any PR numbers from a given files commit history.')
|
|
216
|
+
format_pars.add_argument('-w', '--width', help='Maximum column width to wrap descriptions at.',
|
|
217
|
+
default=79, type=int)
|
|
218
|
+
format_pars.add_argument('--version', required=True, action='store', type=str,
|
|
219
|
+
help='Version number')
|
|
220
|
+
format_pars.add_argument('-d', '--date', action='store', type=str,
|
|
221
|
+
help='Date to use with the changelog entry')
|
|
222
|
+
format_pars.add_argument('-r', '--rm', default=False, action='store_true',
|
|
223
|
+
help='Stage the changelog files as deleted files in git.')
|
|
224
|
+
|
|
225
|
+
for p in (gen_pars, format_pars):
|
|
226
|
+
p.add_argument('-v', '--verbose', default=False, action='store_true',
|
|
227
|
+
help='Enable verbose output')
|
|
228
|
+
p.add_argument('--cdir', default='./changes', action='store',
|
|
229
|
+
help='Directory of changelog files.')
|
|
230
|
+
p.add_argument('--disable-git-dir-check', dest='git_dir_check', default=True, action='store_false',
|
|
231
|
+
help=argparse.SUPPRESS)
|
|
232
|
+
|
|
233
|
+
return pars
|
|
234
|
+
|
|
235
|
+
if __name__ == '__main__': # pragma: no cover
|
|
236
|
+
sys.exit(asyncio.run(main(sys.argv[1:], s_output.stdout)))
|