synapse 2.165.0__py311-none-any.whl → 2.167.0__py311-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of synapse might be problematic. Click here for more details.
- synapse/axon.py +4 -10
- synapse/cmds/cortex.py +1 -6
- synapse/common.py +6 -0
- synapse/cortex.py +104 -57
- synapse/datamodel.py +32 -0
- synapse/exc.py +1 -0
- synapse/lib/agenda.py +81 -51
- synapse/lib/aha.py +2 -0
- synapse/lib/ast.py +21 -23
- synapse/lib/base.py +11 -10
- synapse/lib/cell.py +24 -24
- synapse/lib/hive.py +11 -0
- synapse/lib/httpapi.py +1 -0
- synapse/lib/nexus.py +3 -2
- synapse/lib/node.py +4 -2
- synapse/lib/schemas.py +3 -1
- synapse/lib/snap.py +50 -0
- synapse/lib/storm.py +19 -17
- synapse/lib/stormlib/aha.py +370 -17
- synapse/lib/stormlib/auth.py +11 -4
- synapse/lib/stormlib/cache.py +202 -0
- synapse/lib/stormlib/cortex.py +69 -7
- synapse/lib/stormlib/macro.py +11 -18
- synapse/lib/stormlib/spooled.py +109 -0
- synapse/lib/stormlib/stix.py +1 -1
- synapse/lib/stormtypes.py +61 -17
- synapse/lib/trigger.py +10 -12
- synapse/lib/types.py +3 -1
- synapse/lib/version.py +2 -2
- synapse/lib/view.py +16 -3
- synapse/models/base.py +8 -0
- synapse/models/files.py +3 -0
- synapse/models/inet.py +74 -2
- synapse/models/orgs.py +52 -8
- synapse/models/person.py +30 -11
- synapse/models/risk.py +44 -3
- synapse/telepath.py +115 -32
- synapse/tests/files/stormpkg/dotstorm/dotstorm.yaml +3 -0
- synapse/tests/test_cortex.py +79 -8
- synapse/tests/test_datamodel.py +22 -0
- synapse/tests/test_lib_agenda.py +8 -1
- synapse/tests/test_lib_aha.py +19 -6
- synapse/tests/test_lib_cell.py +6 -2
- synapse/tests/test_lib_grammar.py +62 -64
- synapse/tests/test_lib_httpapi.py +1 -1
- synapse/tests/test_lib_rstorm.py +4 -4
- synapse/tests/test_lib_storm.py +98 -7
- synapse/tests/test_lib_stormlib_aha.py +196 -0
- synapse/tests/test_lib_stormlib_cache.py +272 -0
- synapse/tests/test_lib_stormlib_compression.py +12 -12
- synapse/tests/test_lib_stormlib_cortex.py +71 -0
- synapse/tests/test_lib_stormlib_macro.py +94 -0
- synapse/tests/test_lib_stormlib_spooled.py +190 -0
- synapse/tests/test_lib_stormtypes.py +71 -37
- synapse/tests/test_lib_view.py +50 -3
- synapse/tests/test_model_files.py +3 -0
- synapse/tests/test_model_inet.py +67 -0
- synapse/tests/test_model_risk.py +6 -0
- synapse/tests/test_telepath.py +30 -7
- synapse/tests/test_tools_genpkg.py +26 -0
- synapse/tests/test_tools_hiveload.py +1 -0
- synapse/tests/test_tools_hivesave.py +1 -0
- synapse/tests/test_tools_modrole.py +81 -0
- synapse/tests/test_tools_moduser.py +105 -0
- synapse/tests/utils.py +22 -3
- synapse/tools/autodoc.py +1 -1
- synapse/tools/hive/load.py +3 -0
- synapse/tools/hive/save.py +3 -0
- synapse/tools/modrole.py +59 -7
- synapse/tools/moduser.py +78 -10
- {synapse-2.165.0.dist-info → synapse-2.167.0.dist-info}/METADATA +3 -3
- {synapse-2.165.0.dist-info → synapse-2.167.0.dist-info}/RECORD +75 -72
- synapse/lib/provenance.py +0 -111
- synapse/tests/test_lib_provenance.py +0 -37
- {synapse-2.165.0.dist-info → synapse-2.167.0.dist-info}/LICENSE +0 -0
- {synapse-2.165.0.dist-info → synapse-2.167.0.dist-info}/WHEEL +0 -0
- {synapse-2.165.0.dist-info → synapse-2.167.0.dist-info}/top_level.txt +0 -0
synapse/models/risk.py
CHANGED
|
@@ -68,6 +68,14 @@ class RiskModule(s_module.CoreModule):
|
|
|
68
68
|
|
|
69
69
|
('risk:threat', ('guid', {}), {
|
|
70
70
|
'doc': 'A threat cluster or subgraph of threat activity, as reported by a specific organization.',
|
|
71
|
+
'display': {
|
|
72
|
+
'columns': (
|
|
73
|
+
{'type': 'prop', 'opts': {'name': 'org:name'}},
|
|
74
|
+
{'type': 'prop', 'opts': {'name': 'org:names'}},
|
|
75
|
+
{'type': 'prop', 'opts': {'name': 'reporter:name'}},
|
|
76
|
+
{'type': 'prop', 'opts': {'name': 'tag'}},
|
|
77
|
+
),
|
|
78
|
+
},
|
|
71
79
|
}),
|
|
72
80
|
('risk:attack', ('guid', {}), {
|
|
73
81
|
'doc': 'An instance of an actor attacking a target.',
|
|
@@ -81,9 +89,22 @@ class RiskModule(s_module.CoreModule):
|
|
|
81
89
|
}),
|
|
82
90
|
('risk:compromise', ('guid', {}), {
|
|
83
91
|
'doc': 'An instance of a compromise and its aggregate impact.',
|
|
92
|
+
'display': {
|
|
93
|
+
'columns': (
|
|
94
|
+
{'type': 'prop', 'opts': {'name': 'name'}},
|
|
95
|
+
{'type': 'prop', 'opts': {'name': 'reporter:name'}},
|
|
96
|
+
),
|
|
97
|
+
},
|
|
84
98
|
}),
|
|
85
99
|
('risk:mitigation', ('guid', {}), {
|
|
86
100
|
'doc': 'A mitigation for a specific risk:vuln.',
|
|
101
|
+
'display': {
|
|
102
|
+
'columns': (
|
|
103
|
+
{'type': 'prop', 'opts': {'name': 'name'}},
|
|
104
|
+
{'type': 'prop', 'opts': {'name': 'reporter:name'}},
|
|
105
|
+
{'type': 'prop', 'opts': {'name': 'tag'}},
|
|
106
|
+
),
|
|
107
|
+
},
|
|
87
108
|
}),
|
|
88
109
|
('risk:attacktype', ('taxonomy', {}), {
|
|
89
110
|
'doc': 'A taxonomy of attack types.',
|
|
@@ -104,6 +125,14 @@ class RiskModule(s_module.CoreModule):
|
|
|
104
125
|
}),
|
|
105
126
|
('risk:tool:software', ('guid', {}), {
|
|
106
127
|
'doc': 'A software tool used in threat activity, as reported by a specific organization.',
|
|
128
|
+
'display': {
|
|
129
|
+
'columns': (
|
|
130
|
+
{'type': 'prop', 'opts': {'name': 'soft:name'}},
|
|
131
|
+
{'type': 'prop', 'opts': {'name': 'soft:names'}},
|
|
132
|
+
{'type': 'prop', 'opts': {'name': 'reporter:name'}},
|
|
133
|
+
{'type': 'prop', 'opts': {'name': 'tag'}},
|
|
134
|
+
),
|
|
135
|
+
},
|
|
107
136
|
}),
|
|
108
137
|
|
|
109
138
|
('risk:alert:verdict:taxonomy', ('taxonomy', {}), {
|
|
@@ -128,19 +157,22 @@ class RiskModule(s_module.CoreModule):
|
|
|
128
157
|
('risk:extortion:type:taxonomy', ('taxonomy', {}), {
|
|
129
158
|
'interfaces': ('meta:taxonomy',),
|
|
130
159
|
'doc': 'A taxonomy of extortion event types.'}),
|
|
160
|
+
|
|
131
161
|
('risk:technique:masquerade', ('guid', {}), {
|
|
132
162
|
'doc': 'Represents the assessment that a node is designed to resemble another in order to mislead.'}),
|
|
133
163
|
),
|
|
134
164
|
'edges': (
|
|
135
165
|
# some explicit examples...
|
|
136
166
|
(('risk:attack', 'uses', 'ou:technique'), {
|
|
137
|
-
'doc': 'The
|
|
167
|
+
'doc': 'The attacker used the technique in the attack.'}),
|
|
138
168
|
(('risk:threat', 'uses', 'ou:technique'), {
|
|
139
169
|
'doc': 'The threat cluster uses the technique.'}),
|
|
140
170
|
(('risk:tool:software', 'uses', 'ou:technique'), {
|
|
141
171
|
'doc': 'The tool uses the technique.'}),
|
|
142
172
|
(('risk:compromise', 'uses', 'ou:technique'), {
|
|
143
|
-
'doc': 'The
|
|
173
|
+
'doc': 'The attacker used the technique in the compromise.'}),
|
|
174
|
+
(('risk:extortion', 'uses', 'ou:technique'), {
|
|
175
|
+
'doc': 'The attacker used the technique to extort the victim.'}),
|
|
144
176
|
|
|
145
177
|
(('risk:attack', 'uses', 'risk:vuln'), {
|
|
146
178
|
'doc': 'The attack used the vulnerability.'}),
|
|
@@ -936,12 +968,18 @@ class RiskModule(s_module.CoreModule):
|
|
|
936
968
|
('compromise', ('risk:compromise', {}), {
|
|
937
969
|
'doc': 'The compromise which allowed the leaker access to the information.'}),
|
|
938
970
|
|
|
971
|
+
('extortion', ('risk:extortion', {}), {
|
|
972
|
+
'doc': 'The extortion event which used the threat of the leak as leverage.'}),
|
|
973
|
+
|
|
939
974
|
('public', ('bool', {}), {
|
|
940
975
|
'doc': 'Set to true if the leaked information was made publicly available.'}),
|
|
941
976
|
|
|
942
977
|
('public:url', ('inet:url', {}), {
|
|
943
978
|
'doc': 'The URL where the leaked information was made publicly available.'}),
|
|
944
979
|
|
|
980
|
+
('size:bytes', ('int', {'min': 0}), {
|
|
981
|
+
'doc': 'The approximate uncompressed size of the total data leaked.'}),
|
|
982
|
+
|
|
945
983
|
)),
|
|
946
984
|
|
|
947
985
|
('risk:extortion:type:taxonomy', {}, ()),
|
|
@@ -963,6 +1001,9 @@ class RiskModule(s_module.CoreModule):
|
|
|
963
1001
|
('demanded', ('time', {}), {
|
|
964
1002
|
'doc': 'The time that the attacker made their demands.'}),
|
|
965
1003
|
|
|
1004
|
+
('deadline', ('time', {}), {
|
|
1005
|
+
'doc': 'The time that the demand must be met.'}),
|
|
1006
|
+
|
|
966
1007
|
('goal', ('ou:goal', {}), {
|
|
967
1008
|
'doc': 'The goal of the attacker in extorting the victim.'}),
|
|
968
1009
|
|
|
@@ -976,7 +1017,7 @@ class RiskModule(s_module.CoreModule):
|
|
|
976
1017
|
'doc': 'The extortion target identity.'}),
|
|
977
1018
|
|
|
978
1019
|
('success', ('bool', {}), {
|
|
979
|
-
'doc':
|
|
1020
|
+
'doc': "Set to true if the victim met the attacker's demands."}),
|
|
980
1021
|
|
|
981
1022
|
('enacted', ('bool', {}), {
|
|
982
1023
|
'doc': 'Set to true if attacker carried out the threat.'}),
|
synapse/telepath.py
CHANGED
|
@@ -135,25 +135,24 @@ def mergeAhaInfo(info0, info1):
|
|
|
135
135
|
|
|
136
136
|
return info0
|
|
137
137
|
|
|
138
|
-
async def open(url,
|
|
138
|
+
async def open(url, onlink=None):
|
|
139
139
|
'''
|
|
140
|
-
Open a new telepath
|
|
141
|
-
'''
|
|
142
|
-
# backward compatible support for a list of URLs or urlinfo dicts...
|
|
143
|
-
if isinstance(url, (tuple, list)): # pragma: no cover
|
|
144
|
-
return await Client.anit(url)
|
|
145
|
-
|
|
146
|
-
urlinfo = chopurl(url)
|
|
140
|
+
Open a new telepath ClientV2 object based on the given URL.
|
|
147
141
|
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
142
|
+
Args:
|
|
143
|
+
url (str): The URL to connect to.
|
|
144
|
+
onlink: An optional async callback function to run when connections are made.
|
|
151
145
|
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
146
|
+
Notes:
|
|
147
|
+
The onlink callback function has the call signature ``(proxy, urlinfo)``.
|
|
148
|
+
The proxy is the Telepath Proxy object.
|
|
149
|
+
The urlinfo is the parsed URL information used to create the proxy object.
|
|
150
|
+
The urlinfo structure may change between versions of Synapse.
|
|
155
151
|
|
|
156
|
-
|
|
152
|
+
Returns:
|
|
153
|
+
ClientV2: A ClientV2 object.
|
|
154
|
+
'''
|
|
155
|
+
return await ClientV2.anit(url, onlink=onlink)
|
|
157
156
|
|
|
158
157
|
async def _getAhaSvc(urlinfo, timeout=None):
|
|
159
158
|
|
|
@@ -501,6 +500,7 @@ class GenrMethod(Method):
|
|
|
501
500
|
class Pipeline(s_base.Base):
|
|
502
501
|
|
|
503
502
|
async def __anit__(self, proxy, genr, name=None):
|
|
503
|
+
s_common.deprecated('Telepath.Pipeline', curv='2.167.0')
|
|
504
504
|
|
|
505
505
|
await s_base.Base.__anit__(self)
|
|
506
506
|
|
|
@@ -832,7 +832,7 @@ class Proxy(s_base.Base):
|
|
|
832
832
|
|
|
833
833
|
mesg = await link.rx()
|
|
834
834
|
if mesg is None:
|
|
835
|
-
raise s_exc.LinkShutDown(mesg=
|
|
835
|
+
raise s_exc.LinkShutDown(mesg='Remote peer disconnected')
|
|
836
836
|
|
|
837
837
|
if mesg[0] != 't2:yield': # pragma: no cover
|
|
838
838
|
info = 'Telepath protocol violation: unexpected message received'
|
|
@@ -869,6 +869,8 @@ class Proxy(s_base.Base):
|
|
|
869
869
|
if self.sess is not None:
|
|
870
870
|
return await self.taskv2(todo, name=name)
|
|
871
871
|
|
|
872
|
+
s_common.deprecated('Telepath task with no session', curv='2.166.0')
|
|
873
|
+
|
|
872
874
|
task = Task()
|
|
873
875
|
|
|
874
876
|
mesg = ('task:init', {
|
|
@@ -980,21 +982,39 @@ class Proxy(s_base.Base):
|
|
|
980
982
|
setattr(self, name, meth)
|
|
981
983
|
return meth
|
|
982
984
|
|
|
983
|
-
class
|
|
985
|
+
class ClientV2(s_base.Base):
|
|
984
986
|
'''
|
|
985
987
|
A telepath client which:
|
|
986
988
|
* connects to multiple services
|
|
987
989
|
* distributes API calls across them
|
|
988
990
|
* receives topology updates from AHA
|
|
991
|
+
|
|
992
|
+
NOTE: This must co-exist with Client until we eliminate uses that
|
|
993
|
+
attempt to call telepath APIs directly from the Client rather
|
|
994
|
+
than awaiting a proxy()
|
|
989
995
|
'''
|
|
990
|
-
async def __anit__(self,
|
|
996
|
+
async def __anit__(self, urlinfo, onlink=None):
|
|
997
|
+
|
|
991
998
|
await s_base.Base.__anit__(self)
|
|
999
|
+
|
|
1000
|
+
# some ugly stuff in order to be backward compatible...
|
|
1001
|
+
if not isinstance(urlinfo, (list, tuple)):
|
|
1002
|
+
urlinfo = (urlinfo,)
|
|
1003
|
+
|
|
1004
|
+
urlinfo = [chopurl(u) for u in urlinfo]
|
|
1005
|
+
|
|
1006
|
+
self.aha = None
|
|
1007
|
+
|
|
992
1008
|
self.clients = {}
|
|
993
1009
|
self.proxies = set()
|
|
994
1010
|
|
|
995
|
-
self.
|
|
996
|
-
|
|
997
|
-
self.
|
|
1011
|
+
self.poolname = None
|
|
1012
|
+
|
|
1013
|
+
self.onlink = onlink
|
|
1014
|
+
|
|
1015
|
+
self.booturls = urlinfo
|
|
1016
|
+
self.bootdeque = collections.deque()
|
|
1017
|
+
self.bootdeque.extend(self.booturls)
|
|
998
1018
|
|
|
999
1019
|
self.ready = asyncio.Event()
|
|
1000
1020
|
self.deque = collections.deque()
|
|
@@ -1003,7 +1023,6 @@ class Pool(s_base.Base):
|
|
|
1003
1023
|
'svc:add': self._onPoolSvcAdd,
|
|
1004
1024
|
'svc:del': self._onPoolSvcDel,
|
|
1005
1025
|
}
|
|
1006
|
-
self.schedCoro(self._toposync())
|
|
1007
1026
|
|
|
1008
1027
|
async def fini():
|
|
1009
1028
|
await self._shutDownPool()
|
|
@@ -1012,8 +1031,63 @@ class Pool(s_base.Base):
|
|
|
1012
1031
|
|
|
1013
1032
|
self.onfini(fini)
|
|
1014
1033
|
|
|
1034
|
+
self.schedCoro(self._initBootProxy())
|
|
1035
|
+
|
|
1036
|
+
def getNextBootUrl(self):
|
|
1037
|
+
if not self.bootdeque:
|
|
1038
|
+
self.bootdeque.extend(self.booturls)
|
|
1039
|
+
return self.bootdeque.popleft()
|
|
1040
|
+
|
|
1041
|
+
async def _initBootProxy(self):
|
|
1042
|
+
|
|
1043
|
+
lastlog = 0.0
|
|
1044
|
+
while not self.isfini:
|
|
1045
|
+
|
|
1046
|
+
urlinfo = self.getNextBootUrl()
|
|
1047
|
+
|
|
1048
|
+
try:
|
|
1049
|
+
|
|
1050
|
+
if urlinfo.get('scheme') == 'aha':
|
|
1051
|
+
|
|
1052
|
+
self.aha, svcinfo = await _getAhaSvc(urlinfo)
|
|
1053
|
+
|
|
1054
|
+
# if the service is a pool, enter pool mode and fire
|
|
1055
|
+
# the topography sync task to manage pool members.
|
|
1056
|
+
services = svcinfo.get('services')
|
|
1057
|
+
if services is not None:
|
|
1058
|
+
# we are an AHA pool!
|
|
1059
|
+
if self.poolname is None:
|
|
1060
|
+
self.poolname = svcinfo.get('name')
|
|
1061
|
+
self.schedCoro(self._toposync())
|
|
1062
|
+
return
|
|
1063
|
+
|
|
1064
|
+
# regular telepath client behavior
|
|
1065
|
+
proxy = await openinfo(urlinfo)
|
|
1066
|
+
await self._onPoolLink(proxy, urlinfo)
|
|
1067
|
+
|
|
1068
|
+
async def reconnect():
|
|
1069
|
+
if not self.isfini:
|
|
1070
|
+
self.schedCoro(self._initBootProxy())
|
|
1071
|
+
|
|
1072
|
+
proxy.onfini(reconnect)
|
|
1073
|
+
return
|
|
1074
|
+
|
|
1075
|
+
except Exception as e:
|
|
1076
|
+
|
|
1077
|
+
now = time.monotonic()
|
|
1078
|
+
if now > lastlog + 60.0: # don't logspam the disconnect message more than 1/min
|
|
1079
|
+
url = s_urlhelp.sanitizeUrl(zipurl(urlinfo))
|
|
1080
|
+
logger.exception(f'telepath clientv2 ({url}) encountered an error: {e}')
|
|
1081
|
+
lastlog = now
|
|
1082
|
+
|
|
1083
|
+
retrysleep = float(urlinfo.get('retrysleep', 0.2))
|
|
1084
|
+
await self.waitfini(timeout=retrysleep)
|
|
1085
|
+
|
|
1086
|
+
async def waitready(self, timeout=None):
|
|
1087
|
+
await s_common.wait_for(self.ready.wait(), timeout=timeout)
|
|
1088
|
+
|
|
1015
1089
|
def size(self):
|
|
1016
|
-
return len(self.
|
|
1090
|
+
return len(self.proxies)
|
|
1017
1091
|
|
|
1018
1092
|
async def _onPoolSvcAdd(self, mesg):
|
|
1019
1093
|
svcname = mesg[1].get('name')
|
|
@@ -1022,7 +1096,7 @@ class Pool(s_base.Base):
|
|
|
1022
1096
|
await oldc.fini()
|
|
1023
1097
|
|
|
1024
1098
|
urlinfo = {'scheme': 'aha', 'host': svcname, 'path': ''}
|
|
1025
|
-
self.clients[svcname] = await
|
|
1099
|
+
self.clients[svcname] = await ClientV2.anit(urlinfo, onlink=self._onPoolLink)
|
|
1026
1100
|
await self.fire('svc:add', **mesg[1])
|
|
1027
1101
|
|
|
1028
1102
|
async def _onPoolSvcDel(self, mesg):
|
|
@@ -1033,10 +1107,11 @@ class Pool(s_base.Base):
|
|
|
1033
1107
|
self.deque.clear()
|
|
1034
1108
|
await self.fire('svc:del', **mesg[1])
|
|
1035
1109
|
|
|
1036
|
-
async def _onPoolLink(self, proxy):
|
|
1110
|
+
async def _onPoolLink(self, proxy, urlinfo):
|
|
1037
1111
|
|
|
1038
1112
|
async def onfini():
|
|
1039
|
-
self.proxies
|
|
1113
|
+
if proxy in self.proxies:
|
|
1114
|
+
self.proxies.remove(proxy)
|
|
1040
1115
|
if proxy in self.deque:
|
|
1041
1116
|
self.deque.remove(proxy)
|
|
1042
1117
|
if not len(self.proxies):
|
|
@@ -1046,13 +1121,23 @@ class Pool(s_base.Base):
|
|
|
1046
1121
|
self.proxies.add(proxy)
|
|
1047
1122
|
self.ready.set()
|
|
1048
1123
|
|
|
1124
|
+
if self.onlink is not None:
|
|
1125
|
+
try:
|
|
1126
|
+
await self.onlink(proxy, urlinfo)
|
|
1127
|
+
except Exception as e:
|
|
1128
|
+
logger.exception(f'onlink: {self.onlink}')
|
|
1129
|
+
|
|
1049
1130
|
async def _shutDownPool(self):
|
|
1050
1131
|
# when we reconnect to our AHA service, we need to dump the current
|
|
1051
1132
|
# topology state and gather it again.
|
|
1052
|
-
for client in self.clients.values():
|
|
1133
|
+
for client in list(self.clients.values()):
|
|
1053
1134
|
await client.fini()
|
|
1054
1135
|
|
|
1136
|
+
for proxy in list(self.proxies):
|
|
1137
|
+
await proxy.fini()
|
|
1138
|
+
|
|
1055
1139
|
self.deque.clear()
|
|
1140
|
+
self.ready.clear()
|
|
1056
1141
|
self.clients.clear()
|
|
1057
1142
|
self.proxies.clear()
|
|
1058
1143
|
|
|
@@ -1065,16 +1150,14 @@ class Pool(s_base.Base):
|
|
|
1065
1150
|
|
|
1066
1151
|
while not self.isfini:
|
|
1067
1152
|
|
|
1068
|
-
poolname = self.ahasvc.get('name')
|
|
1069
|
-
|
|
1070
1153
|
try:
|
|
1071
1154
|
ahaproxy = await self.aha.proxy()
|
|
1072
1155
|
|
|
1073
1156
|
await reset()
|
|
1074
1157
|
|
|
1075
|
-
async for mesg in ahaproxy.iterPoolTopo(poolname):
|
|
1158
|
+
async for mesg in ahaproxy.iterPoolTopo(self.poolname):
|
|
1076
1159
|
hand = self.mesghands.get(mesg[0])
|
|
1077
|
-
if hand is None:
|
|
1160
|
+
if hand is None: # pragma: no cover
|
|
1078
1161
|
logger.warning(f'Unknown AHA pool topography message: {mesg}')
|
|
1079
1162
|
continue
|
|
1080
1163
|
|
|
@@ -1092,7 +1175,7 @@ class Pool(s_base.Base):
|
|
|
1092
1175
|
|
|
1093
1176
|
await self.ready.wait()
|
|
1094
1177
|
|
|
1095
|
-
if self.isfini:
|
|
1178
|
+
if self.isfini: # pragma: no cover
|
|
1096
1179
|
raise s_exc.IsFini()
|
|
1097
1180
|
|
|
1098
1181
|
if not self.deque:
|
synapse/tests/test_cortex.py
CHANGED
|
@@ -4564,10 +4564,9 @@ class CortexBasicTest(s_t_utils.SynTest):
|
|
|
4564
4564
|
await core1.addFeedData('syn.nodes', data)
|
|
4565
4565
|
self.len(1, await core1.nodes('test:int=8 -#test.12345'))
|
|
4566
4566
|
|
|
4567
|
-
# This tag does match regex
|
|
4568
4567
|
data = [(('test:int', 8), {'tags': {'test.1234': (None, None)}})]
|
|
4569
4568
|
await core1.addFeedData('syn.nodes', data)
|
|
4570
|
-
self.len(0, await core1.nodes('test:int=8 -#
|
|
4569
|
+
self.len(0, await core1.nodes('test:int=8 -#newtag.1234'))
|
|
4571
4570
|
|
|
4572
4571
|
core1.view.layers[0].readonly = True
|
|
4573
4572
|
await self.asyncraises(s_exc.IsReadOnly, core1.addFeedData('syn.nodes', data))
|
|
@@ -6338,7 +6337,7 @@ class CortexBasicTest(s_t_utils.SynTest):
|
|
|
6338
6337
|
async def action():
|
|
6339
6338
|
await asyncio.sleep(0.1)
|
|
6340
6339
|
await core.callStorm('return($lib.view.get().fork())')
|
|
6341
|
-
await core.callStorm('return($lib.cron.add(query="{
|
|
6340
|
+
await core.callStorm('return($lib.cron.add(query="{meta:note=*}", hourly=30).pack())')
|
|
6342
6341
|
tdef = {'cond': 'node:add', 'storm': '[test:str="foobar"]', 'form': 'test:int'}
|
|
6343
6342
|
opts = {'vars': {'tdef': tdef}}
|
|
6344
6343
|
trig = await core.callStorm('return($lib.trigger.add($tdef))', opts=opts)
|
|
@@ -7008,7 +7007,7 @@ class CortexBasicTest(s_t_utils.SynTest):
|
|
|
7008
7007
|
# clear out the #cno.cve tags and test prune behavior.
|
|
7009
7008
|
await core.nodes('#cno.cve [ -#cno.cve ]')
|
|
7010
7009
|
|
|
7011
|
-
await core.nodes('[ inet:ipv4=1.2.3.4 +#cno.cve.2021.12345.foo +#cno.cve.2021.55555 ]')
|
|
7010
|
+
await core.nodes('[ inet:ipv4=1.2.3.4 +#cno.cve.2021.12345.foo +#cno.cve.2021.55555.bar ]')
|
|
7012
7011
|
|
|
7013
7012
|
await core.nodes('$lib.model.tags.set(cno.cve, prune, (2))')
|
|
7014
7013
|
|
|
@@ -7016,6 +7015,10 @@ class CortexBasicTest(s_t_utils.SynTest):
|
|
|
7016
7015
|
nodes = await core.nodes('[ inet:ipv4=1.2.3.4 -#cno.cve.2021.55555 ]')
|
|
7017
7016
|
self.sorteq(('cno', 'cno.cve', 'cno.cve.2021', 'cno.cve.2021.12345', 'cno.cve.2021.12345.foo'), [t[0] for t in nodes[0].getTags()])
|
|
7018
7017
|
|
|
7018
|
+
# double delete shouldn't prune
|
|
7019
|
+
nodes = await core.nodes('[ inet:ipv4=1.2.3.4 -#cno.cve.2021.55555 ]')
|
|
7020
|
+
self.sorteq(('cno', 'cno.cve', 'cno.cve.2021', 'cno.cve.2021.12345', 'cno.cve.2021.12345.foo'), [t[0] for t in nodes[0].getTags()])
|
|
7021
|
+
|
|
7019
7022
|
# test that the pruning behavior stops at the correct level
|
|
7020
7023
|
nodes = await core.nodes('[ inet:ipv4=1.2.3.4 -#cno.cve.2021.12345.foo ]')
|
|
7021
7024
|
self.sorteq(('cno', 'cno.cve', 'cno.cve.2021', 'cno.cve.2021.12345'), [t[0] for t in nodes[0].getTags()])
|
|
@@ -7809,6 +7812,9 @@ class CortexBasicTest(s_t_utils.SynTest):
|
|
|
7809
7812
|
self.stormHasNoWarnErr(msgs)
|
|
7810
7813
|
self.stormIsInPrint('AHA service (01.core...) added to service pool (pool00.loop.vertex.link)', msgs)
|
|
7811
7814
|
|
|
7815
|
+
msgs = await core00.stormlist('cortex.storm.pool.set newp')
|
|
7816
|
+
self.stormIsInErr(':// not found in [newp]', msgs)
|
|
7817
|
+
|
|
7812
7818
|
msgs = await core00.stormlist('cortex.storm.pool.set --connection-timeout 1 --sync-timeout 1 aha://pool00...')
|
|
7813
7819
|
self.stormHasNoWarnErr(msgs)
|
|
7814
7820
|
self.stormIsInPrint('Storm pool configuration set.', msgs)
|
|
@@ -7817,6 +7823,8 @@ class CortexBasicTest(s_t_utils.SynTest):
|
|
|
7817
7823
|
|
|
7818
7824
|
core00 = await base.enter_context(self.getTestCore(dirn=dirn00))
|
|
7819
7825
|
|
|
7826
|
+
await core00.stormpool.waitready(timeout=12)
|
|
7827
|
+
|
|
7820
7828
|
with self.getLoggerStream('synapse') as stream:
|
|
7821
7829
|
msgs = await alist(core00.storm('inet:asn=0'))
|
|
7822
7830
|
self.len(1, [m for m in msgs if m[0] == 'node'])
|
|
@@ -7906,28 +7914,28 @@ class CortexBasicTest(s_t_utils.SynTest):
|
|
|
7906
7914
|
|
|
7907
7915
|
stream.seek(0)
|
|
7908
7916
|
data = stream.read()
|
|
7909
|
-
self.isin('
|
|
7917
|
+
self.isin('Storm query mirror pool is empty, running query locally.', data)
|
|
7910
7918
|
|
|
7911
7919
|
with self.getLoggerStream('synapse') as stream:
|
|
7912
7920
|
self.true(await core00.callStorm('inet:asn=0 return($lib.true)'))
|
|
7913
7921
|
|
|
7914
7922
|
stream.seek(0)
|
|
7915
7923
|
data = stream.read()
|
|
7916
|
-
self.isin('
|
|
7924
|
+
self.isin('Storm query mirror pool is empty, running query locally.', data)
|
|
7917
7925
|
|
|
7918
7926
|
with self.getLoggerStream('synapse') as stream:
|
|
7919
7927
|
self.len(1, await alist(core00.exportStorm('inet:asn=0')))
|
|
7920
7928
|
|
|
7921
7929
|
stream.seek(0)
|
|
7922
7930
|
data = stream.read()
|
|
7923
|
-
self.isin('
|
|
7931
|
+
self.isin('Storm query mirror pool is empty, running query locally.', data)
|
|
7924
7932
|
|
|
7925
7933
|
with self.getLoggerStream('synapse') as stream:
|
|
7926
7934
|
self.eq(1, await core00.count('inet:asn=0'))
|
|
7927
7935
|
|
|
7928
7936
|
stream.seek(0)
|
|
7929
7937
|
data = stream.read()
|
|
7930
|
-
self.isin('
|
|
7938
|
+
self.isin('Storm query mirror pool is empty, running query locally.', data)
|
|
7931
7939
|
|
|
7932
7940
|
core01 = await base.enter_context(self.getTestCore(dirn=dirn01))
|
|
7933
7941
|
await core01.promote(graceful=True)
|
|
@@ -7984,3 +7992,66 @@ class CortexBasicTest(s_t_utils.SynTest):
|
|
|
7984
7992
|
|
|
7985
7993
|
msgs = await alist(core01.storm('inet:asn=0', opts={'mirror': False}))
|
|
7986
7994
|
self.len(1, [m for m in msgs if m[0] == 'node'])
|
|
7995
|
+
|
|
7996
|
+
async def test_cortex_authgate(self):
|
|
7997
|
+
# TODO - Remove this in 3.0.0
|
|
7998
|
+
with self.getTestDir() as dirn:
|
|
7999
|
+
|
|
8000
|
+
async with self.getTestCore(dirn=dirn) as core: # type: s_cortex.Cortex
|
|
8001
|
+
|
|
8002
|
+
unfo = await core.addUser('lowuser')
|
|
8003
|
+
lowuser = unfo.get('iden')
|
|
8004
|
+
|
|
8005
|
+
msgs = await core.stormlist('auth.user.addrule lowuser --gate cortex node')
|
|
8006
|
+
self.stormIsInWarn('Adding rule on the "cortex" authgate. This authgate is not used', msgs)
|
|
8007
|
+
msgs = await core.stormlist('auth.role.addrule all --gate cortex hehe')
|
|
8008
|
+
self.stormIsInWarn('Adding rule on the "cortex" authgate. This authgate is not used', msgs)
|
|
8009
|
+
|
|
8010
|
+
aslow = {'user': lowuser}
|
|
8011
|
+
|
|
8012
|
+
# The cortex authgate does nothing
|
|
8013
|
+
with self.raises(s_exc.AuthDeny) as cm:
|
|
8014
|
+
await core.nodes('[test:str=hello]', opts=aslow)
|
|
8015
|
+
|
|
8016
|
+
with self.getAsyncLoggerStream('synapse.cortex') as stream:
|
|
8017
|
+
async with self.getTestCore(dirn=dirn) as core: # type: s_cortex.Cortex
|
|
8018
|
+
# The cortex authgate still does nothing
|
|
8019
|
+
with self.raises(s_exc.AuthDeny) as cm:
|
|
8020
|
+
await core.nodes('[test:str=hello]', opts=aslow)
|
|
8021
|
+
stream.seek(0)
|
|
8022
|
+
buf = stream.read()
|
|
8023
|
+
self.isin('(lowuser) has a rule on the "cortex" authgate', buf)
|
|
8024
|
+
self.isin('(all) has a rule on the "cortex" authgate', buf)
|
|
8025
|
+
|
|
8026
|
+
async def test_cortex_check_nexus_init(self):
|
|
8027
|
+
# This test is a simple safety net for making sure no nexus events
|
|
8028
|
+
# happen before the nexus subsystem is initialized (initNexusSubsystem).
|
|
8029
|
+
# It's possible for code which calls nexus APIs to run but not do
|
|
8030
|
+
# anything which wouldn't be caught here. I don't think there's a good
|
|
8031
|
+
# way to check for that condition though.
|
|
8032
|
+
|
|
8033
|
+
class Cortex(s_cortex.Cortex):
|
|
8034
|
+
async def initServiceStorage(self):
|
|
8035
|
+
self._test_pre_service_storage_index = await self.nexsroot.index()
|
|
8036
|
+
ret = await super().initServiceStorage()
|
|
8037
|
+
self._test_post_service_storage_index = await self.nexsroot.index()
|
|
8038
|
+
return ret
|
|
8039
|
+
|
|
8040
|
+
async def initNexusSubsystem(self):
|
|
8041
|
+
self._test_pre_nexus_index = await self.nexsroot.index()
|
|
8042
|
+
ret = await super().initNexusSubsystem()
|
|
8043
|
+
self._test_post_nexus_index = await self.nexsroot.index()
|
|
8044
|
+
return ret
|
|
8045
|
+
|
|
8046
|
+
conf = {
|
|
8047
|
+
'layer:lmdb:map_async': True,
|
|
8048
|
+
'nexslog:en': True,
|
|
8049
|
+
'layers:logedits': True,
|
|
8050
|
+
}
|
|
8051
|
+
|
|
8052
|
+
with self.getTestDir() as dirn:
|
|
8053
|
+
async with await Cortex.anit(dirn, conf=conf) as core:
|
|
8054
|
+
offs = core._test_pre_service_storage_index
|
|
8055
|
+
self.eq(core._test_post_service_storage_index, offs)
|
|
8056
|
+
self.eq(core._test_pre_nexus_index, offs)
|
|
8057
|
+
self.ge(core._test_post_nexus_index, core._test_pre_nexus_index)
|
synapse/tests/test_datamodel.py
CHANGED
|
@@ -41,6 +41,28 @@ class DeprecatedModel(s_module.CoreModule):
|
|
|
41
41
|
|
|
42
42
|
class DataModelTest(s_t_utils.SynTest):
|
|
43
43
|
|
|
44
|
+
async def test_datamodel_basics(self):
|
|
45
|
+
async with self.getTestCore() as core:
|
|
46
|
+
core.model.addType('woot:one', 'guid', {}, {
|
|
47
|
+
'display': {
|
|
48
|
+
'columns': (
|
|
49
|
+
{'type': 'newp', 'opts': {}},
|
|
50
|
+
),
|
|
51
|
+
},
|
|
52
|
+
})
|
|
53
|
+
with self.raises(s_exc.BadFormDef):
|
|
54
|
+
core.model.addForm('woot:one', {}, ())
|
|
55
|
+
|
|
56
|
+
core.model.addType('woot:two', 'guid', {}, {
|
|
57
|
+
'display': {
|
|
58
|
+
'columns': (
|
|
59
|
+
{'type': 'prop', 'opts': {'name': 'hehe'}},
|
|
60
|
+
),
|
|
61
|
+
},
|
|
62
|
+
})
|
|
63
|
+
with self.raises(s_exc.BadFormDef):
|
|
64
|
+
core.model.addForm('woot:two', {}, ())
|
|
65
|
+
|
|
44
66
|
async def test_datamodel_formname(self):
|
|
45
67
|
modl = s_datamodel.Model()
|
|
46
68
|
mods = (
|
synapse/tests/test_lib_agenda.py
CHANGED
|
@@ -361,10 +361,13 @@ class AgendaTest(s_t_utils.SynTest):
|
|
|
361
361
|
|
|
362
362
|
# schedule a query to run every Wednesday and Friday at 10:15am
|
|
363
363
|
cdef = {'creator': visi.iden, 'iden': s_common.guid(), 'storm': '$lib.queue.gen(visi).put(bar)',
|
|
364
|
+
'pool': True,
|
|
364
365
|
'reqs': {s_tu.HOUR: 10, s_tu.MINUTE: 15},
|
|
365
366
|
'incunit': s_agenda.TimeUnit.DAYOFWEEK,
|
|
366
367
|
'incvals': (2, 4)}
|
|
367
368
|
adef = await agenda.add(cdef)
|
|
369
|
+
|
|
370
|
+
self.true(adef['pool'])
|
|
368
371
|
guid = adef.get('iden')
|
|
369
372
|
|
|
370
373
|
self.len(1, agenda.apptheap)
|
|
@@ -634,7 +637,7 @@ class AgendaTest(s_t_utils.SynTest):
|
|
|
634
637
|
nodes = await core.nodes('test:int=97', opts={'view': newview})
|
|
635
638
|
self.len(0, nodes)
|
|
636
639
|
|
|
637
|
-
async def
|
|
640
|
+
async def test_agenda_edit(self):
|
|
638
641
|
|
|
639
642
|
async with self.getTestCore() as core:
|
|
640
643
|
|
|
@@ -645,8 +648,12 @@ class AgendaTest(s_t_utils.SynTest):
|
|
|
645
648
|
self.stormHasNoWarnErr(msgs)
|
|
646
649
|
|
|
647
650
|
cdef = await core.callStorm('for $cron in $lib.cron.list() { return($cron) }')
|
|
651
|
+
self.false(cdef['pool'])
|
|
648
652
|
self.eq(cdef['creator'], core.auth.rootuser.iden)
|
|
649
653
|
|
|
654
|
+
cdef = await core.callStorm('for $cron in $lib.cron.list() { $cron.set(pool, (true)) return($cron) }')
|
|
655
|
+
self.true(cdef['pool'])
|
|
656
|
+
|
|
650
657
|
opts = {'vars': {'lowuser': lowuser}}
|
|
651
658
|
cdef = await core.callStorm('for $cron in $lib.cron.list() { return($cron.set(creator, $lowuser)) }',
|
|
652
659
|
opts=opts)
|
synapse/tests/test_lib_aha.py
CHANGED
|
@@ -6,6 +6,7 @@ from unittest import mock
|
|
|
6
6
|
import synapse.exc as s_exc
|
|
7
7
|
import synapse.axon as s_axon
|
|
8
8
|
import synapse.common as s_common
|
|
9
|
+
import synapse.cortex as s_cortex
|
|
9
10
|
import synapse.telepath as s_telepath
|
|
10
11
|
|
|
11
12
|
import synapse.lib.aha as s_aha
|
|
@@ -186,8 +187,8 @@ class AhaTest(s_test.SynTest):
|
|
|
186
187
|
self.nn(await proxy.getCellIden())
|
|
187
188
|
|
|
188
189
|
with self.raises(s_exc.BadArg):
|
|
189
|
-
await cryo.ahaclient.
|
|
190
|
-
await
|
|
190
|
+
_proxy = await cryo.ahaclient.proxy(timeout=2)
|
|
191
|
+
await _proxy.modAhaSvcInfo('cryo.mynet', {'newp': 'newp'})
|
|
191
192
|
|
|
192
193
|
async with await s_telepath.openurl('aha://root:secret@0.cryo.mynet') as proxy:
|
|
193
194
|
self.nn(await proxy.getCellIden())
|
|
@@ -1146,8 +1147,6 @@ class AhaTest(s_test.SynTest):
|
|
|
1146
1147
|
|
|
1147
1148
|
async with self.getTestAhaProv() as aha:
|
|
1148
1149
|
|
|
1149
|
-
import synapse.cortex as s_cortex
|
|
1150
|
-
|
|
1151
1150
|
async with await s_base.Base.anit() as base:
|
|
1152
1151
|
|
|
1153
1152
|
with self.getTestDir() as dirn:
|
|
@@ -1169,13 +1168,24 @@ class AhaTest(s_test.SynTest):
|
|
|
1169
1168
|
self.stormHasNoWarnErr(msgs)
|
|
1170
1169
|
self.stormIsInPrint('Created AHA service pool: pool00.loop.vertex.link', msgs)
|
|
1171
1170
|
|
|
1172
|
-
|
|
1173
|
-
|
|
1171
|
+
# Pool has no members....
|
|
1172
|
+
pool = await s_telepath.open('aha://pool00...')
|
|
1173
|
+
self.eq(0, pool.size())
|
|
1174
|
+
waiter = pool.waiter(0, 'svc:add')
|
|
1174
1175
|
|
|
1175
1176
|
msgs = await core00.stormlist('aha.pool.svc.add pool00... 00...')
|
|
1176
1177
|
self.stormHasNoWarnErr(msgs)
|
|
1177
1178
|
self.stormIsInPrint('AHA service (00...) added to service pool (pool00.loop.vertex.link)', msgs)
|
|
1178
1179
|
|
|
1180
|
+
self.len(1, await waiter.wait(timeout=12))
|
|
1181
|
+
prox = await pool.proxy(timeout=12)
|
|
1182
|
+
info = await prox.getCellInfo()
|
|
1183
|
+
self.eq('00', info.get('cell').get('aha').get('name'))
|
|
1184
|
+
self.eq(1, pool.size())
|
|
1185
|
+
await pool.fini()
|
|
1186
|
+
self.eq(0, pool.size())
|
|
1187
|
+
self.true(prox.isfini)
|
|
1188
|
+
|
|
1179
1189
|
poolinfo = await aha.getAhaPool('pool00...')
|
|
1180
1190
|
self.len(1, poolinfo['services'])
|
|
1181
1191
|
|
|
@@ -1184,6 +1194,9 @@ class AhaTest(s_test.SynTest):
|
|
|
1184
1194
|
self.stormIsInPrint(' 00.loop.vertex.link', msgs)
|
|
1185
1195
|
self.stormIsInPrint('1 pools', msgs)
|
|
1186
1196
|
|
|
1197
|
+
msgs = await core00.stormlist('$lib.print($lib.aha.pool.get(pool00.loop.vertex.link))')
|
|
1198
|
+
self.stormIsInPrint('aha:pool: pool00.loop.vertex.link', msgs)
|
|
1199
|
+
|
|
1187
1200
|
async with await s_telepath.open('aha://pool00...') as pool:
|
|
1188
1201
|
|
|
1189
1202
|
replay = s_common.envbool('SYNDEV_NEXUS_REPLAY')
|