synapse 2.171.0__py311-none-any.whl → 2.172.0__py311-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of synapse might be problematic. Click here for more details.
- synapse/common.py +20 -0
- synapse/cortex.py +86 -4
- synapse/lib/agenda.py +13 -7
- synapse/lib/ast.py +9 -8
- synapse/lib/cache.py +2 -2
- synapse/lib/cell.py +5 -0
- synapse/lib/coro.py +12 -0
- synapse/lib/layer.py +124 -84
- synapse/lib/lmdbslab.py +17 -10
- synapse/lib/node.py +1 -1
- synapse/lib/slabseqn.py +11 -5
- synapse/lib/storm.py +7 -71
- synapse/lib/stormhttp.py +1 -1
- synapse/lib/stormlib/auth.py +19 -0
- synapse/lib/stormlib/cell.py +42 -4
- synapse/lib/stormlib/compression.py +6 -6
- synapse/lib/stormlib/env.py +50 -0
- synapse/lib/stormlib/gen.py +1 -1
- synapse/lib/stormlib/model.py +1 -1
- synapse/lib/stormtypes.py +35 -11
- synapse/lib/types.py +6 -6
- synapse/lib/version.py +2 -2
- synapse/lib/view.py +6 -12
- synapse/models/base.py +13 -0
- synapse/models/biz.py +14 -0
- synapse/models/economic.py +3 -0
- synapse/models/inet.py +474 -4
- synapse/models/infotech.py +163 -22
- synapse/models/orgs.py +17 -0
- synapse/models/risk.py +15 -1
- synapse/models/transport.py +1 -1
- synapse/tests/test_common.py +15 -0
- synapse/tests/test_lib_ast.py +2 -1
- synapse/tests/test_lib_layer.py +168 -59
- synapse/tests/test_lib_lmdbslab.py +13 -0
- synapse/tests/test_lib_stormlib_auth.py +22 -0
- synapse/tests/test_lib_stormlib_cell.py +47 -0
- synapse/tests/test_lib_stormlib_env.py +25 -0
- synapse/tests/test_lib_view.py +9 -9
- synapse/tests/test_model_base.py +5 -3
- synapse/tests/test_model_economic.py +4 -0
- synapse/tests/test_model_inet.py +405 -1
- synapse/tests/test_model_infotech.py +135 -3
- synapse/tests/test_model_orgs.py +6 -0
- synapse/tests/test_model_risk.py +8 -0
- synapse/tests/test_tools_storm.py +46 -8
- synapse/tools/storm.py +14 -6
- {synapse-2.171.0.dist-info → synapse-2.172.0.dist-info}/METADATA +1 -1
- {synapse-2.171.0.dist-info → synapse-2.172.0.dist-info}/RECORD +52 -50
- {synapse-2.171.0.dist-info → synapse-2.172.0.dist-info}/WHEEL +1 -1
- {synapse-2.171.0.dist-info → synapse-2.172.0.dist-info}/LICENSE +0 -0
- {synapse-2.171.0.dist-info → synapse-2.172.0.dist-info}/top_level.txt +0 -0
synapse/lib/slabseqn.py
CHANGED
|
@@ -169,7 +169,7 @@ class SlabSeqn:
|
|
|
169
169
|
|
|
170
170
|
return s_common.int64un(byts) + 1
|
|
171
171
|
|
|
172
|
-
def iter(self, offs):
|
|
172
|
+
def iter(self, offs, reverse=False):
|
|
173
173
|
'''
|
|
174
174
|
Iterate over items in a sequence from a given offset.
|
|
175
175
|
|
|
@@ -180,10 +180,16 @@ class SlabSeqn:
|
|
|
180
180
|
(indx, valu): The index and valu of the item.
|
|
181
181
|
'''
|
|
182
182
|
startkey = s_common.int64en(offs)
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
183
|
+
if reverse:
|
|
184
|
+
for lkey, lval in self.slab.scanByRangeBack(startkey, db=self.db):
|
|
185
|
+
offs = s_common.int64un(lkey)
|
|
186
|
+
valu = s_msgpack.un(lval)
|
|
187
|
+
yield offs, valu
|
|
188
|
+
else:
|
|
189
|
+
for lkey, lval in self.slab.scanByRange(startkey, db=self.db):
|
|
190
|
+
offs = s_common.int64un(lkey)
|
|
191
|
+
valu = s_msgpack.un(lval)
|
|
192
|
+
yield offs, valu
|
|
187
193
|
|
|
188
194
|
async def aiter(self, offs, wait=False, timeout=None):
|
|
189
195
|
'''
|
synapse/lib/storm.py
CHANGED
|
@@ -2195,92 +2195,28 @@ class Runtime(s_base.Base):
|
|
|
2195
2195
|
return self.user.allowed(perms, gateiden=gateiden, default=default)
|
|
2196
2196
|
|
|
2197
2197
|
def allowedReason(self, perms, gateiden=None, default=None):
|
|
2198
|
-
'''
|
|
2199
|
-
Similar to allowed, but always prefer the default value specified by the caller.
|
|
2200
|
-
Default values are still pulled from permdefs if there is a match there; but still prefer caller default.
|
|
2201
|
-
This results in a ternary response that can be used to know if a rule had a positive/negative or no match.
|
|
2202
|
-
The matching reason metadata is also returned.
|
|
2203
|
-
'''
|
|
2204
2198
|
if self.asroot:
|
|
2205
2199
|
return self._admin_reason
|
|
2206
2200
|
|
|
2207
|
-
|
|
2208
|
-
permdef = self.snap.core.getPermDef(perms)
|
|
2209
|
-
if permdef:
|
|
2210
|
-
default = permdef.get('default', default)
|
|
2211
|
-
|
|
2212
|
-
return self.user.getAllowedReason(perms, gateiden=gateiden, default=default)
|
|
2201
|
+
return self.snap.core._propAllowedReason(self.user, perms, gateiden=gateiden, default=default)
|
|
2213
2202
|
|
|
2214
2203
|
def confirmPropSet(self, prop, layriden=None):
|
|
2204
|
+
if self.asroot:
|
|
2205
|
+
return
|
|
2215
2206
|
|
|
2216
2207
|
if layriden is None:
|
|
2217
2208
|
layriden = self.snap.wlyr.iden
|
|
2218
2209
|
|
|
2219
|
-
|
|
2220
|
-
|
|
2221
|
-
if meta0.isadmin:
|
|
2222
|
-
return
|
|
2223
|
-
|
|
2224
|
-
allowed0 = meta0.value
|
|
2225
|
-
|
|
2226
|
-
meta1 = self.allowedReason(prop.setperms[1], gateiden=layriden)
|
|
2227
|
-
allowed1 = meta1.value
|
|
2228
|
-
|
|
2229
|
-
if allowed0:
|
|
2230
|
-
if allowed1:
|
|
2231
|
-
return
|
|
2232
|
-
elif allowed1 is False:
|
|
2233
|
-
# This is a allow-with-precedence case.
|
|
2234
|
-
# Inspect meta to determine if the rule a0 is more specific than rule a1
|
|
2235
|
-
if len(meta0.rule) >= len(meta1.rule):
|
|
2236
|
-
return
|
|
2237
|
-
self.user.raisePermDeny(prop.setperms[0], gateiden=layriden)
|
|
2238
|
-
return
|
|
2239
|
-
|
|
2240
|
-
if allowed1:
|
|
2241
|
-
if allowed0 is None:
|
|
2242
|
-
return
|
|
2243
|
-
# allowed0 here is False. This is a deny-with-precedence case.
|
|
2244
|
-
# Inspect meta to determine if the rule a1 is more specific than rule a0
|
|
2245
|
-
if len(meta1.rule) > len(meta0.rule):
|
|
2246
|
-
return
|
|
2247
|
-
|
|
2248
|
-
self.user.raisePermDeny(prop.setperms[0], gateiden=layriden)
|
|
2210
|
+
return self.snap.core.confirmPropSet(self.user, prop, layriden=layriden)
|
|
2249
2211
|
|
|
2250
2212
|
def confirmPropDel(self, prop, layriden=None):
|
|
2213
|
+
if self.asroot:
|
|
2214
|
+
return
|
|
2251
2215
|
|
|
2252
2216
|
if layriden is None:
|
|
2253
2217
|
layriden = self.snap.wlyr.iden
|
|
2254
2218
|
|
|
2255
|
-
|
|
2256
|
-
|
|
2257
|
-
if meta0.isadmin:
|
|
2258
|
-
return
|
|
2259
|
-
|
|
2260
|
-
allowed0 = meta0.value
|
|
2261
|
-
meta1 = self.allowedReason(prop.delperms[1], gateiden=layriden)
|
|
2262
|
-
allowed1 = meta1.value
|
|
2263
|
-
|
|
2264
|
-
if allowed0:
|
|
2265
|
-
if allowed1:
|
|
2266
|
-
return
|
|
2267
|
-
elif allowed1 is False:
|
|
2268
|
-
# This is a allow-with-precedence case.
|
|
2269
|
-
# Inspect meta to determine if the rule a0 is more specific than rule a1
|
|
2270
|
-
if len(meta0.rule) >= len(meta1.rule):
|
|
2271
|
-
return
|
|
2272
|
-
self.user.raisePermDeny(prop.delperms[0], gateiden=layriden)
|
|
2273
|
-
return
|
|
2274
|
-
|
|
2275
|
-
if allowed1:
|
|
2276
|
-
if allowed0 is None:
|
|
2277
|
-
return
|
|
2278
|
-
# allowed0 here is False. This is a deny-with-precedence case.
|
|
2279
|
-
# Inspect meta to determine if the rule a1 is more specific than rule a0
|
|
2280
|
-
if len(meta1.rule) > len(meta0.rule):
|
|
2281
|
-
return
|
|
2282
|
-
|
|
2283
|
-
self.user.raisePermDeny(prop.delperms[0], gateiden=layriden)
|
|
2219
|
+
return self.snap.core.confirmPropDel(self.user, prop, layriden=layriden)
|
|
2284
2220
|
|
|
2285
2221
|
def confirmEasyPerm(self, item, perm, mesg=None):
|
|
2286
2222
|
if not self.asroot:
|
synapse/lib/stormhttp.py
CHANGED
|
@@ -549,7 +549,7 @@ class HttpResp(s_stormtypes.Prim):
|
|
|
549
549
|
return json.loads(valu.decode(encoding, errors))
|
|
550
550
|
|
|
551
551
|
except UnicodeDecodeError as e:
|
|
552
|
-
raise s_exc.StormRuntimeError(mesg=f'{e}: {repr(valu)
|
|
552
|
+
raise s_exc.StormRuntimeError(mesg=f'{e}: {s_common.trimText(repr(valu))}') from None
|
|
553
553
|
|
|
554
554
|
except json.JSONDecodeError as e:
|
|
555
555
|
mesg = f'Unable to decode HTTP response as json: {e.args[0]}'
|
synapse/lib/stormlib/auth.py
CHANGED
|
@@ -959,6 +959,18 @@ class User(s_stormtypes.Prim):
|
|
|
959
959
|
{'name': 'locked', 'type': 'boolean', 'desc': 'True to lock the user, false to unlock them.', },
|
|
960
960
|
),
|
|
961
961
|
'returns': {'type': 'null', }}},
|
|
962
|
+
{'name': 'setArchived', 'desc': '''
|
|
963
|
+
Set the archived status for a user.
|
|
964
|
+
|
|
965
|
+
Notes:
|
|
966
|
+
Setting a user as "archived" will also lock the user.
|
|
967
|
+
Removing a users "archived" status will not unlock the user.
|
|
968
|
+
''',
|
|
969
|
+
'type': {'type': 'function', '_funcname': '_methUserSetArchived',
|
|
970
|
+
'args': (
|
|
971
|
+
{'name': 'archived', 'type': 'boolean', 'desc': 'True to archive the user, false to unarchive them.', },
|
|
972
|
+
),
|
|
973
|
+
'returns': {'type': 'null', }}},
|
|
962
974
|
{'name': 'setPasswd', 'desc': 'Set the Users password.',
|
|
963
975
|
'type': {'type': 'function', '_funcname': '_methUserSetPasswd',
|
|
964
976
|
'args': (
|
|
@@ -1113,6 +1125,7 @@ class User(s_stormtypes.Prim):
|
|
|
1113
1125
|
'setEmail': self._methUserSetEmail,
|
|
1114
1126
|
'setLocked': self._methUserSetLocked,
|
|
1115
1127
|
'setPasswd': self._methUserSetPasswd,
|
|
1128
|
+
'setArchived': self._methUserSetArchived,
|
|
1116
1129
|
'getAllowedReason': self._methGetAllowedReason,
|
|
1117
1130
|
'genApiKey': self._methGenApiKey,
|
|
1118
1131
|
'getApiKey': self._methGetApiKey,
|
|
@@ -1286,6 +1299,10 @@ class User(s_stormtypes.Prim):
|
|
|
1286
1299
|
self.runt.confirm(('auth', 'user', 'set', 'locked'))
|
|
1287
1300
|
await self.runt.snap.core.setUserLocked(self.valu, await s_stormtypes.tobool(locked))
|
|
1288
1301
|
|
|
1302
|
+
async def _methUserSetArchived(self, archived):
|
|
1303
|
+
self.runt.confirm(('auth', 'user', 'set', 'archived'))
|
|
1304
|
+
await self.runt.snap.core.setUserArchived(self.valu, await s_stormtypes.tobool(archived))
|
|
1305
|
+
|
|
1289
1306
|
async def _methGenApiKey(self, name, duration=None):
|
|
1290
1307
|
name = await s_stormtypes.tostr(name)
|
|
1291
1308
|
duration = await s_stormtypes.toint(duration, noneok=True)
|
|
@@ -1703,6 +1720,8 @@ class LibUsers(s_stormtypes.Lib):
|
|
|
1703
1720
|
'desc': 'Controls changing a user\'s email address.'},
|
|
1704
1721
|
{'perm': ('auth', 'user', 'set', 'locked'), 'gate': 'cortex',
|
|
1705
1722
|
'desc': 'Controls locking/unlocking a user account.'},
|
|
1723
|
+
{'perm': ('auth', 'user', 'set', 'archived'), 'gate': 'cortex',
|
|
1724
|
+
'desc': 'Controls archiving/unarchiving a user account.'},
|
|
1706
1725
|
{'perm': ('auth', 'user', 'set', 'passwd'), 'gate': 'cortex',
|
|
1707
1726
|
'desc': 'Controls changing a user password.'},
|
|
1708
1727
|
{'perm': ('auth', 'user', 'set', 'rules'), 'gate': 'cortex',
|
synapse/lib/stormlib/cell.py
CHANGED
|
@@ -2,11 +2,16 @@ import asyncio
|
|
|
2
2
|
import logging
|
|
3
3
|
|
|
4
4
|
import synapse.exc as s_exc
|
|
5
|
-
import synapse.lib.
|
|
5
|
+
import synapse.lib.autodoc as s_autodoc
|
|
6
6
|
import synapse.lib.stormtypes as s_stormtypes
|
|
7
7
|
|
|
8
8
|
logger = logging.getLogger(__name__)
|
|
9
9
|
|
|
10
|
+
def prepHotfixDesc(txt):
|
|
11
|
+
lines = txt.split('\n')
|
|
12
|
+
lines = s_autodoc.scrubLines(lines)
|
|
13
|
+
lines = s_autodoc.ljuster(lines)
|
|
14
|
+
return lines
|
|
10
15
|
|
|
11
16
|
storm_missing_autoadds = '''
|
|
12
17
|
$absoluteOrder = $lib.view.list(deporder=$lib.true)
|
|
@@ -64,6 +69,17 @@ for $view in $views {
|
|
|
64
69
|
}
|
|
65
70
|
'''
|
|
66
71
|
|
|
72
|
+
storm_migrate_riskhasvuln = '''
|
|
73
|
+
for $view in $lib.view.list(deporder=$lib.true) {
|
|
74
|
+
view.exec $view.iden {
|
|
75
|
+
$layer = $lib.layer.get()
|
|
76
|
+
for ($buid, $sode) in $layer.getStorNodesByForm(risk:hasvuln) {
|
|
77
|
+
yield $buid
|
|
78
|
+
$lib.model.migration.s.riskHasVulnToVulnerable($node)
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
'''
|
|
67
83
|
|
|
68
84
|
hotfixes = (
|
|
69
85
|
((1, 0, 0), {
|
|
@@ -78,6 +94,20 @@ hotfixes = (
|
|
|
78
94
|
'desc': 'Populate it:sec:cpe:v2_2 properties from existing CPE where the property is not set.',
|
|
79
95
|
'query': storm_missing_cpe22,
|
|
80
96
|
}),
|
|
97
|
+
((4, 0, 0), {
|
|
98
|
+
'desc': '''
|
|
99
|
+
Create risk:vulnerable nodes from existing risk:hasvuln nodes.
|
|
100
|
+
|
|
101
|
+
This hotfix should only be applied after all logic that would create
|
|
102
|
+
risk:hasvuln nodes has been updated. The hotfix uses the
|
|
103
|
+
$lib.model.migration.s.riskHasVulnToVulnerable() function,
|
|
104
|
+
which can be used directly for testing.
|
|
105
|
+
|
|
106
|
+
Tags, tag properties, edges, and node data will all be copied
|
|
107
|
+
to the risk:vulnerable nodes.
|
|
108
|
+
''',
|
|
109
|
+
'query': storm_migrate_riskhasvuln,
|
|
110
|
+
}),
|
|
81
111
|
)
|
|
82
112
|
runtime_fixes_key = 'cortex:runtime:stormfixes'
|
|
83
113
|
|
|
@@ -174,7 +204,9 @@ class CellLib(s_stormtypes.Lib):
|
|
|
174
204
|
assert desc is not None
|
|
175
205
|
assert vars is not None
|
|
176
206
|
|
|
177
|
-
|
|
207
|
+
title = prepHotfixDesc(desc)[0]
|
|
208
|
+
await self.runt.printf(f'Applying hotfix {vers} for [{title}]')
|
|
209
|
+
|
|
178
210
|
try:
|
|
179
211
|
query = await self.runt.getStormQuery(text)
|
|
180
212
|
async with self.runt.getSubRuntime(query, opts={'vars': vars}) as runt:
|
|
@@ -206,8 +238,14 @@ class CellLib(s_stormtypes.Lib):
|
|
|
206
238
|
continue
|
|
207
239
|
|
|
208
240
|
dowork = True
|
|
209
|
-
|
|
210
|
-
|
|
241
|
+
|
|
242
|
+
desclines = prepHotfixDesc(info.get('desc'))
|
|
243
|
+
await self.runt.printf(f'Would apply fix {vers} for [{desclines[0]}]')
|
|
244
|
+
if len(desclines) > 1:
|
|
245
|
+
for line in desclines[1:]:
|
|
246
|
+
await self.runt.printf(f' {line}' if line else '')
|
|
247
|
+
else:
|
|
248
|
+
await self.runt.printf('')
|
|
211
249
|
|
|
212
250
|
return dowork
|
|
213
251
|
|
|
@@ -53,7 +53,7 @@ class Bzip2Lib(s_stormtypes.Lib):
|
|
|
53
53
|
try:
|
|
54
54
|
return bz2.compress(valu)
|
|
55
55
|
except Exception as e:
|
|
56
|
-
mesg = f'Error during bzip2 compression - {str(e)}: {repr(valu)
|
|
56
|
+
mesg = f'Error during bzip2 compression - {str(e)}: {s_common.trimText(repr(valu))}'
|
|
57
57
|
raise s_exc.StormRuntimeError(mesg=mesg) from None
|
|
58
58
|
|
|
59
59
|
async def un(self, valu):
|
|
@@ -61,7 +61,7 @@ class Bzip2Lib(s_stormtypes.Lib):
|
|
|
61
61
|
try:
|
|
62
62
|
return bz2.decompress(valu)
|
|
63
63
|
except Exception as e:
|
|
64
|
-
mesg = f'Error during bzip2 decompression - {str(e)}: {repr(valu)
|
|
64
|
+
mesg = f'Error during bzip2 decompression - {str(e)}: {s_common.trimText(repr(valu))}'
|
|
65
65
|
raise s_exc.StormRuntimeError(mesg=mesg) from None
|
|
66
66
|
|
|
67
67
|
@s_stormtypes.registry.registerLib
|
|
@@ -110,7 +110,7 @@ class GzipLib(s_stormtypes.Lib):
|
|
|
110
110
|
try:
|
|
111
111
|
return gzip.compress(valu)
|
|
112
112
|
except Exception as e:
|
|
113
|
-
mesg = f'Error during gzip compression - {str(e)}: {repr(valu)
|
|
113
|
+
mesg = f'Error during gzip compression - {str(e)}: {s_common.trimText(repr(valu))}'
|
|
114
114
|
raise s_exc.StormRuntimeError(mesg=mesg) from None
|
|
115
115
|
|
|
116
116
|
async def un(self, valu):
|
|
@@ -118,7 +118,7 @@ class GzipLib(s_stormtypes.Lib):
|
|
|
118
118
|
try:
|
|
119
119
|
return gzip.decompress(valu)
|
|
120
120
|
except Exception as e:
|
|
121
|
-
mesg = f'Error during gzip decompression - {str(e)}: {repr(valu)
|
|
121
|
+
mesg = f'Error during gzip decompression - {str(e)}: {s_common.trimText(repr(valu))}'
|
|
122
122
|
raise s_exc.StormRuntimeError(mesg=mesg) from None
|
|
123
123
|
|
|
124
124
|
@s_stormtypes.registry.registerLib
|
|
@@ -167,7 +167,7 @@ class ZlibLib(s_stormtypes.Lib):
|
|
|
167
167
|
try:
|
|
168
168
|
return zlib.compress(valu)
|
|
169
169
|
except Exception as e:
|
|
170
|
-
mesg = f'Error during zlib compression - {str(e)}: {repr(valu)
|
|
170
|
+
mesg = f'Error during zlib compression - {str(e)}: {s_common.trimText(repr(valu))}'
|
|
171
171
|
raise s_exc.StormRuntimeError(mesg=mesg) from None
|
|
172
172
|
|
|
173
173
|
async def un(self, valu):
|
|
@@ -175,5 +175,5 @@ class ZlibLib(s_stormtypes.Lib):
|
|
|
175
175
|
try:
|
|
176
176
|
return zlib.decompress(valu)
|
|
177
177
|
except Exception as e:
|
|
178
|
-
mesg = f'Error during zlib decompression - {str(e)}: {repr(valu)
|
|
178
|
+
mesg = f'Error during zlib decompression - {str(e)}: {s_common.trimText(repr(valu))}'
|
|
179
179
|
raise s_exc.StormRuntimeError(mesg=mesg) from None
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
import os
|
|
2
|
+
|
|
3
|
+
import synapse.exc as s_exc
|
|
4
|
+
import synapse.common as s_common
|
|
5
|
+
import synapse.lib.stormtypes as s_stormtypes
|
|
6
|
+
|
|
7
|
+
@s_stormtypes.registry.registerLib
|
|
8
|
+
class LibEnv(s_stormtypes.Lib):
|
|
9
|
+
'''
|
|
10
|
+
A Storm Library for accessing environment vars.
|
|
11
|
+
'''
|
|
12
|
+
_storm_locals = (
|
|
13
|
+
{'name': 'get', 'desc': '''
|
|
14
|
+
Retrieve an environment variable.
|
|
15
|
+
|
|
16
|
+
Notes:
|
|
17
|
+
Environment variables must begin with ``SYN_STORM_ENV_`` in
|
|
18
|
+
order to be accessed by this API.
|
|
19
|
+
''',
|
|
20
|
+
'type': {
|
|
21
|
+
'type': 'function', '_funcname': '_libEnvGet',
|
|
22
|
+
'args': (
|
|
23
|
+
{'name': 'name', 'type': 'str', 'desc': 'The name of the environment variable.', },
|
|
24
|
+
{'name': 'default', 'type': 'obj', 'default': None,
|
|
25
|
+
'desc': 'The value to return if the environment variable is not set.', },
|
|
26
|
+
),
|
|
27
|
+
'returns': {'type': 'str', 'desc': 'The environment variable string.'},
|
|
28
|
+
},
|
|
29
|
+
},
|
|
30
|
+
)
|
|
31
|
+
_storm_lib_path = ('env',)
|
|
32
|
+
|
|
33
|
+
def getObjLocals(self):
|
|
34
|
+
return {
|
|
35
|
+
'get': self._libEnvGet,
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
@s_stormtypes.stormfunc(readonly=True)
|
|
39
|
+
async def _libEnvGet(self, name, default=None):
|
|
40
|
+
|
|
41
|
+
self.runt.reqAdmin(mesg='$lib.env.get() requires admin privileges.')
|
|
42
|
+
|
|
43
|
+
name = await s_stormtypes.tostr(name)
|
|
44
|
+
default = await s_stormtypes.toprim(default)
|
|
45
|
+
|
|
46
|
+
if not name.startswith('SYN_STORM_ENV_'):
|
|
47
|
+
mesg = f'Environment variable must start with SYN_STORM_ENV_ : {name}'
|
|
48
|
+
raise s_exc.BadArg(mesg=mesg)
|
|
49
|
+
|
|
50
|
+
return os.getenv(name, default=default)
|
synapse/lib/stormlib/gen.py
CHANGED
|
@@ -603,7 +603,7 @@ stormcmds = (
|
|
|
603
603
|
{
|
|
604
604
|
'name': 'gen.it.av.scan.result',
|
|
605
605
|
'descr': '''
|
|
606
|
-
Lift (or create) the it:av:scan:result node by deconflicting the target and signature
|
|
606
|
+
Lift (or create) the it:av:scan:result node by deconflicting the target and signature name.
|
|
607
607
|
|
|
608
608
|
The scan time and scanner name may also optionally be provided for deconfliction.
|
|
609
609
|
|
synapse/lib/stormlib/model.py
CHANGED
|
@@ -1090,7 +1090,7 @@ class LibModelMigrations(s_stormtypes.Lib, MigrationEditorMixin):
|
|
|
1090
1090
|
self.runt.confirmPropSet(riskvuln.props['vuln'])
|
|
1091
1091
|
self.runt.confirmPropSet(riskvuln.props['node'])
|
|
1092
1092
|
|
|
1093
|
-
if
|
|
1093
|
+
if seen := n.get('.seen'):
|
|
1094
1094
|
self.runt.confirmPropSet(riskvuln.props['.seen'])
|
|
1095
1095
|
props['.seen'] = seen
|
|
1096
1096
|
|
synapse/lib/stormtypes.py
CHANGED
|
@@ -4130,7 +4130,7 @@ class LibBase64(Lib):
|
|
|
4130
4130
|
return base64.urlsafe_b64encode(valu).decode('ascii')
|
|
4131
4131
|
return base64.b64encode(valu).decode('ascii')
|
|
4132
4132
|
except TypeError as e:
|
|
4133
|
-
mesg = f'Error during base64 encoding - {str(e)}: {repr(valu)
|
|
4133
|
+
mesg = f'Error during base64 encoding - {str(e)}: {s_common.trimText(repr(valu))}'
|
|
4134
4134
|
raise s_exc.StormRuntimeError(mesg=mesg, urlsafe=urlsafe) from None
|
|
4135
4135
|
|
|
4136
4136
|
@stormfunc(readonly=True)
|
|
@@ -4140,7 +4140,7 @@ class LibBase64(Lib):
|
|
|
4140
4140
|
return base64.urlsafe_b64decode(valu)
|
|
4141
4141
|
return base64.b64decode(valu)
|
|
4142
4142
|
except binascii.Error as e:
|
|
4143
|
-
mesg = f'Error during base64 decoding - {str(e)}: {repr(valu)
|
|
4143
|
+
mesg = f'Error during base64 decoding - {str(e)}: {s_common.trimText(repr(valu))}'
|
|
4144
4144
|
raise s_exc.StormRuntimeError(mesg=mesg, urlsafe=urlsafe) from None
|
|
4145
4145
|
|
|
4146
4146
|
@functools.total_ordering
|
|
@@ -4488,7 +4488,7 @@ class Str(Prim):
|
|
|
4488
4488
|
try:
|
|
4489
4489
|
return self.valu.encode(encoding, 'surrogatepass')
|
|
4490
4490
|
except UnicodeEncodeError as e:
|
|
4491
|
-
raise s_exc.StormRuntimeError(mesg=f'{e}: {repr(self.valu)
|
|
4491
|
+
raise s_exc.StormRuntimeError(mesg=f'{e}: {s_common.trimText(repr(self.valu))}') from None
|
|
4492
4492
|
|
|
4493
4493
|
@stormfunc(readonly=True)
|
|
4494
4494
|
async def _methStrSplit(self, text, maxsplit=-1):
|
|
@@ -4733,7 +4733,7 @@ class Bytes(Prim):
|
|
|
4733
4733
|
try:
|
|
4734
4734
|
return self.valu.decode(encoding, errors)
|
|
4735
4735
|
except UnicodeDecodeError as e:
|
|
4736
|
-
raise s_exc.StormRuntimeError(mesg=f'{e}: {repr(self.valu)
|
|
4736
|
+
raise s_exc.StormRuntimeError(mesg=f'{e}: {s_common.trimText(repr(self.valu))}') from None
|
|
4737
4737
|
|
|
4738
4738
|
async def _methBunzip(self):
|
|
4739
4739
|
return bz2.decompress(self.valu)
|
|
@@ -4763,7 +4763,7 @@ class Bytes(Prim):
|
|
|
4763
4763
|
return json.loads(valu.decode(encoding, errors))
|
|
4764
4764
|
|
|
4765
4765
|
except UnicodeDecodeError as e:
|
|
4766
|
-
raise s_exc.StormRuntimeError(mesg=f'{e}: {repr(valu)
|
|
4766
|
+
raise s_exc.StormRuntimeError(mesg=f'{e}: {s_common.trimText(repr(valu))}') from None
|
|
4767
4767
|
|
|
4768
4768
|
except json.JSONDecodeError as e:
|
|
4769
4769
|
mesg = f'Unable to decode bytes as json: {e.args[0]}'
|
|
@@ -6606,7 +6606,12 @@ class Layer(Prim):
|
|
|
6606
6606
|
{'name': 'repr', 'desc': 'Get a string representation of the Layer.',
|
|
6607
6607
|
'type': {'type': 'function', '_funcname': '_methLayerRepr',
|
|
6608
6608
|
'returns': {'type': 'str', 'desc': 'A string that can be printed, representing a Layer.', }}},
|
|
6609
|
-
{'name': 'edits', 'desc': '
|
|
6609
|
+
{'name': 'edits', 'desc': '''
|
|
6610
|
+
Yield (offs, nodeedits) tuples from the given offset.
|
|
6611
|
+
|
|
6612
|
+
Notes:
|
|
6613
|
+
Specifying reverse=(true) disables the wait behavior.
|
|
6614
|
+
''',
|
|
6610
6615
|
'type': {'type': 'function', '_funcname': '_methLayerEdits',
|
|
6611
6616
|
'args': (
|
|
6612
6617
|
{'name': 'offs', 'type': 'int', 'desc': 'Offset to start getting nodeedits from the layer at.',
|
|
@@ -6616,9 +6621,14 @@ class Layer(Prim):
|
|
|
6616
6621
|
'otherwise exit the generator when there are no more edits.', },
|
|
6617
6622
|
{'name': 'size', 'type': 'int', 'desc': 'The maximum number of nodeedits to yield.',
|
|
6618
6623
|
'default': None, },
|
|
6624
|
+
{'name': 'reverse', 'type': 'boolean', 'desc': 'Yield the edits in reverse order.',
|
|
6625
|
+
'default': False, },
|
|
6619
6626
|
),
|
|
6620
6627
|
'returns': {'name': 'Yields', 'type': 'list',
|
|
6621
6628
|
'desc': 'Yields offset, nodeedit tuples from a given offset.', }}},
|
|
6629
|
+
{'name': 'edited', 'desc': 'Return the last time the layer was edited or null if no edits are present.',
|
|
6630
|
+
'type': {'type': 'function', '_funcname': '_methLayerEdited',
|
|
6631
|
+
'returns': {'type': 'time', 'desc': 'The last time the layer was edited.', }}},
|
|
6622
6632
|
{'name': 'addPush', 'desc': 'Configure the layer to push edits to a remote layer/feed.',
|
|
6623
6633
|
'type': {'type': 'function', '_funcname': '_addPush',
|
|
6624
6634
|
'args': (
|
|
@@ -6901,6 +6911,7 @@ class Layer(Prim):
|
|
|
6901
6911
|
'pack': self._methLayerPack,
|
|
6902
6912
|
'repr': self._methLayerRepr,
|
|
6903
6913
|
'edits': self._methLayerEdits,
|
|
6914
|
+
'edited': self._methLayerEdited,
|
|
6904
6915
|
'verify': self.verify,
|
|
6905
6916
|
'addPush': self._addPush,
|
|
6906
6917
|
'delPush': self._delPush,
|
|
@@ -7181,15 +7192,22 @@ class Layer(Prim):
|
|
|
7181
7192
|
return layr.getTagPropValuCount(form, tag, prop.name, prop.type.stortype, norm)
|
|
7182
7193
|
|
|
7183
7194
|
@stormfunc(readonly=True)
|
|
7184
|
-
async def _methLayerEdits(self, offs=0, wait=True, size=None):
|
|
7195
|
+
async def _methLayerEdits(self, offs=0, wait=True, size=None, reverse=False):
|
|
7185
7196
|
offs = await toint(offs)
|
|
7186
7197
|
wait = await tobool(wait)
|
|
7187
|
-
|
|
7188
|
-
|
|
7189
|
-
|
|
7198
|
+
reverse = await tobool(reverse)
|
|
7199
|
+
|
|
7200
|
+
layr = self.runt.snap.core.reqLayer(self.valu.get('iden'))
|
|
7201
|
+
|
|
7202
|
+
self.runt.confirm(('layer', 'edits', 'read'), gateiden=layr.iden)
|
|
7203
|
+
|
|
7204
|
+
if reverse:
|
|
7205
|
+
wait = False
|
|
7206
|
+
if offs == 0:
|
|
7207
|
+
offs = 0xffffffffffffffff
|
|
7190
7208
|
|
|
7191
7209
|
count = 0
|
|
7192
|
-
async for item in
|
|
7210
|
+
async for item in layr.syncNodeEdits(offs, wait=wait, reverse=reverse):
|
|
7193
7211
|
|
|
7194
7212
|
yield item
|
|
7195
7213
|
|
|
@@ -7197,6 +7215,12 @@ class Layer(Prim):
|
|
|
7197
7215
|
if size is not None and size == count:
|
|
7198
7216
|
break
|
|
7199
7217
|
|
|
7218
|
+
@stormfunc(readonly=True)
|
|
7219
|
+
async def _methLayerEdited(self):
|
|
7220
|
+
layr = self.runt.snap.core.reqLayer(self.valu.get('iden'))
|
|
7221
|
+
async for offs, edits, meta in layr.syncNodeEdits2(0xffffffffffffffff, wait=False, reverse=True):
|
|
7222
|
+
return meta.get('time')
|
|
7223
|
+
|
|
7200
7224
|
@stormfunc(readonly=True)
|
|
7201
7225
|
async def getStorNode(self, nodeid):
|
|
7202
7226
|
nodeid = await tostr(nodeid)
|
synapse/lib/types.py
CHANGED
|
@@ -507,7 +507,7 @@ class Comp(Type):
|
|
|
507
507
|
fields = self.opts.get('fields')
|
|
508
508
|
if len(fields) != len(valu):
|
|
509
509
|
raise s_exc.BadTypeValu(name=self.name, fields=fields, numitems=len(valu),
|
|
510
|
-
mesg=f'invalid number of fields given for norming: {repr(valu)
|
|
510
|
+
mesg=f'invalid number of fields given for norming: {s_common.trimText(repr(valu))}')
|
|
511
511
|
|
|
512
512
|
subs = {}
|
|
513
513
|
adds = []
|
|
@@ -1589,7 +1589,7 @@ class Data(Type):
|
|
|
1589
1589
|
if self.validator is not None:
|
|
1590
1590
|
self.validator(valu)
|
|
1591
1591
|
except (s_exc.MustBeJsonSafe, s_exc.SchemaViolation) as e:
|
|
1592
|
-
raise s_exc.BadTypeValu(name=self.name, mesg=f'{e}: {repr(valu)
|
|
1592
|
+
raise s_exc.BadTypeValu(name=self.name, mesg=f'{e}: {s_common.trimText(repr(valu))}') from None
|
|
1593
1593
|
byts = s_msgpack.en(valu)
|
|
1594
1594
|
return s_msgpack.un(byts), {}
|
|
1595
1595
|
|
|
@@ -1608,7 +1608,7 @@ class NodeProp(Type):
|
|
|
1608
1608
|
|
|
1609
1609
|
def _normPyTuple(self, valu):
|
|
1610
1610
|
if len(valu) != 2:
|
|
1611
|
-
mesg = f'Must be a 2-tuple: {repr(valu)
|
|
1611
|
+
mesg = f'Must be a 2-tuple: {s_common.trimText(repr(valu))}'
|
|
1612
1612
|
raise s_exc.BadTypeValu(name=self.name, numitems=len(valu), mesg=mesg) from None
|
|
1613
1613
|
|
|
1614
1614
|
propname, propvalu = valu
|
|
@@ -1650,7 +1650,7 @@ class Range(Type):
|
|
|
1650
1650
|
|
|
1651
1651
|
def _normPyTuple(self, valu):
|
|
1652
1652
|
if len(valu) != 2:
|
|
1653
|
-
mesg = f'Must be a 2-tuple of type {self.subtype.name}: {repr(valu)
|
|
1653
|
+
mesg = f'Must be a 2-tuple of type {self.subtype.name}: {s_common.trimText(repr(valu))}'
|
|
1654
1654
|
raise s_exc.BadTypeValu(numitems=len(valu), name=self.name, mesg=mesg)
|
|
1655
1655
|
|
|
1656
1656
|
minv = self.subtype.norm(valu[0])[0]
|
|
@@ -2262,11 +2262,11 @@ class Time(IntBase):
|
|
|
2262
2262
|
'''
|
|
2263
2263
|
|
|
2264
2264
|
if not isinstance(vals, (list, tuple)):
|
|
2265
|
-
mesg = f'Must be a 2-tuple: {repr(vals)
|
|
2265
|
+
mesg = f'Must be a 2-tuple: {s_common.trimText(repr(vals))}'
|
|
2266
2266
|
raise s_exc.BadCmprValu(itemtype=type(vals), cmpr='range=', mesg=mesg)
|
|
2267
2267
|
|
|
2268
2268
|
if len(vals) != 2:
|
|
2269
|
-
mesg = f'Must be a 2-tuple: {repr(vals)
|
|
2269
|
+
mesg = f'Must be a 2-tuple: {s_common.trimText(repr(vals))}'
|
|
2270
2270
|
raise s_exc.BadCmprValu(itemtype=type(vals), cmpr='range=', mesg=mesg)
|
|
2271
2271
|
|
|
2272
2272
|
tick, tock = self.getTickTock(vals)
|
synapse/lib/version.py
CHANGED
|
@@ -223,6 +223,6 @@ def reqVersion(valu, reqver,
|
|
|
223
223
|
##############################################################################
|
|
224
224
|
# The following are touched during the release process by bumpversion.
|
|
225
225
|
# Do not modify these directly.
|
|
226
|
-
version = (2,
|
|
226
|
+
version = (2, 172, 0)
|
|
227
227
|
verstring = '.'.join([str(x) for x in version])
|
|
228
|
-
commit = '
|
|
228
|
+
commit = '3e33d8a8cbdfd0f4f6f9a31d664578d817d9ccb8'
|
synapse/lib/view.py
CHANGED
|
@@ -1444,11 +1444,7 @@ class View(s_nexus.Pusher): # type: ignore
|
|
|
1444
1444
|
if user is None or user.isAdmin() or user.isAdmin(gateiden=parentlayr.iden):
|
|
1445
1445
|
return
|
|
1446
1446
|
|
|
1447
|
-
|
|
1448
|
-
async for nodeedit in fromlayr.iterLayerNodeEdits():
|
|
1449
|
-
for offs, perm in s_layer.getNodeEditPerms([nodeedit]):
|
|
1450
|
-
self.parent._confirm(user, perm)
|
|
1451
|
-
await asyncio.sleep(0)
|
|
1447
|
+
await fromlayr.confirmLayerEditPerms(user, parentlayr.iden)
|
|
1452
1448
|
|
|
1453
1449
|
async def wipeAllowed(self, user=None):
|
|
1454
1450
|
'''
|
|
@@ -1457,10 +1453,8 @@ class View(s_nexus.Pusher): # type: ignore
|
|
|
1457
1453
|
if user is None or user.isAdmin():
|
|
1458
1454
|
return
|
|
1459
1455
|
|
|
1460
|
-
|
|
1461
|
-
|
|
1462
|
-
self._confirm(user, perm)
|
|
1463
|
-
await asyncio.sleep(0)
|
|
1456
|
+
layer = self.layers[0]
|
|
1457
|
+
await layer.confirmLayerEditPerms(user, layer.iden, delete=True)
|
|
1464
1458
|
|
|
1465
1459
|
async def runTagAdd(self, node, tag, valu):
|
|
1466
1460
|
|
|
@@ -1564,14 +1558,14 @@ class View(s_nexus.Pusher): # type: ignore
|
|
|
1564
1558
|
async def getTrigger(self, iden):
|
|
1565
1559
|
trig = self.triggers.get(iden)
|
|
1566
1560
|
if trig is None:
|
|
1567
|
-
raise s_exc.NoSuchIden("Trigger not found")
|
|
1561
|
+
raise s_exc.NoSuchIden(mesg=f"Trigger not found {iden=}", iden=iden)
|
|
1568
1562
|
|
|
1569
1563
|
return trig
|
|
1570
1564
|
|
|
1571
1565
|
async def delTrigger(self, iden):
|
|
1572
1566
|
trig = self.triggers.get(iden)
|
|
1573
1567
|
if trig is None:
|
|
1574
|
-
raise s_exc.NoSuchIden("Trigger not found")
|
|
1568
|
+
raise s_exc.NoSuchIden(mesg=f"Trigger not found {iden=}", iden=iden)
|
|
1575
1569
|
|
|
1576
1570
|
return await self._push('trigger:del', iden)
|
|
1577
1571
|
|
|
@@ -1592,7 +1586,7 @@ class View(s_nexus.Pusher): # type: ignore
|
|
|
1592
1586
|
async def setTriggerInfo(self, iden, name, valu):
|
|
1593
1587
|
trig = self.triggers.get(iden)
|
|
1594
1588
|
if trig is None:
|
|
1595
|
-
raise s_exc.NoSuchIden("Trigger not found")
|
|
1589
|
+
raise s_exc.NoSuchIden(mesg=f"Trigger not found {iden=}", iden=iden)
|
|
1596
1590
|
await trig.set(name, valu)
|
|
1597
1591
|
|
|
1598
1592
|
await self.core.feedBeholder('trigger:set', {'iden': trig.iden, 'view': trig.view.iden, 'name': name, 'valu': valu}, gates=[trig.iden])
|