synapse 2.170.0__py311-none-any.whl → 2.172.0__py311-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of synapse might be problematic. Click here for more details.
- synapse/common.py +20 -0
- synapse/cortex.py +98 -6
- synapse/lib/agenda.py +13 -7
- synapse/lib/ast.py +9 -8
- synapse/lib/cache.py +2 -2
- synapse/lib/cell.py +7 -3
- synapse/lib/coro.py +12 -0
- synapse/lib/layer.py +124 -84
- synapse/lib/lmdbslab.py +34 -10
- synapse/lib/node.py +1 -1
- synapse/lib/slabseqn.py +11 -5
- synapse/lib/storm.py +7 -71
- synapse/lib/stormhttp.py +1 -1
- synapse/lib/stormlib/auth.py +19 -0
- synapse/lib/stormlib/cell.py +42 -4
- synapse/lib/stormlib/compression.py +6 -6
- synapse/lib/stormlib/env.py +50 -0
- synapse/lib/stormlib/gen.py +1 -1
- synapse/lib/stormlib/model.py +1 -1
- synapse/lib/stormtypes.py +35 -11
- synapse/lib/types.py +7 -7
- synapse/lib/version.py +2 -2
- synapse/lib/view.py +13 -13
- synapse/models/base.py +13 -0
- synapse/models/biz.py +14 -0
- synapse/models/economic.py +3 -0
- synapse/models/inet.py +474 -4
- synapse/models/infotech.py +163 -22
- synapse/models/orgs.py +17 -0
- synapse/models/risk.py +15 -1
- synapse/models/transport.py +1 -1
- synapse/tests/test_common.py +15 -0
- synapse/tests/test_cortex.py +9 -0
- synapse/tests/test_lib_ast.py +2 -1
- synapse/tests/test_lib_cell.py +1 -1
- synapse/tests/test_lib_layer.py +168 -59
- synapse/tests/test_lib_lmdbslab.py +49 -0
- synapse/tests/test_lib_stormlib_auth.py +22 -0
- synapse/tests/test_lib_stormlib_cell.py +47 -0
- synapse/tests/test_lib_stormlib_env.py +25 -0
- synapse/tests/test_lib_stormtypes.py +12 -1
- synapse/tests/test_lib_types.py +1 -0
- synapse/tests/test_lib_view.py +9 -9
- synapse/tests/test_model_base.py +5 -3
- synapse/tests/test_model_economic.py +4 -0
- synapse/tests/test_model_files.py +1 -0
- synapse/tests/test_model_inet.py +405 -1
- synapse/tests/test_model_infotech.py +135 -3
- synapse/tests/test_model_orgs.py +6 -0
- synapse/tests/test_model_risk.py +8 -0
- synapse/tests/test_tools_storm.py +46 -8
- synapse/tests/utils.py +30 -9
- synapse/tools/storm.py +14 -6
- {synapse-2.170.0.dist-info → synapse-2.172.0.dist-info}/METADATA +1 -1
- {synapse-2.170.0.dist-info → synapse-2.172.0.dist-info}/RECORD +58 -56
- {synapse-2.170.0.dist-info → synapse-2.172.0.dist-info}/WHEEL +1 -1
- {synapse-2.170.0.dist-info → synapse-2.172.0.dist-info}/LICENSE +0 -0
- {synapse-2.170.0.dist-info → synapse-2.172.0.dist-info}/top_level.txt +0 -0
synapse/lib/lmdbslab.py
CHANGED
|
@@ -1253,18 +1253,18 @@ class Slab(s_base.Base):
|
|
|
1253
1253
|
finally:
|
|
1254
1254
|
self._relXactForReading()
|
|
1255
1255
|
|
|
1256
|
-
def scanKeys(self, db=None):
|
|
1256
|
+
def scanKeys(self, db=None, nodup=False):
|
|
1257
1257
|
|
|
1258
|
-
with ScanKeys(self, db) as scan:
|
|
1258
|
+
with ScanKeys(self, db, nodup=nodup) as scan:
|
|
1259
1259
|
|
|
1260
1260
|
if not scan.first():
|
|
1261
1261
|
return
|
|
1262
1262
|
|
|
1263
1263
|
yield from scan.iternext()
|
|
1264
1264
|
|
|
1265
|
-
def scanKeysByPref(self, byts, db=None):
|
|
1265
|
+
def scanKeysByPref(self, byts, db=None, nodup=False):
|
|
1266
1266
|
|
|
1267
|
-
with ScanKeys(self, db) as scan:
|
|
1267
|
+
with ScanKeys(self, db, nodup=nodup) as scan:
|
|
1268
1268
|
|
|
1269
1269
|
if not scan.set_range(byts):
|
|
1270
1270
|
return
|
|
@@ -1283,7 +1283,7 @@ class Slab(s_base.Base):
|
|
|
1283
1283
|
'''
|
|
1284
1284
|
count = 0
|
|
1285
1285
|
size = len(byts)
|
|
1286
|
-
with ScanKeys(self, db) as scan:
|
|
1286
|
+
with ScanKeys(self, db, nodup=True) as scan:
|
|
1287
1287
|
|
|
1288
1288
|
if not scan.set_range(byts):
|
|
1289
1289
|
return 0
|
|
@@ -1293,7 +1293,7 @@ class Slab(s_base.Base):
|
|
|
1293
1293
|
if lkey[:size] != byts:
|
|
1294
1294
|
return count
|
|
1295
1295
|
|
|
1296
|
-
count +=
|
|
1296
|
+
count += scan.curs.count()
|
|
1297
1297
|
if maxsize is not None and maxsize == count:
|
|
1298
1298
|
return count
|
|
1299
1299
|
|
|
@@ -1367,6 +1367,10 @@ class Slab(s_base.Base):
|
|
|
1367
1367
|
if not scan.set_range(nextbyts):
|
|
1368
1368
|
return
|
|
1369
1369
|
|
|
1370
|
+
if scan.atitem[0] == nextbyts:
|
|
1371
|
+
if not scan.next_key():
|
|
1372
|
+
return
|
|
1373
|
+
|
|
1370
1374
|
except OverflowError:
|
|
1371
1375
|
if not scan.first():
|
|
1372
1376
|
return
|
|
@@ -1752,14 +1756,21 @@ class ScanKeys(Scan):
|
|
|
1752
1756
|
An iterator over the keys of the database. If the database is dupsort, a key with multiple values with be yielded
|
|
1753
1757
|
once for each value.
|
|
1754
1758
|
'''
|
|
1759
|
+
def __init__(self, slab, db, nodup=False):
|
|
1760
|
+
Scan.__init__(self, slab, db)
|
|
1761
|
+
self.nodup = nodup
|
|
1762
|
+
|
|
1755
1763
|
def iterfunc(self):
|
|
1756
1764
|
if self.dupsort:
|
|
1757
|
-
|
|
1765
|
+
if self.nodup:
|
|
1766
|
+
return self.curs.iternext_nodup(keys=True, values=False)
|
|
1767
|
+
else:
|
|
1768
|
+
return Scan.iterfunc(self)
|
|
1758
1769
|
|
|
1759
1770
|
return self.curs.iternext(keys=True, values=False)
|
|
1760
1771
|
|
|
1761
1772
|
def resume(self):
|
|
1762
|
-
if self.dupsort:
|
|
1773
|
+
if self.dupsort and not self.nodup:
|
|
1763
1774
|
return Scan.resume(self)
|
|
1764
1775
|
|
|
1765
1776
|
return self.curs.set_range(self.atitem)
|
|
@@ -1768,13 +1779,13 @@ class ScanKeys(Scan):
|
|
|
1768
1779
|
'''
|
|
1769
1780
|
Returns if the cursor is at the value in atitem
|
|
1770
1781
|
'''
|
|
1771
|
-
if self.dupsort:
|
|
1782
|
+
if self.dupsort and not self.nodup:
|
|
1772
1783
|
return Scan.isatitem(self)
|
|
1773
1784
|
|
|
1774
1785
|
return self.atitem == self.curs.key()
|
|
1775
1786
|
|
|
1776
1787
|
def iternext(self):
|
|
1777
|
-
if self.dupsort:
|
|
1788
|
+
if self.dupsort and not self.nodup:
|
|
1778
1789
|
yield from (item[0] for item in Scan.iternext(self))
|
|
1779
1790
|
return
|
|
1780
1791
|
|
|
@@ -1810,6 +1821,19 @@ class ScanBack(Scan):
|
|
|
1810
1821
|
self.atitem = next(self.genr)
|
|
1811
1822
|
return True
|
|
1812
1823
|
|
|
1824
|
+
def next_key(self):
|
|
1825
|
+
|
|
1826
|
+
if not self.curs.prev_nodup():
|
|
1827
|
+
return False
|
|
1828
|
+
|
|
1829
|
+
if self.dupsort:
|
|
1830
|
+
self.curs.last_dup()
|
|
1831
|
+
|
|
1832
|
+
self.genr = self.iterfunc()
|
|
1833
|
+
self.atitem = next(self.genr)
|
|
1834
|
+
|
|
1835
|
+
return True
|
|
1836
|
+
|
|
1813
1837
|
def set_range(self, lkey):
|
|
1814
1838
|
|
|
1815
1839
|
if not self.curs.set_range(lkey):
|
synapse/lib/node.py
CHANGED
|
@@ -270,7 +270,7 @@ class Node:
|
|
|
270
270
|
|
|
271
271
|
if self.form.isrunt:
|
|
272
272
|
if prop.info.get('ro'):
|
|
273
|
-
mesg = f'Cannot set read-only props on runt nodes: {repr(valu)
|
|
273
|
+
mesg = f'Cannot set read-only props on runt nodes: {s_common.trimText(repr(valu))}'
|
|
274
274
|
raise s_exc.IsRuntForm(mesg=mesg, form=self.form.full, prop=name)
|
|
275
275
|
|
|
276
276
|
await self.snap.core.runRuntPropSet(self, prop, valu)
|
synapse/lib/slabseqn.py
CHANGED
|
@@ -169,7 +169,7 @@ class SlabSeqn:
|
|
|
169
169
|
|
|
170
170
|
return s_common.int64un(byts) + 1
|
|
171
171
|
|
|
172
|
-
def iter(self, offs):
|
|
172
|
+
def iter(self, offs, reverse=False):
|
|
173
173
|
'''
|
|
174
174
|
Iterate over items in a sequence from a given offset.
|
|
175
175
|
|
|
@@ -180,10 +180,16 @@ class SlabSeqn:
|
|
|
180
180
|
(indx, valu): The index and valu of the item.
|
|
181
181
|
'''
|
|
182
182
|
startkey = s_common.int64en(offs)
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
183
|
+
if reverse:
|
|
184
|
+
for lkey, lval in self.slab.scanByRangeBack(startkey, db=self.db):
|
|
185
|
+
offs = s_common.int64un(lkey)
|
|
186
|
+
valu = s_msgpack.un(lval)
|
|
187
|
+
yield offs, valu
|
|
188
|
+
else:
|
|
189
|
+
for lkey, lval in self.slab.scanByRange(startkey, db=self.db):
|
|
190
|
+
offs = s_common.int64un(lkey)
|
|
191
|
+
valu = s_msgpack.un(lval)
|
|
192
|
+
yield offs, valu
|
|
187
193
|
|
|
188
194
|
async def aiter(self, offs, wait=False, timeout=None):
|
|
189
195
|
'''
|
synapse/lib/storm.py
CHANGED
|
@@ -2195,92 +2195,28 @@ class Runtime(s_base.Base):
|
|
|
2195
2195
|
return self.user.allowed(perms, gateiden=gateiden, default=default)
|
|
2196
2196
|
|
|
2197
2197
|
def allowedReason(self, perms, gateiden=None, default=None):
|
|
2198
|
-
'''
|
|
2199
|
-
Similar to allowed, but always prefer the default value specified by the caller.
|
|
2200
|
-
Default values are still pulled from permdefs if there is a match there; but still prefer caller default.
|
|
2201
|
-
This results in a ternary response that can be used to know if a rule had a positive/negative or no match.
|
|
2202
|
-
The matching reason metadata is also returned.
|
|
2203
|
-
'''
|
|
2204
2198
|
if self.asroot:
|
|
2205
2199
|
return self._admin_reason
|
|
2206
2200
|
|
|
2207
|
-
|
|
2208
|
-
permdef = self.snap.core.getPermDef(perms)
|
|
2209
|
-
if permdef:
|
|
2210
|
-
default = permdef.get('default', default)
|
|
2211
|
-
|
|
2212
|
-
return self.user.getAllowedReason(perms, gateiden=gateiden, default=default)
|
|
2201
|
+
return self.snap.core._propAllowedReason(self.user, perms, gateiden=gateiden, default=default)
|
|
2213
2202
|
|
|
2214
2203
|
def confirmPropSet(self, prop, layriden=None):
|
|
2204
|
+
if self.asroot:
|
|
2205
|
+
return
|
|
2215
2206
|
|
|
2216
2207
|
if layriden is None:
|
|
2217
2208
|
layriden = self.snap.wlyr.iden
|
|
2218
2209
|
|
|
2219
|
-
|
|
2220
|
-
|
|
2221
|
-
if meta0.isadmin:
|
|
2222
|
-
return
|
|
2223
|
-
|
|
2224
|
-
allowed0 = meta0.value
|
|
2225
|
-
|
|
2226
|
-
meta1 = self.allowedReason(prop.setperms[1], gateiden=layriden)
|
|
2227
|
-
allowed1 = meta1.value
|
|
2228
|
-
|
|
2229
|
-
if allowed0:
|
|
2230
|
-
if allowed1:
|
|
2231
|
-
return
|
|
2232
|
-
elif allowed1 is False:
|
|
2233
|
-
# This is a allow-with-precedence case.
|
|
2234
|
-
# Inspect meta to determine if the rule a0 is more specific than rule a1
|
|
2235
|
-
if len(meta0.rule) >= len(meta1.rule):
|
|
2236
|
-
return
|
|
2237
|
-
self.user.raisePermDeny(prop.setperms[0], gateiden=layriden)
|
|
2238
|
-
return
|
|
2239
|
-
|
|
2240
|
-
if allowed1:
|
|
2241
|
-
if allowed0 is None:
|
|
2242
|
-
return
|
|
2243
|
-
# allowed0 here is False. This is a deny-with-precedence case.
|
|
2244
|
-
# Inspect meta to determine if the rule a1 is more specific than rule a0
|
|
2245
|
-
if len(meta1.rule) > len(meta0.rule):
|
|
2246
|
-
return
|
|
2247
|
-
|
|
2248
|
-
self.user.raisePermDeny(prop.setperms[0], gateiden=layriden)
|
|
2210
|
+
return self.snap.core.confirmPropSet(self.user, prop, layriden=layriden)
|
|
2249
2211
|
|
|
2250
2212
|
def confirmPropDel(self, prop, layriden=None):
|
|
2213
|
+
if self.asroot:
|
|
2214
|
+
return
|
|
2251
2215
|
|
|
2252
2216
|
if layriden is None:
|
|
2253
2217
|
layriden = self.snap.wlyr.iden
|
|
2254
2218
|
|
|
2255
|
-
|
|
2256
|
-
|
|
2257
|
-
if meta0.isadmin:
|
|
2258
|
-
return
|
|
2259
|
-
|
|
2260
|
-
allowed0 = meta0.value
|
|
2261
|
-
meta1 = self.allowedReason(prop.delperms[1], gateiden=layriden)
|
|
2262
|
-
allowed1 = meta1.value
|
|
2263
|
-
|
|
2264
|
-
if allowed0:
|
|
2265
|
-
if allowed1:
|
|
2266
|
-
return
|
|
2267
|
-
elif allowed1 is False:
|
|
2268
|
-
# This is a allow-with-precedence case.
|
|
2269
|
-
# Inspect meta to determine if the rule a0 is more specific than rule a1
|
|
2270
|
-
if len(meta0.rule) >= len(meta1.rule):
|
|
2271
|
-
return
|
|
2272
|
-
self.user.raisePermDeny(prop.delperms[0], gateiden=layriden)
|
|
2273
|
-
return
|
|
2274
|
-
|
|
2275
|
-
if allowed1:
|
|
2276
|
-
if allowed0 is None:
|
|
2277
|
-
return
|
|
2278
|
-
# allowed0 here is False. This is a deny-with-precedence case.
|
|
2279
|
-
# Inspect meta to determine if the rule a1 is more specific than rule a0
|
|
2280
|
-
if len(meta1.rule) > len(meta0.rule):
|
|
2281
|
-
return
|
|
2282
|
-
|
|
2283
|
-
self.user.raisePermDeny(prop.delperms[0], gateiden=layriden)
|
|
2219
|
+
return self.snap.core.confirmPropDel(self.user, prop, layriden=layriden)
|
|
2284
2220
|
|
|
2285
2221
|
def confirmEasyPerm(self, item, perm, mesg=None):
|
|
2286
2222
|
if not self.asroot:
|
synapse/lib/stormhttp.py
CHANGED
|
@@ -549,7 +549,7 @@ class HttpResp(s_stormtypes.Prim):
|
|
|
549
549
|
return json.loads(valu.decode(encoding, errors))
|
|
550
550
|
|
|
551
551
|
except UnicodeDecodeError as e:
|
|
552
|
-
raise s_exc.StormRuntimeError(mesg=f'{e}: {repr(valu)
|
|
552
|
+
raise s_exc.StormRuntimeError(mesg=f'{e}: {s_common.trimText(repr(valu))}') from None
|
|
553
553
|
|
|
554
554
|
except json.JSONDecodeError as e:
|
|
555
555
|
mesg = f'Unable to decode HTTP response as json: {e.args[0]}'
|
synapse/lib/stormlib/auth.py
CHANGED
|
@@ -959,6 +959,18 @@ class User(s_stormtypes.Prim):
|
|
|
959
959
|
{'name': 'locked', 'type': 'boolean', 'desc': 'True to lock the user, false to unlock them.', },
|
|
960
960
|
),
|
|
961
961
|
'returns': {'type': 'null', }}},
|
|
962
|
+
{'name': 'setArchived', 'desc': '''
|
|
963
|
+
Set the archived status for a user.
|
|
964
|
+
|
|
965
|
+
Notes:
|
|
966
|
+
Setting a user as "archived" will also lock the user.
|
|
967
|
+
Removing a users "archived" status will not unlock the user.
|
|
968
|
+
''',
|
|
969
|
+
'type': {'type': 'function', '_funcname': '_methUserSetArchived',
|
|
970
|
+
'args': (
|
|
971
|
+
{'name': 'archived', 'type': 'boolean', 'desc': 'True to archive the user, false to unarchive them.', },
|
|
972
|
+
),
|
|
973
|
+
'returns': {'type': 'null', }}},
|
|
962
974
|
{'name': 'setPasswd', 'desc': 'Set the Users password.',
|
|
963
975
|
'type': {'type': 'function', '_funcname': '_methUserSetPasswd',
|
|
964
976
|
'args': (
|
|
@@ -1113,6 +1125,7 @@ class User(s_stormtypes.Prim):
|
|
|
1113
1125
|
'setEmail': self._methUserSetEmail,
|
|
1114
1126
|
'setLocked': self._methUserSetLocked,
|
|
1115
1127
|
'setPasswd': self._methUserSetPasswd,
|
|
1128
|
+
'setArchived': self._methUserSetArchived,
|
|
1116
1129
|
'getAllowedReason': self._methGetAllowedReason,
|
|
1117
1130
|
'genApiKey': self._methGenApiKey,
|
|
1118
1131
|
'getApiKey': self._methGetApiKey,
|
|
@@ -1286,6 +1299,10 @@ class User(s_stormtypes.Prim):
|
|
|
1286
1299
|
self.runt.confirm(('auth', 'user', 'set', 'locked'))
|
|
1287
1300
|
await self.runt.snap.core.setUserLocked(self.valu, await s_stormtypes.tobool(locked))
|
|
1288
1301
|
|
|
1302
|
+
async def _methUserSetArchived(self, archived):
|
|
1303
|
+
self.runt.confirm(('auth', 'user', 'set', 'archived'))
|
|
1304
|
+
await self.runt.snap.core.setUserArchived(self.valu, await s_stormtypes.tobool(archived))
|
|
1305
|
+
|
|
1289
1306
|
async def _methGenApiKey(self, name, duration=None):
|
|
1290
1307
|
name = await s_stormtypes.tostr(name)
|
|
1291
1308
|
duration = await s_stormtypes.toint(duration, noneok=True)
|
|
@@ -1703,6 +1720,8 @@ class LibUsers(s_stormtypes.Lib):
|
|
|
1703
1720
|
'desc': 'Controls changing a user\'s email address.'},
|
|
1704
1721
|
{'perm': ('auth', 'user', 'set', 'locked'), 'gate': 'cortex',
|
|
1705
1722
|
'desc': 'Controls locking/unlocking a user account.'},
|
|
1723
|
+
{'perm': ('auth', 'user', 'set', 'archived'), 'gate': 'cortex',
|
|
1724
|
+
'desc': 'Controls archiving/unarchiving a user account.'},
|
|
1706
1725
|
{'perm': ('auth', 'user', 'set', 'passwd'), 'gate': 'cortex',
|
|
1707
1726
|
'desc': 'Controls changing a user password.'},
|
|
1708
1727
|
{'perm': ('auth', 'user', 'set', 'rules'), 'gate': 'cortex',
|
synapse/lib/stormlib/cell.py
CHANGED
|
@@ -2,11 +2,16 @@ import asyncio
|
|
|
2
2
|
import logging
|
|
3
3
|
|
|
4
4
|
import synapse.exc as s_exc
|
|
5
|
-
import synapse.lib.
|
|
5
|
+
import synapse.lib.autodoc as s_autodoc
|
|
6
6
|
import synapse.lib.stormtypes as s_stormtypes
|
|
7
7
|
|
|
8
8
|
logger = logging.getLogger(__name__)
|
|
9
9
|
|
|
10
|
+
def prepHotfixDesc(txt):
|
|
11
|
+
lines = txt.split('\n')
|
|
12
|
+
lines = s_autodoc.scrubLines(lines)
|
|
13
|
+
lines = s_autodoc.ljuster(lines)
|
|
14
|
+
return lines
|
|
10
15
|
|
|
11
16
|
storm_missing_autoadds = '''
|
|
12
17
|
$absoluteOrder = $lib.view.list(deporder=$lib.true)
|
|
@@ -64,6 +69,17 @@ for $view in $views {
|
|
|
64
69
|
}
|
|
65
70
|
'''
|
|
66
71
|
|
|
72
|
+
storm_migrate_riskhasvuln = '''
|
|
73
|
+
for $view in $lib.view.list(deporder=$lib.true) {
|
|
74
|
+
view.exec $view.iden {
|
|
75
|
+
$layer = $lib.layer.get()
|
|
76
|
+
for ($buid, $sode) in $layer.getStorNodesByForm(risk:hasvuln) {
|
|
77
|
+
yield $buid
|
|
78
|
+
$lib.model.migration.s.riskHasVulnToVulnerable($node)
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
'''
|
|
67
83
|
|
|
68
84
|
hotfixes = (
|
|
69
85
|
((1, 0, 0), {
|
|
@@ -78,6 +94,20 @@ hotfixes = (
|
|
|
78
94
|
'desc': 'Populate it:sec:cpe:v2_2 properties from existing CPE where the property is not set.',
|
|
79
95
|
'query': storm_missing_cpe22,
|
|
80
96
|
}),
|
|
97
|
+
((4, 0, 0), {
|
|
98
|
+
'desc': '''
|
|
99
|
+
Create risk:vulnerable nodes from existing risk:hasvuln nodes.
|
|
100
|
+
|
|
101
|
+
This hotfix should only be applied after all logic that would create
|
|
102
|
+
risk:hasvuln nodes has been updated. The hotfix uses the
|
|
103
|
+
$lib.model.migration.s.riskHasVulnToVulnerable() function,
|
|
104
|
+
which can be used directly for testing.
|
|
105
|
+
|
|
106
|
+
Tags, tag properties, edges, and node data will all be copied
|
|
107
|
+
to the risk:vulnerable nodes.
|
|
108
|
+
''',
|
|
109
|
+
'query': storm_migrate_riskhasvuln,
|
|
110
|
+
}),
|
|
81
111
|
)
|
|
82
112
|
runtime_fixes_key = 'cortex:runtime:stormfixes'
|
|
83
113
|
|
|
@@ -174,7 +204,9 @@ class CellLib(s_stormtypes.Lib):
|
|
|
174
204
|
assert desc is not None
|
|
175
205
|
assert vars is not None
|
|
176
206
|
|
|
177
|
-
|
|
207
|
+
title = prepHotfixDesc(desc)[0]
|
|
208
|
+
await self.runt.printf(f'Applying hotfix {vers} for [{title}]')
|
|
209
|
+
|
|
178
210
|
try:
|
|
179
211
|
query = await self.runt.getStormQuery(text)
|
|
180
212
|
async with self.runt.getSubRuntime(query, opts={'vars': vars}) as runt:
|
|
@@ -206,8 +238,14 @@ class CellLib(s_stormtypes.Lib):
|
|
|
206
238
|
continue
|
|
207
239
|
|
|
208
240
|
dowork = True
|
|
209
|
-
|
|
210
|
-
|
|
241
|
+
|
|
242
|
+
desclines = prepHotfixDesc(info.get('desc'))
|
|
243
|
+
await self.runt.printf(f'Would apply fix {vers} for [{desclines[0]}]')
|
|
244
|
+
if len(desclines) > 1:
|
|
245
|
+
for line in desclines[1:]:
|
|
246
|
+
await self.runt.printf(f' {line}' if line else '')
|
|
247
|
+
else:
|
|
248
|
+
await self.runt.printf('')
|
|
211
249
|
|
|
212
250
|
return dowork
|
|
213
251
|
|
|
@@ -53,7 +53,7 @@ class Bzip2Lib(s_stormtypes.Lib):
|
|
|
53
53
|
try:
|
|
54
54
|
return bz2.compress(valu)
|
|
55
55
|
except Exception as e:
|
|
56
|
-
mesg = f'Error during bzip2 compression - {str(e)}: {repr(valu)
|
|
56
|
+
mesg = f'Error during bzip2 compression - {str(e)}: {s_common.trimText(repr(valu))}'
|
|
57
57
|
raise s_exc.StormRuntimeError(mesg=mesg) from None
|
|
58
58
|
|
|
59
59
|
async def un(self, valu):
|
|
@@ -61,7 +61,7 @@ class Bzip2Lib(s_stormtypes.Lib):
|
|
|
61
61
|
try:
|
|
62
62
|
return bz2.decompress(valu)
|
|
63
63
|
except Exception as e:
|
|
64
|
-
mesg = f'Error during bzip2 decompression - {str(e)}: {repr(valu)
|
|
64
|
+
mesg = f'Error during bzip2 decompression - {str(e)}: {s_common.trimText(repr(valu))}'
|
|
65
65
|
raise s_exc.StormRuntimeError(mesg=mesg) from None
|
|
66
66
|
|
|
67
67
|
@s_stormtypes.registry.registerLib
|
|
@@ -110,7 +110,7 @@ class GzipLib(s_stormtypes.Lib):
|
|
|
110
110
|
try:
|
|
111
111
|
return gzip.compress(valu)
|
|
112
112
|
except Exception as e:
|
|
113
|
-
mesg = f'Error during gzip compression - {str(e)}: {repr(valu)
|
|
113
|
+
mesg = f'Error during gzip compression - {str(e)}: {s_common.trimText(repr(valu))}'
|
|
114
114
|
raise s_exc.StormRuntimeError(mesg=mesg) from None
|
|
115
115
|
|
|
116
116
|
async def un(self, valu):
|
|
@@ -118,7 +118,7 @@ class GzipLib(s_stormtypes.Lib):
|
|
|
118
118
|
try:
|
|
119
119
|
return gzip.decompress(valu)
|
|
120
120
|
except Exception as e:
|
|
121
|
-
mesg = f'Error during gzip decompression - {str(e)}: {repr(valu)
|
|
121
|
+
mesg = f'Error during gzip decompression - {str(e)}: {s_common.trimText(repr(valu))}'
|
|
122
122
|
raise s_exc.StormRuntimeError(mesg=mesg) from None
|
|
123
123
|
|
|
124
124
|
@s_stormtypes.registry.registerLib
|
|
@@ -167,7 +167,7 @@ class ZlibLib(s_stormtypes.Lib):
|
|
|
167
167
|
try:
|
|
168
168
|
return zlib.compress(valu)
|
|
169
169
|
except Exception as e:
|
|
170
|
-
mesg = f'Error during zlib compression - {str(e)}: {repr(valu)
|
|
170
|
+
mesg = f'Error during zlib compression - {str(e)}: {s_common.trimText(repr(valu))}'
|
|
171
171
|
raise s_exc.StormRuntimeError(mesg=mesg) from None
|
|
172
172
|
|
|
173
173
|
async def un(self, valu):
|
|
@@ -175,5 +175,5 @@ class ZlibLib(s_stormtypes.Lib):
|
|
|
175
175
|
try:
|
|
176
176
|
return zlib.decompress(valu)
|
|
177
177
|
except Exception as e:
|
|
178
|
-
mesg = f'Error during zlib decompression - {str(e)}: {repr(valu)
|
|
178
|
+
mesg = f'Error during zlib decompression - {str(e)}: {s_common.trimText(repr(valu))}'
|
|
179
179
|
raise s_exc.StormRuntimeError(mesg=mesg) from None
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
import os
|
|
2
|
+
|
|
3
|
+
import synapse.exc as s_exc
|
|
4
|
+
import synapse.common as s_common
|
|
5
|
+
import synapse.lib.stormtypes as s_stormtypes
|
|
6
|
+
|
|
7
|
+
@s_stormtypes.registry.registerLib
|
|
8
|
+
class LibEnv(s_stormtypes.Lib):
|
|
9
|
+
'''
|
|
10
|
+
A Storm Library for accessing environment vars.
|
|
11
|
+
'''
|
|
12
|
+
_storm_locals = (
|
|
13
|
+
{'name': 'get', 'desc': '''
|
|
14
|
+
Retrieve an environment variable.
|
|
15
|
+
|
|
16
|
+
Notes:
|
|
17
|
+
Environment variables must begin with ``SYN_STORM_ENV_`` in
|
|
18
|
+
order to be accessed by this API.
|
|
19
|
+
''',
|
|
20
|
+
'type': {
|
|
21
|
+
'type': 'function', '_funcname': '_libEnvGet',
|
|
22
|
+
'args': (
|
|
23
|
+
{'name': 'name', 'type': 'str', 'desc': 'The name of the environment variable.', },
|
|
24
|
+
{'name': 'default', 'type': 'obj', 'default': None,
|
|
25
|
+
'desc': 'The value to return if the environment variable is not set.', },
|
|
26
|
+
),
|
|
27
|
+
'returns': {'type': 'str', 'desc': 'The environment variable string.'},
|
|
28
|
+
},
|
|
29
|
+
},
|
|
30
|
+
)
|
|
31
|
+
_storm_lib_path = ('env',)
|
|
32
|
+
|
|
33
|
+
def getObjLocals(self):
|
|
34
|
+
return {
|
|
35
|
+
'get': self._libEnvGet,
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
@s_stormtypes.stormfunc(readonly=True)
|
|
39
|
+
async def _libEnvGet(self, name, default=None):
|
|
40
|
+
|
|
41
|
+
self.runt.reqAdmin(mesg='$lib.env.get() requires admin privileges.')
|
|
42
|
+
|
|
43
|
+
name = await s_stormtypes.tostr(name)
|
|
44
|
+
default = await s_stormtypes.toprim(default)
|
|
45
|
+
|
|
46
|
+
if not name.startswith('SYN_STORM_ENV_'):
|
|
47
|
+
mesg = f'Environment variable must start with SYN_STORM_ENV_ : {name}'
|
|
48
|
+
raise s_exc.BadArg(mesg=mesg)
|
|
49
|
+
|
|
50
|
+
return os.getenv(name, default=default)
|
synapse/lib/stormlib/gen.py
CHANGED
|
@@ -603,7 +603,7 @@ stormcmds = (
|
|
|
603
603
|
{
|
|
604
604
|
'name': 'gen.it.av.scan.result',
|
|
605
605
|
'descr': '''
|
|
606
|
-
Lift (or create) the it:av:scan:result node by deconflicting the target and signature
|
|
606
|
+
Lift (or create) the it:av:scan:result node by deconflicting the target and signature name.
|
|
607
607
|
|
|
608
608
|
The scan time and scanner name may also optionally be provided for deconfliction.
|
|
609
609
|
|
synapse/lib/stormlib/model.py
CHANGED
|
@@ -1090,7 +1090,7 @@ class LibModelMigrations(s_stormtypes.Lib, MigrationEditorMixin):
|
|
|
1090
1090
|
self.runt.confirmPropSet(riskvuln.props['vuln'])
|
|
1091
1091
|
self.runt.confirmPropSet(riskvuln.props['node'])
|
|
1092
1092
|
|
|
1093
|
-
if
|
|
1093
|
+
if seen := n.get('.seen'):
|
|
1094
1094
|
self.runt.confirmPropSet(riskvuln.props['.seen'])
|
|
1095
1095
|
props['.seen'] = seen
|
|
1096
1096
|
|
synapse/lib/stormtypes.py
CHANGED
|
@@ -4130,7 +4130,7 @@ class LibBase64(Lib):
|
|
|
4130
4130
|
return base64.urlsafe_b64encode(valu).decode('ascii')
|
|
4131
4131
|
return base64.b64encode(valu).decode('ascii')
|
|
4132
4132
|
except TypeError as e:
|
|
4133
|
-
mesg = f'Error during base64 encoding - {str(e)}: {repr(valu)
|
|
4133
|
+
mesg = f'Error during base64 encoding - {str(e)}: {s_common.trimText(repr(valu))}'
|
|
4134
4134
|
raise s_exc.StormRuntimeError(mesg=mesg, urlsafe=urlsafe) from None
|
|
4135
4135
|
|
|
4136
4136
|
@stormfunc(readonly=True)
|
|
@@ -4140,7 +4140,7 @@ class LibBase64(Lib):
|
|
|
4140
4140
|
return base64.urlsafe_b64decode(valu)
|
|
4141
4141
|
return base64.b64decode(valu)
|
|
4142
4142
|
except binascii.Error as e:
|
|
4143
|
-
mesg = f'Error during base64 decoding - {str(e)}: {repr(valu)
|
|
4143
|
+
mesg = f'Error during base64 decoding - {str(e)}: {s_common.trimText(repr(valu))}'
|
|
4144
4144
|
raise s_exc.StormRuntimeError(mesg=mesg, urlsafe=urlsafe) from None
|
|
4145
4145
|
|
|
4146
4146
|
@functools.total_ordering
|
|
@@ -4488,7 +4488,7 @@ class Str(Prim):
|
|
|
4488
4488
|
try:
|
|
4489
4489
|
return self.valu.encode(encoding, 'surrogatepass')
|
|
4490
4490
|
except UnicodeEncodeError as e:
|
|
4491
|
-
raise s_exc.StormRuntimeError(mesg=f'{e}: {repr(self.valu)
|
|
4491
|
+
raise s_exc.StormRuntimeError(mesg=f'{e}: {s_common.trimText(repr(self.valu))}') from None
|
|
4492
4492
|
|
|
4493
4493
|
@stormfunc(readonly=True)
|
|
4494
4494
|
async def _methStrSplit(self, text, maxsplit=-1):
|
|
@@ -4733,7 +4733,7 @@ class Bytes(Prim):
|
|
|
4733
4733
|
try:
|
|
4734
4734
|
return self.valu.decode(encoding, errors)
|
|
4735
4735
|
except UnicodeDecodeError as e:
|
|
4736
|
-
raise s_exc.StormRuntimeError(mesg=f'{e}: {repr(self.valu)
|
|
4736
|
+
raise s_exc.StormRuntimeError(mesg=f'{e}: {s_common.trimText(repr(self.valu))}') from None
|
|
4737
4737
|
|
|
4738
4738
|
async def _methBunzip(self):
|
|
4739
4739
|
return bz2.decompress(self.valu)
|
|
@@ -4763,7 +4763,7 @@ class Bytes(Prim):
|
|
|
4763
4763
|
return json.loads(valu.decode(encoding, errors))
|
|
4764
4764
|
|
|
4765
4765
|
except UnicodeDecodeError as e:
|
|
4766
|
-
raise s_exc.StormRuntimeError(mesg=f'{e}: {repr(valu)
|
|
4766
|
+
raise s_exc.StormRuntimeError(mesg=f'{e}: {s_common.trimText(repr(valu))}') from None
|
|
4767
4767
|
|
|
4768
4768
|
except json.JSONDecodeError as e:
|
|
4769
4769
|
mesg = f'Unable to decode bytes as json: {e.args[0]}'
|
|
@@ -6606,7 +6606,12 @@ class Layer(Prim):
|
|
|
6606
6606
|
{'name': 'repr', 'desc': 'Get a string representation of the Layer.',
|
|
6607
6607
|
'type': {'type': 'function', '_funcname': '_methLayerRepr',
|
|
6608
6608
|
'returns': {'type': 'str', 'desc': 'A string that can be printed, representing a Layer.', }}},
|
|
6609
|
-
{'name': 'edits', 'desc': '
|
|
6609
|
+
{'name': 'edits', 'desc': '''
|
|
6610
|
+
Yield (offs, nodeedits) tuples from the given offset.
|
|
6611
|
+
|
|
6612
|
+
Notes:
|
|
6613
|
+
Specifying reverse=(true) disables the wait behavior.
|
|
6614
|
+
''',
|
|
6610
6615
|
'type': {'type': 'function', '_funcname': '_methLayerEdits',
|
|
6611
6616
|
'args': (
|
|
6612
6617
|
{'name': 'offs', 'type': 'int', 'desc': 'Offset to start getting nodeedits from the layer at.',
|
|
@@ -6616,9 +6621,14 @@ class Layer(Prim):
|
|
|
6616
6621
|
'otherwise exit the generator when there are no more edits.', },
|
|
6617
6622
|
{'name': 'size', 'type': 'int', 'desc': 'The maximum number of nodeedits to yield.',
|
|
6618
6623
|
'default': None, },
|
|
6624
|
+
{'name': 'reverse', 'type': 'boolean', 'desc': 'Yield the edits in reverse order.',
|
|
6625
|
+
'default': False, },
|
|
6619
6626
|
),
|
|
6620
6627
|
'returns': {'name': 'Yields', 'type': 'list',
|
|
6621
6628
|
'desc': 'Yields offset, nodeedit tuples from a given offset.', }}},
|
|
6629
|
+
{'name': 'edited', 'desc': 'Return the last time the layer was edited or null if no edits are present.',
|
|
6630
|
+
'type': {'type': 'function', '_funcname': '_methLayerEdited',
|
|
6631
|
+
'returns': {'type': 'time', 'desc': 'The last time the layer was edited.', }}},
|
|
6622
6632
|
{'name': 'addPush', 'desc': 'Configure the layer to push edits to a remote layer/feed.',
|
|
6623
6633
|
'type': {'type': 'function', '_funcname': '_addPush',
|
|
6624
6634
|
'args': (
|
|
@@ -6901,6 +6911,7 @@ class Layer(Prim):
|
|
|
6901
6911
|
'pack': self._methLayerPack,
|
|
6902
6912
|
'repr': self._methLayerRepr,
|
|
6903
6913
|
'edits': self._methLayerEdits,
|
|
6914
|
+
'edited': self._methLayerEdited,
|
|
6904
6915
|
'verify': self.verify,
|
|
6905
6916
|
'addPush': self._addPush,
|
|
6906
6917
|
'delPush': self._delPush,
|
|
@@ -7181,15 +7192,22 @@ class Layer(Prim):
|
|
|
7181
7192
|
return layr.getTagPropValuCount(form, tag, prop.name, prop.type.stortype, norm)
|
|
7182
7193
|
|
|
7183
7194
|
@stormfunc(readonly=True)
|
|
7184
|
-
async def _methLayerEdits(self, offs=0, wait=True, size=None):
|
|
7195
|
+
async def _methLayerEdits(self, offs=0, wait=True, size=None, reverse=False):
|
|
7185
7196
|
offs = await toint(offs)
|
|
7186
7197
|
wait = await tobool(wait)
|
|
7187
|
-
|
|
7188
|
-
|
|
7189
|
-
|
|
7198
|
+
reverse = await tobool(reverse)
|
|
7199
|
+
|
|
7200
|
+
layr = self.runt.snap.core.reqLayer(self.valu.get('iden'))
|
|
7201
|
+
|
|
7202
|
+
self.runt.confirm(('layer', 'edits', 'read'), gateiden=layr.iden)
|
|
7203
|
+
|
|
7204
|
+
if reverse:
|
|
7205
|
+
wait = False
|
|
7206
|
+
if offs == 0:
|
|
7207
|
+
offs = 0xffffffffffffffff
|
|
7190
7208
|
|
|
7191
7209
|
count = 0
|
|
7192
|
-
async for item in
|
|
7210
|
+
async for item in layr.syncNodeEdits(offs, wait=wait, reverse=reverse):
|
|
7193
7211
|
|
|
7194
7212
|
yield item
|
|
7195
7213
|
|
|
@@ -7197,6 +7215,12 @@ class Layer(Prim):
|
|
|
7197
7215
|
if size is not None and size == count:
|
|
7198
7216
|
break
|
|
7199
7217
|
|
|
7218
|
+
@stormfunc(readonly=True)
|
|
7219
|
+
async def _methLayerEdited(self):
|
|
7220
|
+
layr = self.runt.snap.core.reqLayer(self.valu.get('iden'))
|
|
7221
|
+
async for offs, edits, meta in layr.syncNodeEdits2(0xffffffffffffffff, wait=False, reverse=True):
|
|
7222
|
+
return meta.get('time')
|
|
7223
|
+
|
|
7200
7224
|
@stormfunc(readonly=True)
|
|
7201
7225
|
async def getStorNode(self, nodeid):
|
|
7202
7226
|
nodeid = await tostr(nodeid)
|