synapse 2.177.0__py311-none-any.whl → 2.179.0__py311-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of synapse might be problematic. Click here for more details.
- synapse/cortex.py +170 -31
- synapse/datamodel.py +47 -1
- synapse/exc.py +1 -0
- synapse/lib/aha.py +362 -88
- synapse/lib/ast.py +26 -22
- synapse/lib/base.py +39 -12
- synapse/lib/cell.py +315 -119
- synapse/lib/config.py +15 -11
- synapse/lib/coro.py +27 -0
- synapse/lib/drive.py +551 -0
- synapse/lib/layer.py +0 -5
- synapse/lib/link.py +1 -1
- synapse/lib/lmdbslab.py +3 -3
- synapse/lib/nexus.py +24 -12
- synapse/lib/schemas.py +39 -0
- synapse/lib/snap.py +17 -7
- synapse/lib/storm.py +3 -1
- synapse/lib/stormhttp.py +1 -0
- synapse/lib/stormlib/imap.py +6 -2
- synapse/lib/stormlib/modelext.py +29 -3
- synapse/lib/stormlib/smtp.py +12 -2
- synapse/lib/stormlib/stix.py +40 -17
- synapse/lib/stormlib/vault.py +2 -2
- synapse/lib/stormtypes.py +1 -1
- synapse/lib/types.py +9 -0
- synapse/lib/version.py +2 -2
- synapse/lookup/pe.py +303 -38
- synapse/models/dns.py +24 -1
- synapse/models/geospace.py +4 -1
- synapse/models/infotech.py +26 -1
- synapse/telepath.py +32 -17
- synapse/tests/files/aha/certs/cas/synapse.crt +28 -0
- synapse/tests/files/aha/certs/cas/synapse.key +51 -0
- synapse/tests/files/aha/certs/hosts/00.aha.loop.vertex.link.crt +30 -0
- synapse/tests/files/aha/certs/hosts/00.aha.loop.vertex.link.key +51 -0
- synapse/tests/files/aha/certs/users/root@synapse.crt +29 -0
- synapse/tests/files/aha/certs/users/root@synapse.key +51 -0
- synapse/tests/files/rstorm/testsvc.py +1 -1
- synapse/tests/test_axon.py +1 -1
- synapse/tests/test_cortex.py +67 -60
- synapse/tests/test_lib_agenda.py +3 -3
- synapse/tests/test_lib_aha.py +353 -490
- synapse/tests/test_lib_base.py +20 -0
- synapse/tests/test_lib_cell.py +273 -22
- synapse/tests/test_lib_config.py +4 -3
- synapse/tests/test_lib_coro.py +12 -0
- synapse/tests/test_lib_nexus.py +8 -0
- synapse/tests/test_lib_stormhttp.py +40 -0
- synapse/tests/test_lib_stormlib_aha.py +35 -35
- synapse/tests/test_lib_stormlib_cell.py +4 -15
- synapse/tests/test_lib_stormlib_imap.py +14 -3
- synapse/tests/test_lib_stormlib_modelext.py +55 -3
- synapse/tests/test_lib_stormlib_smtp.py +51 -0
- synapse/tests/test_lib_stormlib_stix.py +15 -0
- synapse/tests/test_lib_stormlib_vault.py +11 -1
- synapse/tests/test_lib_stormtypes.py +5 -0
- synapse/tests/test_lib_types.py +9 -0
- synapse/tests/test_model_dns.py +8 -0
- synapse/tests/test_model_geospace.py +3 -1
- synapse/tests/test_model_infotech.py +47 -0
- synapse/tests/test_model_syn.py +11 -0
- synapse/tests/test_tools_aha.py +78 -101
- synapse/tests/test_utils_stormcov.py +1 -1
- synapse/tests/utils.py +86 -120
- synapse/tools/aha/clone.py +50 -0
- synapse/tools/aha/enroll.py +2 -1
- synapse/tools/backup.py +2 -2
- synapse/tools/changelog.py +31 -1
- {synapse-2.177.0.dist-info → synapse-2.179.0.dist-info}/METADATA +48 -48
- {synapse-2.177.0.dist-info → synapse-2.179.0.dist-info}/RECORD +73 -65
- {synapse-2.177.0.dist-info → synapse-2.179.0.dist-info}/WHEEL +1 -1
- {synapse-2.177.0.dist-info → synapse-2.179.0.dist-info}/LICENSE +0 -0
- {synapse-2.177.0.dist-info → synapse-2.179.0.dist-info}/top_level.txt +0 -0
synapse/lib/config.py
CHANGED
|
@@ -392,10 +392,16 @@ class Config(c_abc.MutableMapping):
|
|
|
392
392
|
else:
|
|
393
393
|
return
|
|
394
394
|
|
|
395
|
-
def reqConfValu(self, key):
|
|
395
|
+
def reqConfValu(self, key): # pragma: no cover
|
|
396
396
|
'''
|
|
397
|
-
|
|
398
|
-
|
|
397
|
+
Deprecated. Use ``req(key)`` API instead.
|
|
398
|
+
'''
|
|
399
|
+
s_common.deprecated('Config.reqConfValu(), use req() instead.')
|
|
400
|
+
return self.req(key)
|
|
401
|
+
|
|
402
|
+
def req(self, key):
|
|
403
|
+
'''
|
|
404
|
+
Get a configuration value. If that value is not present in the schema or is not set, then raise an exception.
|
|
399
405
|
|
|
400
406
|
Args:
|
|
401
407
|
key (str): The key to require.
|
|
@@ -403,17 +409,15 @@ class Config(c_abc.MutableMapping):
|
|
|
403
409
|
Returns:
|
|
404
410
|
The requested value.
|
|
405
411
|
'''
|
|
406
|
-
# Ensure that the key is in self.json_schema
|
|
407
412
|
if key not in self.json_schema.get('properties', {}):
|
|
408
|
-
raise s_exc.BadArg(mesg='
|
|
409
|
-
|
|
413
|
+
raise s_exc.BadArg(mesg=f'The {key} configuration option is not present in the configuration schema.',
|
|
414
|
+
name=key)
|
|
410
415
|
|
|
411
|
-
|
|
412
|
-
if
|
|
413
|
-
|
|
414
|
-
key=key)
|
|
416
|
+
valu = self.conf.get(key, s_common.novalu)
|
|
417
|
+
if valu is not s_common.novalu:
|
|
418
|
+
return valu
|
|
415
419
|
|
|
416
|
-
|
|
420
|
+
raise s_exc.NeedConfValu(mesg=f'The {key} configuration option is required.', name=key)
|
|
417
421
|
|
|
418
422
|
def reqKeyValid(self, key, value):
|
|
419
423
|
'''
|
synapse/lib/coro.py
CHANGED
|
@@ -39,6 +39,20 @@ async def agen(item):
|
|
|
39
39
|
for x in item:
|
|
40
40
|
yield x
|
|
41
41
|
|
|
42
|
+
async def chunks(genr, size=100):
|
|
43
|
+
|
|
44
|
+
retn = []
|
|
45
|
+
async for item in genr:
|
|
46
|
+
|
|
47
|
+
retn.append(item)
|
|
48
|
+
|
|
49
|
+
if len(retn) == size:
|
|
50
|
+
yield retn
|
|
51
|
+
retn = []
|
|
52
|
+
|
|
53
|
+
if retn:
|
|
54
|
+
yield retn
|
|
55
|
+
|
|
42
56
|
async def pause(genr, iterations=10):
|
|
43
57
|
idx = 0
|
|
44
58
|
|
|
@@ -148,6 +162,19 @@ async def ornot(func, *args, **kwargs):
|
|
|
148
162
|
return await retn
|
|
149
163
|
return retn
|
|
150
164
|
|
|
165
|
+
bgtasks = set()
|
|
166
|
+
def create_task(coro):
|
|
167
|
+
|
|
168
|
+
task = asyncio.get_running_loop().create_task(coro)
|
|
169
|
+
bgtasks.add(task)
|
|
170
|
+
|
|
171
|
+
def done(t):
|
|
172
|
+
bgtasks.remove(t)
|
|
173
|
+
|
|
174
|
+
task.add_done_callback(done)
|
|
175
|
+
|
|
176
|
+
return task
|
|
177
|
+
|
|
151
178
|
class GenrHelp:
|
|
152
179
|
|
|
153
180
|
def __init__(self, genr):
|
synapse/lib/drive.py
ADDED
|
@@ -0,0 +1,551 @@
|
|
|
1
|
+
import regex
|
|
2
|
+
import asyncio
|
|
3
|
+
|
|
4
|
+
import synapse.exc as s_exc
|
|
5
|
+
import synapse.common as s_common
|
|
6
|
+
|
|
7
|
+
import synapse.lib.base as s_base
|
|
8
|
+
import synapse.lib.config as s_config
|
|
9
|
+
import synapse.lib.msgpack as s_msgpack
|
|
10
|
+
import synapse.lib.schemas as s_schemas
|
|
11
|
+
|
|
12
|
+
nameregex = regex.compile(s_schemas.re_drivename)
|
|
13
|
+
def reqValidName(name):
|
|
14
|
+
if nameregex.match(name) is None:
|
|
15
|
+
mesg = f'Name {name} is invalid. It must match: {s_schemas.re_drivename}.'
|
|
16
|
+
raise s_exc.BadName(mesg=mesg)
|
|
17
|
+
return name
|
|
18
|
+
|
|
19
|
+
LKEY_TYPE = b'\x00' # <type> = <schema>
|
|
20
|
+
LKEY_DIRN = b'\x01' # <bidn> <name> = <kid>
|
|
21
|
+
LKEY_INFO = b'\x02' # <bidn> = <info>
|
|
22
|
+
LKEY_DATA = b'\x03' # <bidn> <vers> = <data>
|
|
23
|
+
LKEY_VERS = b'\x04' # <bidn> <vers> = <versinfo>
|
|
24
|
+
LKEY_INFO_BYTYPE = b'\x05' # <type> 00 <bidn> = 01
|
|
25
|
+
|
|
26
|
+
rootdir = '00000000000000000000000000000000'
|
|
27
|
+
|
|
28
|
+
def getVersIndx(vers):
|
|
29
|
+
maji = vers[0].to_bytes(3, 'big')
|
|
30
|
+
mini = vers[1].to_bytes(3, 'big')
|
|
31
|
+
pati = vers[2].to_bytes(3, 'big')
|
|
32
|
+
return maji + mini + pati
|
|
33
|
+
|
|
34
|
+
class Drive(s_base.Base):
|
|
35
|
+
'''
|
|
36
|
+
Drive is a hierarchical storage abstraction which:
|
|
37
|
+
|
|
38
|
+
* Provides enveloping which includes meta data for each item:
|
|
39
|
+
* creator iden / time
|
|
40
|
+
* updated iden / time / version
|
|
41
|
+
* number of children
|
|
42
|
+
* data type for the item
|
|
43
|
+
* easy perms (enforcement is up to the caller)
|
|
44
|
+
|
|
45
|
+
* Enforces schemas for data
|
|
46
|
+
* Allows storage of historical versions of data
|
|
47
|
+
* Provides a "path traversal" based API
|
|
48
|
+
* Provides an iden based API that does not require traversal
|
|
49
|
+
'''
|
|
50
|
+
async def __anit__(self, slab, name):
|
|
51
|
+
await s_base.Base.__anit__(self)
|
|
52
|
+
self.slab = slab
|
|
53
|
+
self.dbname = slab.initdb(f'drive:{name}')
|
|
54
|
+
self.validators = {}
|
|
55
|
+
|
|
56
|
+
def getPathNorm(self, path):
|
|
57
|
+
|
|
58
|
+
if isinstance(path, str):
|
|
59
|
+
path = path.strip().strip('/').split('/')
|
|
60
|
+
|
|
61
|
+
return [reqValidName(p.strip().lower()) for p in path]
|
|
62
|
+
|
|
63
|
+
def getItemInfo(self, iden):
|
|
64
|
+
return self._getItemInfo(s_common.uhex(iden))
|
|
65
|
+
|
|
66
|
+
def _getItemInfo(self, bidn):
|
|
67
|
+
byts = self.slab.get(LKEY_INFO + bidn, db=self.dbname)
|
|
68
|
+
if byts is not None:
|
|
69
|
+
return s_msgpack.un(byts)
|
|
70
|
+
|
|
71
|
+
def reqItemInfo(self, iden):
|
|
72
|
+
return self._reqItemInfo(s_common.uhex(iden))
|
|
73
|
+
|
|
74
|
+
def _reqItemInfo(self, bidn):
|
|
75
|
+
info = self._getItemInfo(bidn)
|
|
76
|
+
if info is not None:
|
|
77
|
+
return info
|
|
78
|
+
|
|
79
|
+
mesg = f'No drive item with ID {s_common.ehex(bidn)}.'
|
|
80
|
+
raise s_exc.NoSuchIden(mesg=mesg)
|
|
81
|
+
|
|
82
|
+
async def setItemPath(self, iden, path):
|
|
83
|
+
'''
|
|
84
|
+
Move an existing item to the given path.
|
|
85
|
+
'''
|
|
86
|
+
return await self._setItemPath(s_common.uhex(iden), path)
|
|
87
|
+
|
|
88
|
+
async def getItemPath(self, iden):
|
|
89
|
+
pathinfo = []
|
|
90
|
+
while iden is not None:
|
|
91
|
+
|
|
92
|
+
info = self.reqItemInfo(iden)
|
|
93
|
+
|
|
94
|
+
pathinfo.append(info)
|
|
95
|
+
iden = info.get('parent')
|
|
96
|
+
if iden == rootdir:
|
|
97
|
+
break
|
|
98
|
+
|
|
99
|
+
pathinfo.reverse()
|
|
100
|
+
return pathinfo
|
|
101
|
+
|
|
102
|
+
async def _setItemPath(self, bidn, path, reldir=rootdir):
|
|
103
|
+
|
|
104
|
+
path = self.getPathNorm(path)
|
|
105
|
+
|
|
106
|
+
# new parent iden / bidn
|
|
107
|
+
parinfo = None
|
|
108
|
+
pariden = reldir
|
|
109
|
+
|
|
110
|
+
pathinfo = await self.getPathInfo(path[:-1], reldir=reldir)
|
|
111
|
+
if pathinfo:
|
|
112
|
+
parinfo = pathinfo[-1]
|
|
113
|
+
pariden = parinfo.get('iden')
|
|
114
|
+
|
|
115
|
+
parbidn = s_common.uhex(pariden)
|
|
116
|
+
|
|
117
|
+
self._reqFreeStep(parbidn, path[-1])
|
|
118
|
+
|
|
119
|
+
info = self._reqItemInfo(bidn)
|
|
120
|
+
|
|
121
|
+
oldp = info.get('parent')
|
|
122
|
+
oldb = s_common.uhex(oldp)
|
|
123
|
+
oldname = info.get('name')
|
|
124
|
+
|
|
125
|
+
name = path[-1]
|
|
126
|
+
|
|
127
|
+
info['name'] = name
|
|
128
|
+
info['parent'] = pariden
|
|
129
|
+
|
|
130
|
+
s_schemas.reqValidDriveInfo(info)
|
|
131
|
+
|
|
132
|
+
rows = [
|
|
133
|
+
(LKEY_INFO + bidn, s_msgpack.en(info)),
|
|
134
|
+
(LKEY_DIRN + parbidn + name.encode(), bidn),
|
|
135
|
+
]
|
|
136
|
+
|
|
137
|
+
if parinfo is not None:
|
|
138
|
+
parinfo['kids'] += 1
|
|
139
|
+
s_schemas.reqValidDriveInfo(parinfo)
|
|
140
|
+
rows.append((LKEY_INFO + parbidn, s_msgpack.en(parinfo)))
|
|
141
|
+
|
|
142
|
+
# if old parent is rootdir this may be None
|
|
143
|
+
oldpinfo = self._getItemInfo(oldb)
|
|
144
|
+
if oldpinfo is not None:
|
|
145
|
+
oldpinfo['kids'] -= 1
|
|
146
|
+
s_schemas.reqValidDriveInfo(oldpinfo)
|
|
147
|
+
rows.append((LKEY_INFO + oldb, s_msgpack.en(oldpinfo)))
|
|
148
|
+
|
|
149
|
+
self.slab.delete(LKEY_DIRN + oldb + oldname.encode(), db=self.dbname)
|
|
150
|
+
self.slab.putmulti(rows, db=self.dbname)
|
|
151
|
+
|
|
152
|
+
pathinfo.append(info)
|
|
153
|
+
return pathinfo
|
|
154
|
+
|
|
155
|
+
def _hasStepItem(self, bidn, name):
|
|
156
|
+
return self.slab.has(LKEY_DIRN + bidn + name.encode(), db=self.dbname)
|
|
157
|
+
|
|
158
|
+
def getStepInfo(self, iden, name):
|
|
159
|
+
return self._getStepInfo(s_common.uhex(iden), name)
|
|
160
|
+
|
|
161
|
+
def _getStepInfo(self, bidn, name):
|
|
162
|
+
step = self.slab.get(LKEY_DIRN + bidn + name.encode(), db=self.dbname)
|
|
163
|
+
if step is None:
|
|
164
|
+
return None
|
|
165
|
+
|
|
166
|
+
byts = self.slab.get(LKEY_INFO + step, db=self.dbname)
|
|
167
|
+
if byts is not None:
|
|
168
|
+
return s_msgpack.un(byts)
|
|
169
|
+
|
|
170
|
+
def _addStepInfo(self, parbidn, parinfo, info):
|
|
171
|
+
|
|
172
|
+
newbidn = s_common.uhex(info.get('iden'))
|
|
173
|
+
|
|
174
|
+
# name must already be normalized
|
|
175
|
+
name = info.get('name')
|
|
176
|
+
typename = info.get('type')
|
|
177
|
+
|
|
178
|
+
self._reqFreeStep(parbidn, name)
|
|
179
|
+
|
|
180
|
+
rows = [
|
|
181
|
+
(LKEY_DIRN + parbidn + name.encode(), newbidn),
|
|
182
|
+
(LKEY_INFO + newbidn, s_msgpack.en(info)),
|
|
183
|
+
]
|
|
184
|
+
|
|
185
|
+
if parinfo is not None:
|
|
186
|
+
parinfo['kids'] += 1
|
|
187
|
+
rows.append((LKEY_INFO + parbidn, s_msgpack.en(parinfo)))
|
|
188
|
+
|
|
189
|
+
if typename is not None:
|
|
190
|
+
typekey = LKEY_INFO_BYTYPE + typename.encode() + b'\x00' + newbidn
|
|
191
|
+
rows.append((typekey, b'\x01'))
|
|
192
|
+
|
|
193
|
+
self.slab.putmulti(rows, db=self.dbname)
|
|
194
|
+
|
|
195
|
+
def setItemPerm(self, iden, perm):
|
|
196
|
+
return self._setItemPerm(s_common.uhex(iden), perm)
|
|
197
|
+
|
|
198
|
+
def _setItemPerm(self, bidn, perm):
|
|
199
|
+
info = self._reqItemInfo(bidn)
|
|
200
|
+
info['perm'] = perm
|
|
201
|
+
s_schemas.reqValidDriveInfo(info)
|
|
202
|
+
self.slab.put(LKEY_INFO + bidn, s_msgpack.en(info), db=self.dbname)
|
|
203
|
+
return info
|
|
204
|
+
|
|
205
|
+
async def getPathInfo(self, path, reldir=rootdir):
|
|
206
|
+
'''
|
|
207
|
+
Return a list of item info for each step in the given path
|
|
208
|
+
relative to rootdir.
|
|
209
|
+
|
|
210
|
+
This API is designed to allow the caller to retrieve the path info
|
|
211
|
+
and potentially check permissions on each level to control access.
|
|
212
|
+
'''
|
|
213
|
+
|
|
214
|
+
path = self.getPathNorm(path)
|
|
215
|
+
parbidn = s_common.uhex(reldir)
|
|
216
|
+
|
|
217
|
+
pathinfo = []
|
|
218
|
+
for part in path:
|
|
219
|
+
await asyncio.sleep(0)
|
|
220
|
+
|
|
221
|
+
info = self._getStepInfo(parbidn, part)
|
|
222
|
+
if info is None:
|
|
223
|
+
mesg = f'Path step not found: {part}'
|
|
224
|
+
raise s_exc.NoSuchPath(mesg=mesg)
|
|
225
|
+
|
|
226
|
+
pathinfo.append(info)
|
|
227
|
+
parbidn = s_common.uhex(info.get('iden'))
|
|
228
|
+
|
|
229
|
+
return pathinfo
|
|
230
|
+
|
|
231
|
+
def hasItemInfo(self, iden):
|
|
232
|
+
return self._hasItemInfo(s_common.uhex(iden))
|
|
233
|
+
|
|
234
|
+
def _hasItemInfo(self, bidn):
|
|
235
|
+
return self.slab.has(LKEY_INFO + bidn, db=self.dbname)
|
|
236
|
+
|
|
237
|
+
async def hasPathInfo(self, path, reldir=rootdir):
|
|
238
|
+
'''
|
|
239
|
+
Check for a path existing relative to reldir.
|
|
240
|
+
'''
|
|
241
|
+
path = self.getPathNorm(path)
|
|
242
|
+
parbidn = s_common.uhex(reldir)
|
|
243
|
+
|
|
244
|
+
for part in path:
|
|
245
|
+
|
|
246
|
+
await asyncio.sleep(0)
|
|
247
|
+
|
|
248
|
+
info = self._getStepInfo(parbidn, part)
|
|
249
|
+
if info is None:
|
|
250
|
+
return False
|
|
251
|
+
|
|
252
|
+
parbidn = s_common.uhex(info.get('iden'))
|
|
253
|
+
|
|
254
|
+
return True
|
|
255
|
+
|
|
256
|
+
async def addItemInfo(self, info, path=None, reldir=rootdir):
|
|
257
|
+
'''
|
|
258
|
+
Add a new item at the specified path relative to reldir.
|
|
259
|
+
'''
|
|
260
|
+
pariden = reldir
|
|
261
|
+
pathinfo = []
|
|
262
|
+
|
|
263
|
+
if path is not None:
|
|
264
|
+
path = self.getPathNorm(path)
|
|
265
|
+
pathinfo = await self.getPathInfo(path, reldir=reldir)
|
|
266
|
+
if pathinfo:
|
|
267
|
+
pariden = pathinfo[-1].get('iden')
|
|
268
|
+
|
|
269
|
+
parbidn = s_common.uhex(pariden)
|
|
270
|
+
parinfo = self._getItemInfo(parbidn)
|
|
271
|
+
|
|
272
|
+
info['size'] = 0
|
|
273
|
+
info['kids'] = 0
|
|
274
|
+
info['parent'] = pariden
|
|
275
|
+
|
|
276
|
+
info.setdefault('perm', {'users': {}, 'roles': {}})
|
|
277
|
+
info.setdefault('version', (0, 0, 0))
|
|
278
|
+
|
|
279
|
+
s_schemas.reqValidDriveInfo(info)
|
|
280
|
+
|
|
281
|
+
iden = info.get('iden')
|
|
282
|
+
typename = info.get('type')
|
|
283
|
+
|
|
284
|
+
bidn = s_common.uhex(iden)
|
|
285
|
+
|
|
286
|
+
if typename is not None:
|
|
287
|
+
self.reqTypeValidator(typename)
|
|
288
|
+
|
|
289
|
+
if self._getItemInfo(bidn) is not None:
|
|
290
|
+
mesg = f'A drive entry with ID {iden} already exists.'
|
|
291
|
+
raise s_exc.DupIden(mesg=mesg)
|
|
292
|
+
|
|
293
|
+
self._addStepInfo(parbidn, parinfo, info)
|
|
294
|
+
|
|
295
|
+
pathinfo.append(info)
|
|
296
|
+
return pathinfo
|
|
297
|
+
|
|
298
|
+
def reqFreeStep(self, iden, name):
|
|
299
|
+
return self._reqFreeStep(s_common.uhex(iden), name)
|
|
300
|
+
|
|
301
|
+
def _reqFreeStep(self, bidn, name):
|
|
302
|
+
if self._hasStepItem(bidn, name):
|
|
303
|
+
mesg = f'A drive entry with name {name} already exists in parent {s_common.ehex(bidn)}.'
|
|
304
|
+
raise s_exc.DupName(mesg=mesg)
|
|
305
|
+
|
|
306
|
+
async def delItemInfo(self, iden):
|
|
307
|
+
'''
|
|
308
|
+
Recursively remove the info and all associated data versions.
|
|
309
|
+
'''
|
|
310
|
+
return await self._delItemInfo(s_common.uhex(iden))
|
|
311
|
+
|
|
312
|
+
async def _delItemInfo(self, bidn):
|
|
313
|
+
async for info in self._walkItemInfo(bidn):
|
|
314
|
+
await self._delOneInfo(info)
|
|
315
|
+
|
|
316
|
+
async def _delOneInfo(self, info):
|
|
317
|
+
iden = info.get('iden')
|
|
318
|
+
parent = info.get('parent')
|
|
319
|
+
|
|
320
|
+
bidn = s_common.uhex(iden)
|
|
321
|
+
parbidn = s_common.uhex(parent)
|
|
322
|
+
|
|
323
|
+
name = info.get('name').encode()
|
|
324
|
+
|
|
325
|
+
self.slab.delete(LKEY_INFO + bidn, db=self.dbname)
|
|
326
|
+
self.slab.delete(LKEY_DIRN + parbidn + name, db=self.dbname)
|
|
327
|
+
|
|
328
|
+
pref = LKEY_VERS + bidn
|
|
329
|
+
for lkey in self.slab.scanKeysByPref(pref, db=self.dbname):
|
|
330
|
+
self.slab.delete(lkey, db=self.dbname)
|
|
331
|
+
await asyncio.sleep(0)
|
|
332
|
+
|
|
333
|
+
pref = LKEY_DATA + bidn
|
|
334
|
+
for lkey in self.slab.scanKeysByPref(pref, db=self.dbname):
|
|
335
|
+
self.slab.delete(lkey, db=self.dbname)
|
|
336
|
+
await asyncio.sleep(0)
|
|
337
|
+
|
|
338
|
+
async def walkItemInfo(self, iden):
|
|
339
|
+
async for item in self._walkItemInfo(s_common.uhex(iden)):
|
|
340
|
+
yield item
|
|
341
|
+
|
|
342
|
+
async def _walkItemInfo(self, bidn):
|
|
343
|
+
async for knfo in self._walkItemKids(bidn):
|
|
344
|
+
yield knfo
|
|
345
|
+
yield self._getItemInfo(bidn)
|
|
346
|
+
|
|
347
|
+
async def walkPathInfo(self, path, reldir=rootdir):
|
|
348
|
+
|
|
349
|
+
path = self.getPathNorm(path)
|
|
350
|
+
pathinfo = await self.getPathInfo(path, reldir=reldir)
|
|
351
|
+
|
|
352
|
+
bidn = s_common.uhex(pathinfo[-1].get('iden'))
|
|
353
|
+
async for info in self._walkItemKids(bidn):
|
|
354
|
+
yield info
|
|
355
|
+
|
|
356
|
+
yield pathinfo[-1]
|
|
357
|
+
|
|
358
|
+
async def getItemKids(self, iden):
|
|
359
|
+
'''
|
|
360
|
+
Yield each of the children of the specified item.
|
|
361
|
+
'''
|
|
362
|
+
bidn = s_common.uhex(iden)
|
|
363
|
+
for lkey, bidn in self.slab.scanByPref(LKEY_DIRN + bidn, db=self.dbname):
|
|
364
|
+
await asyncio.sleep(0)
|
|
365
|
+
|
|
366
|
+
info = self._getItemInfo(bidn)
|
|
367
|
+
if info is None: # pragma no cover
|
|
368
|
+
continue
|
|
369
|
+
|
|
370
|
+
yield info
|
|
371
|
+
|
|
372
|
+
async def _walkItemKids(self, bidn):
|
|
373
|
+
|
|
374
|
+
for lkey, bidn in self.slab.scanByPref(LKEY_DIRN + bidn, db=self.dbname):
|
|
375
|
+
await asyncio.sleep(0)
|
|
376
|
+
|
|
377
|
+
info = self._getItemInfo(bidn)
|
|
378
|
+
if info is None: # pragma: no cover
|
|
379
|
+
continue
|
|
380
|
+
|
|
381
|
+
nidn = s_common.uhex(info.get('iden'))
|
|
382
|
+
async for item in self._walkItemKids(nidn):
|
|
383
|
+
yield item
|
|
384
|
+
|
|
385
|
+
yield info
|
|
386
|
+
|
|
387
|
+
def setItemData(self, iden, versinfo, data):
|
|
388
|
+
return self._setItemData(s_common.uhex(iden), versinfo, data)
|
|
389
|
+
|
|
390
|
+
def _setItemData(self, bidn, versinfo, data):
|
|
391
|
+
|
|
392
|
+
info = self._reqItemInfo(bidn)
|
|
393
|
+
|
|
394
|
+
typename = info.get('type')
|
|
395
|
+
|
|
396
|
+
self.reqValidData(typename, data)
|
|
397
|
+
|
|
398
|
+
byts = s_msgpack.en(data)
|
|
399
|
+
|
|
400
|
+
size = len(byts)
|
|
401
|
+
|
|
402
|
+
versinfo['size'] = size
|
|
403
|
+
|
|
404
|
+
s_schemas.reqValidDriveDataVers(versinfo)
|
|
405
|
+
|
|
406
|
+
curvers = info.get('version')
|
|
407
|
+
datavers = versinfo.get('version')
|
|
408
|
+
|
|
409
|
+
versindx = getVersIndx(datavers)
|
|
410
|
+
|
|
411
|
+
rows = [
|
|
412
|
+
(LKEY_DATA + bidn + versindx, s_msgpack.en(data)),
|
|
413
|
+
(LKEY_VERS + bidn + versindx, s_msgpack.en(versinfo)),
|
|
414
|
+
]
|
|
415
|
+
|
|
416
|
+
# if new version is greater than the one we have stored
|
|
417
|
+
# update the info with the newest version info...
|
|
418
|
+
if datavers >= curvers:
|
|
419
|
+
info.update(versinfo)
|
|
420
|
+
rows.append((LKEY_INFO + bidn, s_msgpack.en(info)))
|
|
421
|
+
|
|
422
|
+
self.slab.putmulti(rows, db=self.dbname)
|
|
423
|
+
|
|
424
|
+
return info, versinfo
|
|
425
|
+
|
|
426
|
+
def getItemData(self, iden, vers=None):
|
|
427
|
+
'''
|
|
428
|
+
Return a (versinfo, data) tuple for the given iden. If
|
|
429
|
+
version is not specified, the current version is returned.
|
|
430
|
+
'''
|
|
431
|
+
return self._getItemData(s_common.uhex(iden), vers=vers)
|
|
432
|
+
|
|
433
|
+
def _getItemData(self, bidn, vers=None):
|
|
434
|
+
|
|
435
|
+
if vers is None:
|
|
436
|
+
info = self._getItemInfo(bidn)
|
|
437
|
+
vers = info.get('version')
|
|
438
|
+
|
|
439
|
+
versindx = getVersIndx(vers)
|
|
440
|
+
versbyts = self.slab.get(LKEY_VERS + bidn + versindx, db=self.dbname)
|
|
441
|
+
if versbyts is None: # pragma: no cover
|
|
442
|
+
return None
|
|
443
|
+
|
|
444
|
+
databyts = self.slab.get(LKEY_DATA + bidn + versindx, db=self.dbname)
|
|
445
|
+
if databyts is None: # pragma: no cover
|
|
446
|
+
return None
|
|
447
|
+
|
|
448
|
+
return s_msgpack.un(versbyts), s_msgpack.un(databyts)
|
|
449
|
+
|
|
450
|
+
def delItemData(self, iden, vers=None):
|
|
451
|
+
return self._delItemData(s_common.uhex(iden), vers=vers)
|
|
452
|
+
|
|
453
|
+
def _delItemData(self, bidn, vers=None):
|
|
454
|
+
|
|
455
|
+
info = self._reqItemInfo(bidn)
|
|
456
|
+
if vers is None:
|
|
457
|
+
vers = info.get('version')
|
|
458
|
+
|
|
459
|
+
versindx = getVersIndx(vers)
|
|
460
|
+
|
|
461
|
+
self.slab.delete(LKEY_VERS + bidn + versindx, db=self.dbname)
|
|
462
|
+
self.slab.delete(LKEY_DATA + bidn + versindx, db=self.dbname)
|
|
463
|
+
|
|
464
|
+
# back down or revert to 0.0.0
|
|
465
|
+
if vers == info.get('version'):
|
|
466
|
+
versinfo = self._getLastDataVers(bidn)
|
|
467
|
+
if versinfo is None:
|
|
468
|
+
info['size'] = 0
|
|
469
|
+
info['version'] = (0, 0, 0)
|
|
470
|
+
info.pop('updated', None)
|
|
471
|
+
info.pop('updater', None)
|
|
472
|
+
else:
|
|
473
|
+
info.update(versinfo)
|
|
474
|
+
|
|
475
|
+
self.slab.put(LKEY_INFO + bidn, s_msgpack.en(info), db=self.dbname)
|
|
476
|
+
return info
|
|
477
|
+
|
|
478
|
+
def _getLastDataVers(self, bidn):
|
|
479
|
+
for lkey, byts in self.slab.scanByPrefBack(LKEY_VERS + bidn, db=self.dbname):
|
|
480
|
+
return s_msgpack.un(byts)
|
|
481
|
+
|
|
482
|
+
async def getItemDataVersions(self, iden):
|
|
483
|
+
'''
|
|
484
|
+
Yield data version info in reverse created order.
|
|
485
|
+
'''
|
|
486
|
+
bidn = s_common.uhex(iden)
|
|
487
|
+
pref = LKEY_VERS + bidn
|
|
488
|
+
for lkey, byts in self.slab.scanByPrefBack(pref, db=self.dbname):
|
|
489
|
+
yield s_msgpack.un(byts)
|
|
490
|
+
await asyncio.sleep(0)
|
|
491
|
+
|
|
492
|
+
def getTypeSchema(self, typename):
|
|
493
|
+
byts = self.slab.get(LKEY_TYPE + typename.encode(), db=self.dbname)
|
|
494
|
+
if byts is not None:
|
|
495
|
+
return s_msgpack.un(byts)
|
|
496
|
+
|
|
497
|
+
async def setTypeSchema(self, typename, schema, callback=None):
|
|
498
|
+
|
|
499
|
+
reqValidName(typename)
|
|
500
|
+
|
|
501
|
+
vtor = s_config.getJsValidator(schema)
|
|
502
|
+
|
|
503
|
+
self.validators[typename] = vtor
|
|
504
|
+
|
|
505
|
+
lkey = LKEY_TYPE + typename.encode()
|
|
506
|
+
|
|
507
|
+
self.slab.put(lkey, s_msgpack.en(schema), db=self.dbname)
|
|
508
|
+
|
|
509
|
+
if callback is not None:
|
|
510
|
+
async for info in self.getItemsByType(typename):
|
|
511
|
+
bidn = s_common.uhex(info.get('iden'))
|
|
512
|
+
for lkey, byts in self.slab.scanByPref(LKEY_VERS + bidn, db=self.dbname):
|
|
513
|
+
versindx = lkey[-9:]
|
|
514
|
+
databyts = self.slab.get(LKEY_DATA + bidn + versindx, db=self.dbname)
|
|
515
|
+
data = await callback(info, s_msgpack.un(byts), s_msgpack.un(databyts))
|
|
516
|
+
vtor(data)
|
|
517
|
+
self.slab.put(LKEY_DATA + bidn + versindx, s_msgpack.en(data), db=self.dbname)
|
|
518
|
+
await asyncio.sleep(0)
|
|
519
|
+
|
|
520
|
+
async def getItemsByType(self, typename):
|
|
521
|
+
tkey = typename.encode() + b'\x00'
|
|
522
|
+
for lkey in self.slab.scanKeysByPref(LKEY_INFO_BYTYPE + tkey, db=self.dbname):
|
|
523
|
+
bidn = lkey[-16:]
|
|
524
|
+
info = self._getItemInfo(bidn)
|
|
525
|
+
if info is not None:
|
|
526
|
+
yield info
|
|
527
|
+
|
|
528
|
+
def getTypeValidator(self, typename):
|
|
529
|
+
vtor = self.validators.get(typename)
|
|
530
|
+
if vtor is not None:
|
|
531
|
+
return vtor
|
|
532
|
+
|
|
533
|
+
schema = self.getTypeSchema(typename)
|
|
534
|
+
if schema is None:
|
|
535
|
+
return None
|
|
536
|
+
|
|
537
|
+
vtor = s_config.getJsValidator(schema)
|
|
538
|
+
self.validators[typename] = vtor
|
|
539
|
+
|
|
540
|
+
return vtor
|
|
541
|
+
|
|
542
|
+
def reqTypeValidator(self, typename):
|
|
543
|
+
vtor = self.getTypeValidator(typename)
|
|
544
|
+
if vtor is not None:
|
|
545
|
+
return vtor
|
|
546
|
+
|
|
547
|
+
mesg = f'No schema registered with name: {typename}'
|
|
548
|
+
raise s_exc.NoSuchType(mesg=mesg)
|
|
549
|
+
|
|
550
|
+
def reqValidData(self, typename, item):
|
|
551
|
+
self.reqTypeValidator(typename)(item)
|
synapse/lib/layer.py
CHANGED
|
@@ -1431,9 +1431,6 @@ class Layer(s_nexus.Pusher):
|
|
|
1431
1431
|
self.growsize = self.layrinfo.get('growsize')
|
|
1432
1432
|
self.logedits = self.layrinfo.get('logedits')
|
|
1433
1433
|
|
|
1434
|
-
self.mapasync = core.conf.get('layer:lmdb:map_async')
|
|
1435
|
-
self.maxreplaylog = core.conf.get('layer:lmdb:max_replay_log')
|
|
1436
|
-
|
|
1437
1434
|
# slim hooks to avoid async/fire
|
|
1438
1435
|
self.nodeAddHook = None
|
|
1439
1436
|
self.nodeDelHook = None
|
|
@@ -2728,8 +2725,6 @@ class Layer(s_nexus.Pusher):
|
|
|
2728
2725
|
slabopts = {
|
|
2729
2726
|
'readahead': True,
|
|
2730
2727
|
'lockmemory': self.lockmemory,
|
|
2731
|
-
'map_async': self.mapasync,
|
|
2732
|
-
'max_replay_log': self.maxreplaylog,
|
|
2733
2728
|
}
|
|
2734
2729
|
|
|
2735
2730
|
if self.growsize is not None:
|
synapse/lib/link.py
CHANGED
|
@@ -164,7 +164,7 @@ class Link(s_base.Base):
|
|
|
164
164
|
self.reader._transport.abort()
|
|
165
165
|
try:
|
|
166
166
|
await self.writer.wait_closed()
|
|
167
|
-
except
|
|
167
|
+
except Exception as e:
|
|
168
168
|
logger.debug('Link error waiting on close: %s', str(e))
|
|
169
169
|
|
|
170
170
|
self.onfini(fini)
|
synapse/lib/lmdbslab.py
CHANGED
|
@@ -838,7 +838,7 @@ class Slab(s_base.Base):
|
|
|
838
838
|
'recovering': slab.recovering,
|
|
839
839
|
'maxsize': slab.maxsize,
|
|
840
840
|
'growsize': slab.growsize,
|
|
841
|
-
'mapasync':
|
|
841
|
+
'mapasync': True,
|
|
842
842
|
|
|
843
843
|
})
|
|
844
844
|
return retn
|
|
@@ -851,6 +851,8 @@ class Slab(s_base.Base):
|
|
|
851
851
|
kwargs.setdefault('lockmemory', False)
|
|
852
852
|
kwargs.setdefault('map_async', True)
|
|
853
853
|
|
|
854
|
+
assert kwargs.get('map_async')
|
|
855
|
+
|
|
854
856
|
opts = kwargs
|
|
855
857
|
|
|
856
858
|
self.path = path
|
|
@@ -895,8 +897,6 @@ class Slab(s_base.Base):
|
|
|
895
897
|
logger.info(f'SYN_LOCKMEM_DISABLE envar set, skipping lockmem for {self.path}')
|
|
896
898
|
self.lockmemory = False
|
|
897
899
|
|
|
898
|
-
self.mapasync = opts.setdefault('map_async', True)
|
|
899
|
-
|
|
900
900
|
self.mapsize = _mapsizeround(mapsize)
|
|
901
901
|
if self.maxsize is not None:
|
|
902
902
|
self.mapsize = min(self.mapsize, self.maxsize)
|