synapse 2.178.0__py311-none-any.whl → 2.180.0__py311-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of synapse might be problematic. Click here for more details.
- synapse/cortex.py +166 -31
- synapse/datamodel.py +47 -1
- synapse/exc.py +1 -0
- synapse/lib/aha.py +2 -1
- synapse/lib/ast.py +110 -76
- synapse/lib/base.py +12 -3
- synapse/lib/cell.py +150 -11
- synapse/lib/coro.py +14 -0
- synapse/lib/drive.py +551 -0
- synapse/lib/layer.py +1 -1
- synapse/lib/lmdbslab.py +2 -0
- synapse/lib/modelrev.py +5 -1
- synapse/lib/node.py +14 -4
- synapse/lib/schemas.py +97 -0
- synapse/lib/snap.py +36 -11
- synapse/lib/storm.py +9 -5
- synapse/lib/stormhttp.py +1 -0
- synapse/lib/stormlib/modelext.py +29 -3
- synapse/lib/stormlib/stix.py +44 -17
- synapse/lib/stormlib/vault.py +2 -2
- synapse/lib/stormtypes.py +1 -1
- synapse/lib/types.py +9 -0
- synapse/lib/version.py +2 -2
- synapse/lookup/pe.py +303 -38
- synapse/models/auth.py +2 -0
- synapse/models/dns.py +24 -1
- synapse/models/geopol.py +3 -0
- synapse/models/geospace.py +4 -1
- synapse/models/inet.py +1 -0
- synapse/models/infotech.py +135 -92
- synapse/models/person.py +5 -2
- synapse/models/telco.py +3 -0
- synapse/tests/test_cortex.py +45 -1
- synapse/tests/test_lib_aha.py +17 -0
- synapse/tests/test_lib_ast.py +231 -0
- synapse/tests/test_lib_cell.py +225 -0
- synapse/tests/test_lib_coro.py +12 -0
- synapse/tests/test_lib_layer.py +22 -0
- synapse/tests/test_lib_modelrev.py +7 -0
- synapse/tests/test_lib_node.py +12 -1
- synapse/tests/test_lib_storm.py +32 -7
- synapse/tests/test_lib_stormhttp.py +40 -0
- synapse/tests/test_lib_stormlib_modelext.py +55 -3
- synapse/tests/test_lib_stormlib_stix.py +15 -0
- synapse/tests/test_lib_stormlib_vault.py +11 -1
- synapse/tests/test_lib_stormtypes.py +5 -0
- synapse/tests/test_lib_types.py +9 -0
- synapse/tests/test_model_dns.py +8 -0
- synapse/tests/test_model_geopol.py +2 -0
- synapse/tests/test_model_geospace.py +3 -1
- synapse/tests/test_model_inet.py +10 -1
- synapse/tests/test_model_infotech.py +47 -0
- synapse/tests/test_model_person.py +2 -0
- synapse/tests/test_model_syn.py +11 -0
- synapse/tests/test_model_telco.py +2 -1
- synapse/tests/test_utils_stormcov.py +1 -1
- synapse/tools/changelog.py +28 -0
- {synapse-2.178.0.dist-info → synapse-2.180.0.dist-info}/METADATA +1 -1
- {synapse-2.178.0.dist-info → synapse-2.180.0.dist-info}/RECORD +62 -61
- {synapse-2.178.0.dist-info → synapse-2.180.0.dist-info}/WHEEL +1 -1
- {synapse-2.178.0.dist-info → synapse-2.180.0.dist-info}/LICENSE +0 -0
- {synapse-2.178.0.dist-info → synapse-2.180.0.dist-info}/top_level.txt +0 -0
synapse/lib/drive.py
ADDED
|
@@ -0,0 +1,551 @@
|
|
|
1
|
+
import regex
|
|
2
|
+
import asyncio
|
|
3
|
+
|
|
4
|
+
import synapse.exc as s_exc
|
|
5
|
+
import synapse.common as s_common
|
|
6
|
+
|
|
7
|
+
import synapse.lib.base as s_base
|
|
8
|
+
import synapse.lib.config as s_config
|
|
9
|
+
import synapse.lib.msgpack as s_msgpack
|
|
10
|
+
import synapse.lib.schemas as s_schemas
|
|
11
|
+
|
|
12
|
+
nameregex = regex.compile(s_schemas.re_drivename)
|
|
13
|
+
def reqValidName(name):
|
|
14
|
+
if nameregex.match(name) is None:
|
|
15
|
+
mesg = f'Name {name} is invalid. It must match: {s_schemas.re_drivename}.'
|
|
16
|
+
raise s_exc.BadName(mesg=mesg)
|
|
17
|
+
return name
|
|
18
|
+
|
|
19
|
+
LKEY_TYPE = b'\x00' # <type> = <schema>
|
|
20
|
+
LKEY_DIRN = b'\x01' # <bidn> <name> = <kid>
|
|
21
|
+
LKEY_INFO = b'\x02' # <bidn> = <info>
|
|
22
|
+
LKEY_DATA = b'\x03' # <bidn> <vers> = <data>
|
|
23
|
+
LKEY_VERS = b'\x04' # <bidn> <vers> = <versinfo>
|
|
24
|
+
LKEY_INFO_BYTYPE = b'\x05' # <type> 00 <bidn> = 01
|
|
25
|
+
|
|
26
|
+
rootdir = '00000000000000000000000000000000'
|
|
27
|
+
|
|
28
|
+
def getVersIndx(vers):
|
|
29
|
+
maji = vers[0].to_bytes(3, 'big')
|
|
30
|
+
mini = vers[1].to_bytes(3, 'big')
|
|
31
|
+
pati = vers[2].to_bytes(3, 'big')
|
|
32
|
+
return maji + mini + pati
|
|
33
|
+
|
|
34
|
+
class Drive(s_base.Base):
|
|
35
|
+
'''
|
|
36
|
+
Drive is a hierarchical storage abstraction which:
|
|
37
|
+
|
|
38
|
+
* Provides enveloping which includes meta data for each item:
|
|
39
|
+
* creator iden / time
|
|
40
|
+
* updated iden / time / version
|
|
41
|
+
* number of children
|
|
42
|
+
* data type for the item
|
|
43
|
+
* easy perms (enforcement is up to the caller)
|
|
44
|
+
|
|
45
|
+
* Enforces schemas for data
|
|
46
|
+
* Allows storage of historical versions of data
|
|
47
|
+
* Provides a "path traversal" based API
|
|
48
|
+
* Provides an iden based API that does not require traversal
|
|
49
|
+
'''
|
|
50
|
+
async def __anit__(self, slab, name):
|
|
51
|
+
await s_base.Base.__anit__(self)
|
|
52
|
+
self.slab = slab
|
|
53
|
+
self.dbname = slab.initdb(f'drive:{name}')
|
|
54
|
+
self.validators = {}
|
|
55
|
+
|
|
56
|
+
def getPathNorm(self, path):
|
|
57
|
+
|
|
58
|
+
if isinstance(path, str):
|
|
59
|
+
path = path.strip().strip('/').split('/')
|
|
60
|
+
|
|
61
|
+
return [reqValidName(p.strip().lower()) for p in path]
|
|
62
|
+
|
|
63
|
+
def getItemInfo(self, iden):
|
|
64
|
+
return self._getItemInfo(s_common.uhex(iden))
|
|
65
|
+
|
|
66
|
+
def _getItemInfo(self, bidn):
|
|
67
|
+
byts = self.slab.get(LKEY_INFO + bidn, db=self.dbname)
|
|
68
|
+
if byts is not None:
|
|
69
|
+
return s_msgpack.un(byts)
|
|
70
|
+
|
|
71
|
+
def reqItemInfo(self, iden):
|
|
72
|
+
return self._reqItemInfo(s_common.uhex(iden))
|
|
73
|
+
|
|
74
|
+
def _reqItemInfo(self, bidn):
|
|
75
|
+
info = self._getItemInfo(bidn)
|
|
76
|
+
if info is not None:
|
|
77
|
+
return info
|
|
78
|
+
|
|
79
|
+
mesg = f'No drive item with ID {s_common.ehex(bidn)}.'
|
|
80
|
+
raise s_exc.NoSuchIden(mesg=mesg)
|
|
81
|
+
|
|
82
|
+
async def setItemPath(self, iden, path):
|
|
83
|
+
'''
|
|
84
|
+
Move an existing item to the given path.
|
|
85
|
+
'''
|
|
86
|
+
return await self._setItemPath(s_common.uhex(iden), path)
|
|
87
|
+
|
|
88
|
+
async def getItemPath(self, iden):
|
|
89
|
+
pathinfo = []
|
|
90
|
+
while iden is not None:
|
|
91
|
+
|
|
92
|
+
info = self.reqItemInfo(iden)
|
|
93
|
+
|
|
94
|
+
pathinfo.append(info)
|
|
95
|
+
iden = info.get('parent')
|
|
96
|
+
if iden == rootdir:
|
|
97
|
+
break
|
|
98
|
+
|
|
99
|
+
pathinfo.reverse()
|
|
100
|
+
return pathinfo
|
|
101
|
+
|
|
102
|
+
async def _setItemPath(self, bidn, path, reldir=rootdir):
|
|
103
|
+
|
|
104
|
+
path = self.getPathNorm(path)
|
|
105
|
+
|
|
106
|
+
# new parent iden / bidn
|
|
107
|
+
parinfo = None
|
|
108
|
+
pariden = reldir
|
|
109
|
+
|
|
110
|
+
pathinfo = await self.getPathInfo(path[:-1], reldir=reldir)
|
|
111
|
+
if pathinfo:
|
|
112
|
+
parinfo = pathinfo[-1]
|
|
113
|
+
pariden = parinfo.get('iden')
|
|
114
|
+
|
|
115
|
+
parbidn = s_common.uhex(pariden)
|
|
116
|
+
|
|
117
|
+
self._reqFreeStep(parbidn, path[-1])
|
|
118
|
+
|
|
119
|
+
info = self._reqItemInfo(bidn)
|
|
120
|
+
|
|
121
|
+
oldp = info.get('parent')
|
|
122
|
+
oldb = s_common.uhex(oldp)
|
|
123
|
+
oldname = info.get('name')
|
|
124
|
+
|
|
125
|
+
name = path[-1]
|
|
126
|
+
|
|
127
|
+
info['name'] = name
|
|
128
|
+
info['parent'] = pariden
|
|
129
|
+
|
|
130
|
+
s_schemas.reqValidDriveInfo(info)
|
|
131
|
+
|
|
132
|
+
rows = [
|
|
133
|
+
(LKEY_INFO + bidn, s_msgpack.en(info)),
|
|
134
|
+
(LKEY_DIRN + parbidn + name.encode(), bidn),
|
|
135
|
+
]
|
|
136
|
+
|
|
137
|
+
if parinfo is not None:
|
|
138
|
+
parinfo['kids'] += 1
|
|
139
|
+
s_schemas.reqValidDriveInfo(parinfo)
|
|
140
|
+
rows.append((LKEY_INFO + parbidn, s_msgpack.en(parinfo)))
|
|
141
|
+
|
|
142
|
+
# if old parent is rootdir this may be None
|
|
143
|
+
oldpinfo = self._getItemInfo(oldb)
|
|
144
|
+
if oldpinfo is not None:
|
|
145
|
+
oldpinfo['kids'] -= 1
|
|
146
|
+
s_schemas.reqValidDriveInfo(oldpinfo)
|
|
147
|
+
rows.append((LKEY_INFO + oldb, s_msgpack.en(oldpinfo)))
|
|
148
|
+
|
|
149
|
+
self.slab.delete(LKEY_DIRN + oldb + oldname.encode(), db=self.dbname)
|
|
150
|
+
self.slab.putmulti(rows, db=self.dbname)
|
|
151
|
+
|
|
152
|
+
pathinfo.append(info)
|
|
153
|
+
return pathinfo
|
|
154
|
+
|
|
155
|
+
def _hasStepItem(self, bidn, name):
|
|
156
|
+
return self.slab.has(LKEY_DIRN + bidn + name.encode(), db=self.dbname)
|
|
157
|
+
|
|
158
|
+
def getStepInfo(self, iden, name):
|
|
159
|
+
return self._getStepInfo(s_common.uhex(iden), name)
|
|
160
|
+
|
|
161
|
+
def _getStepInfo(self, bidn, name):
|
|
162
|
+
step = self.slab.get(LKEY_DIRN + bidn + name.encode(), db=self.dbname)
|
|
163
|
+
if step is None:
|
|
164
|
+
return None
|
|
165
|
+
|
|
166
|
+
byts = self.slab.get(LKEY_INFO + step, db=self.dbname)
|
|
167
|
+
if byts is not None:
|
|
168
|
+
return s_msgpack.un(byts)
|
|
169
|
+
|
|
170
|
+
def _addStepInfo(self, parbidn, parinfo, info):
|
|
171
|
+
|
|
172
|
+
newbidn = s_common.uhex(info.get('iden'))
|
|
173
|
+
|
|
174
|
+
# name must already be normalized
|
|
175
|
+
name = info.get('name')
|
|
176
|
+
typename = info.get('type')
|
|
177
|
+
|
|
178
|
+
self._reqFreeStep(parbidn, name)
|
|
179
|
+
|
|
180
|
+
rows = [
|
|
181
|
+
(LKEY_DIRN + parbidn + name.encode(), newbidn),
|
|
182
|
+
(LKEY_INFO + newbidn, s_msgpack.en(info)),
|
|
183
|
+
]
|
|
184
|
+
|
|
185
|
+
if parinfo is not None:
|
|
186
|
+
parinfo['kids'] += 1
|
|
187
|
+
rows.append((LKEY_INFO + parbidn, s_msgpack.en(parinfo)))
|
|
188
|
+
|
|
189
|
+
if typename is not None:
|
|
190
|
+
typekey = LKEY_INFO_BYTYPE + typename.encode() + b'\x00' + newbidn
|
|
191
|
+
rows.append((typekey, b'\x01'))
|
|
192
|
+
|
|
193
|
+
self.slab.putmulti(rows, db=self.dbname)
|
|
194
|
+
|
|
195
|
+
def setItemPerm(self, iden, perm):
|
|
196
|
+
return self._setItemPerm(s_common.uhex(iden), perm)
|
|
197
|
+
|
|
198
|
+
def _setItemPerm(self, bidn, perm):
|
|
199
|
+
info = self._reqItemInfo(bidn)
|
|
200
|
+
info['perm'] = perm
|
|
201
|
+
s_schemas.reqValidDriveInfo(info)
|
|
202
|
+
self.slab.put(LKEY_INFO + bidn, s_msgpack.en(info), db=self.dbname)
|
|
203
|
+
return info
|
|
204
|
+
|
|
205
|
+
async def getPathInfo(self, path, reldir=rootdir):
|
|
206
|
+
'''
|
|
207
|
+
Return a list of item info for each step in the given path
|
|
208
|
+
relative to rootdir.
|
|
209
|
+
|
|
210
|
+
This API is designed to allow the caller to retrieve the path info
|
|
211
|
+
and potentially check permissions on each level to control access.
|
|
212
|
+
'''
|
|
213
|
+
|
|
214
|
+
path = self.getPathNorm(path)
|
|
215
|
+
parbidn = s_common.uhex(reldir)
|
|
216
|
+
|
|
217
|
+
pathinfo = []
|
|
218
|
+
for part in path:
|
|
219
|
+
await asyncio.sleep(0)
|
|
220
|
+
|
|
221
|
+
info = self._getStepInfo(parbidn, part)
|
|
222
|
+
if info is None:
|
|
223
|
+
mesg = f'Path step not found: {part}'
|
|
224
|
+
raise s_exc.NoSuchPath(mesg=mesg)
|
|
225
|
+
|
|
226
|
+
pathinfo.append(info)
|
|
227
|
+
parbidn = s_common.uhex(info.get('iden'))
|
|
228
|
+
|
|
229
|
+
return pathinfo
|
|
230
|
+
|
|
231
|
+
def hasItemInfo(self, iden):
|
|
232
|
+
return self._hasItemInfo(s_common.uhex(iden))
|
|
233
|
+
|
|
234
|
+
def _hasItemInfo(self, bidn):
|
|
235
|
+
return self.slab.has(LKEY_INFO + bidn, db=self.dbname)
|
|
236
|
+
|
|
237
|
+
async def hasPathInfo(self, path, reldir=rootdir):
|
|
238
|
+
'''
|
|
239
|
+
Check for a path existing relative to reldir.
|
|
240
|
+
'''
|
|
241
|
+
path = self.getPathNorm(path)
|
|
242
|
+
parbidn = s_common.uhex(reldir)
|
|
243
|
+
|
|
244
|
+
for part in path:
|
|
245
|
+
|
|
246
|
+
await asyncio.sleep(0)
|
|
247
|
+
|
|
248
|
+
info = self._getStepInfo(parbidn, part)
|
|
249
|
+
if info is None:
|
|
250
|
+
return False
|
|
251
|
+
|
|
252
|
+
parbidn = s_common.uhex(info.get('iden'))
|
|
253
|
+
|
|
254
|
+
return True
|
|
255
|
+
|
|
256
|
+
async def addItemInfo(self, info, path=None, reldir=rootdir):
|
|
257
|
+
'''
|
|
258
|
+
Add a new item at the specified path relative to reldir.
|
|
259
|
+
'''
|
|
260
|
+
pariden = reldir
|
|
261
|
+
pathinfo = []
|
|
262
|
+
|
|
263
|
+
if path is not None:
|
|
264
|
+
path = self.getPathNorm(path)
|
|
265
|
+
pathinfo = await self.getPathInfo(path, reldir=reldir)
|
|
266
|
+
if pathinfo:
|
|
267
|
+
pariden = pathinfo[-1].get('iden')
|
|
268
|
+
|
|
269
|
+
parbidn = s_common.uhex(pariden)
|
|
270
|
+
parinfo = self._getItemInfo(parbidn)
|
|
271
|
+
|
|
272
|
+
info['size'] = 0
|
|
273
|
+
info['kids'] = 0
|
|
274
|
+
info['parent'] = pariden
|
|
275
|
+
|
|
276
|
+
info.setdefault('perm', {'users': {}, 'roles': {}})
|
|
277
|
+
info.setdefault('version', (0, 0, 0))
|
|
278
|
+
|
|
279
|
+
s_schemas.reqValidDriveInfo(info)
|
|
280
|
+
|
|
281
|
+
iden = info.get('iden')
|
|
282
|
+
typename = info.get('type')
|
|
283
|
+
|
|
284
|
+
bidn = s_common.uhex(iden)
|
|
285
|
+
|
|
286
|
+
if typename is not None:
|
|
287
|
+
self.reqTypeValidator(typename)
|
|
288
|
+
|
|
289
|
+
if self._getItemInfo(bidn) is not None:
|
|
290
|
+
mesg = f'A drive entry with ID {iden} already exists.'
|
|
291
|
+
raise s_exc.DupIden(mesg=mesg)
|
|
292
|
+
|
|
293
|
+
self._addStepInfo(parbidn, parinfo, info)
|
|
294
|
+
|
|
295
|
+
pathinfo.append(info)
|
|
296
|
+
return pathinfo
|
|
297
|
+
|
|
298
|
+
def reqFreeStep(self, iden, name):
|
|
299
|
+
return self._reqFreeStep(s_common.uhex(iden), name)
|
|
300
|
+
|
|
301
|
+
def _reqFreeStep(self, bidn, name):
|
|
302
|
+
if self._hasStepItem(bidn, name):
|
|
303
|
+
mesg = f'A drive entry with name {name} already exists in parent {s_common.ehex(bidn)}.'
|
|
304
|
+
raise s_exc.DupName(mesg=mesg)
|
|
305
|
+
|
|
306
|
+
async def delItemInfo(self, iden):
|
|
307
|
+
'''
|
|
308
|
+
Recursively remove the info and all associated data versions.
|
|
309
|
+
'''
|
|
310
|
+
return await self._delItemInfo(s_common.uhex(iden))
|
|
311
|
+
|
|
312
|
+
async def _delItemInfo(self, bidn):
|
|
313
|
+
async for info in self._walkItemInfo(bidn):
|
|
314
|
+
await self._delOneInfo(info)
|
|
315
|
+
|
|
316
|
+
async def _delOneInfo(self, info):
|
|
317
|
+
iden = info.get('iden')
|
|
318
|
+
parent = info.get('parent')
|
|
319
|
+
|
|
320
|
+
bidn = s_common.uhex(iden)
|
|
321
|
+
parbidn = s_common.uhex(parent)
|
|
322
|
+
|
|
323
|
+
name = info.get('name').encode()
|
|
324
|
+
|
|
325
|
+
self.slab.delete(LKEY_INFO + bidn, db=self.dbname)
|
|
326
|
+
self.slab.delete(LKEY_DIRN + parbidn + name, db=self.dbname)
|
|
327
|
+
|
|
328
|
+
pref = LKEY_VERS + bidn
|
|
329
|
+
for lkey in self.slab.scanKeysByPref(pref, db=self.dbname):
|
|
330
|
+
self.slab.delete(lkey, db=self.dbname)
|
|
331
|
+
await asyncio.sleep(0)
|
|
332
|
+
|
|
333
|
+
pref = LKEY_DATA + bidn
|
|
334
|
+
for lkey in self.slab.scanKeysByPref(pref, db=self.dbname):
|
|
335
|
+
self.slab.delete(lkey, db=self.dbname)
|
|
336
|
+
await asyncio.sleep(0)
|
|
337
|
+
|
|
338
|
+
async def walkItemInfo(self, iden):
|
|
339
|
+
async for item in self._walkItemInfo(s_common.uhex(iden)):
|
|
340
|
+
yield item
|
|
341
|
+
|
|
342
|
+
async def _walkItemInfo(self, bidn):
|
|
343
|
+
async for knfo in self._walkItemKids(bidn):
|
|
344
|
+
yield knfo
|
|
345
|
+
yield self._getItemInfo(bidn)
|
|
346
|
+
|
|
347
|
+
async def walkPathInfo(self, path, reldir=rootdir):
|
|
348
|
+
|
|
349
|
+
path = self.getPathNorm(path)
|
|
350
|
+
pathinfo = await self.getPathInfo(path, reldir=reldir)
|
|
351
|
+
|
|
352
|
+
bidn = s_common.uhex(pathinfo[-1].get('iden'))
|
|
353
|
+
async for info in self._walkItemKids(bidn):
|
|
354
|
+
yield info
|
|
355
|
+
|
|
356
|
+
yield pathinfo[-1]
|
|
357
|
+
|
|
358
|
+
async def getItemKids(self, iden):
|
|
359
|
+
'''
|
|
360
|
+
Yield each of the children of the specified item.
|
|
361
|
+
'''
|
|
362
|
+
bidn = s_common.uhex(iden)
|
|
363
|
+
for lkey, bidn in self.slab.scanByPref(LKEY_DIRN + bidn, db=self.dbname):
|
|
364
|
+
await asyncio.sleep(0)
|
|
365
|
+
|
|
366
|
+
info = self._getItemInfo(bidn)
|
|
367
|
+
if info is None: # pragma no cover
|
|
368
|
+
continue
|
|
369
|
+
|
|
370
|
+
yield info
|
|
371
|
+
|
|
372
|
+
async def _walkItemKids(self, bidn):
|
|
373
|
+
|
|
374
|
+
for lkey, bidn in self.slab.scanByPref(LKEY_DIRN + bidn, db=self.dbname):
|
|
375
|
+
await asyncio.sleep(0)
|
|
376
|
+
|
|
377
|
+
info = self._getItemInfo(bidn)
|
|
378
|
+
if info is None: # pragma: no cover
|
|
379
|
+
continue
|
|
380
|
+
|
|
381
|
+
nidn = s_common.uhex(info.get('iden'))
|
|
382
|
+
async for item in self._walkItemKids(nidn):
|
|
383
|
+
yield item
|
|
384
|
+
|
|
385
|
+
yield info
|
|
386
|
+
|
|
387
|
+
def setItemData(self, iden, versinfo, data):
|
|
388
|
+
return self._setItemData(s_common.uhex(iden), versinfo, data)
|
|
389
|
+
|
|
390
|
+
def _setItemData(self, bidn, versinfo, data):
|
|
391
|
+
|
|
392
|
+
info = self._reqItemInfo(bidn)
|
|
393
|
+
|
|
394
|
+
typename = info.get('type')
|
|
395
|
+
|
|
396
|
+
self.reqValidData(typename, data)
|
|
397
|
+
|
|
398
|
+
byts = s_msgpack.en(data)
|
|
399
|
+
|
|
400
|
+
size = len(byts)
|
|
401
|
+
|
|
402
|
+
versinfo['size'] = size
|
|
403
|
+
|
|
404
|
+
s_schemas.reqValidDriveDataVers(versinfo)
|
|
405
|
+
|
|
406
|
+
curvers = info.get('version')
|
|
407
|
+
datavers = versinfo.get('version')
|
|
408
|
+
|
|
409
|
+
versindx = getVersIndx(datavers)
|
|
410
|
+
|
|
411
|
+
rows = [
|
|
412
|
+
(LKEY_DATA + bidn + versindx, s_msgpack.en(data)),
|
|
413
|
+
(LKEY_VERS + bidn + versindx, s_msgpack.en(versinfo)),
|
|
414
|
+
]
|
|
415
|
+
|
|
416
|
+
# if new version is greater than the one we have stored
|
|
417
|
+
# update the info with the newest version info...
|
|
418
|
+
if datavers >= curvers:
|
|
419
|
+
info.update(versinfo)
|
|
420
|
+
rows.append((LKEY_INFO + bidn, s_msgpack.en(info)))
|
|
421
|
+
|
|
422
|
+
self.slab.putmulti(rows, db=self.dbname)
|
|
423
|
+
|
|
424
|
+
return info, versinfo
|
|
425
|
+
|
|
426
|
+
def getItemData(self, iden, vers=None):
|
|
427
|
+
'''
|
|
428
|
+
Return a (versinfo, data) tuple for the given iden. If
|
|
429
|
+
version is not specified, the current version is returned.
|
|
430
|
+
'''
|
|
431
|
+
return self._getItemData(s_common.uhex(iden), vers=vers)
|
|
432
|
+
|
|
433
|
+
def _getItemData(self, bidn, vers=None):
|
|
434
|
+
|
|
435
|
+
if vers is None:
|
|
436
|
+
info = self._getItemInfo(bidn)
|
|
437
|
+
vers = info.get('version')
|
|
438
|
+
|
|
439
|
+
versindx = getVersIndx(vers)
|
|
440
|
+
versbyts = self.slab.get(LKEY_VERS + bidn + versindx, db=self.dbname)
|
|
441
|
+
if versbyts is None: # pragma: no cover
|
|
442
|
+
return None
|
|
443
|
+
|
|
444
|
+
databyts = self.slab.get(LKEY_DATA + bidn + versindx, db=self.dbname)
|
|
445
|
+
if databyts is None: # pragma: no cover
|
|
446
|
+
return None
|
|
447
|
+
|
|
448
|
+
return s_msgpack.un(versbyts), s_msgpack.un(databyts)
|
|
449
|
+
|
|
450
|
+
def delItemData(self, iden, vers=None):
|
|
451
|
+
return self._delItemData(s_common.uhex(iden), vers=vers)
|
|
452
|
+
|
|
453
|
+
def _delItemData(self, bidn, vers=None):
|
|
454
|
+
|
|
455
|
+
info = self._reqItemInfo(bidn)
|
|
456
|
+
if vers is None:
|
|
457
|
+
vers = info.get('version')
|
|
458
|
+
|
|
459
|
+
versindx = getVersIndx(vers)
|
|
460
|
+
|
|
461
|
+
self.slab.delete(LKEY_VERS + bidn + versindx, db=self.dbname)
|
|
462
|
+
self.slab.delete(LKEY_DATA + bidn + versindx, db=self.dbname)
|
|
463
|
+
|
|
464
|
+
# back down or revert to 0.0.0
|
|
465
|
+
if vers == info.get('version'):
|
|
466
|
+
versinfo = self._getLastDataVers(bidn)
|
|
467
|
+
if versinfo is None:
|
|
468
|
+
info['size'] = 0
|
|
469
|
+
info['version'] = (0, 0, 0)
|
|
470
|
+
info.pop('updated', None)
|
|
471
|
+
info.pop('updater', None)
|
|
472
|
+
else:
|
|
473
|
+
info.update(versinfo)
|
|
474
|
+
|
|
475
|
+
self.slab.put(LKEY_INFO + bidn, s_msgpack.en(info), db=self.dbname)
|
|
476
|
+
return info
|
|
477
|
+
|
|
478
|
+
def _getLastDataVers(self, bidn):
|
|
479
|
+
for lkey, byts in self.slab.scanByPrefBack(LKEY_VERS + bidn, db=self.dbname):
|
|
480
|
+
return s_msgpack.un(byts)
|
|
481
|
+
|
|
482
|
+
async def getItemDataVersions(self, iden):
|
|
483
|
+
'''
|
|
484
|
+
Yield data version info in reverse created order.
|
|
485
|
+
'''
|
|
486
|
+
bidn = s_common.uhex(iden)
|
|
487
|
+
pref = LKEY_VERS + bidn
|
|
488
|
+
for lkey, byts in self.slab.scanByPrefBack(pref, db=self.dbname):
|
|
489
|
+
yield s_msgpack.un(byts)
|
|
490
|
+
await asyncio.sleep(0)
|
|
491
|
+
|
|
492
|
+
def getTypeSchema(self, typename):
|
|
493
|
+
byts = self.slab.get(LKEY_TYPE + typename.encode(), db=self.dbname)
|
|
494
|
+
if byts is not None:
|
|
495
|
+
return s_msgpack.un(byts)
|
|
496
|
+
|
|
497
|
+
async def setTypeSchema(self, typename, schema, callback=None):
|
|
498
|
+
|
|
499
|
+
reqValidName(typename)
|
|
500
|
+
|
|
501
|
+
vtor = s_config.getJsValidator(schema)
|
|
502
|
+
|
|
503
|
+
self.validators[typename] = vtor
|
|
504
|
+
|
|
505
|
+
lkey = LKEY_TYPE + typename.encode()
|
|
506
|
+
|
|
507
|
+
self.slab.put(lkey, s_msgpack.en(schema), db=self.dbname)
|
|
508
|
+
|
|
509
|
+
if callback is not None:
|
|
510
|
+
async for info in self.getItemsByType(typename):
|
|
511
|
+
bidn = s_common.uhex(info.get('iden'))
|
|
512
|
+
for lkey, byts in self.slab.scanByPref(LKEY_VERS + bidn, db=self.dbname):
|
|
513
|
+
versindx = lkey[-9:]
|
|
514
|
+
databyts = self.slab.get(LKEY_DATA + bidn + versindx, db=self.dbname)
|
|
515
|
+
data = await callback(info, s_msgpack.un(byts), s_msgpack.un(databyts))
|
|
516
|
+
vtor(data)
|
|
517
|
+
self.slab.put(LKEY_DATA + bidn + versindx, s_msgpack.en(data), db=self.dbname)
|
|
518
|
+
await asyncio.sleep(0)
|
|
519
|
+
|
|
520
|
+
async def getItemsByType(self, typename):
|
|
521
|
+
tkey = typename.encode() + b'\x00'
|
|
522
|
+
for lkey in self.slab.scanKeysByPref(LKEY_INFO_BYTYPE + tkey, db=self.dbname):
|
|
523
|
+
bidn = lkey[-16:]
|
|
524
|
+
info = self._getItemInfo(bidn)
|
|
525
|
+
if info is not None:
|
|
526
|
+
yield info
|
|
527
|
+
|
|
528
|
+
def getTypeValidator(self, typename):
|
|
529
|
+
vtor = self.validators.get(typename)
|
|
530
|
+
if vtor is not None:
|
|
531
|
+
return vtor
|
|
532
|
+
|
|
533
|
+
schema = self.getTypeSchema(typename)
|
|
534
|
+
if schema is None:
|
|
535
|
+
return None
|
|
536
|
+
|
|
537
|
+
vtor = s_config.getJsValidator(schema)
|
|
538
|
+
self.validators[typename] = vtor
|
|
539
|
+
|
|
540
|
+
return vtor
|
|
541
|
+
|
|
542
|
+
def reqTypeValidator(self, typename):
|
|
543
|
+
vtor = self.getTypeValidator(typename)
|
|
544
|
+
if vtor is not None:
|
|
545
|
+
return vtor
|
|
546
|
+
|
|
547
|
+
mesg = f'No schema registered with name: {typename}'
|
|
548
|
+
raise s_exc.NoSuchType(mesg=mesg)
|
|
549
|
+
|
|
550
|
+
def reqValidData(self, typename, item):
|
|
551
|
+
self.reqTypeValidator(typename)(item)
|
synapse/lib/layer.py
CHANGED
synapse/lib/lmdbslab.py
CHANGED
|
@@ -834,6 +834,7 @@ class Slab(s_base.Base):
|
|
|
834
834
|
'xactops': len(slab.xactops),
|
|
835
835
|
'mapsize': slab.mapsize,
|
|
836
836
|
'readonly': slab.readonly,
|
|
837
|
+
'readahead': slab.readahead,
|
|
837
838
|
'lockmemory': slab.lockmemory,
|
|
838
839
|
'recovering': slab.recovering,
|
|
839
840
|
'maxsize': slab.maxsize,
|
|
@@ -889,6 +890,7 @@ class Slab(s_base.Base):
|
|
|
889
890
|
self.growsize = opts.pop('growsize', self.DEFAULT_GROWSIZE)
|
|
890
891
|
|
|
891
892
|
self.readonly = opts.get('readonly', False)
|
|
893
|
+
self.readahead = opts.get('readahead', True)
|
|
892
894
|
self.lockmemory = opts.pop('lockmemory', False)
|
|
893
895
|
|
|
894
896
|
if self.lockmemory:
|
synapse/lib/modelrev.py
CHANGED
|
@@ -8,7 +8,7 @@ import synapse.lib.layer as s_layer
|
|
|
8
8
|
|
|
9
9
|
logger = logging.getLogger(__name__)
|
|
10
10
|
|
|
11
|
-
maxvers = (0, 2,
|
|
11
|
+
maxvers = (0, 2, 27)
|
|
12
12
|
|
|
13
13
|
class ModelRev:
|
|
14
14
|
|
|
@@ -40,6 +40,7 @@ class ModelRev:
|
|
|
40
40
|
((0, 2, 24), self.revModel_0_2_24),
|
|
41
41
|
((0, 2, 25), self.revModel_0_2_25),
|
|
42
42
|
((0, 2, 26), self.revModel_0_2_26),
|
|
43
|
+
((0, 2, 27), self.revModel_0_2_27),
|
|
43
44
|
)
|
|
44
45
|
|
|
45
46
|
async def _uniqSortArray(self, todoprops, layers):
|
|
@@ -783,6 +784,9 @@ class ModelRev:
|
|
|
783
784
|
logger.info(f'Updating ndef indexing for {name}')
|
|
784
785
|
await self._updatePropStortype(layers, prop.full)
|
|
785
786
|
|
|
787
|
+
async def revModel_0_2_27(self, layers):
|
|
788
|
+
await self._normPropValu(layers, 'it:dev:repo:commit:id')
|
|
789
|
+
|
|
786
790
|
async def runStorm(self, text, opts=None):
|
|
787
791
|
'''
|
|
788
792
|
Run storm code in a schedcoro and log the output messages.
|
synapse/lib/node.py
CHANGED
|
@@ -708,11 +708,16 @@ class Path:
|
|
|
708
708
|
'''
|
|
709
709
|
A path context tracked through the storm runtime.
|
|
710
710
|
'''
|
|
711
|
-
def __init__(self, vars, nodes):
|
|
711
|
+
def __init__(self, vars, nodes, links=None):
|
|
712
712
|
|
|
713
713
|
self.node = None
|
|
714
714
|
self.nodes = nodes
|
|
715
715
|
|
|
716
|
+
if links is not None:
|
|
717
|
+
self.links = links
|
|
718
|
+
else:
|
|
719
|
+
self.links = []
|
|
720
|
+
|
|
716
721
|
if len(nodes):
|
|
717
722
|
self.node = nodes[-1]
|
|
718
723
|
|
|
@@ -765,19 +770,24 @@ class Path:
|
|
|
765
770
|
info = await s_stormtypes.toprim(dict(self.metadata))
|
|
766
771
|
if path:
|
|
767
772
|
info['nodes'] = [node.iden() for node in self.nodes]
|
|
773
|
+
|
|
768
774
|
return info
|
|
769
775
|
|
|
770
|
-
def fork(self, node):
|
|
776
|
+
def fork(self, node, link):
|
|
777
|
+
|
|
778
|
+
links = list(self.links)
|
|
779
|
+
if self.node is not None and link is not None:
|
|
780
|
+
links.append((self.node.iden(), link))
|
|
771
781
|
|
|
772
782
|
nodes = list(self.nodes)
|
|
773
783
|
nodes.append(node)
|
|
774
784
|
|
|
775
|
-
path = Path(self.vars.copy(), nodes)
|
|
785
|
+
path = Path(self.vars.copy(), nodes, links=links)
|
|
776
786
|
|
|
777
787
|
return path
|
|
778
788
|
|
|
779
789
|
def clone(self):
|
|
780
|
-
path = Path(copy.copy(self.vars), copy.copy(self.nodes))
|
|
790
|
+
path = Path(copy.copy(self.vars), copy.copy(self.nodes), copy.copy(self.links))
|
|
781
791
|
path.frames = [v.copy() for v in self.frames]
|
|
782
792
|
return path
|
|
783
793
|
|