synapse 2.200.0__py311-none-any.whl → 2.202.0__py311-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of synapse might be problematic. Click here for more details.
- synapse/cortex.py +30 -23
- synapse/datamodel.py +2 -3
- synapse/lib/agenda.py +24 -5
- synapse/lib/ast.py +7 -10
- synapse/lib/base.py +2 -12
- synapse/lib/cell.py +9 -13
- synapse/lib/parser.py +2 -1
- synapse/lib/schemas.py +1 -0
- synapse/lib/storm.lark +5 -4
- synapse/lib/storm.py +2 -9
- synapse/lib/storm_format.py +2 -1
- synapse/lib/version.py +2 -2
- synapse/models/dns.py +1 -1
- synapse/models/economic.py +23 -23
- synapse/models/files.py +2 -2
- synapse/models/inet.py +2 -2
- synapse/models/infotech.py +7 -7
- synapse/models/person.py +1 -1
- synapse/models/proj.py +3 -2
- synapse/models/risk.py +1 -1
- synapse/models/transport.py +3 -3
- synapse/telepath.py +75 -16
- synapse/tests/test_cortex.py +26 -3
- synapse/tests/test_lib_agenda.py +41 -0
- synapse/tests/test_lib_ast.py +3 -0
- synapse/tests/test_lib_cell.py +11 -0
- synapse/tests/test_lib_grammar.py +4 -0
- synapse/tests/test_lib_storm.py +7 -1
- synapse/tests/test_model_risk.py +4 -0
- synapse/tests/test_telepath.py +56 -34
- synapse/tests/test_tools_autodoc.py +5 -0
- synapse/tests/test_utils_getrefs.py +35 -28
- synapse/tests/utils.py +7 -7
- synapse/tools/autodoc.py +16 -1
- synapse/utils/getrefs.py +4 -2
- {synapse-2.200.0.dist-info → synapse-2.202.0.dist-info}/METADATA +1 -1
- {synapse-2.200.0.dist-info → synapse-2.202.0.dist-info}/RECORD +40 -40
- {synapse-2.200.0.dist-info → synapse-2.202.0.dist-info}/WHEEL +1 -1
- {synapse-2.200.0.dist-info → synapse-2.202.0.dist-info}/LICENSE +0 -0
- {synapse-2.200.0.dist-info → synapse-2.202.0.dist-info}/top_level.txt +0 -0
synapse/cortex.py
CHANGED
|
@@ -5874,38 +5874,45 @@ class Cortex(s_oauth.OAuthMixin, s_cell.Cell): # type: ignore
|
|
|
5874
5874
|
if self.stormpool is None: # pragma: no cover
|
|
5875
5875
|
return None
|
|
5876
5876
|
|
|
5877
|
-
|
|
5877
|
+
size = self.stormpool.size()
|
|
5878
|
+
if size == 0:
|
|
5878
5879
|
logger.warning('Storm query mirror pool is empty, running query locally.')
|
|
5879
5880
|
return None
|
|
5880
5881
|
|
|
5881
|
-
|
|
5882
|
+
for _ in range(size):
|
|
5882
5883
|
|
|
5883
|
-
|
|
5884
|
-
|
|
5885
|
-
|
|
5886
|
-
|
|
5887
|
-
|
|
5888
|
-
|
|
5889
|
-
|
|
5884
|
+
try:
|
|
5885
|
+
timeout = self.stormpoolopts.get('timeout:connection')
|
|
5886
|
+
proxy = await self.stormpool.proxy(timeout=timeout)
|
|
5887
|
+
proxyname = proxy._ahainfo.get('name')
|
|
5888
|
+
if proxyname is not None and proxyname == self.ahasvcname:
|
|
5889
|
+
# we are part of the pool and were selected. Convert to local use.
|
|
5890
|
+
return None
|
|
5891
|
+
|
|
5892
|
+
except TimeoutError:
|
|
5893
|
+
logger.warning('Timeout waiting for pool mirror proxy.')
|
|
5894
|
+
continue
|
|
5895
|
+
|
|
5896
|
+
try:
|
|
5897
|
+
|
|
5898
|
+
curoffs = opts.setdefault('nexsoffs', await self.getNexsIndx() - 1)
|
|
5899
|
+
miroffs = await s_common.wait_for(proxy.getNexsIndx(), timeout) - 1
|
|
5900
|
+
if (delta := curoffs - miroffs) <= MAX_NEXUS_DELTA:
|
|
5901
|
+
return proxy
|
|
5890
5902
|
|
|
5891
|
-
|
|
5892
|
-
miroffs = await s_common.wait_for(proxy.getNexsIndx(), timeout) - 1
|
|
5893
|
-
if (delta := curoffs - miroffs) > MAX_NEXUS_DELTA:
|
|
5894
|
-
mesg = (f'Pool mirror [{proxyname}] Nexus offset delta too large '
|
|
5895
|
-
f'({delta} > {MAX_NEXUS_DELTA}), running query locally.')
|
|
5903
|
+
mesg = f'Pool mirror [{proxyname}] is too far out of sync. Skipping.'
|
|
5896
5904
|
logger.warning(mesg, extra=await self.getLogExtra(delta=delta, mirror=proxyname, mirror_offset=miroffs))
|
|
5897
|
-
return None
|
|
5898
5905
|
|
|
5899
|
-
|
|
5906
|
+
except s_exc.IsFini:
|
|
5907
|
+
mesg = f'Proxy for pool mirror [{proxyname}] was shutdown. Skipping.'
|
|
5908
|
+
logger.warning(mesg, extra=await self.getLogExtra(mirror=proxyname))
|
|
5900
5909
|
|
|
5901
|
-
|
|
5902
|
-
|
|
5903
|
-
logger.warning('Timeout waiting for pool mirror, running query locally.')
|
|
5904
|
-
else:
|
|
5905
|
-
mesg = f'Timeout waiting for pool mirror [{proxyname}] Nexus offset, running query locally.'
|
|
5910
|
+
except TimeoutError:
|
|
5911
|
+
mesg = f'Timeout waiting for pool mirror [{proxyname}] Nexus offset.'
|
|
5906
5912
|
logger.warning(mesg, extra=await self.getLogExtra(mirror=proxyname))
|
|
5907
|
-
|
|
5908
|
-
|
|
5913
|
+
|
|
5914
|
+
logger.warning('Pool members exhausted. Running query locally.', extra=await self.getLogExtra())
|
|
5915
|
+
return None
|
|
5909
5916
|
|
|
5910
5917
|
async def storm(self, text, opts=None):
|
|
5911
5918
|
|
synapse/datamodel.py
CHANGED
|
@@ -1160,10 +1160,9 @@ class Model:
|
|
|
1160
1160
|
for propname, typedef, propinfo in iface.get('props', ()):
|
|
1161
1161
|
|
|
1162
1162
|
# allow form props to take precedence
|
|
1163
|
-
if form.prop(propname) is
|
|
1164
|
-
|
|
1163
|
+
if (prop := form.prop(propname)) is None:
|
|
1164
|
+
prop = self._addFormProp(form, propname, typedef, propinfo)
|
|
1165
1165
|
|
|
1166
|
-
prop = self._addFormProp(form, propname, typedef, propinfo)
|
|
1167
1166
|
self.ifaceprops[f'{name}:{propname}'].append(prop.full)
|
|
1168
1167
|
|
|
1169
1168
|
if subifaces is not None:
|
synapse/lib/agenda.py
CHANGED
|
@@ -260,6 +260,7 @@ class _Appt:
|
|
|
260
260
|
'created',
|
|
261
261
|
'enabled',
|
|
262
262
|
'errcount',
|
|
263
|
+
'loglevel',
|
|
263
264
|
'nexttime',
|
|
264
265
|
'lasterrs',
|
|
265
266
|
'isrunning',
|
|
@@ -269,7 +270,7 @@ class _Appt:
|
|
|
269
270
|
'lastfinishtime',
|
|
270
271
|
}
|
|
271
272
|
|
|
272
|
-
def __init__(self, stor, iden, recur, indx, query, creator, recs, nexttime=None, view=None, created=None, pool=False):
|
|
273
|
+
def __init__(self, stor, iden, recur, indx, query, creator, recs, nexttime=None, view=None, created=None, pool=False, loglevel=None):
|
|
273
274
|
self.doc = ''
|
|
274
275
|
self.name = ''
|
|
275
276
|
self.task = None
|
|
@@ -284,6 +285,7 @@ class _Appt:
|
|
|
284
285
|
self._recidxnexttime = None # index of rec who is up next
|
|
285
286
|
self.view = view
|
|
286
287
|
self.created = created
|
|
288
|
+
self.loglevel = loglevel
|
|
287
289
|
|
|
288
290
|
if self.recur and not self.recs:
|
|
289
291
|
raise s_exc.BadTime(mesg='A recurrent appointment with no records')
|
|
@@ -364,7 +366,10 @@ class _Appt:
|
|
|
364
366
|
if val['ver'] != 1:
|
|
365
367
|
raise s_exc.BadStorageVersion(mesg=f"Found version {val['ver']}") # pragma: no cover
|
|
366
368
|
recs = [ApptRec.unpack(tupl) for tupl in val['recs']]
|
|
367
|
-
|
|
369
|
+
# TODO: MOAR INSANITY
|
|
370
|
+
loglevel = val.get('loglevel', 'WARNING')
|
|
371
|
+
appt = cls(stor, val['iden'], val['recur'], val['indx'], val['query'], val['creator'], recs,
|
|
372
|
+
nexttime=val['nexttime'], view=val.get('view'), loglevel=loglevel)
|
|
368
373
|
appt.doc = val.get('doc', '')
|
|
369
374
|
appt.name = val.get('name', '')
|
|
370
375
|
appt.pool = val.get('pool', False)
|
|
@@ -373,6 +378,7 @@ class _Appt:
|
|
|
373
378
|
appt.lastfinishtime = val['lastfinishtime']
|
|
374
379
|
appt.lastresult = val['lastresult']
|
|
375
380
|
appt.enabled = val['enabled']
|
|
381
|
+
appt.lasterrs = list(val.get('lasterrs', []))
|
|
376
382
|
|
|
377
383
|
return appt
|
|
378
384
|
|
|
@@ -422,8 +428,10 @@ class _Appt:
|
|
|
422
428
|
logger.warning('_Appt.edits() Invalid attribute received: %s = %r', name, valu, extra=extra)
|
|
423
429
|
continue
|
|
424
430
|
|
|
425
|
-
|
|
426
|
-
|
|
431
|
+
if name == 'lasterrs' and not isinstance(valu, list):
|
|
432
|
+
valu = list(valu)
|
|
433
|
+
|
|
434
|
+
setattr(self, name, valu)
|
|
427
435
|
|
|
428
436
|
await self.save()
|
|
429
437
|
|
|
@@ -559,6 +567,7 @@ class Agenda(s_base.Base):
|
|
|
559
567
|
creator = cdef.get('creator')
|
|
560
568
|
view = cdef.get('view')
|
|
561
569
|
created = cdef.get('created')
|
|
570
|
+
loglevel = cdef.get('loglevel', 'WARNING')
|
|
562
571
|
|
|
563
572
|
pool = cdef.get('pool', False)
|
|
564
573
|
|
|
@@ -603,7 +612,9 @@ class Agenda(s_base.Base):
|
|
|
603
612
|
incvals = (incvals, )
|
|
604
613
|
recs.extend(ApptRec(rd, incunit, v) for (rd, v) in itertools.product(reqdicts, incvals))
|
|
605
614
|
|
|
606
|
-
|
|
615
|
+
# TODO: this is insane. Make _Appt take the cdef directly...
|
|
616
|
+
appt = _Appt(self, iden, recur, indx, query, creator, recs, nexttime=nexttime, view=view,
|
|
617
|
+
created=created, pool=pool, loglevel=loglevel)
|
|
607
618
|
self._addappt(iden, appt)
|
|
608
619
|
|
|
609
620
|
appt.doc = cdef.get('doc', '')
|
|
@@ -841,7 +852,10 @@ class Agenda(s_base.Base):
|
|
|
841
852
|
extra={'synapse': {'iden': appt.iden, 'name': appt.name, 'user': user.iden, 'text': appt.query,
|
|
842
853
|
'username': user.name, 'view': appt.view}})
|
|
843
854
|
starttime = self._getNowTick()
|
|
855
|
+
|
|
844
856
|
success = False
|
|
857
|
+
loglevel = s_common.normLogLevel(appt.loglevel)
|
|
858
|
+
|
|
845
859
|
try:
|
|
846
860
|
opts = {
|
|
847
861
|
'user': user.iden,
|
|
@@ -861,6 +875,11 @@ class Agenda(s_base.Base):
|
|
|
861
875
|
if mesg[0] == 'node':
|
|
862
876
|
count += 1
|
|
863
877
|
|
|
878
|
+
elif mesg[0] == 'warn' and loglevel <= logging.WARNING:
|
|
879
|
+
text = mesg[1].get('mesg', '<missing message>')
|
|
880
|
+
extra = await self.core.getLogExtra(cron=appt.iden, **mesg[1])
|
|
881
|
+
logger.warning(f'Cron job {appt.iden} issued warning: {text}', extra=extra)
|
|
882
|
+
|
|
864
883
|
elif mesg[0] == 'err':
|
|
865
884
|
excname, errinfo = mesg[1]
|
|
866
885
|
errinfo.pop('eline', None)
|
synapse/lib/ast.py
CHANGED
|
@@ -217,13 +217,10 @@ class Query(AstNode):
|
|
|
217
217
|
genr = await stack.enter_async_context(contextlib.aclosing(oper.run(runt, genr)))
|
|
218
218
|
|
|
219
219
|
async for node, path in genr:
|
|
220
|
-
runt.tick()
|
|
221
220
|
yield node, path
|
|
222
221
|
|
|
223
222
|
async def iterNodePaths(self, runt, genr=None):
|
|
224
223
|
|
|
225
|
-
count = 0
|
|
226
|
-
|
|
227
224
|
self.optimize()
|
|
228
225
|
self.validate(runt)
|
|
229
226
|
|
|
@@ -231,18 +228,18 @@ class Query(AstNode):
|
|
|
231
228
|
if genr is None:
|
|
232
229
|
genr = runt.getInput()
|
|
233
230
|
|
|
231
|
+
count = 0
|
|
232
|
+
limit = runt.getOpt('limit')
|
|
233
|
+
|
|
234
234
|
async with contextlib.aclosing(self.run(runt, genr)) as agen:
|
|
235
235
|
async for node, path in agen:
|
|
236
236
|
|
|
237
|
-
runt.tick()
|
|
238
|
-
|
|
239
237
|
yield node, path
|
|
240
238
|
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
break
|
|
239
|
+
if limit is not None:
|
|
240
|
+
count += 1
|
|
241
|
+
if count >= limit:
|
|
242
|
+
break
|
|
246
243
|
|
|
247
244
|
class Lookup(Query):
|
|
248
245
|
'''
|
synapse/lib/base.py
CHANGED
|
@@ -128,7 +128,7 @@ class Base:
|
|
|
128
128
|
|
|
129
129
|
self.isfini = False
|
|
130
130
|
self.anitted = True # For assertion purposes
|
|
131
|
-
self.finievt =
|
|
131
|
+
self.finievt = asyncio.Event()
|
|
132
132
|
self.entered = False
|
|
133
133
|
|
|
134
134
|
# hold a weak ref to other bases we should fini if they
|
|
@@ -431,10 +431,7 @@ class Base:
|
|
|
431
431
|
self._syn_funcs.clear()
|
|
432
432
|
self._fini_funcs.clear()
|
|
433
433
|
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
if fevt is not None:
|
|
437
|
-
fevt.set()
|
|
434
|
+
self.finievt.set()
|
|
438
435
|
|
|
439
436
|
return 0
|
|
440
437
|
|
|
@@ -468,13 +465,6 @@ class Base:
|
|
|
468
465
|
base.waitfini(timeout=30)
|
|
469
466
|
|
|
470
467
|
'''
|
|
471
|
-
|
|
472
|
-
if self.isfini:
|
|
473
|
-
return True
|
|
474
|
-
|
|
475
|
-
if self.finievt is None:
|
|
476
|
-
self.finievt = asyncio.Event()
|
|
477
|
-
|
|
478
468
|
return await s_coro.event_wait(self.finievt, timeout)
|
|
479
469
|
|
|
480
470
|
def schedCoro(self, coro):
|
synapse/lib/cell.py
CHANGED
|
@@ -2711,6 +2711,12 @@ class Cell(s_nexus.Pusher, s_telepath.Aware):
|
|
|
2711
2711
|
raise s_exc.BadArg(mesg=mesg, arg='path', valu=path)
|
|
2712
2712
|
await self._streamBackupArchive(path, user, name)
|
|
2713
2713
|
|
|
2714
|
+
async def _removeStreamingBackup(self, path):
|
|
2715
|
+
logger.debug(f'Removing {path}')
|
|
2716
|
+
await s_coro.executor(shutil.rmtree, path, ignore_errors=True)
|
|
2717
|
+
logger.debug(f'Removed {path}')
|
|
2718
|
+
self.backupstreaming = False
|
|
2719
|
+
|
|
2714
2720
|
async def iterNewBackupArchive(self, user, name=None, remove=False):
|
|
2715
2721
|
|
|
2716
2722
|
if self.backupstreaming:
|
|
@@ -2728,23 +2734,13 @@ class Cell(s_nexus.Pusher, s_telepath.Aware):
|
|
|
2728
2734
|
mesg = 'Backup with name already exists'
|
|
2729
2735
|
raise s_exc.BadArg(mesg=mesg)
|
|
2730
2736
|
|
|
2731
|
-
|
|
2732
|
-
await self.runBackup(name)
|
|
2733
|
-
except Exception:
|
|
2734
|
-
if remove:
|
|
2735
|
-
logger.debug(f'Removing {path}')
|
|
2736
|
-
await s_coro.executor(shutil.rmtree, path, ignore_errors=True)
|
|
2737
|
-
logger.debug(f'Removed {path}')
|
|
2738
|
-
raise
|
|
2739
|
-
|
|
2737
|
+
await self.runBackup(name)
|
|
2740
2738
|
await self._streamBackupArchive(path, user, name)
|
|
2741
2739
|
|
|
2742
2740
|
finally:
|
|
2743
2741
|
if remove:
|
|
2744
|
-
|
|
2745
|
-
await
|
|
2746
|
-
logger.debug(f'Removed {path}')
|
|
2747
|
-
self.backupstreaming = False
|
|
2742
|
+
self.removetask = asyncio.create_task(self._removeStreamingBackup(path))
|
|
2743
|
+
await asyncio.shield(self.removetask)
|
|
2748
2744
|
|
|
2749
2745
|
async def isUserAllowed(self, iden, perm, gateiden=None, default=False):
|
|
2750
2746
|
user = self.auth.user(iden) # type: s_auth.User
|
synapse/lib/parser.py
CHANGED
|
@@ -75,7 +75,7 @@ terminalEnglishMap = {
|
|
|
75
75
|
'MODSET': '+= or -=',
|
|
76
76
|
'MODSETMULTI': '++= or --=',
|
|
77
77
|
'NONQUOTEWORD': 'unquoted value',
|
|
78
|
-
'
|
|
78
|
+
'NOTOP': 'not',
|
|
79
79
|
'NULL': 'null',
|
|
80
80
|
'NUMBER': 'number',
|
|
81
81
|
'OCTNUMBER': 'number',
|
|
@@ -134,6 +134,7 @@ terminalEnglishMap = {
|
|
|
134
134
|
'_LPARNOSPACE': '(',
|
|
135
135
|
'_MATCHHASH': '#',
|
|
136
136
|
'_MATCHHASHWILD': '#',
|
|
137
|
+
'_NOT': 'not',
|
|
137
138
|
'_RETURN': 'return',
|
|
138
139
|
'_REVERSE': 'reverse',
|
|
139
140
|
'_RIGHTJOIN': '-+>',
|
synapse/lib/schemas.py
CHANGED
synapse/lib/storm.lark
CHANGED
|
@@ -437,7 +437,8 @@ _cond: notcond | "(" _condexpr ")"
|
|
|
437
437
|
| condsubq | arraycond
|
|
438
438
|
| _varvalu | _reqdollarexprs
|
|
439
439
|
|
|
440
|
-
|
|
440
|
+
_NOT: "not"
|
|
441
|
+
notcond: _NOT _cond
|
|
441
442
|
|
|
442
443
|
hasrelpropcond: relprop | univprop
|
|
443
444
|
relpropcond: relpropvalue _cmpr _valu
|
|
@@ -627,15 +628,15 @@ OCTNUMBER.1: /
|
|
|
627
628
|
/x
|
|
628
629
|
|
|
629
630
|
BOOL.2: /(true|false)(?=$|[\s\),\]}\|\=])/
|
|
630
|
-
NULL.2:
|
|
631
|
-
|
|
631
|
+
NULL.2: /null(?=$|[\s\),\]}\|\=])/
|
|
632
|
+
NOTOP.2: /not(?=$|[\s\),\]}\|\=])/
|
|
632
633
|
OR.2: "or"
|
|
633
634
|
AND.2: "and"
|
|
634
635
|
|
|
635
636
|
// $ expression rules in increasing order of precedence (modeled on Python's order)
|
|
636
637
|
?expror: exprand | expror OR exprand
|
|
637
638
|
?exprand: exprnot | exprand AND exprnot
|
|
638
|
-
?exprnot: exprcmp |
|
|
639
|
+
?exprnot: exprcmp | NOTOP exprcmp
|
|
639
640
|
?exprcmp: exprsum | exprcmp (CMPR | EQSPACE | EQNOSPACE) exprsum
|
|
640
641
|
?exprsum: exprproduct | exprsum (EXPRPLUS | EXPRMINUS) exprproduct
|
|
641
642
|
?exprproduct: exprunary | exprproduct (EXPRTIMES | EXPRDIVIDE | EXPRMODULO) exprunary
|
synapse/lib/storm.py
CHANGED
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
import types
|
|
2
2
|
import pprint
|
|
3
3
|
import asyncio
|
|
4
|
-
import hashlib
|
|
5
4
|
import logging
|
|
6
5
|
import argparse
|
|
7
6
|
import contextlib
|
|
@@ -22,13 +21,10 @@ import synapse.lib.snap as s_snap
|
|
|
22
21
|
import synapse.lib.cache as s_cache
|
|
23
22
|
import synapse.lib.layer as s_layer
|
|
24
23
|
import synapse.lib.scope as s_scope
|
|
25
|
-
import synapse.lib.config as s_config
|
|
26
24
|
import synapse.lib.autodoc as s_autodoc
|
|
27
|
-
import synapse.lib.grammar as s_grammar
|
|
28
25
|
import synapse.lib.msgpack as s_msgpack
|
|
29
26
|
import synapse.lib.schemas as s_schemas
|
|
30
27
|
import synapse.lib.spooled as s_spooled
|
|
31
|
-
import synapse.lib.version as s_version
|
|
32
28
|
import synapse.lib.hashitem as s_hashitem
|
|
33
29
|
import synapse.lib.stormctrl as s_stormctrl
|
|
34
30
|
import synapse.lib.stormtypes as s_stormtypes
|
|
@@ -1726,9 +1722,6 @@ class Runtime(s_base.Base):
|
|
|
1726
1722
|
async def warnonce(self, mesg, **info):
|
|
1727
1723
|
return await self.snap.warnonce(mesg, **info)
|
|
1728
1724
|
|
|
1729
|
-
def tick(self):
|
|
1730
|
-
pass
|
|
1731
|
-
|
|
1732
1725
|
def cancel(self):
|
|
1733
1726
|
self.task.cancel()
|
|
1734
1727
|
|
|
@@ -1964,7 +1957,6 @@ class Runtime(s_base.Base):
|
|
|
1964
1957
|
nodegenr = subgraph.run(self, nodegenr)
|
|
1965
1958
|
|
|
1966
1959
|
async for item in nodegenr:
|
|
1967
|
-
self.tick()
|
|
1968
1960
|
yield item
|
|
1969
1961
|
|
|
1970
1962
|
except RecursionError:
|
|
@@ -2847,7 +2839,8 @@ class BatchCmd(Cmd):
|
|
|
2847
2839
|
mesg = f'Specified batch size ({size}) is above the maximum (10000).'
|
|
2848
2840
|
raise s_exc.StormRuntimeError(mesg=mesg)
|
|
2849
2841
|
|
|
2850
|
-
|
|
2842
|
+
_query = await s_stormtypes.tostr(self.opts.query)
|
|
2843
|
+
query = await runt.getStormQuery(_query)
|
|
2851
2844
|
doyield = await s_stormtypes.tobool(self.opts.cond)
|
|
2852
2845
|
|
|
2853
2846
|
async with runt.getSubRuntime(query, opts={'vars': {'nodes': []}}) as subr:
|
synapse/lib/storm_format.py
CHANGED
|
@@ -57,7 +57,7 @@ TerminalPygMap = {
|
|
|
57
57
|
'MODSET': p_t.Operator,
|
|
58
58
|
'MODSETMULTI': p_t.Operator,
|
|
59
59
|
'NONQUOTEWORD': p_t.Literal,
|
|
60
|
-
'
|
|
60
|
+
'NOTOP': p_t.Operator,
|
|
61
61
|
'NULL': p_t.Keyword,
|
|
62
62
|
'NUMBER': p_t.Literal.Number,
|
|
63
63
|
'OCTNUMBER': p_t.Literal.Number,
|
|
@@ -115,6 +115,7 @@ TerminalPygMap = {
|
|
|
115
115
|
'_LPARNOSPACE': p_t.Punctuation,
|
|
116
116
|
'_MATCHHASH': p_t.Punctuation,
|
|
117
117
|
'_MATCHHASHWILD': p_t.Punctuation,
|
|
118
|
+
'_NOT': p_t.Keyword,
|
|
118
119
|
'_RETURN': p_t.Keyword,
|
|
119
120
|
'_REVERSE': p_t.Keyword,
|
|
120
121
|
'_RIGHTJOIN': p_t.Punctuation,
|
synapse/lib/version.py
CHANGED
|
@@ -223,6 +223,6 @@ def reqVersion(valu, reqver,
|
|
|
223
223
|
##############################################################################
|
|
224
224
|
# The following are touched during the release process by bumpversion.
|
|
225
225
|
# Do not modify these directly.
|
|
226
|
-
version = (2,
|
|
226
|
+
version = (2, 202, 0)
|
|
227
227
|
verstring = '.'.join([str(x) for x in version])
|
|
228
|
-
commit = '
|
|
228
|
+
commit = '744d404357c48b663bd03528e35c6f0e186edb0e'
|
synapse/models/dns.py
CHANGED
|
@@ -147,7 +147,7 @@ class DnsModule(s_module.CoreModule):
|
|
|
147
147
|
|
|
148
148
|
('inet:dns:txt', ('comp', {'fields': (('fqdn', 'inet:fqdn'), ('txt', 'str'))}), {
|
|
149
149
|
'ex': '(hehe.vertex.link,"fancy TXT record")',
|
|
150
|
-
'doc': 'The result of a DNS
|
|
150
|
+
'doc': 'The result of a DNS TXT record lookup.'}),
|
|
151
151
|
|
|
152
152
|
('inet:dns:type', ('int', {}), {
|
|
153
153
|
'doc': 'A DNS query/answer type integer.'}),
|
synapse/models/economic.py
CHANGED
|
@@ -67,10 +67,10 @@ class EconModule(s_module.CoreModule):
|
|
|
67
67
|
'doc': 'A financial security which is typically traded on an exchange.'}),
|
|
68
68
|
|
|
69
69
|
('econ:fin:bar', ('guid', {}), {
|
|
70
|
-
'doc': 'A sample of the open, close, high, low prices of a security in a specific time window'}),
|
|
70
|
+
'doc': 'A sample of the open, close, high, low prices of a security in a specific time window.'}),
|
|
71
71
|
|
|
72
72
|
('econ:fin:tick', ('guid', {}), {
|
|
73
|
-
'doc': 'A sample of the price of a security at a single moment in time'}),
|
|
73
|
+
'doc': 'A sample of the price of a security at a single moment in time.'}),
|
|
74
74
|
|
|
75
75
|
('econ:bank:account:type:taxonomy', ('taxonomy', {}), {
|
|
76
76
|
'doc': 'A bank account type taxonomy.'}),
|
|
@@ -192,10 +192,10 @@ class EconModule(s_module.CoreModule):
|
|
|
192
192
|
'doc': 'The campaign that the purchase was in support of.'}),
|
|
193
193
|
|
|
194
194
|
('price', ('econ:price', {}), {
|
|
195
|
-
'doc': 'The econ:price of the purchase'}),
|
|
195
|
+
'doc': 'The econ:price of the purchase.'}),
|
|
196
196
|
|
|
197
197
|
('currency', ('econ:currency', {}), {
|
|
198
|
-
'doc': 'The econ:price of the purchase'}),
|
|
198
|
+
'doc': 'The econ:price of the purchase.'}),
|
|
199
199
|
|
|
200
200
|
('listing', ('biz:listing', {}), {
|
|
201
201
|
'doc': 'The purchase was made based on the given listing.'}),
|
|
@@ -283,10 +283,10 @@ class EconModule(s_module.CoreModule):
|
|
|
283
283
|
'doc': 'The purchase which the payment was paying for.'}),
|
|
284
284
|
|
|
285
285
|
('amount', ('econ:price', {}), {
|
|
286
|
-
'doc': 'The amount of money transferred in the payment'}),
|
|
286
|
+
'doc': 'The amount of money transferred in the payment.'}),
|
|
287
287
|
|
|
288
288
|
('currency', ('econ:currency', {}), {
|
|
289
|
-
'doc': 'The currency of the payment'}),
|
|
289
|
+
'doc': 'The currency of the payment.'}),
|
|
290
290
|
|
|
291
291
|
('memo', ('str', {}), {
|
|
292
292
|
'doc': 'A small note specified by the payer common in financial transactions.'}),
|
|
@@ -339,66 +339,66 @@ class EconModule(s_module.CoreModule):
|
|
|
339
339
|
('econ:fin:exchange', {}, (
|
|
340
340
|
|
|
341
341
|
('name', ('str', {'lower': True, 'strip': True}), {
|
|
342
|
-
'doc': 'A simple name for the exchange',
|
|
342
|
+
'doc': 'A simple name for the exchange.',
|
|
343
343
|
'ex': 'nasdaq'}),
|
|
344
344
|
|
|
345
345
|
('org', ('ou:org', {}), {
|
|
346
|
-
'doc': 'The organization that operates the exchange'}),
|
|
346
|
+
'doc': 'The organization that operates the exchange.'}),
|
|
347
347
|
|
|
348
348
|
('currency', ('econ:currency', {}), {
|
|
349
|
-
'doc': 'The currency used for all transactions in the exchange',
|
|
349
|
+
'doc': 'The currency used for all transactions in the exchange.',
|
|
350
350
|
'ex': 'usd'}),
|
|
351
351
|
)),
|
|
352
352
|
|
|
353
353
|
('econ:fin:security', {}, (
|
|
354
354
|
|
|
355
355
|
('exchange', ('econ:fin:exchange', {}), {
|
|
356
|
-
'doc': 'The exchange on which the security is traded'}),
|
|
356
|
+
'doc': 'The exchange on which the security is traded.'}),
|
|
357
357
|
|
|
358
358
|
('ticker', ('str', {'lower': True, 'strip': True}), {
|
|
359
|
-
'doc': 'The identifier for this security within the exchange'}),
|
|
359
|
+
'doc': 'The identifier for this security within the exchange.'}),
|
|
360
360
|
|
|
361
361
|
('type', ('str', {'lower': True, 'strip': True}), {
|
|
362
|
-
'doc': 'A user defined type such as stock, bond, option, future, or forex'}),
|
|
362
|
+
'doc': 'A user defined type such as stock, bond, option, future, or forex.'}),
|
|
363
363
|
|
|
364
364
|
('price', ('econ:price', {}), {
|
|
365
|
-
'doc': 'The last known/available price of the security'}),
|
|
365
|
+
'doc': 'The last known/available price of the security.'}),
|
|
366
366
|
|
|
367
367
|
('time', ('time', {}), {
|
|
368
|
-
'doc': 'The time of the last know price sample'}),
|
|
368
|
+
'doc': 'The time of the last know price sample.'}),
|
|
369
369
|
)),
|
|
370
370
|
|
|
371
371
|
('econ:fin:tick', {}, (
|
|
372
372
|
|
|
373
373
|
('security', ('econ:fin:security', {}), {
|
|
374
|
-
'doc': 'The security measured by the tick'}),
|
|
374
|
+
'doc': 'The security measured by the tick.'}),
|
|
375
375
|
|
|
376
376
|
('time', ('time', {}), {
|
|
377
|
-
'doc': 'The time the price was sampled'}),
|
|
377
|
+
'doc': 'The time the price was sampled.'}),
|
|
378
378
|
|
|
379
379
|
('price', ('econ:price', {}), {
|
|
380
|
-
'doc': 'The price of the security at the time'}),
|
|
380
|
+
'doc': 'The price of the security at the time.'}),
|
|
381
381
|
)),
|
|
382
382
|
|
|
383
383
|
('econ:fin:bar', {}, (
|
|
384
384
|
|
|
385
385
|
('security', ('econ:fin:security', {}), {
|
|
386
|
-
'doc': 'The security measured by the bar'}),
|
|
386
|
+
'doc': 'The security measured by the bar.'}),
|
|
387
387
|
|
|
388
388
|
('ival', ('ival', {}), {
|
|
389
|
-
'doc': 'The interval of measurement'}),
|
|
389
|
+
'doc': 'The interval of measurement.'}),
|
|
390
390
|
|
|
391
391
|
('price:open', ('econ:price', {}), {
|
|
392
|
-
'doc': 'The opening price of the security'}),
|
|
392
|
+
'doc': 'The opening price of the security.'}),
|
|
393
393
|
|
|
394
394
|
('price:close', ('econ:price', {}), {
|
|
395
|
-
'doc': 'The closing price of the security'}),
|
|
395
|
+
'doc': 'The closing price of the security.'}),
|
|
396
396
|
|
|
397
397
|
('price:low', ('econ:price', {}), {
|
|
398
|
-
'doc': 'The low price of the security'}),
|
|
398
|
+
'doc': 'The low price of the security.'}),
|
|
399
399
|
|
|
400
400
|
('price:high', ('econ:price', {}), {
|
|
401
|
-
'doc': 'The high price of the security'}),
|
|
401
|
+
'doc': 'The high price of the security.'}),
|
|
402
402
|
)),
|
|
403
403
|
|
|
404
404
|
('econ:acct:invoice', {}, (
|
synapse/models/files.py
CHANGED
|
@@ -270,7 +270,7 @@ class FileModule(s_module.CoreModule):
|
|
|
270
270
|
('file', ('file:bytes', {}), {
|
|
271
271
|
'doc': 'The Mach-O file containing the load command.'}),
|
|
272
272
|
('type', ('int', {'enums': s_l_macho.getLoadCmdTypes()}), {
|
|
273
|
-
'doc': 'The type of the load command'}),
|
|
273
|
+
'doc': 'The type of the load command.'}),
|
|
274
274
|
('size', ('int', {}), {
|
|
275
275
|
'doc': 'The size of the load command structure in bytes.'}),
|
|
276
276
|
),
|
|
@@ -724,7 +724,7 @@ class FileModule(s_module.CoreModule):
|
|
|
724
724
|
('sha256', ('hash:sha256', {}), {
|
|
725
725
|
'doc': 'The sha256 hash of the bytes of the Mach-O section.'}),
|
|
726
726
|
('offset', ('int', {}), {
|
|
727
|
-
'doc': 'The file offset to the beginning of the section'}),
|
|
727
|
+
'doc': 'The file offset to the beginning of the section.'}),
|
|
728
728
|
)),
|
|
729
729
|
|
|
730
730
|
('file:mime:lnk', {}, (
|
synapse/models/inet.py
CHANGED
|
@@ -1332,7 +1332,7 @@ class InetModule(s_module.CoreModule):
|
|
|
1332
1332
|
|
|
1333
1333
|
('inet:ssl:cert', ('comp', {'fields': (('server', 'inet:server'), ('file', 'file:bytes'))}), {
|
|
1334
1334
|
'deprecated': True,
|
|
1335
|
-
'doc': 'Deprecated. Please use inet:tls:servercert or inet:tls:clientcert',
|
|
1335
|
+
'doc': 'Deprecated. Please use inet:tls:servercert or inet:tls:clientcert.',
|
|
1336
1336
|
}),
|
|
1337
1337
|
|
|
1338
1338
|
('inet:port', ('int', {'min': 0, 'max': 0xffff}), {
|
|
@@ -3802,7 +3802,7 @@ class InetModule(s_module.CoreModule):
|
|
|
3802
3802
|
|
|
3803
3803
|
('client:address', ('inet:client', {}), {
|
|
3804
3804
|
'deprecated': True,
|
|
3805
|
-
'doc': 'Deprecated. Please use :client'}),
|
|
3805
|
+
'doc': 'Deprecated. Please use :client.'}),
|
|
3806
3806
|
|
|
3807
3807
|
('client:software', ('it:prod:softver', {}), {
|
|
3808
3808
|
'doc': 'The client software version used to send the message.'}),
|