synapse 2.200.0__py311-none-any.whl → 2.202.0__py311-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of synapse might be problematic. Click here for more details.

Files changed (40) hide show
  1. synapse/cortex.py +30 -23
  2. synapse/datamodel.py +2 -3
  3. synapse/lib/agenda.py +24 -5
  4. synapse/lib/ast.py +7 -10
  5. synapse/lib/base.py +2 -12
  6. synapse/lib/cell.py +9 -13
  7. synapse/lib/parser.py +2 -1
  8. synapse/lib/schemas.py +1 -0
  9. synapse/lib/storm.lark +5 -4
  10. synapse/lib/storm.py +2 -9
  11. synapse/lib/storm_format.py +2 -1
  12. synapse/lib/version.py +2 -2
  13. synapse/models/dns.py +1 -1
  14. synapse/models/economic.py +23 -23
  15. synapse/models/files.py +2 -2
  16. synapse/models/inet.py +2 -2
  17. synapse/models/infotech.py +7 -7
  18. synapse/models/person.py +1 -1
  19. synapse/models/proj.py +3 -2
  20. synapse/models/risk.py +1 -1
  21. synapse/models/transport.py +3 -3
  22. synapse/telepath.py +75 -16
  23. synapse/tests/test_cortex.py +26 -3
  24. synapse/tests/test_lib_agenda.py +41 -0
  25. synapse/tests/test_lib_ast.py +3 -0
  26. synapse/tests/test_lib_cell.py +11 -0
  27. synapse/tests/test_lib_grammar.py +4 -0
  28. synapse/tests/test_lib_storm.py +7 -1
  29. synapse/tests/test_model_risk.py +4 -0
  30. synapse/tests/test_telepath.py +56 -34
  31. synapse/tests/test_tools_autodoc.py +5 -0
  32. synapse/tests/test_utils_getrefs.py +35 -28
  33. synapse/tests/utils.py +7 -7
  34. synapse/tools/autodoc.py +16 -1
  35. synapse/utils/getrefs.py +4 -2
  36. {synapse-2.200.0.dist-info → synapse-2.202.0.dist-info}/METADATA +1 -1
  37. {synapse-2.200.0.dist-info → synapse-2.202.0.dist-info}/RECORD +40 -40
  38. {synapse-2.200.0.dist-info → synapse-2.202.0.dist-info}/WHEEL +1 -1
  39. {synapse-2.200.0.dist-info → synapse-2.202.0.dist-info}/LICENSE +0 -0
  40. {synapse-2.200.0.dist-info → synapse-2.202.0.dist-info}/top_level.txt +0 -0
@@ -641,10 +641,10 @@ class ItModule(s_module.CoreModule):
641
641
  'doc': 'Semantic Version type.',
642
642
  }),
643
643
  ('it:sec:cpe', 'synapse.models.infotech.Cpe23Str', {}, {
644
- 'doc': 'A NIST CPE 2.3 Formatted String',
644
+ 'doc': 'A NIST CPE 2.3 Formatted String.',
645
645
  }),
646
646
  ('it:sec:cpe:v2_2', 'synapse.models.infotech.Cpe22Str', {}, {
647
- 'doc': 'A NIST CPE 2.2 Formatted String',
647
+ 'doc': 'A NIST CPE 2.2 Formatted String.',
648
648
  }),
649
649
  ),
650
650
  'types': (
@@ -697,7 +697,7 @@ class ItModule(s_module.CoreModule):
697
697
  'ex': 'cve-2012-0158'
698
698
  }),
699
699
  ('it:sec:cwe', ('str', {'regex': r'^CWE-[0-9]{1,8}$'}), {
700
- 'doc': 'NIST NVD Common Weaknesses Enumeration Specification',
700
+ 'doc': 'NIST NVD Common Weaknesses Enumeration Specification.',
701
701
  'ex': 'CWE-120',
702
702
  }),
703
703
 
@@ -1297,7 +1297,7 @@ class ItModule(s_module.CoreModule):
1297
1297
  )),
1298
1298
  ('it:account', {}, (
1299
1299
  ('user', ('inet:user', {}), {
1300
- 'doc': 'The username associated with the account',
1300
+ 'doc': 'The username associated with the account.',
1301
1301
  }),
1302
1302
  ('contact', ('ps:contact', {}), {
1303
1303
  'doc': 'Additional contact information associated with this account.',
@@ -1977,7 +1977,7 @@ class ItModule(s_module.CoreModule):
1977
1977
  'doc': 'The commit that produced this diff.'}),
1978
1978
 
1979
1979
  ('file', ('file:bytes', {}), {
1980
- 'doc': 'The file after the commit has been applied'}),
1980
+ 'doc': 'The file after the commit has been applied.'}),
1981
1981
 
1982
1982
  ('path', ('file:path', {}), {
1983
1983
  'doc': 'The path to the file in the repo that the diff is being applied to.'}),
@@ -2272,7 +2272,7 @@ class ItModule(s_module.CoreModule):
2272
2272
  'disp': {'hint': 'text'},
2273
2273
  }),
2274
2274
  ('cpe', ('it:sec:cpe', {}), {
2275
- 'doc': 'The NIST CPE 2.3 string specifying this software version',
2275
+ 'doc': 'The NIST CPE 2.3 string specifying this software version.',
2276
2276
  }),
2277
2277
  ('cves', ('array', {'type': 'it:sec:cve', 'uniq': True, 'sorted': True}), {
2278
2278
  'doc': 'A list of CVEs that apply to this software version.',
@@ -2420,7 +2420,7 @@ class ItModule(s_module.CoreModule):
2420
2420
  'doc': 'Set if this result was part of running multiple scanners.'}),
2421
2421
 
2422
2422
  ('multi:count', ('int', {'min': 0}), {
2423
- 'doc': 'The total number of scanners which were run by a multi-scanner'}),
2423
+ 'doc': 'The total number of scanners which were run by a multi-scanner.'}),
2424
2424
 
2425
2425
  ('multi:count:benign', ('int', {'min': 0}), {
2426
2426
  'doc': 'The number of scanners which returned a benign verdict.'}),
synapse/models/person.py CHANGED
@@ -198,7 +198,7 @@ class PsModule(s_module.CoreModule):
198
198
  'doc': 'The last date the student attended a class.',
199
199
  }),
200
200
  ('classes', ('array', {'type': 'edu:class', 'uniq': True, 'sorted': True}), {
201
- 'doc': 'The classes attended by the student',
201
+ 'doc': 'The classes attended by the student.',
202
202
  }),
203
203
  ('achievement', ('ps:achievement', {}), {
204
204
  'doc': 'The achievement awarded to the individual.',
synapse/models/proj.py CHANGED
@@ -173,7 +173,7 @@ class ProjectModule(s_module.CoreModule):
173
173
  'doc': 'The ticket the comment was added to.'}),
174
174
 
175
175
  ('text', ('str', {}), {
176
- 'doc': 'The text of the comment'}),
176
+ 'doc': 'The text of the comment.'}),
177
177
  # -(refs)> thing comment is about
178
178
  )),
179
179
 
@@ -244,7 +244,8 @@ class ProjectModule(s_module.CoreModule):
244
244
  'doc': 'The sprint that contains the ticket.'}),
245
245
 
246
246
  ('type', ('str', {'lower': True, 'strip': True}), {
247
- 'doc': 'The type of ticket. (eg story / bug)'}),
247
+ 'doc': 'The type of ticket.',
248
+ 'ex': 'bug'}),
248
249
  )),
249
250
  ),
250
251
  }),
synapse/models/risk.py CHANGED
@@ -97,7 +97,7 @@ class RiskModule(s_module.CoreModule):
97
97
  },
98
98
  }),
99
99
  ('risk:mitigation:type:taxonomy', ('taxonomy', {}), {
100
- 'interaces': ('taxonomy',),
100
+ 'interfaces': ('meta:taxonomy',),
101
101
  'doc': 'A taxonomy of mitigation types.',
102
102
  }),
103
103
  ('risk:mitigation', ('guid', {}), {
@@ -102,7 +102,7 @@ class TransportModule(s_module.CoreModule):
102
102
  'doc': 'An individual sea vessel.'}),
103
103
 
104
104
  ('transport:sea:mmsi', ('str', {'regex': '[0-9]{9}'}), {
105
- 'doc': 'A Maritime Mobile Service Identifier'}),
105
+ 'doc': 'A Maritime Mobile Service Identifier.'}),
106
106
 
107
107
  ('transport:sea:imo', ('str', {'lower': True, 'strip': True, 'replace': ((' ', ''),), 'regex': '^imo[0-9]{7}$'}), {
108
108
  'doc': 'An International Maritime Organization registration number.'}),
@@ -349,7 +349,7 @@ class TransportModule(s_module.CoreModule):
349
349
  )),
350
350
  ('transport:air:port', {}, (
351
351
  ('name', ('str', {'lower': True, 'onespace': True}), {
352
- 'doc': 'The name of the airport'}),
352
+ 'doc': 'The name of the airport.'}),
353
353
  ('place', ('geo:place', {}), {
354
354
  'doc': 'The place where the IATA airport code is assigned.'}),
355
355
  )),
@@ -462,7 +462,7 @@ class TransportModule(s_module.CoreModule):
462
462
  'doc': 'Deprecated. Please use :phys:length.'}),
463
463
 
464
464
  ('beam', ('geo:dist', {}), {
465
- 'doc': 'The official overall vessel beam'}),
465
+ 'doc': 'The official overall vessel beam.'}),
466
466
 
467
467
  ('flag', ('iso:3166:cc', {}), {
468
468
  'doc': 'The country the vessel is flagged to.'}),
synapse/telepath.py CHANGED
@@ -31,6 +31,8 @@ televers = (3, 0)
31
31
 
32
32
  aha_clients = {}
33
33
 
34
+ LINK_CULL_INTERVAL = 10
35
+
34
36
  async def addAhaUrl(url):
35
37
  '''
36
38
  Add (incref) an aha registry URL.
@@ -590,6 +592,10 @@ class Proxy(s_base.Base):
590
592
  valu = proxy.getFooValu(x, y)
591
593
 
592
594
  '''
595
+ _link_task = None
596
+ _link_event = asyncio.Event()
597
+ _all_proxies = set()
598
+
593
599
  async def __anit__(self, link, name):
594
600
 
595
601
  await s_base.Base.__anit__(self)
@@ -608,11 +614,15 @@ class Proxy(s_base.Base):
608
614
  self.methinfo = {}
609
615
 
610
616
  self.sess = None
617
+
611
618
  self.links = collections.deque()
612
- self._link_poolsize = 4
619
+ self.alllinks = collections.deque()
620
+
621
+ self._link_add = 0 # counter for pending links being connected
622
+ self._links_min = 4 # low water mark for the link pool
623
+ self._links_max = 12 # high water mark for the link pool
613
624
 
614
625
  self.synack = None
615
- self.syndone = asyncio.Event()
616
626
 
617
627
  self.handlers = {
618
628
  'task:fini': self._onTaskFini,
@@ -626,19 +636,52 @@ class Proxy(s_base.Base):
626
636
  await item.fini()
627
637
 
628
638
  mesg = ('task:fini', {'retn': (False, ('IsFini', {}))})
629
- for name, task in list(self.tasks.items()):
639
+ for iden, task in list(self.tasks.items()): # pragma: no cover
630
640
  task.reply(mesg)
631
- del self.tasks[name]
641
+ del self.tasks[iden]
632
642
 
633
- for link in self.links:
634
- await link.fini()
643
+ # fini all the links from a different task to prevent
644
+ # delaying the proxy shutdown...
645
+ s_coro.create_task(self._finiAllLinks())
635
646
 
636
- del self.syndone
637
- await self.link.fini()
647
+ if self in self._all_proxies:
648
+ self._all_proxies.remove(self)
649
+
650
+ if not Proxy._all_proxies and Proxy._link_task is not None:
651
+ Proxy._link_task.cancel()
652
+ Proxy._link_task = None
653
+
654
+ Proxy._all_proxies.add(self)
638
655
 
639
656
  self.onfini(fini)
640
657
  self.link.onfini(self.fini)
641
658
 
659
+ if Proxy._link_task is None:
660
+ Proxy._link_task = s_coro.create_task(Proxy._linkLoopTask())
661
+
662
+ @classmethod
663
+ async def _linkLoopTask(clas):
664
+ while True:
665
+ try:
666
+ await s_coro.event_wait(Proxy._link_event, timeout=LINK_CULL_INTERVAL)
667
+
668
+ for proxy in list(Proxy._all_proxies):
669
+
670
+ if proxy.isfini:
671
+ continue
672
+
673
+ # close one link per proxy per period if the number of
674
+ # available links is greater than _links_max...
675
+ if len(proxy.links) > proxy._links_max:
676
+ link = proxy.links.popleft()
677
+ await link.fini()
678
+ await proxy.fire('pool:link:fini', link=link)
679
+
680
+ Proxy._link_event.clear()
681
+
682
+ except Exception: # pragma: no cover
683
+ logger.exception('synapse.telepath.Proxy.linkLoopTask()')
684
+
642
685
  def _hasTeleFeat(self, name, vers=1):
643
686
  return self._features.get(name, 0) >= vers
644
687
 
@@ -692,6 +735,12 @@ class Proxy(s_base.Base):
692
735
 
693
736
  link = self.links.popleft()
694
737
 
738
+ # fire a task to replace the link if we are
739
+ # below the low-water mark for link count.
740
+ if len(self.links) + self._link_add < self._links_min:
741
+ self._link_add += 1
742
+ self.schedCoro(self._addPoolLink())
743
+
695
744
  if link.isfini:
696
745
  continue
697
746
 
@@ -700,6 +749,11 @@ class Proxy(s_base.Base):
700
749
  # we need a new one...
701
750
  return await self._initPoolLink()
702
751
 
752
+ async def _addPoolLink(self):
753
+ link = await self._initPoolLink()
754
+ self.links.append(link)
755
+ self._link_add -= 1
756
+
703
757
  async def getPipeline(self, genr, name=None):
704
758
  '''
705
759
  Construct a proxy API call pipeline in order to make
@@ -724,8 +778,6 @@ class Proxy(s_base.Base):
724
778
 
725
779
  async def _initPoolLink(self):
726
780
 
727
- # TODO loop / backoff
728
-
729
781
  if self.link.get('unix'):
730
782
 
731
783
  path = self.link.get('path')
@@ -739,19 +791,26 @@ class Proxy(s_base.Base):
739
791
 
740
792
  link = await s_link.connect(host, port, ssl=ssl)
741
793
 
742
- self.onfini(link)
794
+ self.alllinks.append(link)
795
+ async def fini():
796
+ self.alllinks.remove(link)
797
+ if link in self.links:
798
+ self.links.remove(link)
799
+
800
+ link.onfini(fini)
743
801
 
744
802
  return link
745
803
 
804
+ async def _finiAllLinks(self):
805
+ for link in list(self.alllinks):
806
+ await link.fini()
807
+ await self.link.fini()
808
+
746
809
  async def _putPoolLink(self, link):
747
810
 
748
811
  if link.isfini:
749
812
  return
750
813
 
751
- # If we've exceeded our poolsize, discard the current link.
752
- if len(self.links) >= self._link_poolsize:
753
- return await link.fini()
754
-
755
814
  self.links.append(link)
756
815
 
757
816
  def __enter__(self):
@@ -862,6 +921,7 @@ class Proxy(s_base.Base):
862
921
 
863
922
  except GeneratorExit:
864
923
  # if they bail early on the genr, fini the link
924
+ # TODO: devise a tx/rx strategy to recover these links...
865
925
  await link.fini()
866
926
 
867
927
  return s_coro.GenrHelp(genrloop())
@@ -1321,7 +1381,6 @@ class Client(s_base.Base):
1321
1381
  info.update(self._t_opts)
1322
1382
  self._t_proxy = await openinfo(info)
1323
1383
  self._t_methinfo = self._t_proxy.methinfo
1324
- self._t_proxy._link_poolsize = self._t_conf.get('link_poolsize', 4)
1325
1384
 
1326
1385
  async def fini():
1327
1386
  if self._t_named_meths:
@@ -7397,6 +7397,9 @@ class CortexBasicTest(s_t_utils.SynTest):
7397
7397
  self.len(1, await core.nodes('media:news -(refs)> *', opts={'view': altview}))
7398
7398
  self.eq(2, await proxy.feedFromAxon(sha256))
7399
7399
 
7400
+ opts['limit'] = 1
7401
+ self.len(1, await alist(proxy.exportStorm('media:news inet:email', opts=opts)))
7402
+
7400
7403
  async with self.getHttpSess(port=port) as sess:
7401
7404
  resp = await sess.post(f'https://localhost:{port}/api/v1/storm/export')
7402
7405
  self.eq(401, resp.status)
@@ -8501,6 +8504,26 @@ class CortexBasicTest(s_t_utils.SynTest):
8501
8504
  self.notin('Timeout waiting for pool mirror', data)
8502
8505
  self.notin('Timeout waiting for query mirror', data)
8503
8506
 
8507
+ orig = s_telepath.ClientV2.proxy
8508
+ async def finidproxy(self, timeout=None):
8509
+ prox = await orig(self, timeout=timeout)
8510
+ await prox.fini()
8511
+ return prox
8512
+
8513
+ with patch('synapse.telepath.ClientV2.proxy', finidproxy):
8514
+ with self.getLoggerStream('synapse') as stream:
8515
+ msgs = await alist(core00.storm('inet:asn=0'))
8516
+ self.len(1, [m for m in msgs if m[0] == 'node'])
8517
+
8518
+ stream.seek(0)
8519
+ data = stream.read()
8520
+ self.isin('Proxy for pool mirror [01.core.synapse] was shutdown. Skipping.', data)
8521
+
8522
+ msgs = await core00.stormlist('cortex.storm.pool.set --connection-timeout 1 --sync-timeout 1 aha://pool00...')
8523
+ self.stormHasNoWarnErr(msgs)
8524
+ self.stormIsInPrint('Storm pool configuration set.', msgs)
8525
+ await core00.stormpool.waitready(timeout=12)
8526
+
8504
8527
  core01.nexsroot.nexslog.indx = 0
8505
8528
 
8506
8529
  with patch('synapse.cortex.MAX_NEXUS_DELTA', 1):
@@ -8513,8 +8536,7 @@ class CortexBasicTest(s_t_utils.SynTest):
8513
8536
 
8514
8537
  stream.seek(0)
8515
8538
  data = stream.read()
8516
- explog = (f'Pool mirror [01.core.synapse] Nexus offset delta too large '
8517
- f'({nexsoffs} > 1), running query locally')
8539
+ explog = ('Pool mirror [01.core.synapse] is too far out of sync. Skipping.')
8518
8540
  self.isin(explog, data)
8519
8541
  self.notin('Offloading Storm query', data)
8520
8542
 
@@ -8572,7 +8594,8 @@ class CortexBasicTest(s_t_utils.SynTest):
8572
8594
 
8573
8595
  stream.seek(0)
8574
8596
  data = stream.read()
8575
- self.isin('Timeout waiting for pool mirror, running query locally', data)
8597
+ self.isin('Timeout waiting for pool mirror proxy.', data)
8598
+ self.isin('Pool members exhausted. Running query locally.', data)
8576
8599
 
8577
8600
  await core01.fini()
8578
8601
 
@@ -1098,6 +1098,15 @@ class AgendaTest(s_t_utils.SynTest):
1098
1098
  self.gt(cdef00['laststarttime'], 0)
1099
1099
  self.eq(cdef00['laststarttime'], cdef01['laststarttime'])
1100
1100
 
1101
+ async def test_agenda_warnings(self):
1102
+
1103
+ async with self.getTestCore() as core:
1104
+ with self.getAsyncLoggerStream('synapse.lib.agenda', 'issued warning: oh hai') as stream:
1105
+ q = '$lib.warn("oh hai")'
1106
+ msgs = await core.stormlist('cron.at --now $q', opts={'vars': {'q': q}})
1107
+ self.stormHasNoWarnErr(msgs)
1108
+ self.true(await stream.wait(timeout=6))
1109
+
1101
1110
  async def test_agenda_graceful_promotion_with_running_cron(self):
1102
1111
 
1103
1112
  async with self.getTestAha() as aha:
@@ -1221,3 +1230,35 @@ class AgendaTest(s_t_utils.SynTest):
1221
1230
 
1222
1231
  crons = await core.callStorm('return($lib.cron.list())')
1223
1232
  self.len(1, crons)
1233
+
1234
+ async def test_agenda_lasterrs(self):
1235
+
1236
+ async with self.getTestCore() as core:
1237
+
1238
+ cdef = {
1239
+ 'iden': 'test',
1240
+ 'creator': core.auth.rootuser.iden,
1241
+ 'storm': '[ test:str=foo ]',
1242
+ 'reqs': {},
1243
+ 'incunit': s_tu.MINUTE,
1244
+ 'incvals': 1
1245
+ }
1246
+
1247
+ await core.agenda.add(cdef)
1248
+ appt = await core.agenda.get('test')
1249
+
1250
+ self.true(isinstance(appt.lasterrs, list))
1251
+ self.eq(appt.lasterrs, [])
1252
+
1253
+ edits = {
1254
+ 'lasterrs': ('error1', 'error2'),
1255
+ }
1256
+ await appt.edits(edits)
1257
+
1258
+ self.true(isinstance(appt.lasterrs, list))
1259
+ self.eq(appt.lasterrs, ['error1', 'error2'])
1260
+
1261
+ await core.agenda._load_all()
1262
+ appt = await core.agenda.get('test')
1263
+ self.true(isinstance(appt.lasterrs, list))
1264
+ self.eq(appt.lasterrs, ['error1', 'error2'])
@@ -1077,6 +1077,9 @@ class AstTest(s_test.SynTest):
1077
1077
  self.len(1, await core.nodes('inet:proto:request:sandbox:file'))
1078
1078
  self.len(1, await core.nodes('it:host:activity:sandbox:file'))
1079
1079
 
1080
+ self.len(1, await core.nodes('[ it:exec:reg:get=* :host=(host,) ]'))
1081
+ self.len(4, await core.nodes('it:host:activity:host=(host,)'))
1082
+
1080
1083
  async def test_ast_edge_walknjoin(self):
1081
1084
 
1082
1085
  async with self.getTestCore() as core:
@@ -1817,8 +1817,16 @@ class CellTest(s_t_utils.SynTest):
1817
1817
 
1818
1818
  with open(bkuppath3, 'wb') as bkup3:
1819
1819
  async for msg in proxy.iterNewBackupArchive('bkup3', remove=True):
1820
+ self.true(core.backupstreaming)
1820
1821
  bkup3.write(msg)
1821
1822
 
1823
+ async def streamdone():
1824
+ while core.backupstreaming:
1825
+ await asyncio.sleep(0)
1826
+
1827
+ task = core.schedCoro(streamdone())
1828
+ await asyncio.wait_for(task, 15)
1829
+
1822
1830
  self.eq(('bkup', 'bkup2'), sorted(await proxy.getBackups()))
1823
1831
  self.false(os.path.isdir(os.path.join(backdirn, 'bkup3')))
1824
1832
 
@@ -1830,6 +1838,9 @@ class CellTest(s_t_utils.SynTest):
1830
1838
  async for msg in proxy.iterNewBackupArchive(remove=True):
1831
1839
  bkup4.write(msg)
1832
1840
 
1841
+ task = core.schedCoro(streamdone())
1842
+ await asyncio.wait_for(task, 15)
1843
+
1833
1844
  self.eq(('bkup', 'bkup2'), sorted(await proxy.getBackups()))
1834
1845
 
1835
1846
  # Start another backup while one is already running
@@ -746,6 +746,8 @@ Queries = [
746
746
  '[test:str=foo :bar--=(foo, bar)]',
747
747
  '[test:str=foo :bar?++=$baz]',
748
748
  '[test:str=foo :bar?--={[it:dev:str=foo]}]',
749
+ '$foo=(notime,)',
750
+ '$foo=(nulltime,)',
749
751
  ]
750
752
 
751
753
  # Generated with print_parse_list below
@@ -1394,6 +1396,8 @@ _ParseResults = [
1394
1396
  'Query: [EditNodeAdd: [FormName: [Const: test:str], Const: =, Const: foo], EditPropSetMulti: [RelProp: [Const: bar], Const: --=, List: [Const: foo, Const: bar]]]',
1395
1397
  'Query: [EditNodeAdd: [FormName: [Const: test:str], Const: =, Const: foo], EditPropSetMulti: [RelProp: [Const: bar], Const: ?++=, VarValue: [Const: baz]]]',
1396
1398
  'Query: [EditNodeAdd: [FormName: [Const: test:str], Const: =, Const: foo], EditPropSetMulti: [RelProp: [Const: bar], Const: ?--=, SubQuery: [Query: [EditNodeAdd: [FormName: [Const: it:dev:str], Const: =, Const: foo]]]]]',
1399
+ 'Query: [SetVarOper: [Const: foo, List: [Const: notime]]]',
1400
+ 'Query: [SetVarOper: [Const: foo, List: [Const: nulltime]]]',
1397
1401
  ]
1398
1402
 
1399
1403
  class GrammarTest(s_t_utils.SynTest):
@@ -167,6 +167,12 @@ class StormTest(s_t_utils.SynTest):
167
167
  self.len(1, await core.nodes('ou:org=({"name": "origname"}) [ :name=newname ]'))
168
168
  self.len(0, await core.nodes('ou:org=({"name": "origname"})'))
169
169
 
170
+ nodes = await core.nodes('[ it:exec:proc=(notime,) ]')
171
+ self.len(1, nodes)
172
+
173
+ nodes = await core.nodes('[ it:exec:proc=(nulltime,) ]')
174
+ self.len(1, nodes)
175
+
170
176
  async def test_lib_storm_jsonexpr(self):
171
177
  async with self.getTestCore() as core:
172
178
 
@@ -5038,7 +5044,7 @@ class StormTest(s_t_utils.SynTest):
5038
5044
  q = '''
5039
5045
  for $i in $lib.range(12) {[ test:str=$i ]}
5040
5046
 
5041
- batch $lib.true --size 5 {
5047
+ batch $lib.true --size 5 ${
5042
5048
  $vals=([])
5043
5049
  for $n in $nodes { $vals.append($n.repr()) }
5044
5050
  $lib.print($lib.str.join(',', $vals))
@@ -610,6 +610,10 @@ class RiskModelTest(s_t_utils.SynTest):
610
610
  self.len(1, await core.nodes('risk:mitigation -> it:mitre:attack:mitigation'))
611
611
  self.len(1, await core.nodes('risk:mitigation -> risk:mitigation:type:taxonomy'))
612
612
 
613
+ nodes = await core.nodes('risk:mitigation:type:taxonomy=foo.bar [ :desc="foo that bars"]')
614
+ self.len(1, nodes)
615
+ self.eq('foo that bars', nodes[0].get('desc'))
616
+
613
617
  async def test_model_risk_tool_software(self):
614
618
 
615
619
  async with self.getTestCore() as core:
@@ -318,6 +318,10 @@ class TeleTest(s_t_utils.SynTest):
318
318
  link.onfini(evt.set)
319
319
  s_glob.sync(proxy._putPoolLink(link))
320
320
 
321
+ # Grab the fresh link from the pool so our original link is up next again
322
+ link2 = s_glob.sync(proxy.getPoolLink())
323
+ s_glob.sync(proxy._putPoolLink(link2))
324
+
321
325
  q = f'{form} | sleep 0.1'
322
326
 
323
327
  # Break from the generator right away, causing a
@@ -935,7 +939,6 @@ class TeleTest(s_t_utils.SynTest):
935
939
 
936
940
  # Validate the Proxy behavior then the client override
937
941
  prox = await s_telepath.openurl(url) # type: Foo
938
- prox._link_poolsize = 2
939
942
 
940
943
  # Start with no links
941
944
  self.len(0, prox.links)
@@ -947,53 +950,72 @@ class TeleTest(s_t_utils.SynTest):
947
950
  genr = await prox.genr() # type: s_coro.GenrHelp
948
951
  self.eq(await genr.genr.__anext__(), 10)
949
952
 
950
- # The link is being used by the genr
951
- self.len(0, prox.links)
952
-
953
- # and upon exhuastion, that link is put back
954
- self.eq(await genr.list(), (20, 30))
953
+ # A new link is in the pool
955
954
  self.len(1, prox.links)
956
- self.true(prox.links[0] is l0)
957
955
 
958
- # Grab the existing link, then do two more calls
959
- genr0 = await prox.genr() # contains l0
960
- genr1 = await prox.genr()
961
- genr2 = await prox.genr()
962
- self.len(0, prox.links)
963
- # Consume two of the three generators
964
- self.eq(await genr2.list(), (10, 20, 30))
965
- self.len(1, prox.links)
966
- self.eq(await genr1.list(), (10, 20, 30))
956
+ # and upon exhuastion, the first link is put back
957
+ self.eq(await genr.list(), (20, 30))
967
958
  self.len(2, prox.links)
968
- # Exhausting the lsat generator results in his
969
- # link not being placed back into the pool
970
- self.eq(await genr0.list(), (10, 20, 30))
959
+ self.true(prox.links[1] is l0)
960
+
961
+ # Grabbing a link will still spin up another since we are below low watermark
962
+ genr = await prox.genr() # type: s_coro.GenrHelp
963
+ self.eq(await genr.genr.__anext__(), 10)
964
+
971
965
  self.len(2, prox.links)
972
- links = set(lnk for lnk in prox.links)
973
- self.notin(l0, links)
974
- # And that link l0 has been fini'd
975
- self.true(l0.isfini)
966
+
967
+ self.eq(await genr.list(), (20, 30))
968
+ self.len(3, prox.links)
969
+
970
+ # Fill up pool above low watermark
971
+ genrs = [await prox.genr() for _ in range(2)]
972
+ [await genr.list() for genr in genrs]
973
+ self.len(5, prox.links)
974
+
975
+ # Grabbing a link no longer spins up a replacement
976
+ genr = await prox.genr() # type: s_coro.GenrHelp
977
+ self.eq(await genr.genr.__anext__(), 10)
978
+ self.len(4, prox.links)
979
+
980
+ self.eq(await genr.list(), (20, 30))
981
+ self.len(5, prox.links)
976
982
 
977
983
  # Tear down a link by hand and place it back
978
984
  # into the pool - that will fail b/c the link
979
985
  # has been down down.
980
986
  l1 = await prox.getPoolLink()
981
- self.len(1, prox.links)
987
+ self.len(4, prox.links)
982
988
  await l1.fini()
983
989
  await prox._putPoolLink(l1)
984
- self.len(1, prox.links)
990
+ self.len(4, prox.links)
985
991
 
986
992
  # And all our links are torn down on fini
987
993
  await prox.fini()
988
- self.len(1, prox.links)
989
- for link in prox.links:
990
- self.true(link.isfini)
994
+ self.len(4, prox.links)
995
+ for link in list(prox.links):
996
+ self.true(await link.waitfini(1))
997
+ self.len(0, prox.links)
998
+
999
+ with mock.patch('synapse.telepath.LINK_CULL_INTERVAL', 1):
1000
+ async with self.getTestDmon() as dmon:
1001
+ dmon.share('foo', foo)
1002
+ url = f'tcp://127.0.0.1:{dmon.addr[1]}/foo'
1003
+
1004
+ prox = await s_telepath.openurl(url)
991
1005
 
992
- # The telepath Client passes through this value as a configuration parameter
993
- conf = {'link_poolsize': 2, 'timeout': 2}
994
- async with await s_telepath.Client.anit(url, conf=conf) as client:
995
- await client.waitready()
996
- self.true(client._t_proxy._link_poolsize, 2)
1006
+ # Fill up pool above high watermark
1007
+ genrs = [await prox.genr() for _ in range(13)]
1008
+ [await genr.list() for genr in genrs]
1009
+ self.len(13, prox.links)
1010
+
1011
+ # Add a fini'd proxy for coverage
1012
+ prox2 = await s_telepath.openurl(url)
1013
+ await prox2.fini()
1014
+ prox2._all_proxies.add(prox2)
1015
+
1016
+ wait = prox.waiter(1, 'pool:link:fini')
1017
+ self.len(1, await wait.wait(timeout=5))
1018
+ self.len(12, prox.links)
997
1019
 
998
1020
  async def test_link_fini_breaking_tasks(self):
999
1021
  foo = Foo()
@@ -1062,7 +1084,7 @@ class TeleTest(s_t_utils.SynTest):
1062
1084
 
1063
1085
  self.eq(vals, (40, 50, 60))
1064
1086
 
1065
- self.eq(1, len(proxy.links))
1087
+ self.eq(2, len(proxy.links))
1066
1088
  self.eq(160, await proxy.bar(80, 80))
1067
1089
 
1068
1090
  async def boomgenr():
@@ -40,6 +40,11 @@ class TestAutoDoc(s_t_utils.SynTest):
40
40
  self.isin('+==========+', s)
41
41
  self.isin('+deprecated+', s)
42
42
 
43
+ self.isin('''This type implements the following interfaces:
44
+
45
+ * ``inet:service:object``
46
+ * ``phys:object``''', s)
47
+
43
48
  with s_common.genfile(path, 'datamodel_forms.rst') as fd:
44
49
  buf = fd.read()
45
50