synapse 2.192.0__py311-none-any.whl → 2.193.0__py311-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of synapse might be problematic. Click here for more details.

Files changed (37) hide show
  1. synapse/common.py +15 -0
  2. synapse/cortex.py +16 -18
  3. synapse/exc.py +6 -1
  4. synapse/lib/agenda.py +0 -2
  5. synapse/lib/ast.py +25 -11
  6. synapse/lib/cell.py +31 -85
  7. synapse/lib/cli.py +20 -11
  8. synapse/lib/parser.py +1 -1
  9. synapse/lib/snap.py +4 -4
  10. synapse/lib/storm.py +34 -17
  11. synapse/lib/stormlib/json.py +5 -2
  12. synapse/lib/stormtypes.py +19 -0
  13. synapse/lib/version.py +2 -2
  14. synapse/models/inet.py +17 -1
  15. synapse/models/infotech.py +14 -4
  16. synapse/models/risk.py +16 -2
  17. synapse/tests/test_cortex.py +3 -3
  18. synapse/tests/test_exc.py +3 -0
  19. synapse/tests/test_lib_agenda.py +157 -1
  20. synapse/tests/test_lib_ast.py +43 -1
  21. synapse/tests/test_lib_cell.py +71 -1
  22. synapse/tests/test_lib_storm.py +72 -30
  23. synapse/tests/test_lib_stormlib_json.py +20 -0
  24. synapse/tests/test_lib_stormlib_scrape.py +2 -2
  25. synapse/tests/test_model_inet.py +40 -5
  26. synapse/tests/test_model_risk.py +2 -0
  27. synapse/tests/test_tools_storm.py +95 -0
  28. synapse/tests/test_utils_getrefs.py +1 -1
  29. synapse/utils/getrefs.py +14 -3
  30. synapse/vendor/cpython/lib/http/__init__.py +0 -0
  31. synapse/vendor/cpython/lib/http/cookies.py +59 -0
  32. synapse/vendor/cpython/lib/test/test_http_cookies.py +49 -0
  33. {synapse-2.192.0.dist-info → synapse-2.193.0.dist-info}/METADATA +2 -2
  34. {synapse-2.192.0.dist-info → synapse-2.193.0.dist-info}/RECORD +37 -34
  35. {synapse-2.192.0.dist-info → synapse-2.193.0.dist-info}/WHEEL +1 -1
  36. {synapse-2.192.0.dist-info → synapse-2.193.0.dist-info}/LICENSE +0 -0
  37. {synapse-2.192.0.dist-info → synapse-2.193.0.dist-info}/top_level.txt +0 -0
@@ -4,6 +4,7 @@ import asyncio
4
4
  import datetime
5
5
  import itertools
6
6
  import urllib.parse as u_parse
7
+ import unittest.mock as mock
7
8
 
8
9
  import synapse.exc as s_exc
9
10
  import synapse.common as s_common
@@ -646,32 +647,6 @@ class StormTest(s_t_utils.SynTest):
646
647
  self.none(task['info'].get('opts'))
647
648
  self.eq(core.view.iden, task['info'].get('view'))
648
649
 
649
- # test the parallel command
650
- nodes = await core.nodes('parallel --size 4 { [ ou:org=* ] }')
651
- self.len(4, nodes)
652
-
653
- # check that subquery validation happens
654
- with self.raises(s_exc.NoSuchVar):
655
- await core.nodes('parallel --size 4 { [ ou:org=$foo ] }')
656
-
657
- # check that an exception on inbound percolates correctly
658
- with self.raises(s_exc.BadTypeValu):
659
- await core.nodes('[ ou:org=* ou:org=foo ] | parallel { [:name=bar] }')
660
-
661
- # check that an exception in the parallel pipeline percolates correctly
662
- with self.raises(s_exc.BadTypeValu):
663
- await core.nodes('parallel { [ou:org=foo] }')
664
-
665
- nodes = await core.nodes('ou:org | parallel {[ :name=foo ]}')
666
- self.true(all([n.get('name') == 'foo' for n in nodes]))
667
-
668
- # Runtsafety test
669
- q = '[ inet:fqdn=www.vertex.link ] $q=:domain | parallel $q'
670
- await self.asyncraises(s_exc.StormRuntimeError, core.nodes(q))
671
-
672
- nodes = await core.nodes('ou:org | parallel ${ $foo=bar [ :name=$foo ]}')
673
- self.true(all([n.get('name') == 'bar' for n in nodes]))
674
-
675
650
  # test $lib.exit() and the StormExit handlers
676
651
  msgs = [m async for m in core.view.storm('$lib.exit()')]
677
652
  self.eq(msgs[-1][0], 'fini')
@@ -789,10 +764,10 @@ class StormTest(s_t_utils.SynTest):
789
764
  },
790
765
  )
791
766
  }
792
- await core.loadStormPkg(emptypkg)
767
+ core.loadStormPkg(emptypkg)
793
768
  await core.addStormPkg(strverpkg)
794
769
 
795
- await core.loadStormPkg(pkg0)
770
+ core.loadStormPkg(pkg0)
796
771
 
797
772
  await core.nodes('$lib.import(foo.baz)', opts=opts)
798
773
  await core.nodes('$lib.import(foo.baz, reqvers="==0.0.1")', opts=opts)
@@ -3437,6 +3412,73 @@ class StormTest(s_t_utils.SynTest):
3437
3412
  q = '[ inet:fqdn=www.vertex.link ] $q=:domain | tee $q'
3438
3413
  await self.asyncraises(s_exc.StormRuntimeError, core.nodes(q))
3439
3414
 
3415
+ async def test_storm_parallel(self):
3416
+
3417
+ async with self.getTestCore() as core:
3418
+
3419
+ nodes = await core.nodes('parallel --size 4 { [ ou:org=* ] }')
3420
+ self.len(4, nodes)
3421
+
3422
+ # check that subquery validation happens
3423
+ with self.raises(s_exc.NoSuchVar):
3424
+ await core.nodes('parallel --size 4 { [ ou:org=$foo ] }')
3425
+
3426
+ # check that an exception on inbound percolates correctly
3427
+ with self.raises(s_exc.BadTypeValu):
3428
+ await core.nodes('[ ou:org=(foo,) ou:org=foo ] | parallel { [:name=bar] }')
3429
+
3430
+ with self.raises(s_exc.BadTypeValu):
3431
+ await core.nodes('[ ou:org=(foo,) ou:org=foo ] | parallel --size 1 { [:name=bar] }')
3432
+
3433
+ # check that an exception in the parallel pipeline percolates correctly
3434
+ with self.raises(s_exc.BadTypeValu):
3435
+ await core.nodes('parallel { [ou:org=foo] }')
3436
+
3437
+ nodes = await core.nodes('ou:org | parallel {[ :name=foo ]}')
3438
+ self.true(all([n.get('name') == 'foo' for n in nodes]))
3439
+
3440
+ # Runtsafety test
3441
+ q = '[ inet:fqdn=www.vertex.link ] $q=:domain | parallel $q'
3442
+ await self.asyncraises(s_exc.StormRuntimeError, core.nodes(q))
3443
+
3444
+ nodes = await core.nodes('ou:org | parallel ${ $foo=bar [ :name=$foo ]}')
3445
+ self.true(all([n.get('name') == 'bar' for n in nodes]))
3446
+
3447
+ orig = s_storm.ParallelCmd.pipeline
3448
+ tsks = {'cnt': 0}
3449
+
3450
+ async def pipecnt(self, runt, query, inq, outq):
3451
+ tsks['cnt'] += 1
3452
+ await orig(self, runt, query, inq, outq)
3453
+
3454
+ with mock.patch('synapse.lib.storm.ParallelCmd.pipeline', pipecnt):
3455
+
3456
+ nodes = await core.nodes('ou:org parallel --size 4 {[ :name=bar ]}')
3457
+ self.len(5, nodes)
3458
+ self.true(all([n.get('name') == 'bar' for n in nodes]))
3459
+ self.eq(4, tsks['cnt'])
3460
+
3461
+ tsks['cnt'] = 0
3462
+
3463
+ nodes = await core.nodes('ou:org parallel --size 5 {[ :name=bar ]}')
3464
+ self.len(5, nodes)
3465
+ self.true(all([n.get('name') == 'bar' for n in nodes]))
3466
+ self.eq(5, tsks['cnt'])
3467
+
3468
+ tsks['cnt'] = 0
3469
+
3470
+ # --size greater than number of nodes only creates a pipeline for each node
3471
+ nodes = await core.nodes('ou:org parallel --size 10 {[ :name=foo ]}')
3472
+ self.len(5, nodes)
3473
+ self.true(all([n.get('name') == 'foo' for n in nodes]))
3474
+ self.eq(5, tsks['cnt'])
3475
+
3476
+ tsks['cnt'] = 0
3477
+
3478
+ nodes = await core.nodes('parallel --size 4 {[ ou:org=* ]}')
3479
+ self.len(4, nodes)
3480
+ self.eq(4, tsks['cnt'])
3481
+
3440
3482
  async def test_storm_yieldvalu(self):
3441
3483
 
3442
3484
  async with self.getTestCore() as core:
@@ -3882,7 +3924,7 @@ class StormTest(s_t_utils.SynTest):
3882
3924
  )},
3883
3925
  ),
3884
3926
  }
3885
- await core.loadStormPkg(pdef)
3927
+ core.loadStormPkg(pdef)
3886
3928
  msgs = await core.stormlist('woot --help')
3887
3929
  helptext = '\n'.join([m[1].get('mesg') for m in msgs if m[0] == 'print'])
3888
3930
  self.isin('Inputs:\n\n hehe:haha\n hoho:lol - We know whats up', helptext)
@@ -4656,7 +4698,7 @@ class StormTest(s_t_utils.SynTest):
4656
4698
  async def test_storm_cmdscope(self):
4657
4699
 
4658
4700
  async with self.getTestCore() as core:
4659
- await core.loadStormPkg({
4701
+ core.loadStormPkg({
4660
4702
  'name': 'testpkg',
4661
4703
  'version': '0.0.1',
4662
4704
  'commands': (
@@ -12,6 +12,26 @@ class JsonTest(s_test.SynTest):
12
12
 
13
13
  self.eq(((1, 2, 3)), await core.callStorm('return($lib.json.load("[1, 2, 3]"))'))
14
14
  self.eq(('["foo", "bar", "baz"]'), await core.callStorm('return($lib.json.save((foo, bar, baz)))'))
15
+ self.eq(('{"foo": 1, "bar": {"baz": "hello"}}'), await core.callStorm('return($lib.json.save(({"foo": 1, "bar": {"baz": "hello"}})))'))
16
+ self.eq(('{"foo": 1, "bar": {"baz": "hello"}}'), await core.callStorm('return($lib.json.save(({"foo": 1, "bar": {"baz": "hello"}}), (null)))'))
17
+ self.eq((
18
+ '''{
19
+ "foo": 1,
20
+ "bar": {
21
+ "baz": "hello"
22
+ }
23
+ }'''), await core.callStorm('return($lib.json.save(({"foo": 1, "bar": {"baz": "hello"}}), indent=(4)))'))
24
+
25
+ self.eq((
26
+ '''{
27
+ "foo": 1,
28
+ "bar": {
29
+ "baz": "hello"
30
+ }
31
+ }'''), await core.callStorm('return($lib.json.save(({"foo": 1, "bar": {"baz": "hello"}}), indent=2))'))
32
+
33
+ with self.raises(s_exc.BadCast):
34
+ await core.callStorm('return($lib.json.save(({"foo": 1, "bar": {"baz": "hello"}}), indent=x))')
15
35
 
16
36
  with self.raises(s_exc.BadJsonText):
17
37
  await core.callStorm('return($lib.json.load(foo))')
@@ -92,7 +92,7 @@ class StormScrapeTest(s_test.SynTest):
92
92
  self.len(0, mods)
93
93
  self.len(0, core.modsbyiface.get('scrape'))
94
94
 
95
- await core.loadStormPkg(pkgdef)
95
+ core.loadStormPkg(pkgdef)
96
96
 
97
97
  mods = await core.getStormIfaces('scrape')
98
98
  self.len(2, mods)
@@ -131,7 +131,7 @@ class StormScrapeTest(s_test.SynTest):
131
131
  conf = {'storm:interface:scrape': False, }
132
132
  async with self.getTestCore(conf=conf) as core:
133
133
 
134
- await core.loadStormPkg(pkgdef)
134
+ core.loadStormPkg(pkgdef)
135
135
 
136
136
  mods = await core.getStormIfaces('scrape')
137
137
  self.len(2, mods)
@@ -10,17 +10,40 @@ class InetModelTest(s_t_utils.SynTest):
10
10
 
11
11
  async def test_model_inet_basics(self):
12
12
  async with self.getTestCore() as core:
13
+ self.len(1, await core.nodes('[ inet:web:hashtag="#🫠" ]'))
14
+ self.len(1, await core.nodes('[ inet:web:hashtag="#🫠🫠" ]'))
15
+ self.len(1, await core.nodes('[ inet:web:hashtag="#·bar"]'))
16
+ self.len(1, await core.nodes('[ inet:web:hashtag="#foo·"]'))
17
+ self.len(1, await core.nodes('[ inet:web:hashtag="#foo〜"]'))
13
18
  self.len(1, await core.nodes('[ inet:web:hashtag="#hehe" ]'))
14
19
  self.len(1, await core.nodes('[ inet:web:hashtag="#foo·bar"]')) # note the interpunct
20
+ self.len(1, await core.nodes('[ inet:web:hashtag="#foo〜bar"]')) # note the wave dash
15
21
  self.len(1, await core.nodes('[ inet:web:hashtag="#fo·o·······b·ar"]'))
16
22
  with self.raises(s_exc.BadTypeValu):
17
23
  await core.nodes('[ inet:web:hashtag="foo" ]')
24
+
18
25
  with self.raises(s_exc.BadTypeValu):
19
- await core.nodes('[ inet:web:hashtag="#foo bar" ]')
20
- with self.raises(s_exc.BadTypeValu):
21
- self.len(1, await core.nodes('[ inet:web:hashtag="#·bar"]'))
22
- with self.raises(s_exc.BadTypeValu):
23
- self.len(1, await core.nodes('[ inet:web:hashtag="#foo·"]'))
26
+ await core.nodes('[ inet:web:hashtag="#foo#bar" ]')
27
+
28
+ # All unicode whitespace from:
29
+ # https://www.compart.com/en/unicode/category/Zl
30
+ # https://www.compart.com/en/unicode/category/Zp
31
+ # https://www.compart.com/en/unicode/category/Zs
32
+ whitespace = [
33
+ '\u0020', '\u00a0', '\u1680', '\u2000', '\u2001', '\u2002', '\u2003', '\u2004',
34
+ '\u2005', '\u2006', '\u2007', '\u2008', '\u2009', '\u200a', '\u202f', '\u205f',
35
+ '\u3000', '\u2028', '\u2029',
36
+ ]
37
+ for char in whitespace:
38
+ with self.raises(s_exc.BadTypeValu):
39
+ await core.callStorm(f'[ inet:web:hashtag="#foo{char}bar" ]')
40
+
41
+ with self.raises(s_exc.BadTypeValu):
42
+ await core.callStorm(f'[ inet:web:hashtag="#{char}bar" ]')
43
+
44
+ # These are allowed because strip=True
45
+ await core.callStorm(f'[ inet:web:hashtag="#foo{char}" ]')
46
+ await core.callStorm(f'[ inet:web:hashtag=" #foo{char}" ]')
24
47
 
25
48
  nodes = await core.nodes('''
26
49
  [ inet:web:instance=(foo,)
@@ -457,6 +480,7 @@ class InetModelTest(s_t_utils.SynTest):
457
480
  :raw=((10), (20))
458
481
  :src:txfiles={[ file:attachment=* :name=foo.exe ]}
459
482
  :dst:txfiles={[ file:attachment=* :name=bar.exe ]}
483
+ :capture:host=*
460
484
  )]'''
461
485
  nodes = await core.nodes(q, opts={'vars': {'valu': valu, 'p': props}})
462
486
  self.len(1, nodes)
@@ -500,11 +524,13 @@ class InetModelTest(s_t_utils.SynTest):
500
524
  self.eq(node.get('src:rdp:hostname'), 'syncoder')
501
525
  self.eq(node.get('src:rdp:keyboard:layout'), 'azerty')
502
526
  self.eq(node.get('raw'), (10, 20))
527
+ self.nn(node.get('capture:host'))
503
528
  self.len(2, await core.nodes('inet:flow -> crypto:x509:cert'))
504
529
  self.len(1, await core.nodes('inet:flow :src:ssh:key -> crypto:key'))
505
530
  self.len(1, await core.nodes('inet:flow :dst:ssh:key -> crypto:key'))
506
531
  self.len(1, await core.nodes('inet:flow :src:txfiles -> file:attachment +:name=foo.exe'))
507
532
  self.len(1, await core.nodes('inet:flow :dst:txfiles -> file:attachment +:name=bar.exe'))
533
+ self.len(1, await core.nodes('inet:flow :capture:host -> it:host'))
508
534
 
509
535
  async def test_fqdn(self):
510
536
  formname = 'inet:fqdn'
@@ -2746,6 +2772,7 @@ class InetModelTest(s_t_utils.SynTest):
2746
2772
  q = '''
2747
2773
  [
2748
2774
  inet:email:message="*"
2775
+ :id="Woot-12345 "
2749
2776
  :to=woot@woot.com
2750
2777
  :from=visi@vertex.link
2751
2778
  :replyto=root@root.com
@@ -2767,6 +2794,7 @@ class InetModelTest(s_t_utils.SynTest):
2767
2794
  nodes = await core.nodes(q, opts={'vars': {'flow': flow}})
2768
2795
  self.len(1, nodes)
2769
2796
 
2797
+ self.eq(nodes[0].get('id'), 'Woot-12345')
2770
2798
  self.eq(nodes[0].get('cc'), ('baz@faz.org', 'foo@bar.com'))
2771
2799
  self.eq(nodes[0].get('received:from:ipv6'), '::1')
2772
2800
  self.eq(nodes[0].get('received:from:ipv4'), 0x01020304)
@@ -2847,6 +2875,7 @@ class InetModelTest(s_t_utils.SynTest):
2847
2875
  nodes = await core.nodes('''
2848
2876
  [ inet:egress=*
2849
2877
  :host = *
2878
+ :host:iface = *
2850
2879
  :client=1.2.3.4
2851
2880
  :client:ipv6="::1"
2852
2881
  ]
@@ -2854,10 +2883,14 @@ class InetModelTest(s_t_utils.SynTest):
2854
2883
 
2855
2884
  self.len(1, nodes)
2856
2885
  self.nn(nodes[0].get('host'))
2886
+ self.nn(nodes[0].get('host:iface'))
2857
2887
  self.eq(nodes[0].get('client'), 'tcp://1.2.3.4')
2858
2888
  self.eq(nodes[0].get('client:ipv4'), 0x01020304)
2859
2889
  self.eq(nodes[0].get('client:ipv6'), '::1')
2860
2890
 
2891
+ self.len(1, await core.nodes('inet:egress -> it:host'))
2892
+ self.len(1, await core.nodes('inet:egress -> inet:iface'))
2893
+
2861
2894
  async def test_model_inet_tls_handshake(self):
2862
2895
 
2863
2896
  async with self.getTestCore() as core:
@@ -2976,6 +3009,7 @@ class InetModelTest(s_t_utils.SynTest):
2976
3009
  (inet:service:account=(blackout, account, vertex, slack)
2977
3010
  :id=U7RN51U1J
2978
3011
  :user=blackout
3012
+ :url=https://vertex.link/users/blackout
2979
3013
  :email=blackout@vertex.link
2980
3014
  :profile={ gen.ps.contact.email vertex.employee blackout@vertex.link }
2981
3015
  :tenant={[ inet:service:tenant=({"id": "VS-31337"}) ]}
@@ -3003,6 +3037,7 @@ class InetModelTest(s_t_utils.SynTest):
3003
3037
  self.eq(accounts[0].ndef, ('inet:service:account', s_common.guid(('blackout', 'account', 'vertex', 'slack'))))
3004
3038
  self.eq(accounts[0].get('id'), 'U7RN51U1J')
3005
3039
  self.eq(accounts[0].get('user'), 'blackout')
3040
+ self.eq(accounts[0].get('url'), 'https://vertex.link/users/blackout')
3006
3041
  self.eq(accounts[0].get('email'), 'blackout@vertex.link')
3007
3042
  self.eq(accounts[0].get('profile'), blckprof.ndef[1])
3008
3043
 
@@ -430,6 +430,7 @@ class RiskModelTest(s_t_utils.SynTest):
430
430
  :disclosed=20231102
431
431
  :owner={ gen.ou.org.hq acme }
432
432
  :leaker={ gen.ou.org.hq wikileaks }
433
+ :recipient={ gen.ou.org.hq everyone }
433
434
  :type=public
434
435
  :goal={[ ou:goal=* :name=publicity ]}
435
436
  :compromise={[ risk:compromise=* :target={ gen.ou.org.hq acme } ]}
@@ -458,6 +459,7 @@ class RiskModelTest(s_t_utils.SynTest):
458
459
  self.len(1, await core.nodes('risk:leak -> risk:leak:type:taxonomy'))
459
460
  self.len(1, await core.nodes('risk:leak :owner -> ps:contact +:orgname=acme'))
460
461
  self.len(1, await core.nodes('risk:leak :leaker -> ps:contact +:orgname=wikileaks'))
462
+ self.len(1, await core.nodes('risk:leak :recipient -> ps:contact +:orgname=everyone'))
461
463
  self.len(1, await core.nodes('risk:leak -> ou:goal +:name=publicity'))
462
464
  self.len(1, await core.nodes('risk:leak -> risk:compromise :target -> ps:contact +:orgname=acme'))
463
465
  self.len(1, await core.nodes('risk:leak :reporter -> ou:org +:name=vertex'))
@@ -1,4 +1,9 @@
1
1
  import os
2
+ import sys
3
+ import signal
4
+ import asyncio
5
+ import multiprocessing
6
+
2
7
  import synapse.tests.utils as s_test
3
8
 
4
9
  from prompt_toolkit.document import Document
@@ -6,10 +11,49 @@ from prompt_toolkit.completion import Completion, CompleteEvent
6
11
 
7
12
  import synapse.exc as s_exc
8
13
  import synapse.common as s_common
14
+ import synapse.telepath as s_telepath
15
+
16
+ import synapse.lib.coro as s_coro
9
17
  import synapse.lib.output as s_output
10
18
  import synapse.lib.msgpack as s_msgpack
11
19
  import synapse.tools.storm as s_t_storm
12
20
 
21
+ def run_cli_till_print(url, evt1):
22
+ '''
23
+ Run the stormCLI until we get a print mesg then set the event.
24
+
25
+ This is a Process target.
26
+ '''
27
+ async def main():
28
+ outp = s_output.OutPutStr() # Capture output instead of sending it to stdout
29
+ async with await s_telepath.openurl(url) as proxy:
30
+ async with await s_t_storm.StormCli.anit(proxy, outp=outp) as scli:
31
+ cmdqueue = asyncio.Queue()
32
+ await cmdqueue.put('while (true) { $lib.print(go) $lib.time.sleep(1) }')
33
+ await cmdqueue.put('!quit')
34
+
35
+ async def fake_prompt():
36
+ return await cmdqueue.get()
37
+
38
+ scli.prompt = fake_prompt
39
+
40
+ d = {'evt1': False}
41
+ async def onmesg(event):
42
+ if d.get('evt1'):
43
+ return
44
+ mesg = event[1].get('mesg')
45
+ if mesg[0] != 'print':
46
+ return
47
+ evt1.set()
48
+ d['evt1'] = True
49
+
50
+ with scli.onWith('storm:mesg', onmesg):
51
+ await scli.addSignalHandlers()
52
+ await scli.runCmdLoop()
53
+
54
+ asyncio.run(main())
55
+ sys.exit(137)
56
+
13
57
  class StormCliTest(s_test.SynTest):
14
58
 
15
59
  async def test_tools_storm(self):
@@ -378,3 +422,54 @@ class StormCliTest(s_test.SynTest):
378
422
  ),
379
423
  vals
380
424
  )
425
+
426
+ async def test_storm_cmdloop_interrupt(self):
427
+ '''
428
+ Test interrupting a long-running query in the command loop
429
+ '''
430
+ async with self.getTestCore() as core:
431
+
432
+ async with core.getLocalProxy() as proxy:
433
+
434
+ outp = s_test.TstOutPut()
435
+ async with await s_t_storm.StormCli.anit(proxy, outp=outp) as scli:
436
+
437
+ cmdqueue = asyncio.Queue()
438
+ await cmdqueue.put('while (true) { $lib.time.sleep(1) }')
439
+ await cmdqueue.put('!quit')
440
+
441
+ async def fake_prompt():
442
+ return await cmdqueue.get()
443
+ scli.prompt = fake_prompt
444
+
445
+ cmdloop_task = asyncio.create_task(scli.runCmdLoop())
446
+ await asyncio.sleep(0.1)
447
+
448
+ if scli.cmdtask is not None:
449
+ scli.cmdtask.cancel()
450
+
451
+ await cmdloop_task
452
+
453
+ outp.expect('<ctrl-c>')
454
+ outp.expect('o/')
455
+ self.true(scli.isfini)
456
+
457
+ async def test_storm_cmdloop_sigint(self):
458
+ '''
459
+ Test interrupting a long-running query in the command loop with a process target and SIGINT.
460
+ '''
461
+
462
+ async with self.getTestCore() as core:
463
+ url = core.getLocalUrl()
464
+
465
+ ctx = multiprocessing.get_context('spawn')
466
+
467
+ evt1 = ctx.Event()
468
+
469
+ proc = ctx.Process(target=run_cli_till_print, args=(url, evt1,))
470
+ proc.start()
471
+
472
+ self.true(await s_coro.executor(evt1.wait, timeout=30))
473
+ os.kill(proc.pid, signal.SIGINT)
474
+ proc.join(timeout=30)
475
+ self.eq(proc.exitcode, 137)
@@ -24,7 +24,7 @@ class TestUtilsGetrefs(s_utils.SynTest):
24
24
  cm = myvcr.use_cassette(fp)
25
25
  return cm
26
26
 
27
- async def test_basics(self):
27
+ def test_basics(self):
28
28
 
29
29
  args = s_getrefs.parse_args([
30
30
  s_data.path('attack-flow', 'attack-flow-schema-2.0.0.json')
synapse/utils/getrefs.py CHANGED
@@ -1,11 +1,12 @@
1
1
  import sys
2
2
  import json
3
3
  import urllib
4
+ import asyncio
4
5
  import logging
5
6
  import pathlib
6
7
  import argparse
7
8
 
8
- import requests
9
+ import aiohttp
9
10
 
10
11
  import synapse.exc as s_exc
11
12
  import synapse.data as s_data
@@ -20,7 +21,13 @@ def download_refs_handler(uri):
20
21
  This function downloads the JSON schema at the given URI, parses the given
21
22
  URI to get the path component, and then saves the referenced schema to the
22
23
  'jsonschemas' directory of synapse.data.
24
+
25
+ This function runs its own asyncio loop for each URI being requested.
23
26
  '''
27
+ ret = asyncio.run(_download_refs_handler(uri))
28
+ return ret
29
+
30
+ async def _download_refs_handler(uri):
24
31
 
25
32
  try:
26
33
  parts = urllib.parse.urlparse(uri)
@@ -45,8 +52,12 @@ def download_refs_handler(uri):
45
52
 
46
53
  # Get the data from the interwebs
47
54
  logger.info(f'Downloading schema from {uri}.')
48
- resp = requests.get(uri)
49
- data = resp.json()
55
+ async with aiohttp.ClientSession() as session:
56
+ async with session.get(uri) as resp:
57
+ resp.raise_for_status()
58
+ buf = await resp.read()
59
+
60
+ data = json.loads(buf.decode())
50
61
 
51
62
  # Save the json schema to disk
52
63
  with filepath.open('w') as fp:
File without changes
@@ -0,0 +1,59 @@
1
+ ##############################################################################
2
+ # Taken from the cpython 3.11 source branch after the 3.11.10 release.
3
+ ##############################################################################
4
+ ####
5
+ # Copyright 2000 by Timothy O'Malley <timo@alum.mit.edu>
6
+ #
7
+ # All Rights Reserved
8
+ #
9
+ # Permission to use, copy, modify, and distribute this software
10
+ # and its documentation for any purpose and without fee is hereby
11
+ # granted, provided that the above copyright notice appear in all
12
+ # copies and that both that copyright notice and this permission
13
+ # notice appear in supporting documentation, and that the name of
14
+ # Timothy O'Malley not be used in advertising or publicity
15
+ # pertaining to distribution of the software without specific, written
16
+ # prior permission.
17
+ #
18
+ # Timothy O'Malley DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS
19
+ # SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
20
+ # AND FITNESS, IN NO EVENT SHALL Timothy O'Malley BE LIABLE FOR
21
+ # ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
22
+ # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
23
+ # WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
24
+ # ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
25
+ # PERFORMANCE OF THIS SOFTWARE.
26
+ #
27
+
28
+ #
29
+ # Import our required modules
30
+ #
31
+ import re
32
+
33
+ _unquote_sub = re.compile(r'\\(?:([0-3][0-7][0-7])|(.))').sub
34
+
35
+ def _unquote_replace(m):
36
+ if m[1]:
37
+ return chr(int(m[1], 8))
38
+ else:
39
+ return m[2]
40
+
41
+ def _unquote(str):
42
+ # If there aren't any doublequotes,
43
+ # then there can't be any special characters. See RFC 2109.
44
+ if str is None or len(str) < 2:
45
+ return str
46
+ if str[0] != '"' or str[-1] != '"':
47
+ return str
48
+
49
+ # We have to assume that we must decode this string.
50
+ # Down to work.
51
+
52
+ # Remove the "s
53
+ str = str[1:-1]
54
+
55
+ # Check for special sequences. Examples:
56
+ # \012 --> \n
57
+ # \" --> "
58
+ #
59
+ return _unquote_sub(_unquote_replace, str)
@@ -0,0 +1,49 @@
1
+ ##############################################################################
2
+ # Taken from the cpython 3.11 source branch after the 3.11.10 release.
3
+ # It has been modified for vendored imports and vendored test harness.
4
+ ##############################################################################
5
+
6
+ # Simple test suite for http/cookies.py
7
+
8
+ from http import cookies
9
+
10
+ # s_v_utils runs the monkeypatch
11
+ import synapse.vendor.utils as s_v_utils
12
+
13
+ class CookieTests(s_v_utils.VendorTest):
14
+
15
+ def test_unquote(self):
16
+ cases = [
17
+ (r'a="b=\""', 'b="'),
18
+ (r'a="b=\\"', 'b=\\'),
19
+ (r'a="b=\="', 'b=='),
20
+ (r'a="b=\n"', 'b=n'),
21
+ (r'a="b=\042"', 'b="'),
22
+ (r'a="b=\134"', 'b=\\'),
23
+ (r'a="b=\377"', 'b=\xff'),
24
+ (r'a="b=\400"', 'b=400'),
25
+ (r'a="b=\42"', 'b=42'),
26
+ (r'a="b=\\042"', 'b=\\042'),
27
+ (r'a="b=\\134"', 'b=\\134'),
28
+ (r'a="b=\\\""', 'b=\\"'),
29
+ (r'a="b=\\\042"', 'b=\\"'),
30
+ (r'a="b=\134\""', 'b=\\"'),
31
+ (r'a="b=\134\042"', 'b=\\"'),
32
+ ]
33
+ for encoded, decoded in cases:
34
+ with self.subTest(encoded):
35
+ C = cookies.SimpleCookie()
36
+ C.load(encoded)
37
+ self.assertEqual(C['a'].value, decoded)
38
+
39
+ def test_unquote_large(self):
40
+ n = 10**6
41
+ for encoded in r'\\', r'\134':
42
+ with self.subTest(encoded):
43
+ data = 'a="b=' + encoded * n + ';"'
44
+ C = cookies.SimpleCookie()
45
+ C.load(data)
46
+ value = C['a'].value
47
+ self.assertEqual(value[:3], 'b=\\')
48
+ self.assertEqual(value[-2:], '\\;')
49
+ self.assertEqual(len(value), n + 3)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: synapse
3
- Version: 2.192.0
3
+ Version: 2.193.0
4
4
  Summary: Synapse Intelligence Analysis Framework
5
5
  Author-email: The Vertex Project LLC <root@vertex.link>
6
6
  License: Apache License 2.0
@@ -31,7 +31,7 @@ Requires-Dist: aiohttp<4.0,>=3.10.0
31
31
  Requires-Dist: aiohttp-socks<0.10.0,>=0.9.0
32
32
  Requires-Dist: aioimaplib<1.2.0,>=1.1.0
33
33
  Requires-Dist: aiosmtplib<3.1.0,>=3.0.0
34
- Requires-Dist: prompt-toolkit<3.1.0,>=3.0.4
34
+ Requires-Dist: prompt_toolkit<3.1.0,>=3.0.29
35
35
  Requires-Dist: lark==1.2.2
36
36
  Requires-Dist: Pygments<2.18.0,>=2.7.4
37
37
  Requires-Dist: packaging<25.0,>=20.0