synapse 2.202.0__py311-none-any.whl → 2.203.0__py311-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of synapse might be problematic. Click here for more details.

Files changed (100) hide show
  1. synapse/axon.py +4 -4
  2. synapse/cmds/cortex.py +4 -6
  3. synapse/cmds/hive.py +10 -10
  4. synapse/common.py +17 -58
  5. synapse/cortex.py +6 -6
  6. synapse/data/__init__.py +3 -2
  7. synapse/data/iana.uris.mpk +1 -0
  8. synapse/lib/autodoc.py +3 -3
  9. synapse/lib/cli.py +2 -2
  10. synapse/lib/config.py +2 -2
  11. synapse/lib/encoding.py +4 -3
  12. synapse/lib/httpapi.py +7 -11
  13. synapse/lib/json.py +224 -0
  14. synapse/lib/lmdbslab.py +1 -1
  15. synapse/lib/oauth.py +176 -54
  16. synapse/lib/rstorm.py +18 -14
  17. synapse/lib/schemas.py +87 -1
  18. synapse/lib/scrape.py +35 -13
  19. synapse/lib/snap.py +2 -1
  20. synapse/lib/storm.py +2 -2
  21. synapse/lib/stormhttp.py +11 -13
  22. synapse/lib/stormlib/aha.py +4 -4
  23. synapse/lib/stormlib/auth.py +1 -1
  24. synapse/lib/stormlib/cache.py +2 -2
  25. synapse/lib/stormlib/cortex.py +5 -5
  26. synapse/lib/stormlib/graph.py +1 -1
  27. synapse/lib/stormlib/imap.py +1 -1
  28. synapse/lib/stormlib/json.py +8 -11
  29. synapse/lib/stormlib/model.py +1 -1
  30. synapse/lib/stormlib/notifications.py +2 -2
  31. synapse/lib/stormlib/oauth.py +105 -2
  32. synapse/lib/stormlib/stats.py +4 -0
  33. synapse/lib/stormlib/stix.py +3 -4
  34. synapse/lib/stormlib/vault.py +6 -6
  35. synapse/lib/stormlib/xml.py +2 -2
  36. synapse/lib/stormtypes.py +19 -28
  37. synapse/lib/structlog.py +3 -3
  38. synapse/lib/types.py +2 -1
  39. synapse/lib/version.py +2 -2
  40. synapse/lib/view.py +7 -3
  41. synapse/models/base.py +51 -2
  42. synapse/telepath.py +5 -3
  43. synapse/tests/files/__init__.py +0 -1
  44. synapse/tests/test_axon.py +1 -1
  45. synapse/tests/test_cmds_cortex.py +3 -2
  46. synapse/tests/test_cmds_hive.py +4 -4
  47. synapse/tests/test_common.py +29 -19
  48. synapse/tests/test_cortex.py +5 -5
  49. synapse/tests/test_lib_ast.py +3 -3
  50. synapse/tests/test_lib_autodoc.py +5 -5
  51. synapse/tests/test_lib_base.py +1 -1
  52. synapse/tests/test_lib_cell.py +16 -10
  53. synapse/tests/test_lib_config.py +2 -2
  54. synapse/tests/test_lib_encoding.py +2 -2
  55. synapse/tests/test_lib_grammar.py +64 -64
  56. synapse/tests/test_lib_httpapi.py +13 -13
  57. synapse/tests/test_lib_json.py +219 -0
  58. synapse/tests/test_lib_multislabseqn.py +2 -1
  59. synapse/tests/test_lib_node.py +2 -2
  60. synapse/tests/test_lib_scrape.py +50 -0
  61. synapse/tests/test_lib_storm.py +6 -6
  62. synapse/tests/test_lib_stormhttp.py +4 -4
  63. synapse/tests/test_lib_stormlib_auth.py +3 -2
  64. synapse/tests/test_lib_stormlib_cortex.py +10 -12
  65. synapse/tests/test_lib_stormlib_infosec.py +2 -3
  66. synapse/tests/test_lib_stormlib_json.py +18 -21
  67. synapse/tests/test_lib_stormlib_log.py +1 -1
  68. synapse/tests/test_lib_stormlib_oauth.py +603 -1
  69. synapse/tests/test_lib_stormlib_stats.py +13 -3
  70. synapse/tests/test_lib_stormlib_stix.py +5 -5
  71. synapse/tests/test_lib_stormtypes.py +4 -4
  72. synapse/tests/test_lib_structlog.py +5 -6
  73. synapse/tests/test_lib_view.py +8 -0
  74. synapse/tests/test_model_base.py +32 -0
  75. synapse/tests/test_model_infotech.py +2 -2
  76. synapse/tests/test_telepath.py +0 -1
  77. synapse/tests/test_tools_cryo_cat.py +4 -3
  78. synapse/tests/test_tools_docker_validate.py +4 -2
  79. synapse/tests/test_tools_feed.py +30 -2
  80. synapse/tests/test_tools_genpkg.py +1 -1
  81. synapse/tests/test_tools_healthcheck.py +8 -7
  82. synapse/tests/test_utils.py +2 -2
  83. synapse/tests/utils.py +3 -3
  84. synapse/tools/autodoc.py +3 -3
  85. synapse/tools/changelog.py +2 -2
  86. synapse/tools/cryo/cat.py +3 -3
  87. synapse/tools/csvtool.py +2 -3
  88. synapse/tools/docker/validate.py +5 -5
  89. synapse/tools/feed.py +2 -1
  90. synapse/tools/genpkg.py +3 -2
  91. synapse/tools/healthcheck.py +2 -3
  92. synapse/tools/json2mpk.py +2 -2
  93. synapse/utils/getrefs.py +6 -6
  94. synapse/vendor/cpython/lib/json.py +35 -0
  95. synapse/vendor/cpython/lib/test/test_json.py +22 -0
  96. {synapse-2.202.0.dist-info → synapse-2.203.0.dist-info}/METADATA +2 -1
  97. {synapse-2.202.0.dist-info → synapse-2.203.0.dist-info}/RECORD +100 -95
  98. {synapse-2.202.0.dist-info → synapse-2.203.0.dist-info}/WHEEL +1 -1
  99. {synapse-2.202.0.dist-info → synapse-2.203.0.dist-info}/LICENSE +0 -0
  100. {synapse-2.202.0.dist-info → synapse-2.203.0.dist-info}/top_level.txt +0 -0
@@ -242,7 +242,7 @@ class StatsTest(s_test.SynTest):
242
242
  $tally.inc(foo, 1)
243
243
  $tally.inc(bar, 2)
244
244
  $tally.inc(baz, 3)
245
- return($tally.sorted(reverse=$lib.true))
245
+ return($tally.sorted(reverse=(true)))
246
246
  '''
247
247
  vals = await core.callStorm(q)
248
248
  self.eq(vals, [('baz', 3), ('bar', 2), ('foo', 1)])
@@ -252,7 +252,7 @@ class StatsTest(s_test.SynTest):
252
252
  $tally.inc(foo, 1)
253
253
  $tally.inc(bar, 2)
254
254
  $tally.inc(baz, 3)
255
- return($tally.sorted(byname=$lib.true))
255
+ return($tally.sorted(byname=(true)))
256
256
  '''
257
257
  vals = await core.callStorm(q)
258
258
  self.eq(vals, [('bar', 2), ('baz', 3), ('foo', 1)])
@@ -262,11 +262,21 @@ class StatsTest(s_test.SynTest):
262
262
  $tally.inc(foo, 1)
263
263
  $tally.inc(bar, 2)
264
264
  $tally.inc(baz, 3)
265
- return($tally.sorted(byname=$lib.true, reverse=$lib.true))
265
+ return($tally.sorted(byname=(true), reverse=(true)))
266
266
  '''
267
267
  vals = await core.callStorm(q)
268
268
  self.eq(vals, [('foo', 1), ('baz', 3), ('bar', 2)])
269
269
 
270
+ q = '''
271
+ $tally = $lib.stats.tally()
272
+ $tally.inc(foo, 1)
273
+ $tally.inc(bar, 1)
274
+ $tally.inc(foo, 1)
275
+ return($tally.sorted(reverse=true)) // String coercion to true
276
+ '''
277
+ vals = await core.callStorm(q)
278
+ self.eq(vals, [('foo', 2), ('bar', 1)])
279
+
270
280
  tally = s_stormlib_stats.StatTally()
271
281
  await tally.inc('foo')
272
282
 
@@ -1,9 +1,9 @@
1
1
  import copy
2
- import json
3
2
 
4
3
  import synapse.exc as s_exc
5
4
  import synapse.common as s_common
6
5
 
6
+ import synapse.lib.json as s_json
7
7
  import synapse.lib.stormlib.stix as s_stix
8
8
 
9
9
  import synapse.tests.utils as s_test
@@ -39,12 +39,12 @@ class StormLibStixTest(s_test.SynTest):
39
39
  def getTestBundle(self, name):
40
40
  path = self.getTestFilePath('stix_export', name)
41
41
  with open(path, 'r') as fd:
42
- return json.load(fd)
42
+ return s_json.load(fd)
43
43
 
44
44
  def setTestBundle(self, name, bund):
45
45
  path = self.getTestFilePath('stix_export', name)
46
46
  with open(path, 'w') as fd:
47
- json.dump(bund, fd, sort_keys=True, indent=2)
47
+ s_json.dump(bund, fd, sort_keys=True, indent=True)
48
48
 
49
49
  def reqValidStix(self, item):
50
50
  resp = s_stix.validateStix(item)
@@ -188,7 +188,7 @@ class StormLibStixTest(s_test.SynTest):
188
188
  self.isin('Error validating bundle', resp.get('mesg'))
189
189
 
190
190
  self.len(14, bund.get('objects'))
191
- self.isin(s_stix.SYN_STIX_EXTENSION_ID, json.dumps(bund))
191
+ self.isin(s_stix.SYN_STIX_EXTENSION_ID, s_json.dumps(bund).decode())
192
192
  nodes = await core.nodes('yield $lib.stix.lift($bundle)', {'vars': {'bundle': bund}})
193
193
  self.len(10, nodes)
194
194
 
@@ -217,7 +217,7 @@ class StormLibStixTest(s_test.SynTest):
217
217
  self.reqValidStix(bund_noext)
218
218
  nodes = await core.nodes('yield $lib.stix.lift($bundle)', {'vars': {'bundle': bund_noext}})
219
219
  self.len(0, nodes)
220
- self.notin(s_stix.SYN_STIX_EXTENSION_ID, json.dumps(bund_noext))
220
+ self.notin(s_stix.SYN_STIX_EXTENSION_ID, s_json.dumps(bund_noext).decode())
221
221
 
222
222
  # test some sad paths...
223
223
  self.none(await core.callStorm('return($lib.stix.export.bundle().add($lib.true))'))
@@ -1,7 +1,6 @@
1
1
  import re
2
2
  import bz2
3
3
  import gzip
4
- import json
5
4
  import base64
6
5
  import struct
7
6
  import asyncio
@@ -16,6 +15,7 @@ from unittest import mock
16
15
  import synapse.exc as s_exc
17
16
  import synapse.common as s_common
18
17
 
18
+ import synapse.lib.json as s_json
19
19
  import synapse.lib.time as s_time
20
20
  import synapse.lib.storm as s_storm
21
21
  import synapse.lib.hashset as s_hashset
@@ -585,7 +585,7 @@ class StormTypesTest(s_test.SynTest):
585
585
  self.eq(0x01020304, await core.callStorm('return($lib.trycast(inet:ipv4, 1.2.3.4).1)'))
586
586
 
587
587
  # trycast/cast a property instead of a form/type
588
- flow = json.loads(s_test_files.getAssetStr('attack_flow/CISA AA22-138B VMWare Workspace (Alt).json'))
588
+ flow = s_json.loads(s_test_files.getAssetStr('attack_flow/CISA AA22-138B VMWare Workspace (Alt).json'))
589
589
  opts = {'vars': {'flow': flow}}
590
590
  self.true(await core.callStorm('return($lib.trycast(it:mitre:attack:flow:data, $flow).0)', opts=opts))
591
591
  self.false(await core.callStorm('return($lib.trycast(it:mitre:attack:flow:data, {}).0)'))
@@ -1502,7 +1502,7 @@ class StormTypesTest(s_test.SynTest):
1502
1502
  async with self.getTestCore() as core:
1503
1503
 
1504
1504
  foo = {'a': 'ohhai'}
1505
- ghstr = json.dumps(foo)
1505
+ ghstr = s_json.dumps(foo).decode()
1506
1506
  valu = s_common.guid()
1507
1507
  n2 = s_common.guid()
1508
1508
 
@@ -6232,7 +6232,7 @@ words\tword\twrd'''
6232
6232
 
6233
6233
  self.eq({
6234
6234
  'file:count': 9,
6235
- 'size:bytes': 651,
6235
+ 'size:bytes': 646,
6236
6236
  }, await core.callStorm('return($lib.axon.metrics())'))
6237
6237
 
6238
6238
  bin_buf = b'\xbb/$\xc0A\xf1\xbf\xbc\x00_\x82v4\xf6\xbd\x1b'
@@ -1,9 +1,10 @@
1
1
  import io
2
- import json
3
2
  import time
4
3
  import logging
5
4
 
6
5
  import synapse.common as s_common
6
+
7
+ import synapse.lib.json as s_json
7
8
  import synapse.lib.structlog as s_structlog
8
9
 
9
10
  import synapse.tests.utils as s_test
@@ -46,7 +47,7 @@ class StructLogTest(s_test.SynTest):
46
47
 
47
48
  # There is a trailing \n on the stream
48
49
  raw_mesgs = [m for m in data.split('\n') if m]
49
- mesgs = [json.loads(m) for m in raw_mesgs]
50
+ mesgs = [s_json.loads(m) for m in raw_mesgs]
50
51
  self.len(5, mesgs)
51
52
 
52
53
  mesg = mesgs[0]
@@ -83,10 +84,8 @@ class StructLogTest(s_test.SynTest):
83
84
  self.eq(erfo.get('args'), (1, 0))
84
85
  self.eq(erfo.get('buffer'), "b'vertex'")
85
86
 
86
- mesg = mesgs[4]
87
87
  rawm = raw_mesgs[4]
88
- self.isin(r'Unicode is cool for \u7a0b\u5e8f\u5458!', rawm)
89
- self.eq(mesg.get('message'), 'Unicode is cool for 程序员!')
88
+ self.isin('"message":"Unicode is cool for 程序员!"', rawm)
90
89
 
91
90
  logger.removeHandler(handler)
92
91
 
@@ -105,7 +104,7 @@ class StructLogTest(s_test.SynTest):
105
104
 
106
105
  # There is a trailing \n on the stream
107
106
  raw_mesgs = [m for m in data.split('\n') if m]
108
- mesgs = [json.loads(m) for m in raw_mesgs]
107
+ mesgs = [s_json.loads(m) for m in raw_mesgs]
109
108
  self.len(1, mesgs)
110
109
  ptime = time.strptime(mesgs[0].get('time'), datefmt)
111
110
  self.eq(now.tm_year, ptime.tm_year)
@@ -906,6 +906,14 @@ class ViewTest(s_t_utils.SynTest):
906
906
  with self.raises(s_exc.BadState):
907
907
  await core.callStorm('return($lib.view.get().insertParentFork().iden)')
908
908
 
909
+ pname = view01.parent.info.get('name')
910
+ vdef = await view01.insertParentFork(visi.iden)
911
+ self.eq(vdef.get('name'), f'inserted fork of {pname}')
912
+
913
+ piden = view03.parent.iden
914
+ vdef = await view03.insertParentFork(visi.iden)
915
+ self.eq(vdef.get('name'), f'inserted fork of {piden}')
916
+
909
917
  async def test_view_children(self):
910
918
 
911
919
  async with self.getTestCore() as core:
@@ -413,3 +413,35 @@ class BaseTest(s_t_utils.SynTest):
413
413
  self.eq('bottles.', nodes[0].get('type'))
414
414
  self.eq(1706832000000, nodes[0].get('time'))
415
415
  self.len(1, await core.nodes('meta:aggregate -> meta:aggregate:type:taxonomy'))
416
+
417
+ async def test_model_feed(self):
418
+
419
+ async with self.getTestCore() as core:
420
+ nodes = await core.nodes('''[
421
+ meta:feed=*
422
+ :name="woot (foo bar baz)"
423
+ :type=foo.bar.baz
424
+ :source={[ meta:source=* :name=woot ]}
425
+ :url=https://v.vtx.lk/slack
426
+ :query="Hi There"
427
+ :opts=({"foo": "bar"})
428
+ :period=(2024,2025)
429
+ :latest=2025
430
+ :offset=17
431
+ :cursor=FooBar
432
+ ]''')
433
+ self.len(1, nodes)
434
+ self.nn(nodes[0].get('source'))
435
+
436
+ self.eq(nodes[0].get('name'), 'woot (foo bar baz)')
437
+ self.eq(nodes[0].get('type'), 'foo.bar.baz.')
438
+ self.eq(nodes[0].get('url'), 'https://v.vtx.lk/slack')
439
+ self.eq(nodes[0].get('query'), 'Hi There')
440
+ self.eq(nodes[0].get('opts'), {"foo": "bar"})
441
+ self.eq(nodes[0].get('period'), (1704067200000, 1735689600000))
442
+ self.eq(nodes[0].get('latest'), 1735689600000)
443
+ self.eq(nodes[0].get('offset'), 17)
444
+ self.eq(nodes[0].get('cursor'), 'FooBar')
445
+
446
+ self.len(1, await core.nodes('meta:feed -> meta:source +:name=woot'))
447
+ self.len(1, await core.nodes('meta:feed -> meta:feed:type:taxonomy'))
@@ -1,9 +1,9 @@
1
- import json
2
1
  import hashlib
3
2
 
4
3
  import synapse.exc as s_exc
5
4
  import synapse.common as s_common
6
5
 
6
+ import synapse.lib.json as s_json
7
7
  import synapse.lib.const as s_const
8
8
  import synapse.lib.scrape as s_scrape
9
9
 
@@ -1954,7 +1954,7 @@ class InfotechModelTest(s_t_utils.SynTest):
1954
1954
  # Test 2.2->2.3 and 2.3->2.2 conversions
1955
1955
  filename = s_t_files.getAssetPath('cpedata.json')
1956
1956
  with open(filename, 'r') as fp:
1957
- cpedata = json.load(fp)
1957
+ cpedata = s_json.load(fp)
1958
1958
 
1959
1959
  for (_cpe22, _cpe23) in cpedata:
1960
1960
  # Convert cpe22 -> cpe23
@@ -1,6 +1,5 @@
1
1
  import os
2
2
  import ssl
3
- import json
4
3
  import socket
5
4
  import asyncio
6
5
  import logging
@@ -1,10 +1,11 @@
1
1
  import io
2
- import json
3
2
 
4
3
  import msgpack
5
4
 
6
5
  import unittest.mock as mock
7
6
 
7
+ import synapse.exc as s_exc
8
+
8
9
  import synapse.lib.msgpack as s_msgpack
9
10
  import synapse.tests.utils as s_t_utils
10
11
  import synapse.tools.cryo.cat as s_cryocat
@@ -34,7 +35,7 @@ class CryoCatTest(s_t_utils.SynTest):
34
35
  argv = ['--ingest', '--jsonl', cryourl]
35
36
  inp = io.StringIO('{"foo: "bar"}\n[]\n')
36
37
  with self.redirectStdin(inp):
37
- with self.raises(json.decoder.JSONDecodeError):
38
+ with self.raises(s_exc.BadJsonText):
38
39
  retn, outp = await self.execToolMain(s_cryocat.main, argv)
39
40
 
40
41
  # Happy path msgpack ingest
@@ -74,7 +75,7 @@ class CryoCatTest(s_t_utils.SynTest):
74
75
 
75
76
  argv = ['--offset', '0', '--jsonl', '--size', '2', '--omit-offset', cryourl]
76
77
  retn, outp = await self.execToolMain(s_cryocat.main, argv)
77
- self.true(outp.expect('[null, {"key": 0}]\n[null, {"key": 1}]\n'))
78
+ self.true(outp.expect('[null,{"key":0}]\n[null,{"key":1}]\n'))
78
79
 
79
80
  argv = ['--offset', '0', '--size', '20', cryourl]
80
81
  retn, outp = await self.execToolMain(s_cryocat.main, argv)
@@ -1,10 +1,12 @@
1
1
  import base64
2
- import json
2
+
3
3
  import unittest.mock as mock
4
4
 
5
5
  import cryptography.hazmat.primitives.serialization as c_serialization
6
6
 
7
+ import synapse.lib.json as s_json
7
8
  import synapse.lib.certdir as s_certdir
9
+
8
10
  import synapse.tests.utils as s_t_utils
9
11
  import synapse.tools.docker.validate as s_t_d_validate
10
12
 
@@ -48,7 +50,7 @@ class TestDockerValidate(s_t_utils.SynTest):
48
50
  # getCosignSignature
49
51
  outp = self.getTestOutp()
50
52
  with mock.patch('subprocess.run') as patch:
51
- test_stdout = json.dumps(test_resp).encode()
53
+ test_stdout = s_json.dumps(test_resp)
52
54
  mock_stdout = mock.MagicMock(stdout=test_stdout)
53
55
  patch.return_value = mock_stdout
54
56
  ret = s_t_d_validate.getCosignSignature(outp, 'hehe/haha:tag')
@@ -1,4 +1,3 @@
1
- import json
2
1
  import hashlib
3
2
 
4
3
  from unittest import mock
@@ -6,6 +5,7 @@ from unittest import mock
6
5
  import synapse.exc as s_exc
7
6
  import synapse.common as s_common
8
7
 
8
+ import synapse.lib.json as s_json
9
9
  import synapse.lib.msgpack as s_msgpack
10
10
 
11
11
  import synapse.tools.feed as s_feed
@@ -33,7 +33,7 @@ class FeedTest(s_t_utils.SynTest):
33
33
  with s_common.genfile(jsonlfp) as fd:
34
34
  for i in range(20):
35
35
  pode = (('test:int', i), {})
36
- _ = fd.write(json.dumps(pode).encode() + b'\n')
36
+ _ = fd.write(s_json.dumps(pode, newline=True))
37
37
 
38
38
  argv = ['--cortex', curl,
39
39
  '--format', 'syn.nodes',
@@ -138,3 +138,31 @@ class FeedTest(s_t_utils.SynTest):
138
138
 
139
139
  nodes = await core.nodes('test:int', opts={'view': oldview})
140
140
  self.len(0, nodes)
141
+
142
+ async def test_synnodes_json(self):
143
+ async with self.getTestCore() as core:
144
+
145
+ await self.addCreatorDeleterRoles(core)
146
+
147
+ host, port = await core.dmon.listen('tcp://127.0.0.1:0/')
148
+
149
+ curl = f'tcp://icanadd:secret@{host}:{port}/'
150
+
151
+ with self.getTestDir() as dirn:
152
+
153
+ jsonfp = s_common.genpath(dirn, 'podes.json')
154
+ with s_common.genfile(jsonfp) as fd:
155
+ podes = [(('test:int', ii), {}) for ii in range(20)]
156
+ s_json.dump(podes, fd)
157
+
158
+ argv = ['--cortex', curl,
159
+ '--format', 'syn.nodes',
160
+ '--modules', 'synapse.tests.utils.TestModule',
161
+ '--chunksize', '3',
162
+ jsonfp]
163
+
164
+ outp = self.getTestOutp()
165
+ self.eq(await s_feed.main(argv, outp=outp), 0)
166
+
167
+ nodes = await core.nodes('test:int')
168
+ self.len(20, nodes)
@@ -65,7 +65,7 @@ class GenPkgTest(s_test.SynTest):
65
65
  ymlpath = s_common.genpath(dirname, 'files', 'stormpkg', 'badcmdname.yaml')
66
66
  await s_genpkg.main((ymlpath,))
67
67
 
68
- with self.raises(s_exc.BadArg):
68
+ with self.raises(s_exc.MustBeJsonSafe):
69
69
  ymlpath = s_common.genpath(dirname, 'files', 'stormpkg', 'badjsonpkg.yaml')
70
70
  await s_genpkg.main((ymlpath,))
71
71
 
@@ -1,9 +1,10 @@
1
- import json
2
1
  import asyncio
3
2
  import logging
4
3
 
5
4
  import unittest.mock as mock
6
5
 
6
+ import synapse.lib.json as s_json
7
+
7
8
  import synapse.tests.utils as s_t_utils
8
9
 
9
10
  import synapse.tools.healthcheck as s_t_healthcheck
@@ -25,7 +26,7 @@ class HealthcheckTest(s_t_utils.SynTest):
25
26
 
26
27
  retn = await s_t_healthcheck.main(argv, outp)
27
28
  self.eq(retn, 0)
28
- resp = json.loads(str(outp))
29
+ resp = s_json.loads(str(outp))
29
30
  self.isinstance(resp, dict)
30
31
 
31
32
  mod = core.modules.get('synapse.tests.utils.TestModule') # type: s_t_utils.TestModule
@@ -34,7 +35,7 @@ class HealthcheckTest(s_t_utils.SynTest):
34
35
  outp.clear()
35
36
  retn = await s_t_healthcheck.main(argv, outp)
36
37
  self.eq(retn, 1)
37
- resp = json.loads(str(outp))
38
+ resp = s_json.loads(str(outp))
38
39
  self.isinstance(resp, dict)
39
40
 
40
41
  # Sad paths
@@ -47,7 +48,7 @@ class HealthcheckTest(s_t_utils.SynTest):
47
48
  outp.clear()
48
49
  retn = await s_t_healthcheck.main(['-c', curl, '-t', '0.4'], outp)
49
50
  self.eq(retn, 1)
50
- resp = json.loads(str(outp))
51
+ resp = s_json.loads(str(outp))
51
52
  self.eq(resp.get('components')[0].get('name'), 'error')
52
53
  m = 'Timeout getting health information from cell.'
53
54
  self.eq(resp.get('components')[0].get('mesg'), m)
@@ -60,7 +61,7 @@ class HealthcheckTest(s_t_utils.SynTest):
60
61
  await root.setPasswd('secret')
61
62
  retn = await s_t_healthcheck.main(['-c', f'tcp://root:newp@127.0.0.1:{port}/cortex', '-t', '0.4'], outp)
62
63
  self.eq(retn, 1)
63
- resp = json.loads(str(outp))
64
+ resp = s_json.loads(str(outp))
64
65
  self.eq(resp.get('components')[0].get('name'), 'error')
65
66
  m = 'Synapse error encountered.'
66
67
  self.eq(resp.get('components')[0].get('mesg'), m)
@@ -72,7 +73,7 @@ class HealthcheckTest(s_t_utils.SynTest):
72
73
  outp.clear()
73
74
  retn = await s_t_healthcheck.main(['-c', f'tcp://visi:secret@127.0.0.1:{port}/cortex', '-t', '0.4'], outp)
74
75
  self.eq(retn, 1)
75
- resp = json.loads(str(outp))
76
+ resp = s_json.loads(str(outp))
76
77
  self.eq(resp.get('components')[0].get('name'), 'error')
77
78
  m = 'Synapse error encountered.'
78
79
  self.eq(resp.get('components')[0].get('mesg'), m)
@@ -85,7 +86,7 @@ class HealthcheckTest(s_t_utils.SynTest):
85
86
  outp.clear()
86
87
  retn = await s_t_healthcheck.main(['-c', curl, '-t', '0.4'], outp)
87
88
  self.eq(retn, 1)
88
- resp = json.loads(str(outp))
89
+ resp = s_json.loads(str(outp))
89
90
  self.eq(resp.get('components')[0].get('name'), 'error')
90
91
  m = 'Unable to connect to cell'
91
92
  self.isin(m, resp.get('components')[0].get('mesg'))
@@ -1,12 +1,12 @@
1
1
  import os
2
2
  import time
3
- import json
4
3
  import logging
5
4
  import unittest
6
5
 
7
6
  import synapse.common as s_common
8
7
 
9
8
  import synapse.lib.base as s_base
9
+ import synapse.lib.json as s_json
10
10
  import synapse.lib.output as s_output
11
11
  import synapse.lib.certdir as s_certdir
12
12
 
@@ -126,7 +126,7 @@ class TestUtils(s_t_utils.SynTest):
126
126
  self.notin('notthere', mesgs)
127
127
 
128
128
  with self.getLoggerStream('synapse.tests.test_utils', 'Test Message') as stream:
129
- thr = logathing(json.dumps({'mesg': 'Test Message'}))
129
+ thr = logathing(s_json.dumps({'mesg': 'Test Message'}).decode())
130
130
  self.true(stream.wait(10))
131
131
  thr.join()
132
132
 
synapse/tests/utils.py CHANGED
@@ -21,13 +21,11 @@ import io
21
21
  import os
22
22
  import sys
23
23
  import copy
24
- import json
25
24
  import math
26
25
  import types
27
26
  import shutil
28
27
  import typing
29
28
  import asyncio
30
- import hashlib
31
29
  import inspect
32
30
  import logging
33
31
  import tempfile
@@ -59,6 +57,7 @@ import synapse.lib.cell as s_cell
59
57
  import synapse.lib.coro as s_coro
60
58
  import synapse.lib.cmdr as s_cmdr
61
59
  import synapse.lib.hive as s_hive
60
+ import synapse.lib.json as s_json
62
61
  import synapse.lib.task as s_task
63
62
  import synapse.lib.const as s_const
64
63
  import synapse.lib.layer as s_layer
@@ -101,7 +100,7 @@ def deguidify(x):
101
100
 
102
101
  def jsonlines(text: str):
103
102
  lines = [k for k in text.split('\n') if k]
104
- return [json.loads(line) for line in lines]
103
+ return [s_json.loads(line) for line in lines]
105
104
 
106
105
  async def waitForBehold(core, events):
107
106
  async for mesg in core.behold():
@@ -457,6 +456,7 @@ testmodel = {
457
456
  ('tick', ('test:time', {}), {}),
458
457
  ('hehe', ('str', {}), {}),
459
458
  ('ndefs', ('array', {'type': 'ndef'}), {}),
459
+ ('somestr', ('test:str', {}), {}),
460
460
  )),
461
461
 
462
462
  ('test:migr', {}, (
synapse/tools/autodoc.py CHANGED
@@ -1,6 +1,5 @@
1
1
  import sys
2
2
  import copy
3
- import json
4
3
  import asyncio
5
4
  import logging
6
5
  import argparse
@@ -13,6 +12,7 @@ import synapse.common as s_common
13
12
  import synapse.cortex as s_cortex
14
13
  import synapse.telepath as s_telepath
15
14
 
15
+ import synapse.lib.json as s_json
16
16
  import synapse.lib.storm as s_storm
17
17
  import synapse.lib.config as s_config
18
18
  import synapse.lib.output as s_output
@@ -262,7 +262,7 @@ def processTypes(rst, dochelp, types):
262
262
  rst.addLines(f' * {key}: ``{valu}``')
263
263
  continue
264
264
  lines = [f' * {key}:\n', ' ::\n\n']
265
- json_lines = json.dumps(valu, indent=1, sort_keys=True)
265
+ json_lines = s_json.dumps(valu, indent=True, sort_keys=True).decode()
266
266
  json_lines = [' ' + line for line in json_lines.split('\n')]
267
267
  lines.extend(json_lines)
268
268
  lines.append('\n')
@@ -810,7 +810,7 @@ async def docConfdefs(ctor):
810
810
  data = {k: v for k, v in conf.items() if k not in (
811
811
  'description', 'default', 'type', 'hideconf', 'hidecmdl',
812
812
  )}
813
- parts = json.dumps(data, sort_keys=True, indent=2).split('\n')
813
+ parts = s_json.dumps(data, sort_keys=True, indent=True).decode().split('\n')
814
814
  lines.append(' ::')
815
815
  lines.append('\n')
816
816
  lines.extend([f' {p}' for p in parts])
@@ -476,7 +476,7 @@ async def gen(opts: argparse.Namespace,
476
476
  if opts.verbose:
477
477
  outp.printf('Validating data against schema')
478
478
 
479
- s_schemas._reqChanglogSchema(data)
479
+ s_schemas._reqChangelogSchema(data)
480
480
 
481
481
  if opts.verbose:
482
482
  outp.printf('Saving the following information:')
@@ -835,7 +835,7 @@ async def format(opts: argparse.Namespace,
835
835
 
836
836
  files_processed.append(fp)
837
837
 
838
- s_schemas._reqChanglogSchema(data)
838
+ s_schemas._reqChangelogSchema(data)
839
839
 
840
840
  data.setdefault('prs', [])
841
841
  prs = data.get('prs')
synapse/tools/cryo/cat.py CHANGED
@@ -1,5 +1,4 @@
1
1
  import sys
2
- import json
3
2
  import pprint
4
3
  import asyncio
5
4
  import argparse
@@ -7,6 +6,7 @@ import logging
7
6
 
8
7
  import synapse.telepath as s_telepath
9
8
 
9
+ import synapse.lib.json as s_json
10
10
  import synapse.lib.output as s_output
11
11
  import synapse.lib.msgpack as s_msgpack
12
12
 
@@ -50,14 +50,14 @@ async def main(argv, outp=s_output.stdout):
50
50
  await tank.puts(items)
51
51
  return 0
52
52
 
53
- items = [json.loads(line) for line in sys.stdin]
53
+ items = [s_json.loads(line) for line in sys.stdin]
54
54
  await tank.puts(items)
55
55
  return 0
56
56
 
57
57
  async for item in tank.slice(opts.offset, opts.size):
58
58
 
59
59
  if opts.jsonl:
60
- outp.printf(json.dumps(item[1], sort_keys=True))
60
+ outp.printf(s_json.dumps(item[1], sort_keys=True).decode())
61
61
 
62
62
  elif opts.msgpack:
63
63
  sys.stdout.buffer.write(s_msgpack.en(item[1]))
synapse/tools/csvtool.py CHANGED
@@ -1,6 +1,5 @@
1
1
  import csv
2
2
  import sys
3
- import json
4
3
  import asyncio
5
4
  import contextlib
6
5
 
@@ -12,6 +11,7 @@ import synapse.telepath as s_telepath
12
11
  import synapse.lib.cmd as s_cmd
13
12
  import synapse.lib.base as s_base
14
13
  import synapse.lib.cmdr as s_cmdr
14
+ import synapse.lib.json as s_json
15
15
  import synapse.lib.output as s_output
16
16
  import synapse.lib.version as s_version
17
17
 
@@ -114,8 +114,7 @@ async def runCsvImport(opts, outp, text, stormopts):
114
114
  outp.printf(repr(mesg))
115
115
 
116
116
  if logfd is not None:
117
- byts = json.dumps(mesg).encode('utf8')
118
- logfd.write(byts + b'\n')
117
+ logfd.write(s_json.dumps(mesg, newline=True))
119
118
 
120
119
  if opts.cli:
121
120
  await s_cmdr.runItemCmdr(core, outp, True)
@@ -1,7 +1,6 @@
1
1
  import os
2
2
  import re
3
3
  import sys
4
- import json
5
4
  import base64
6
5
  import pprint
7
6
  import argparse
@@ -11,6 +10,7 @@ import synapse.exc as s_exc
11
10
  import synapse.data as s_data
12
11
  import synapse.common as s_common
13
12
 
13
+ import synapse.lib.json as s_json
14
14
  import synapse.lib.output as s_outp
15
15
  import synapse.lib.certdir as s_certdir
16
16
 
@@ -50,8 +50,8 @@ def getCosignSignature(outp, image):
50
50
 
51
51
  blob = proc.stdout
52
52
  try:
53
- sigd = json.loads(blob)
54
- except json.JSONDecodeError as e:
53
+ sigd = s_json.loads(blob)
54
+ except s_exc.BadJsonText as e:
55
55
  outp.printf(f'Error decoding blob: {blob}: {e}')
56
56
  return None
57
57
  if not isinstance(sigd, dict):
@@ -104,8 +104,8 @@ def checkCosignSignature(outp, pubk_byts, image_to_verify):
104
104
  except subprocess.CalledProcessError as e: # pragma: no cover
105
105
  outp.printf(f'Error calling {" ".join(args)}: {e}')
106
106
  return None
107
- blob = json.loads(proc.stdout.decode())
108
- outp.printf(f'Cosign output:')
107
+ blob = s_json.loads(proc.stdout)
108
+ outp.printf('Cosign output:')
109
109
  outp.printf(pprint.pformat(blob))
110
110
  return True
111
111
 
synapse/tools/feed.py CHANGED
@@ -11,6 +11,7 @@ import synapse.cortex as s_cortex
11
11
  import synapse.telepath as s_telepath
12
12
 
13
13
  import synapse.lib.cmdr as s_cmdr
14
+ import synapse.lib.json as s_json
14
15
  import synapse.lib.output as s_output
15
16
  import synapse.lib.msgpack as s_msgpack
16
17
  import synapse.lib.version as s_version
@@ -24,7 +25,7 @@ def getItems(*paths):
24
25
  items = []
25
26
  for path in paths:
26
27
  if path.endswith('.json'):
27
- item = s_common.jsload(path)
28
+ item = s_json.jsload(path)
28
29
  if not isinstance(item, list):
29
30
  item = [item]
30
31
  items.append((path, item))