synapse 2.224.0__py311-none-any.whl → 2.225.0__py311-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of synapse might be problematic. Click here for more details.
- synapse/axon.py +10 -5
- synapse/lib/cell.py +1 -1
- synapse/lib/const.py +4 -0
- synapse/lib/multislabseqn.py +36 -1
- synapse/lib/nexus.py +67 -8
- synapse/lib/queue.py +4 -1
- synapse/lib/rstorm.py +2 -2
- synapse/lib/schemas.py +11 -1
- synapse/lib/slabseqn.py +28 -0
- synapse/lib/storm.py +5 -1
- synapse/lib/stormhttp.py +7 -1
- synapse/lib/version.py +2 -2
- synapse/models/inet.py +4 -0
- synapse/models/media.py +4 -0
- synapse/models/risk.py +3 -0
- synapse/tests/test_cortex.py +2 -2
- synapse/tests/test_lib_agenda.py +1 -1
- synapse/tests/test_lib_cell.py +1 -1
- synapse/tests/test_lib_certdir.py +1 -1
- synapse/tests/test_lib_httpapi.py +1 -1
- synapse/tests/test_lib_layer.py +1 -1
- synapse/tests/test_lib_multislabseqn.py +22 -0
- synapse/tests/test_lib_nexus.py +42 -1
- synapse/tests/test_lib_slabseqn.py +30 -1
- synapse/tests/test_lib_storm.py +59 -1
- synapse/tests/test_lib_stormhttp.py +16 -0
- synapse/tests/test_lib_stormlib_oauth.py +1 -1
- synapse/tests/test_lib_stormsvc.py +1 -1
- synapse/tests/test_lib_trigger.py +1 -1
- synapse/tests/test_model_inet.py +6 -0
- synapse/tests/test_model_media.py +4 -1
- synapse/tests/test_model_risk.py +2 -0
- synapse/tests/{test_tools_axon2axon.py → test_tools_axon_copy.py} +4 -4
- synapse/tests/{test_tools_pullfile.py → test_tools_axon_get.py} +4 -4
- synapse/tests/{test_tools_pushfile.py → test_tools_axon_put.py} +7 -7
- synapse/tests/{test_tools_csvtool.py → test_tools_cortex_csv.py} +12 -3
- synapse/tests/{test_tools_feed.py → test_tools_cortex_feed.py} +2 -2
- synapse/tests/{test_tools_apikey.py → test_tools_service_apikey.py} +1 -4
- synapse/tests/{test_tools_backup.py → test_tools_service_backup.py} +5 -5
- synapse/tests/{test_tools_demote.py → test_tools_service_demote.py} +1 -1
- synapse/tests/{test_tools_healthcheck.py → test_tools_service_healthcheck.py} +1 -1
- synapse/tests/{test_tools_livebackup.py → test_tools_service_livebackup.py} +1 -1
- synapse/tests/{test_tools_modrole.py → test_tools_service_modrole.py} +1 -1
- synapse/tests/{test_tools_moduser.py → test_tools_service_moduser.py} +1 -1
- synapse/tests/{test_tools_promote.py → test_tools_service_promote.py} +1 -1
- synapse/tests/{test_tools_reload.py → test_tools_service_reload.py} +1 -1
- synapse/tests/{test_tools_shutdown.py → test_tools_service_shutdown.py} +1 -1
- synapse/tests/{test_tools_snapshot.py → test_tools_service_snapshot.py} +1 -1
- synapse/tests/{test_tools_storm.py → test_tools_storm_cli.py} +1 -1
- synapse/tests/{test_tools_pkgs_gendocs.py → test_tools_storm_pkg_doc.py} +12 -3
- synapse/tests/{test_tools_genpkg.py → test_tools_storm_pkg_gen.py} +1 -1
- synapse/tests/{test_tools_autodoc.py → test_tools_utils_autodoc.py} +1 -1
- synapse/tests/test_tools_utils_changelog.py +454 -0
- synapse/tests/{test_tools_easycert.py → test_tools_utils_easycert.py} +48 -46
- synapse/tests/{test_tools_guid.py → test_tools_utils_guid.py} +3 -3
- synapse/tests/{test_tools_json2mpk.py → test_tools_utils_json2mpk.py} +3 -3
- synapse/tests/{test_tools_rstorm.py → test_tools_utils_rstorm.py} +6 -1
- synapse/tests/utils.py +3 -1
- synapse/tools/apikey.py +4 -83
- synapse/tools/autodoc.py +3 -1031
- synapse/tools/axon/copy.py +44 -0
- synapse/tools/axon/get.py +64 -0
- synapse/tools/axon/put.py +122 -0
- synapse/tools/axon2axon.py +3 -36
- synapse/tools/backup.py +6 -176
- synapse/tools/changelog.py +3 -1098
- synapse/tools/cortex/csv.py +236 -0
- synapse/tools/cortex/feed.py +151 -0
- synapse/tools/csvtool.py +3 -227
- synapse/tools/demote.py +4 -40
- synapse/tools/docker/validate.py +3 -3
- synapse/tools/easycert.py +4 -129
- synapse/tools/feed.py +3 -140
- synapse/tools/genpkg.py +3 -307
- synapse/tools/guid.py +7 -6
- synapse/tools/healthcheck.py +3 -101
- synapse/tools/json2mpk.py +6 -38
- synapse/tools/livebackup.py +4 -27
- synapse/tools/modrole.py +3 -108
- synapse/tools/moduser.py +3 -179
- synapse/tools/pkgs/gendocs.py +3 -164
- synapse/tools/promote.py +4 -41
- synapse/tools/pullfile.py +3 -56
- synapse/tools/pushfile.py +3 -114
- synapse/tools/reload.py +4 -61
- synapse/tools/rstorm.py +3 -26
- synapse/tools/service/__init__.py +0 -0
- synapse/tools/service/apikey.py +90 -0
- synapse/tools/service/backup.py +181 -0
- synapse/tools/service/demote.py +47 -0
- synapse/tools/service/healthcheck.py +109 -0
- synapse/tools/service/livebackup.py +34 -0
- synapse/tools/service/modrole.py +116 -0
- synapse/tools/service/moduser.py +184 -0
- synapse/tools/service/promote.py +48 -0
- synapse/tools/service/reload.py +68 -0
- synapse/tools/service/shutdown.py +51 -0
- synapse/tools/service/snapshot.py +64 -0
- synapse/tools/shutdown.py +5 -45
- synapse/tools/snapshot.py +4 -57
- synapse/tools/storm/__init__.py +0 -0
- synapse/tools/storm/__main__.py +5 -0
- synapse/tools/{storm.py → storm/_cli.py} +0 -3
- synapse/tools/storm/pkg/__init__.py +0 -0
- synapse/tools/{pkgs/pandoc_filter.py → storm/pkg/_pandoc_filter.py} +1 -1
- synapse/tools/storm/pkg/doc.py +176 -0
- synapse/tools/storm/pkg/gen.py +315 -0
- synapse/tools/utils/__init__.py +0 -0
- synapse/tools/utils/autodoc.py +1040 -0
- synapse/tools/utils/changelog.py +1124 -0
- synapse/tools/utils/easycert.py +136 -0
- synapse/tools/utils/guid.py +11 -0
- synapse/tools/utils/json2mpk.py +46 -0
- synapse/tools/utils/rstorm.py +35 -0
- {synapse-2.224.0.dist-info → synapse-2.225.0.dist-info}/METADATA +1 -1
- {synapse-2.224.0.dist-info → synapse-2.225.0.dist-info}/RECORD +120 -91
- synapse/tests/test_tools_changelog.py +0 -196
- /synapse/tests/{test_tools_axon.py → test_tools_axon_dump_load.py} +0 -0
- {synapse-2.224.0.dist-info → synapse-2.225.0.dist-info}/WHEEL +0 -0
- {synapse-2.224.0.dist-info → synapse-2.225.0.dist-info}/licenses/LICENSE +0 -0
- {synapse-2.224.0.dist-info → synapse-2.225.0.dist-info}/top_level.txt +0 -0
synapse/tools/promote.py
CHANGED
|
@@ -1,48 +1,11 @@
|
|
|
1
|
-
import synapse.
|
|
2
|
-
|
|
3
|
-
import synapse.telepath as s_telepath
|
|
1
|
+
import synapse.common as s_common
|
|
4
2
|
|
|
5
3
|
import synapse.lib.cmd as s_cmd
|
|
6
|
-
import synapse.lib.output as s_output
|
|
7
|
-
import synapse.lib.urlhelp as s_urlhelp
|
|
8
|
-
|
|
9
|
-
descr = '''
|
|
10
|
-
Promote a mirror to the leader.
|
|
11
|
-
|
|
12
|
-
Example (being run from a Cortex mirror docker container):
|
|
13
|
-
python -m synapse.tools.promote
|
|
14
|
-
'''
|
|
15
|
-
|
|
16
|
-
async def main(argv, outp=s_output.stdout):
|
|
17
|
-
|
|
18
|
-
pars = s_cmd.Parser(prog='synapse.tools.promote', outp=outp, description=descr)
|
|
19
|
-
|
|
20
|
-
pars.add_argument('--svcurl', default='cell:///vertex/storage',
|
|
21
|
-
help='The telepath URL of the Synapse service.')
|
|
22
|
-
|
|
23
|
-
pars.add_argument('--failure', default=False, action='store_true',
|
|
24
|
-
help='Promotion is due to leader being offline. Graceful handoff is not possible.')
|
|
25
|
-
|
|
26
|
-
opts = pars.parse_args(argv)
|
|
27
|
-
|
|
28
|
-
async with s_telepath.withTeleEnv():
|
|
29
|
-
|
|
30
|
-
async with await s_telepath.openurl(opts.svcurl) as cell:
|
|
31
|
-
|
|
32
|
-
graceful = not opts.failure
|
|
33
4
|
|
|
34
|
-
|
|
35
|
-
try:
|
|
36
|
-
await cell.promote(graceful=graceful)
|
|
37
|
-
except s_exc.BadState as e:
|
|
38
|
-
mesg = f'Failed to promote service to being a leader; {e.get("mesg")}'
|
|
39
|
-
outp.printf(mesg)
|
|
40
|
-
return 1
|
|
41
|
-
except s_exc.SynErr as e:
|
|
42
|
-
outp.printf(f'Failed to promote service {s_urlhelp.sanitizeUrl(opts.svcurl)}: {e}')
|
|
43
|
-
return 1
|
|
5
|
+
from synapse.tools.service.promote import main
|
|
44
6
|
|
|
45
|
-
|
|
7
|
+
s_common.deprecated('synapse.tools.promote is deprecated. Please use synapse.tools.service.promote instead.',
|
|
8
|
+
curv='v2.225.0')
|
|
46
9
|
|
|
47
10
|
if __name__ == '__main__': # pragma: no cover
|
|
48
11
|
s_cmd.exitmain(main)
|
synapse/tools/pullfile.py
CHANGED
|
@@ -1,64 +1,11 @@
|
|
|
1
|
-
import pathlib
|
|
2
|
-
|
|
3
1
|
import synapse.common as s_common
|
|
4
|
-
import synapse.telepath as s_telepath
|
|
5
2
|
|
|
6
3
|
import synapse.lib.cmd as s_cmd
|
|
7
|
-
import synapse.lib.output as s_output
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
async def main(argv, outp=s_output.stdout):
|
|
11
|
-
pars = getArgParser(outp)
|
|
12
|
-
opts = pars.parse_args(argv)
|
|
13
|
-
|
|
14
|
-
if opts.output is None:
|
|
15
|
-
opts.output = '.'
|
|
16
|
-
|
|
17
|
-
outdir = pathlib.Path(opts.output)
|
|
18
|
-
|
|
19
|
-
s_common.gendir(opts.output)
|
|
20
|
-
|
|
21
|
-
async with s_telepath.withTeleEnv():
|
|
22
|
-
|
|
23
|
-
async with await s_telepath.openurl(opts.axon) as axon:
|
|
24
|
-
|
|
25
|
-
# reminder: these are the hashes *not* available
|
|
26
|
-
|
|
27
|
-
awants = await axon.wants([s_common.uhex(h) for h in opts.hashes])
|
|
28
|
-
for a in awants:
|
|
29
|
-
outp.printf(f'{s_common.ehex(a)} not in axon store')
|
|
30
|
-
|
|
31
|
-
exists = [h for h in opts.hashes if s_common.uhex(h) not in awants]
|
|
32
|
-
|
|
33
|
-
for h in exists:
|
|
34
|
-
|
|
35
|
-
try:
|
|
36
|
-
outp.printf(f'Fetching {h} to file')
|
|
37
|
-
|
|
38
|
-
with open(outdir.joinpath(h), 'wb') as fd:
|
|
39
|
-
async for b in axon.get(s_common.uhex(h)):
|
|
40
|
-
fd.write(b)
|
|
41
|
-
|
|
42
|
-
outp.printf(f'Fetched {h} to file')
|
|
43
|
-
|
|
44
|
-
except Exception as e:
|
|
45
|
-
outp.printf('Error: Hit Exception: %s' % (str(e),))
|
|
46
|
-
continue
|
|
47
|
-
|
|
48
|
-
return 0
|
|
49
|
-
|
|
50
4
|
|
|
51
|
-
|
|
52
|
-
desc = 'Fetches file from the given axon'
|
|
53
|
-
pars = s_cmd.Parser(prog='synapse.tools.pullfile', outp=outp, description=desc)
|
|
54
|
-
pars.add_argument('-a', '--axon', type=str, dest='axon', required=True,
|
|
55
|
-
help='URL to the axon blob store')
|
|
56
|
-
pars.add_argument('-o', '--output', type=str, dest='output',
|
|
57
|
-
help='Directory to output files to')
|
|
58
|
-
pars.add_argument('-l', '--list-hashes', dest='hashes', action='append', default=[],
|
|
59
|
-
help='List of hashes to pull from axon')
|
|
5
|
+
from synapse.tools.axon.get import main
|
|
60
6
|
|
|
61
|
-
|
|
7
|
+
s_common.deprecated('synapse.tools.pullfile is deprecated. Please use synapse.tools.axon.get instead.',
|
|
8
|
+
curv='v2.225.0')
|
|
62
9
|
|
|
63
10
|
if __name__ == '__main__': # pragma: no cover
|
|
64
11
|
s_cmd.exitmain(main)
|
synapse/tools/pushfile.py
CHANGED
|
@@ -1,122 +1,11 @@
|
|
|
1
|
-
import os
|
|
2
|
-
import glob
|
|
3
|
-
import logging
|
|
4
|
-
|
|
5
|
-
import synapse.exc as s_exc
|
|
6
1
|
import synapse.common as s_common
|
|
7
|
-
import synapse.telepath as s_telepath
|
|
8
2
|
|
|
9
3
|
import synapse.lib.cmd as s_cmd
|
|
10
|
-
import synapse.lib.output as s_output
|
|
11
|
-
import synapse.lib.hashset as s_hashset
|
|
12
|
-
|
|
13
|
-
logger = logging.getLogger(__name__)
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
async def main(argv, outp=s_output.stdout):
|
|
17
|
-
pars = getArgParser(outp)
|
|
18
|
-
opts = pars.parse_args(argv)
|
|
19
|
-
|
|
20
|
-
async with s_telepath.withTeleEnv():
|
|
21
|
-
|
|
22
|
-
axon = await s_telepath.openurl(opts.axon)
|
|
23
|
-
|
|
24
|
-
core = None
|
|
25
|
-
if opts.cortex:
|
|
26
|
-
core = await s_telepath.openurl(opts.cortex)
|
|
27
|
-
|
|
28
|
-
tags = set()
|
|
29
|
-
if opts.tags:
|
|
30
|
-
for tag in opts.tags.split(','):
|
|
31
|
-
tags.add(tag)
|
|
32
|
-
|
|
33
|
-
tags = tuple(tags)
|
|
34
|
-
if tags:
|
|
35
|
-
outp.printf(f'adding tags: {tags}')
|
|
36
|
-
|
|
37
|
-
filepaths = set()
|
|
38
|
-
for item in opts.filenames:
|
|
39
|
-
paths = glob.glob(item, recursive=opts.recursive)
|
|
40
|
-
|
|
41
|
-
if not paths:
|
|
42
|
-
outp.printf(f'filepath does not contain any files: {item}')
|
|
43
|
-
continue
|
|
44
|
-
|
|
45
|
-
filepaths.update([path for path in paths if os.path.isfile(path)])
|
|
46
|
-
|
|
47
|
-
for path in filepaths:
|
|
48
|
-
|
|
49
|
-
bname = os.path.basename(path)
|
|
50
|
-
|
|
51
|
-
hset = s_hashset.HashSet()
|
|
52
|
-
with s_common.reqfile(path) as fd:
|
|
53
|
-
hset.eatfd(fd)
|
|
54
|
-
|
|
55
|
-
fhashes = {htyp: hasher.hexdigest() for htyp, hasher in hset.hashes}
|
|
56
|
-
|
|
57
|
-
sha256 = fhashes.get('sha256')
|
|
58
|
-
bsha256 = s_common.uhex(sha256)
|
|
59
|
-
|
|
60
|
-
if not await axon.has(bsha256):
|
|
61
|
-
|
|
62
|
-
async with await axon.upload() as upfd:
|
|
63
|
-
|
|
64
|
-
with s_common.genfile(path) as fd:
|
|
65
|
-
for byts in s_common.iterfd(fd):
|
|
66
|
-
await upfd.write(byts)
|
|
67
|
-
|
|
68
|
-
size, hashval = await upfd.save()
|
|
69
|
-
|
|
70
|
-
if hashval != bsha256: # pragma: no cover
|
|
71
|
-
raise s_exc.SynErr(mesg='hashes do not match',
|
|
72
|
-
ehash=s_common.ehex(hashval),
|
|
73
|
-
ahash=hashval)
|
|
74
|
-
|
|
75
|
-
outp.printf(f'Uploaded [{bname}] to axon')
|
|
76
|
-
else:
|
|
77
|
-
outp.printf(f'Axon already had [{bname}]')
|
|
78
|
-
|
|
79
|
-
if core:
|
|
80
|
-
opts = {'vars': {
|
|
81
|
-
'md5': fhashes.get('md5'),
|
|
82
|
-
'sha1': fhashes.get('sha1'),
|
|
83
|
-
'sha256': fhashes.get('sha256'),
|
|
84
|
-
'size': hset.size,
|
|
85
|
-
'name': bname,
|
|
86
|
-
'tags': tags,
|
|
87
|
-
}}
|
|
88
|
-
|
|
89
|
-
q = '[file:bytes=$sha256 :md5=$md5 :sha1=$sha1 :size=$size :name=$name] ' \
|
|
90
|
-
'{ for $tag in $tags { [+#$tag] } }'
|
|
91
|
-
|
|
92
|
-
msgs = await core.storm(q, opts=opts).list()
|
|
93
|
-
node = [m[1] for m in msgs if m[0] == 'node'][0]
|
|
94
|
-
|
|
95
|
-
iden = node[0][1]
|
|
96
|
-
size = node[1]['props']['size']
|
|
97
|
-
name = node[1]['props']['name']
|
|
98
|
-
mesg = f'file: {bname} ({size}) added to core ({iden}) as {name}'
|
|
99
|
-
outp.printf(mesg)
|
|
100
|
-
|
|
101
|
-
await axon.fini()
|
|
102
|
-
if core:
|
|
103
|
-
await core.fini()
|
|
104
4
|
|
|
105
|
-
|
|
5
|
+
from synapse.tools.axon.put import main
|
|
106
6
|
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
'file:bytes in a Cortex.'
|
|
110
|
-
pars = s_cmd.Parser(prog='synapse.tools.pushfile', outp=outp, description=desc)
|
|
111
|
-
pars.add_argument('-a', '--axon', required=True, type=str, dest='axon',
|
|
112
|
-
help='URL for a target Axon to store files at.')
|
|
113
|
-
pars.add_argument('-c', '--cortex', default=None, type=str, dest='cortex',
|
|
114
|
-
help='URL for a target Cortex to make file:bytes nodes.')
|
|
115
|
-
pars.add_argument('filenames', nargs='+', help='File names (or glob patterns) to upload')
|
|
116
|
-
pars.add_argument('-r', '--recursive', action='store_true',
|
|
117
|
-
help='Recursively search paths to upload files.')
|
|
118
|
-
pars.add_argument('-t', '--tags', help='comma separated list of tags to add to the nodes')
|
|
119
|
-
return pars
|
|
7
|
+
s_common.deprecated('synapse.tools.pushfile is deprecated. Please use synapse.tools.axon.put instead.',
|
|
8
|
+
curv='v2.225.0')
|
|
120
9
|
|
|
121
10
|
if __name__ == '__main__': # pragma: no cover
|
|
122
11
|
s_cmd.exitmain(main)
|
synapse/tools/reload.py
CHANGED
|
@@ -1,68 +1,11 @@
|
|
|
1
|
-
import synapse.
|
|
1
|
+
import synapse.common as s_common
|
|
2
2
|
|
|
3
3
|
import synapse.lib.cmd as s_cmd
|
|
4
|
-
import synapse.lib.output as s_output
|
|
5
|
-
import synapse.lib.urlhelp as s_urlhelp
|
|
6
4
|
|
|
7
|
-
|
|
8
|
-
List or execute reload subsystems on a Synapse service.
|
|
9
|
-
'''
|
|
5
|
+
from synapse.tools.service.reload import main
|
|
10
6
|
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
pars = getArgParser(outp)
|
|
14
|
-
opts = pars.parse_args(argv)
|
|
15
|
-
|
|
16
|
-
async with s_telepath.withTeleEnv():
|
|
17
|
-
|
|
18
|
-
async with await s_telepath.openurl(opts.svcurl) as cell:
|
|
19
|
-
|
|
20
|
-
if opts.cmd == 'list':
|
|
21
|
-
names = await cell.getReloadableSystems()
|
|
22
|
-
if names:
|
|
23
|
-
outp.printf(f'Cell at {s_urlhelp.sanitizeUrl(opts.svcurl)} has the following reload subsystems:')
|
|
24
|
-
for name in names:
|
|
25
|
-
outp.printf(name)
|
|
26
|
-
else:
|
|
27
|
-
outp.printf(f'Cell at {s_urlhelp.sanitizeUrl(opts.svcurl)} has no registered reload subsystems.')
|
|
28
|
-
|
|
29
|
-
if opts.cmd == 'reload':
|
|
30
|
-
outp.printf(f'Reloading cell at {s_urlhelp.sanitizeUrl(opts.svcurl)}')
|
|
31
|
-
try:
|
|
32
|
-
ret = await cell.reload(subsystem=opts.name)
|
|
33
|
-
except Exception as e:
|
|
34
|
-
outp.printf(f'Error reloading cell: {e}')
|
|
35
|
-
return 1
|
|
36
|
-
|
|
37
|
-
if not ret:
|
|
38
|
-
outp.printf('No subsystems reloaded.')
|
|
39
|
-
else:
|
|
40
|
-
outp.printf(f'{"Name:".ljust(40)}{"Result:".ljust(10)}Value:')
|
|
41
|
-
for name, (isok, valu) in ret.items():
|
|
42
|
-
if isok:
|
|
43
|
-
mesg = str(valu)
|
|
44
|
-
result = 'Success'
|
|
45
|
-
else:
|
|
46
|
-
mesg = valu[1].get('mesg')
|
|
47
|
-
if mesg is None:
|
|
48
|
-
mesg = valu[0]
|
|
49
|
-
result = 'Failed'
|
|
50
|
-
|
|
51
|
-
outp.printf(f'{name.ljust(40)}{result.ljust(10)}{mesg}')
|
|
52
|
-
return 0
|
|
53
|
-
|
|
54
|
-
def getArgParser(outp):
|
|
55
|
-
pars = s_cmd.Parser(prog='synapse.tools.reload', outp=outp, description=descr)
|
|
56
|
-
pars.add_argument('--svcurl', default='cell:///vertex/storage', help='The telepath URL of the Synapse service.')
|
|
57
|
-
|
|
58
|
-
subpars = pars.add_subparsers(required=True,
|
|
59
|
-
title='subcommands',
|
|
60
|
-
dest='cmd',)
|
|
61
|
-
pars_list = subpars.add_parser('list', help='List subsystems which can be reloaded.')
|
|
62
|
-
reld_list = subpars.add_parser('reload', help='Reload registered subsystems.')
|
|
63
|
-
reld_list.add_argument('-n', '--name', type=str, help='Name of a subsystem to reload.')
|
|
64
|
-
|
|
65
|
-
return pars
|
|
7
|
+
s_common.deprecated('synapse.tools.reload is deprecated. Please use synapse.tools.service.reload instead.',
|
|
8
|
+
curv='v2.225.0')
|
|
66
9
|
|
|
67
10
|
if __name__ == '__main__': # pragma: no cover
|
|
68
11
|
s_cmd.exitmain(main)
|
synapse/tools/rstorm.py
CHANGED
|
@@ -1,34 +1,11 @@
|
|
|
1
|
-
import logging
|
|
2
|
-
|
|
3
1
|
import synapse.common as s_common
|
|
4
2
|
|
|
5
3
|
import synapse.lib.cmd as s_cmd
|
|
6
|
-
import synapse.lib.output as s_output
|
|
7
|
-
import synapse.lib.rstorm as s_rstorm
|
|
8
|
-
|
|
9
|
-
logger = logging.getLogger(__name__)
|
|
10
|
-
|
|
11
|
-
prog = 'synapse.tools.rstorm'
|
|
12
|
-
descr = 'An RST pre-processor that allows you to embed storm directives.'
|
|
13
|
-
|
|
14
|
-
async def main(argv, outp=s_output.stdout):
|
|
15
|
-
|
|
16
|
-
pars = s_cmd.Parser(prog=prog, outp=outp, description=descr)
|
|
17
|
-
pars.add_argument('rstfile', help='Input RST file with storm directives.')
|
|
18
|
-
pars.add_argument('--save', help='Output file to save (default: stdout)')
|
|
19
|
-
|
|
20
|
-
opts = pars.parse_args(argv)
|
|
21
4
|
|
|
22
|
-
|
|
23
|
-
lines = await rstorm.run()
|
|
5
|
+
from synapse.tools.utils.rstorm import logger, main
|
|
24
6
|
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
fd.truncate(0)
|
|
28
|
-
[fd.write(line) for line in lines]
|
|
29
|
-
else:
|
|
30
|
-
for line in lines:
|
|
31
|
-
outp.printf(line, addnl=False)
|
|
7
|
+
s_common.deprecated('synapse.tools.rstorm is deprecated. Please use synapse.tools.utils.rstorm instead.',
|
|
8
|
+
curv='v2.225.0')
|
|
32
9
|
|
|
33
10
|
if __name__ == '__main__': # pragma: no cover
|
|
34
11
|
s_common.setlogging(logger)
|
|
File without changes
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
import synapse.exc as s_exc
|
|
2
|
+
import synapse.telepath as s_telepath
|
|
3
|
+
|
|
4
|
+
import synapse.lib.cmd as s_cmd
|
|
5
|
+
import synapse.lib.time as s_time
|
|
6
|
+
import synapse.lib.output as s_output
|
|
7
|
+
|
|
8
|
+
descr = '''
|
|
9
|
+
Add, list, or delete user API keys from a Synapse service.
|
|
10
|
+
'''
|
|
11
|
+
|
|
12
|
+
def printkey(outp, info, apikey=None):
|
|
13
|
+
iden = info.get('iden')
|
|
14
|
+
name = info.get('name')
|
|
15
|
+
created = info.get('created')
|
|
16
|
+
updated = info.get('updated')
|
|
17
|
+
expires = info.get('expires')
|
|
18
|
+
|
|
19
|
+
outp.printf(f'Iden: {iden}')
|
|
20
|
+
if apikey:
|
|
21
|
+
outp.printf(f' API Key: {apikey}')
|
|
22
|
+
outp.printf(f' Name: {name}')
|
|
23
|
+
outp.printf(f' Created: {s_time.repr(created)}')
|
|
24
|
+
outp.printf(f' Updated: {s_time.repr(updated)}')
|
|
25
|
+
if expires:
|
|
26
|
+
outp.printf(f' Expires: {s_time.repr(expires)}')
|
|
27
|
+
|
|
28
|
+
outp.printf('')
|
|
29
|
+
|
|
30
|
+
async def main(argv, outp=s_output.stdout):
|
|
31
|
+
|
|
32
|
+
pars = s_cmd.Parser(prog='synapse.tools.service.apikey', outp=outp, description=descr)
|
|
33
|
+
pars.add_argument('--svcurl', default='cell:///vertex/storage', help='The telepath URL of the Synapse service.')
|
|
34
|
+
|
|
35
|
+
subpars = pars.add_subparsers(dest='action', required=True)
|
|
36
|
+
|
|
37
|
+
addp = subpars.add_parser('add', help='Add a user API key.')
|
|
38
|
+
addp.add_argument('-d', '--duration', type=int, help='The duration of the API key in seconds.')
|
|
39
|
+
addp.add_argument('-u', '--username', type=str, help='The username to add an API key to (restricted to admins).')
|
|
40
|
+
addp.add_argument('name', help='The name of the API key to add.')
|
|
41
|
+
|
|
42
|
+
listp = subpars.add_parser('list', help='List user API keys.')
|
|
43
|
+
listp.add_argument('-u', '--username', type=str, help='The username to list API keys for (restricted to admins).')
|
|
44
|
+
|
|
45
|
+
delp = subpars.add_parser('del', help='Delete a user API key.')
|
|
46
|
+
delp.add_argument('iden', help='The iden of the API key to delete.')
|
|
47
|
+
|
|
48
|
+
opts = pars.parse_args(argv)
|
|
49
|
+
|
|
50
|
+
async with s_telepath.withTeleEnv():
|
|
51
|
+
|
|
52
|
+
async with await s_telepath.openurl(opts.svcurl) as cell:
|
|
53
|
+
|
|
54
|
+
try:
|
|
55
|
+
useriden = None
|
|
56
|
+
if opts.action in ('add', 'list') and opts.username:
|
|
57
|
+
user = await cell.getUserInfo(opts.username)
|
|
58
|
+
useriden = user.get('iden')
|
|
59
|
+
|
|
60
|
+
if opts.action == 'add':
|
|
61
|
+
if (duration := opts.duration) is not None:
|
|
62
|
+
# Convert from seconds to milliseconds
|
|
63
|
+
duration *= 1000
|
|
64
|
+
|
|
65
|
+
apikey, info = await cell.addUserApiKey(opts.name, duration=duration, useriden=useriden)
|
|
66
|
+
outp.printf(f'Successfully added API key with name={opts.name}.')
|
|
67
|
+
printkey(outp, info, apikey)
|
|
68
|
+
|
|
69
|
+
elif opts.action == 'del':
|
|
70
|
+
await cell.delUserApiKey(opts.iden)
|
|
71
|
+
outp.printf(f'Successfully deleted API key with iden={opts.iden}.')
|
|
72
|
+
|
|
73
|
+
elif opts.action == 'list':
|
|
74
|
+
apikeys = await cell.listUserApiKeys(useriden=useriden)
|
|
75
|
+
if not apikeys:
|
|
76
|
+
outp.printf('No API keys found.')
|
|
77
|
+
return 0
|
|
78
|
+
|
|
79
|
+
for info in apikeys:
|
|
80
|
+
printkey(outp, info)
|
|
81
|
+
|
|
82
|
+
except s_exc.SynErr as exc:
|
|
83
|
+
mesg = exc.get('mesg')
|
|
84
|
+
outp.printf(f'ERROR: {exc.__class__.__name__}: {mesg}')
|
|
85
|
+
return 1
|
|
86
|
+
|
|
87
|
+
return 0
|
|
88
|
+
|
|
89
|
+
if __name__ == '__main__': # pragma: no cover
|
|
90
|
+
s_cmd.exitmain(main)
|
|
@@ -0,0 +1,181 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import sys
|
|
3
|
+
import glob
|
|
4
|
+
import time
|
|
5
|
+
import shutil
|
|
6
|
+
import fnmatch
|
|
7
|
+
import logging
|
|
8
|
+
import argparse
|
|
9
|
+
import contextlib
|
|
10
|
+
|
|
11
|
+
import lmdb
|
|
12
|
+
|
|
13
|
+
import synapse.common as s_common
|
|
14
|
+
|
|
15
|
+
import synapse.lib.cmd as s_cmd
|
|
16
|
+
|
|
17
|
+
logger = logging.getLogger(__name__)
|
|
18
|
+
|
|
19
|
+
def backup(srcdir, dstdir, skipdirs=None):
|
|
20
|
+
'''
|
|
21
|
+
Create a backup of a Synapse application.
|
|
22
|
+
|
|
23
|
+
Args:
|
|
24
|
+
srcdir (str): Path to the directory to backup.
|
|
25
|
+
dstdir (str): Path to backup target directory.
|
|
26
|
+
skipdirs (list or None): Optional list of relative directory name glob patterns to exclude from the backup.
|
|
27
|
+
|
|
28
|
+
Note:
|
|
29
|
+
Running this method from the same process as a running user of the directory may lead to a segmentation fault
|
|
30
|
+
'''
|
|
31
|
+
with capturelmdbs(srcdir, skipdirs=skipdirs) as lmdbinfo:
|
|
32
|
+
txnbackup(lmdbinfo, srcdir, dstdir, skipdirs=skipdirs)
|
|
33
|
+
|
|
34
|
+
@contextlib.contextmanager
|
|
35
|
+
def capturelmdbs(srcdir, skipdirs=None, onlydirs=None):
|
|
36
|
+
'''
|
|
37
|
+
A context manager that opens all the lmdb files under a srcdir and makes a read transaction. All transactions are
|
|
38
|
+
aborted and environments closed when the context is exited.
|
|
39
|
+
|
|
40
|
+
Yields:
|
|
41
|
+
Dict[str, Tuple[lmdb.Environment, lmdb.Transaction]]: Maps path to environment, transaction
|
|
42
|
+
'''
|
|
43
|
+
if onlydirs:
|
|
44
|
+
lmdbpaths = onlydirs
|
|
45
|
+
|
|
46
|
+
else:
|
|
47
|
+
if skipdirs is None:
|
|
48
|
+
skipdirs = []
|
|
49
|
+
|
|
50
|
+
srcdir = glob.escape(os.path.abspath(srcdir))
|
|
51
|
+
skipdirs.append(os.path.join(srcdir, 'tmp/*'))
|
|
52
|
+
skipdirs.append(os.path.join(srcdir, '*/tmp/*'))
|
|
53
|
+
|
|
54
|
+
srcdirglob = s_common.genpath(srcdir, '**/data.mdb')
|
|
55
|
+
fniter = glob.iglob(srcdirglob, recursive=True)
|
|
56
|
+
lmdbpaths = [os.path.dirname(fn) for fn in fniter if not
|
|
57
|
+
any([fnmatch.fnmatch(fn, pattern) for pattern in skipdirs])]
|
|
58
|
+
|
|
59
|
+
lmdbinfo = {}
|
|
60
|
+
|
|
61
|
+
with contextlib.ExitStack() as stack:
|
|
62
|
+
for path in lmdbpaths:
|
|
63
|
+
logger.debug(f'Capturing txn for {path}')
|
|
64
|
+
datafile = os.path.join(path, 'data.mdb')
|
|
65
|
+
stat = os.stat(datafile)
|
|
66
|
+
map_size = stat.st_size
|
|
67
|
+
env = stack.enter_context(
|
|
68
|
+
lmdb.open(path, map_size=map_size, max_dbs=16384, create=False, readonly=True))
|
|
69
|
+
txn = stack.enter_context(env.begin())
|
|
70
|
+
assert path not in lmdbinfo
|
|
71
|
+
lmdbinfo[path] = (env, txn)
|
|
72
|
+
|
|
73
|
+
yield lmdbinfo
|
|
74
|
+
|
|
75
|
+
def txnbackup(lmdbinfo, srcdir, dstdir, skipdirs=None):
|
|
76
|
+
'''
|
|
77
|
+
Create a backup of a Synapse application under a (hopefully consistent) set of transactions.
|
|
78
|
+
|
|
79
|
+
Args:
|
|
80
|
+
lmdbinfo(Dict[str, Tuple[lmdb.Environment, lmdb.Transaction]]): Maps of path to environment, transaction
|
|
81
|
+
srcdir (str): Path to the directory to backup.
|
|
82
|
+
dstdir (str): Path to backup target directory.
|
|
83
|
+
skipdirs (list or None): Optional list of relative directory name glob patterns to exclude from the backup.
|
|
84
|
+
|
|
85
|
+
Note:
|
|
86
|
+
Running this method from the same process as a running user of the directory may lead to a segmentation fault
|
|
87
|
+
'''
|
|
88
|
+
tick = s_common.now()
|
|
89
|
+
|
|
90
|
+
srcdir = s_common.reqdir(srcdir)
|
|
91
|
+
dstdir = s_common.gendir(dstdir)
|
|
92
|
+
|
|
93
|
+
if skipdirs is None:
|
|
94
|
+
skipdirs = []
|
|
95
|
+
|
|
96
|
+
# Always avoid backing up temporary and backup directories
|
|
97
|
+
skipdirs.append('**/tmp')
|
|
98
|
+
skipdirs.append('**/backups')
|
|
99
|
+
|
|
100
|
+
logger.debug(f'Starting backup of [{srcdir}]')
|
|
101
|
+
logger.debug(f'Destination dir: [{dstdir}]')
|
|
102
|
+
|
|
103
|
+
for root, dnames, fnames in os.walk(srcdir, topdown=True):
|
|
104
|
+
|
|
105
|
+
relpath = os.path.relpath(root, start=srcdir)
|
|
106
|
+
|
|
107
|
+
for name in list(dnames):
|
|
108
|
+
|
|
109
|
+
srcpath = s_common.genpath(root, name)
|
|
110
|
+
|
|
111
|
+
relname = os.path.join(relpath, name)
|
|
112
|
+
|
|
113
|
+
if any([fnmatch.fnmatch(relname, pattern) for pattern in skipdirs]):
|
|
114
|
+
logger.debug(f'skipping dir:{srcpath}')
|
|
115
|
+
dnames.remove(name)
|
|
116
|
+
continue
|
|
117
|
+
|
|
118
|
+
dstpath = s_common.genpath(dstdir, relname)
|
|
119
|
+
|
|
120
|
+
info = lmdbinfo.get(os.path.abspath(srcpath))
|
|
121
|
+
|
|
122
|
+
if info is not None:
|
|
123
|
+
logger.debug('backing up lmdb file: %s', srcpath)
|
|
124
|
+
dnames.remove(name)
|
|
125
|
+
env, txn = info
|
|
126
|
+
backup_lmdb(env, dstpath, txn=txn)
|
|
127
|
+
continue
|
|
128
|
+
|
|
129
|
+
if name.endswith('.lmdb'):
|
|
130
|
+
logger.warning('lmdb file %s not copied', srcpath)
|
|
131
|
+
dnames.remove(name)
|
|
132
|
+
continue
|
|
133
|
+
|
|
134
|
+
logger.debug(f'making dir:{dstpath}')
|
|
135
|
+
s_common.gendir(dstpath)
|
|
136
|
+
|
|
137
|
+
for name in fnames:
|
|
138
|
+
|
|
139
|
+
srcpath = s_common.genpath(root, name)
|
|
140
|
+
# skip unix sockets etc...
|
|
141
|
+
if not os.path.isfile(srcpath):
|
|
142
|
+
continue
|
|
143
|
+
|
|
144
|
+
dstpath = s_common.genpath(dstdir, relpath, name)
|
|
145
|
+
logger.debug(f'copying: {srcpath} -> {dstpath}')
|
|
146
|
+
shutil.copy(srcpath, dstpath)
|
|
147
|
+
|
|
148
|
+
tock = s_common.now()
|
|
149
|
+
|
|
150
|
+
logger.debug(f'Backup complete. Took [{tock-tick:.2f}] for [{srcdir}]')
|
|
151
|
+
return
|
|
152
|
+
|
|
153
|
+
def backup_lmdb(env: lmdb.Environment, dstdir: str, txn=None):
|
|
154
|
+
|
|
155
|
+
tick = time.time()
|
|
156
|
+
|
|
157
|
+
s_common.gendir(dstdir)
|
|
158
|
+
|
|
159
|
+
env.copy(dstdir, compact=True, txn=txn)
|
|
160
|
+
|
|
161
|
+
tock = time.time()
|
|
162
|
+
logger.info(f'backup of: {env.path()} took: {tock-tick:.2f} seconds')
|
|
163
|
+
|
|
164
|
+
async def main(argv):
|
|
165
|
+
args = parse_args(argv)
|
|
166
|
+
backup(args.srcdir, args.dstdir, args.skipdirs)
|
|
167
|
+
return 0
|
|
168
|
+
|
|
169
|
+
def parse_args(argv):
|
|
170
|
+
desc = 'Create an optimized backup of a Synapse directory.'
|
|
171
|
+
parser = argparse.ArgumentParser('synapse.tools.service.backup', description=desc)
|
|
172
|
+
parser.add_argument('srcdir', help='Path to the Synapse directory to backup.')
|
|
173
|
+
parser.add_argument('dstdir', help='Path to the backup target directory.')
|
|
174
|
+
parser.add_argument('--skipdirs', nargs='+',
|
|
175
|
+
help='Glob patterns of relative directory names to exclude from the backup.')
|
|
176
|
+
args = parser.parse_args(argv)
|
|
177
|
+
return args
|
|
178
|
+
|
|
179
|
+
if __name__ == '__main__': # pragma: no cover
|
|
180
|
+
s_common.setlogging(logger, defval='DEBUG')
|
|
181
|
+
s_cmd.exitmain(main)
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
import synapse.exc as s_exc
|
|
2
|
+
|
|
3
|
+
import synapse.telepath as s_telepath
|
|
4
|
+
|
|
5
|
+
import synapse.lib.cmd as s_cmd
|
|
6
|
+
import synapse.lib.output as s_output
|
|
7
|
+
import synapse.lib.urlhelp as s_urlhelp
|
|
8
|
+
|
|
9
|
+
descr = '''
|
|
10
|
+
Automatically select a new leader and demote this service.
|
|
11
|
+
|
|
12
|
+
Example:
|
|
13
|
+
python -m synapse.tools.service.demote
|
|
14
|
+
'''
|
|
15
|
+
|
|
16
|
+
async def main(argv, outp=s_output.stdout):
|
|
17
|
+
|
|
18
|
+
pars = s_cmd.Parser(prog='synapse.tools.service.demote', outp=outp, description=descr)
|
|
19
|
+
|
|
20
|
+
pars.add_argument('--url', default='cell:///vertex/storage',
|
|
21
|
+
help='The telepath URL of the Synapse service.')
|
|
22
|
+
|
|
23
|
+
pars.add_argument('--timeout', type=int, default=60,
|
|
24
|
+
help='The timeout to use awaiting network connections.')
|
|
25
|
+
|
|
26
|
+
opts = pars.parse_args(argv)
|
|
27
|
+
|
|
28
|
+
async with s_telepath.withTeleEnv():
|
|
29
|
+
|
|
30
|
+
try:
|
|
31
|
+
|
|
32
|
+
async with await s_telepath.openurl(opts.url) as cell:
|
|
33
|
+
|
|
34
|
+
outp.printf(f'Demoting leader: {opts.url}')
|
|
35
|
+
|
|
36
|
+
if await cell.demote(timeout=opts.timeout):
|
|
37
|
+
return 0
|
|
38
|
+
|
|
39
|
+
except s_exc.SynErr as e:
|
|
40
|
+
outp.printf(f'Error while demoting service {s_urlhelp.sanitizeUrl(opts.url)}: {e}')
|
|
41
|
+
return 1
|
|
42
|
+
|
|
43
|
+
outp.printf(f'Failed to demote service {s_urlhelp.sanitizeUrl(opts.url)}')
|
|
44
|
+
return 1
|
|
45
|
+
|
|
46
|
+
if __name__ == '__main__': # pragma: no cover
|
|
47
|
+
s_cmd.exitmain(main)
|