synapse 2.224.0__py311-none-any.whl → 2.225.0__py311-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of synapse might be problematic. Click here for more details.
- synapse/axon.py +10 -5
- synapse/lib/cell.py +1 -1
- synapse/lib/const.py +4 -0
- synapse/lib/multislabseqn.py +36 -1
- synapse/lib/nexus.py +67 -8
- synapse/lib/queue.py +4 -1
- synapse/lib/rstorm.py +2 -2
- synapse/lib/schemas.py +11 -1
- synapse/lib/slabseqn.py +28 -0
- synapse/lib/storm.py +5 -1
- synapse/lib/stormhttp.py +7 -1
- synapse/lib/version.py +2 -2
- synapse/models/inet.py +4 -0
- synapse/models/media.py +4 -0
- synapse/models/risk.py +3 -0
- synapse/tests/test_cortex.py +2 -2
- synapse/tests/test_lib_agenda.py +1 -1
- synapse/tests/test_lib_cell.py +1 -1
- synapse/tests/test_lib_certdir.py +1 -1
- synapse/tests/test_lib_httpapi.py +1 -1
- synapse/tests/test_lib_layer.py +1 -1
- synapse/tests/test_lib_multislabseqn.py +22 -0
- synapse/tests/test_lib_nexus.py +42 -1
- synapse/tests/test_lib_slabseqn.py +30 -1
- synapse/tests/test_lib_storm.py +59 -1
- synapse/tests/test_lib_stormhttp.py +16 -0
- synapse/tests/test_lib_stormlib_oauth.py +1 -1
- synapse/tests/test_lib_stormsvc.py +1 -1
- synapse/tests/test_lib_trigger.py +1 -1
- synapse/tests/test_model_inet.py +6 -0
- synapse/tests/test_model_media.py +4 -1
- synapse/tests/test_model_risk.py +2 -0
- synapse/tests/{test_tools_axon2axon.py → test_tools_axon_copy.py} +4 -4
- synapse/tests/{test_tools_pullfile.py → test_tools_axon_get.py} +4 -4
- synapse/tests/{test_tools_pushfile.py → test_tools_axon_put.py} +7 -7
- synapse/tests/{test_tools_csvtool.py → test_tools_cortex_csv.py} +12 -3
- synapse/tests/{test_tools_feed.py → test_tools_cortex_feed.py} +2 -2
- synapse/tests/{test_tools_apikey.py → test_tools_service_apikey.py} +1 -4
- synapse/tests/{test_tools_backup.py → test_tools_service_backup.py} +5 -5
- synapse/tests/{test_tools_demote.py → test_tools_service_demote.py} +1 -1
- synapse/tests/{test_tools_healthcheck.py → test_tools_service_healthcheck.py} +1 -1
- synapse/tests/{test_tools_livebackup.py → test_tools_service_livebackup.py} +1 -1
- synapse/tests/{test_tools_modrole.py → test_tools_service_modrole.py} +1 -1
- synapse/tests/{test_tools_moduser.py → test_tools_service_moduser.py} +1 -1
- synapse/tests/{test_tools_promote.py → test_tools_service_promote.py} +1 -1
- synapse/tests/{test_tools_reload.py → test_tools_service_reload.py} +1 -1
- synapse/tests/{test_tools_shutdown.py → test_tools_service_shutdown.py} +1 -1
- synapse/tests/{test_tools_snapshot.py → test_tools_service_snapshot.py} +1 -1
- synapse/tests/{test_tools_storm.py → test_tools_storm_cli.py} +1 -1
- synapse/tests/{test_tools_pkgs_gendocs.py → test_tools_storm_pkg_doc.py} +12 -3
- synapse/tests/{test_tools_genpkg.py → test_tools_storm_pkg_gen.py} +1 -1
- synapse/tests/{test_tools_autodoc.py → test_tools_utils_autodoc.py} +1 -1
- synapse/tests/test_tools_utils_changelog.py +454 -0
- synapse/tests/{test_tools_easycert.py → test_tools_utils_easycert.py} +48 -46
- synapse/tests/{test_tools_guid.py → test_tools_utils_guid.py} +3 -3
- synapse/tests/{test_tools_json2mpk.py → test_tools_utils_json2mpk.py} +3 -3
- synapse/tests/{test_tools_rstorm.py → test_tools_utils_rstorm.py} +6 -1
- synapse/tests/utils.py +3 -1
- synapse/tools/apikey.py +4 -83
- synapse/tools/autodoc.py +3 -1031
- synapse/tools/axon/copy.py +44 -0
- synapse/tools/axon/get.py +64 -0
- synapse/tools/axon/put.py +122 -0
- synapse/tools/axon2axon.py +3 -36
- synapse/tools/backup.py +6 -176
- synapse/tools/changelog.py +3 -1098
- synapse/tools/cortex/csv.py +236 -0
- synapse/tools/cortex/feed.py +151 -0
- synapse/tools/csvtool.py +3 -227
- synapse/tools/demote.py +4 -40
- synapse/tools/docker/validate.py +3 -3
- synapse/tools/easycert.py +4 -129
- synapse/tools/feed.py +3 -140
- synapse/tools/genpkg.py +3 -307
- synapse/tools/guid.py +7 -6
- synapse/tools/healthcheck.py +3 -101
- synapse/tools/json2mpk.py +6 -38
- synapse/tools/livebackup.py +4 -27
- synapse/tools/modrole.py +3 -108
- synapse/tools/moduser.py +3 -179
- synapse/tools/pkgs/gendocs.py +3 -164
- synapse/tools/promote.py +4 -41
- synapse/tools/pullfile.py +3 -56
- synapse/tools/pushfile.py +3 -114
- synapse/tools/reload.py +4 -61
- synapse/tools/rstorm.py +3 -26
- synapse/tools/service/__init__.py +0 -0
- synapse/tools/service/apikey.py +90 -0
- synapse/tools/service/backup.py +181 -0
- synapse/tools/service/demote.py +47 -0
- synapse/tools/service/healthcheck.py +109 -0
- synapse/tools/service/livebackup.py +34 -0
- synapse/tools/service/modrole.py +116 -0
- synapse/tools/service/moduser.py +184 -0
- synapse/tools/service/promote.py +48 -0
- synapse/tools/service/reload.py +68 -0
- synapse/tools/service/shutdown.py +51 -0
- synapse/tools/service/snapshot.py +64 -0
- synapse/tools/shutdown.py +5 -45
- synapse/tools/snapshot.py +4 -57
- synapse/tools/storm/__init__.py +0 -0
- synapse/tools/storm/__main__.py +5 -0
- synapse/tools/{storm.py → storm/_cli.py} +0 -3
- synapse/tools/storm/pkg/__init__.py +0 -0
- synapse/tools/{pkgs/pandoc_filter.py → storm/pkg/_pandoc_filter.py} +1 -1
- synapse/tools/storm/pkg/doc.py +176 -0
- synapse/tools/storm/pkg/gen.py +315 -0
- synapse/tools/utils/__init__.py +0 -0
- synapse/tools/utils/autodoc.py +1040 -0
- synapse/tools/utils/changelog.py +1124 -0
- synapse/tools/utils/easycert.py +136 -0
- synapse/tools/utils/guid.py +11 -0
- synapse/tools/utils/json2mpk.py +46 -0
- synapse/tools/utils/rstorm.py +35 -0
- {synapse-2.224.0.dist-info → synapse-2.225.0.dist-info}/METADATA +1 -1
- {synapse-2.224.0.dist-info → synapse-2.225.0.dist-info}/RECORD +120 -91
- synapse/tests/test_tools_changelog.py +0 -196
- /synapse/tests/{test_tools_axon.py → test_tools_axon_dump_load.py} +0 -0
- {synapse-2.224.0.dist-info → synapse-2.225.0.dist-info}/WHEEL +0 -0
- {synapse-2.224.0.dist-info → synapse-2.225.0.dist-info}/licenses/LICENSE +0 -0
- {synapse-2.224.0.dist-info → synapse-2.225.0.dist-info}/top_level.txt +0 -0
synapse/tools/shutdown.py
CHANGED
|
@@ -1,51 +1,11 @@
|
|
|
1
|
-
import synapse.
|
|
2
|
-
import synapse.telepath as s_telepath
|
|
1
|
+
import synapse.common as s_common
|
|
3
2
|
|
|
4
3
|
import synapse.lib.cmd as s_cmd
|
|
5
|
-
import synapse.lib.output as s_output
|
|
6
4
|
|
|
7
|
-
|
|
8
|
-
Initiate a graceful shutdown of a service.
|
|
5
|
+
from synapse.tools.service.shutdown import main
|
|
9
6
|
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
no new tasks are created. Without a timeout, it can block forever if
|
|
13
|
-
tasks do not exit.
|
|
7
|
+
s_common.deprecated('synapse.tools.shutdown is deprecated. Please use synapse.tools.service.shutdown instead.',
|
|
8
|
+
curv='v2.225.0')
|
|
14
9
|
|
|
15
|
-
|
|
16
|
-
exit code 1 if a timeout was specified and was hit. Upon hitting the timeout
|
|
17
|
-
the system resumes normal operation.
|
|
18
|
-
|
|
19
|
-
NOTE: This will also demote the service if run on a leader with mirrors.
|
|
20
|
-
'''
|
|
21
|
-
|
|
22
|
-
async def main(argv, outp=s_output.stdout):
|
|
23
|
-
|
|
24
|
-
pars = s_cmd.Parser(prog='synapse.tools.shutdown', outp=outp, description=desc)
|
|
25
|
-
|
|
26
|
-
pars.add_argument('--url', default='cell:///vertex/storage',
|
|
27
|
-
help='The telepath URL to connect to the service.')
|
|
28
|
-
|
|
29
|
-
pars.add_argument('--timeout', default=None, type=int,
|
|
30
|
-
help='An optional timeout in seconds. If timeout is reached, the shutdown is aborted.')
|
|
31
|
-
|
|
32
|
-
opts = pars.parse_args(argv)
|
|
33
|
-
|
|
34
|
-
async with s_telepath.withTeleEnv():
|
|
35
|
-
|
|
36
|
-
try:
|
|
37
|
-
|
|
38
|
-
async with await s_telepath.openurl(opts.url) as proxy:
|
|
39
|
-
|
|
40
|
-
if await proxy.shutdown(timeout=opts.timeout):
|
|
41
|
-
return 0
|
|
42
|
-
|
|
43
|
-
return 1
|
|
44
|
-
|
|
45
|
-
except Exception as e: # pragma: no cover
|
|
46
|
-
text = s_exc.reprexc(e)
|
|
47
|
-
outp.printf(f'Error while attempting graceful shutdown: {text}')
|
|
48
|
-
return 1
|
|
49
|
-
|
|
50
|
-
if __name__ == '__main__': # pragma: no cover
|
|
10
|
+
if __name__ == '__main__': # pragma: no cover
|
|
51
11
|
s_cmd.exitmain(main)
|
synapse/tools/snapshot.py
CHANGED
|
@@ -1,64 +1,11 @@
|
|
|
1
|
-
import
|
|
2
|
-
import synapse.exc as s_exc
|
|
3
|
-
import synapse.telepath as s_telepath
|
|
1
|
+
import synapse.common as s_common
|
|
4
2
|
|
|
5
3
|
import synapse.lib.cmd as s_cmd
|
|
6
|
-
import synapse.lib.output as s_output
|
|
7
4
|
|
|
8
|
-
|
|
5
|
+
from synapse.tools.service.snapshot import main
|
|
9
6
|
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
system admins to generate a transactionally consistent volume
|
|
13
|
-
snapshot using 3rd party tools.
|
|
14
|
-
|
|
15
|
-
The use pattern should be::
|
|
16
|
-
|
|
17
|
-
python -m synapse.tools.snapshot freeze
|
|
18
|
-
|
|
19
|
-
<generate volume snapshot using 3rd party tools>
|
|
20
|
-
|
|
21
|
-
python -m synapse.tools.snapshot resume
|
|
22
|
-
|
|
23
|
-
The tool will set the process exit code to 0 on success.
|
|
24
|
-
'''
|
|
25
|
-
|
|
26
|
-
async def main(argv, outp=s_output.stdout):
|
|
27
|
-
|
|
28
|
-
pars = s_cmd.Parser(prog='synapse.tools.snapshot', outp=outp, description=desc)
|
|
29
|
-
|
|
30
|
-
subs = pars.add_subparsers(required=True, title='commands', dest='cmd')
|
|
31
|
-
|
|
32
|
-
freeze = subs.add_parser('freeze', help='Suspend edits and sync changes to disk.')
|
|
33
|
-
freeze.add_argument('--timeout', type=int, default=120,
|
|
34
|
-
help='Maximum time to wait for the nexus lock.')
|
|
35
|
-
|
|
36
|
-
freeze.add_argument('--svcurl', default='cell:///vertex/storage',
|
|
37
|
-
help='The telepath URL of the Synapse service.')
|
|
38
|
-
|
|
39
|
-
resume = subs.add_parser('resume', help='Resume edits and continue normal operation.')
|
|
40
|
-
resume.add_argument('--svcurl', default='cell:///vertex/storage',
|
|
41
|
-
help='The telepath URL of the Synapse service.')
|
|
42
|
-
|
|
43
|
-
opts = pars.parse_args(argv)
|
|
44
|
-
|
|
45
|
-
try:
|
|
46
|
-
async with s_telepath.withTeleEnv():
|
|
47
|
-
|
|
48
|
-
async with await s_telepath.openurl(opts.svcurl) as proxy:
|
|
49
|
-
|
|
50
|
-
if opts.cmd == 'freeze':
|
|
51
|
-
await proxy.freeze(timeout=opts.timeout)
|
|
52
|
-
return 0
|
|
53
|
-
|
|
54
|
-
if opts.cmd == 'resume':
|
|
55
|
-
await proxy.resume()
|
|
56
|
-
return 0
|
|
57
|
-
|
|
58
|
-
except s_exc.SynErr as e:
|
|
59
|
-
mesg = e.errinfo.get('mesg')
|
|
60
|
-
outp.printf(f'ERROR {e.__class__.__name__}: {mesg}')
|
|
61
|
-
return 1
|
|
7
|
+
s_common.deprecated('synapse.tools.snapshot is deprecated. Please use synapse.tools.service.snapshot instead.',
|
|
8
|
+
curv='v2.225.0')
|
|
62
9
|
|
|
63
10
|
if __name__ == '__main__': # pragma: no cover
|
|
64
11
|
s_cmd.exitmain(main)
|
|
File without changes
|
|
File without changes
|
|
@@ -33,7 +33,7 @@ def main():
|
|
|
33
33
|
|
|
34
34
|
Usage:
|
|
35
35
|
|
|
36
|
-
pandoc -f rst -t markdown --filter ./synapse/tools/pkg/
|
|
36
|
+
pandoc -f rst -t markdown --filter ./synapse/tools/storm/pkg/_pandoc_filter.py -o foo.md foo.rst
|
|
37
37
|
'''
|
|
38
38
|
|
|
39
39
|
ast = json.load(sys.stdin)
|
|
@@ -0,0 +1,176 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import shutil
|
|
3
|
+
import logging
|
|
4
|
+
import subprocess
|
|
5
|
+
|
|
6
|
+
import regex as re
|
|
7
|
+
|
|
8
|
+
import synapse.exc as s_exc
|
|
9
|
+
import synapse.common as s_common
|
|
10
|
+
|
|
11
|
+
import synapse.lib.cmd as s_cmd
|
|
12
|
+
import synapse.lib.output as s_output
|
|
13
|
+
|
|
14
|
+
import synapse.tools.utils.rstorm as s_rstorm
|
|
15
|
+
import synapse.tools.utils.autodoc as s_autodoc
|
|
16
|
+
|
|
17
|
+
logger = logging.getLogger(__name__)
|
|
18
|
+
|
|
19
|
+
_TOOLDIR = os.path.split(__file__)[0]
|
|
20
|
+
PANDOC_FILTER = os.path.join(_TOOLDIR, '_pandoc_filter.py')
|
|
21
|
+
|
|
22
|
+
# see https://www.sphinx-doc.org/en/master/usage/restructuredtext/field-lists.html#file-wide-metadata
|
|
23
|
+
re_sphinx_metadata_fields = re.compile(r'^:(tocdepth|nocomments|orphan|nosearch):( \w+)?\n\n',
|
|
24
|
+
flags=re.MULTILINE)
|
|
25
|
+
|
|
26
|
+
def hasPandoc():
|
|
27
|
+
if os.system('pandoc --version') == 0:
|
|
28
|
+
return True
|
|
29
|
+
return False
|
|
30
|
+
|
|
31
|
+
async def buildPkgDocs(outp, pkgpath: str, rst_only: bool =False):
|
|
32
|
+
|
|
33
|
+
logger.info(f'Building pkg for {pkgpath}')
|
|
34
|
+
pkgdef = s_common.yamlload(pkgpath)
|
|
35
|
+
if pkgdef is None:
|
|
36
|
+
raise s_exc.BadArg(mesg=f'Package does not exist or does not contain yaml: {pkgpath}')
|
|
37
|
+
|
|
38
|
+
dirn = os.path.dirname(s_common.genpath(pkgpath))
|
|
39
|
+
|
|
40
|
+
docsdir = os.path.join(dirn, 'docs')
|
|
41
|
+
builddir = os.path.join(dirn, 'docs', '_build')
|
|
42
|
+
|
|
43
|
+
shutil.rmtree(builddir, ignore_errors=True)
|
|
44
|
+
|
|
45
|
+
s_common.gendir(builddir)
|
|
46
|
+
|
|
47
|
+
# touch any files we need in order to load a package, due to
|
|
48
|
+
# rstorm needing to load the package using genpkg tool. This
|
|
49
|
+
# does mean that standalone builds of a storm package from this
|
|
50
|
+
# must be done after using this buildpkg tool.
|
|
51
|
+
stormpkg_md_present = False
|
|
52
|
+
for dnfo in pkgdef.get('docs', ()):
|
|
53
|
+
fpath = dnfo.get('path')
|
|
54
|
+
with s_common.genfile(dirn, fpath) as fd:
|
|
55
|
+
pass
|
|
56
|
+
if fpath.endswith('stormpackage.md'):
|
|
57
|
+
stormpkg_md_present = True
|
|
58
|
+
|
|
59
|
+
# Generate the build .RST for stormpackage.md
|
|
60
|
+
if stormpkg_md_present:
|
|
61
|
+
logger.info(f'Generating stormpackage.rst for {pkgpath}')
|
|
62
|
+
pkgdocs, pkgname = await s_autodoc.docStormpkg(pkgpath)
|
|
63
|
+
with s_common.genfile(docsdir, 'stormpackage.rst') as fd:
|
|
64
|
+
text = pkgdocs.getRstText()
|
|
65
|
+
if rst_only is False:
|
|
66
|
+
# Leave this in place if we're only generating RST
|
|
67
|
+
text = text.replace('.. highlight:: none\n', '')
|
|
68
|
+
fd.write(text.encode())
|
|
69
|
+
logger.info('Generated the stormpackage.rst file!')
|
|
70
|
+
|
|
71
|
+
for name in os.listdir(docsdir):
|
|
72
|
+
|
|
73
|
+
if not name.endswith('.rst'):
|
|
74
|
+
continue
|
|
75
|
+
|
|
76
|
+
docpath = os.path.join(docsdir, name)
|
|
77
|
+
|
|
78
|
+
basename = name.rsplit('.', 1)[0]
|
|
79
|
+
|
|
80
|
+
builtmd = os.path.join(builddir, f'{basename}.md')
|
|
81
|
+
builtrst = os.path.join(builddir, name)
|
|
82
|
+
|
|
83
|
+
argv = (docpath, '--save', builtrst)
|
|
84
|
+
logger.info(f'Executing rstorm for {argv}')
|
|
85
|
+
await s_rstorm.main(argv)
|
|
86
|
+
|
|
87
|
+
if rst_only:
|
|
88
|
+
logger.info(f'rst_only enabled, done processing {name}')
|
|
89
|
+
continue
|
|
90
|
+
|
|
91
|
+
logger.info('Preprocessing rstorm output')
|
|
92
|
+
with s_common.genfile(builtrst) as fd:
|
|
93
|
+
buf = fd.read().decode()
|
|
94
|
+
|
|
95
|
+
# Remove highglight:: none directives
|
|
96
|
+
buf = buf.replace('.. highlight:: none\n', '')
|
|
97
|
+
|
|
98
|
+
# Remove sphinx metadata fields
|
|
99
|
+
buf = re_sphinx_metadata_fields.sub('', buf)
|
|
100
|
+
|
|
101
|
+
lines = buf.splitlines(keepends=True)
|
|
102
|
+
|
|
103
|
+
# Remove lines which start with explicit sphinx rst targets
|
|
104
|
+
nlines1 = []
|
|
105
|
+
for line in lines:
|
|
106
|
+
if line.startswith('.. _') and line.strip().endswith(':'):
|
|
107
|
+
logger.info(f'Dropping: [{line.strip()}]')
|
|
108
|
+
continue
|
|
109
|
+
nlines1.append(line)
|
|
110
|
+
|
|
111
|
+
buf = ''.join(nlines1)
|
|
112
|
+
|
|
113
|
+
with s_common.genfile(builtrst) as fd:
|
|
114
|
+
fd.truncate()
|
|
115
|
+
_ = fd.write(buf.encode())
|
|
116
|
+
|
|
117
|
+
logger.info(f'Converting {builtrst} to markdown')
|
|
118
|
+
if name == 'stormpackage.rst':
|
|
119
|
+
args = ['pandoc', '--filter', PANDOC_FILTER, '-f', 'rst', '-t', 'markdown', '-o', builtmd, builtrst]
|
|
120
|
+
else:
|
|
121
|
+
args = ['pandoc', '-f', 'rst', '-t', 'markdown', '-o', builtmd, builtrst]
|
|
122
|
+
|
|
123
|
+
r = subprocess.run(args, capture_output=True)
|
|
124
|
+
|
|
125
|
+
# Re-write stderr (logging) to our outp
|
|
126
|
+
for line in r.stderr.decode().splitlines():
|
|
127
|
+
outp.printf(f'ERR: {line}')
|
|
128
|
+
|
|
129
|
+
if r.returncode != 0:
|
|
130
|
+
raise s_exc.SynErr(mesg=f'Error converting {builtrst} to {builtmd}')
|
|
131
|
+
|
|
132
|
+
logger.info(f'Done converting {builtrst} to {builtmd}')
|
|
133
|
+
|
|
134
|
+
# Strip out / manipulate the md content
|
|
135
|
+
with s_common.genfile(builtmd) as fd:
|
|
136
|
+
buf = fd.read().decode()
|
|
137
|
+
|
|
138
|
+
lines = buf.splitlines(keepends=True)
|
|
139
|
+
|
|
140
|
+
# Remove lines which only have a single `:` left in them
|
|
141
|
+
nlines1 = [line for line in lines if line.strip() != ':']
|
|
142
|
+
|
|
143
|
+
buf = ''.join(nlines1)
|
|
144
|
+
|
|
145
|
+
with s_common.genfile(builtmd) as fd:
|
|
146
|
+
fd.truncate()
|
|
147
|
+
_ = fd.write(buf.encode())
|
|
148
|
+
|
|
149
|
+
logger.info('Done manipulating markdown')
|
|
150
|
+
|
|
151
|
+
logger.info(f'buildPkgDocs complete for {pkgpath}.')
|
|
152
|
+
|
|
153
|
+
prog = 'synapse.tools.storm.pkg.doc'
|
|
154
|
+
desc = 'A tool for building storm package docs from RStorm into markdown. This tool requires pandoc to be available.'
|
|
155
|
+
|
|
156
|
+
async def main(argv, outp=s_output.stdout):
|
|
157
|
+
|
|
158
|
+
pars = s_cmd.Parser(prog=prog, outp=outp, description=desc)
|
|
159
|
+
pars.add_argument('pkgfile', metavar='<pkgfile>', help='Path to a storm package prototype yml file.')
|
|
160
|
+
pars.add_argument('--rst-only', default=False, action='store_true',
|
|
161
|
+
help='Stops building after the .rst files have been generated.')
|
|
162
|
+
|
|
163
|
+
opts = pars.parse_args(argv)
|
|
164
|
+
|
|
165
|
+
if opts.rst_only is False and not hasPandoc():
|
|
166
|
+
logger.error('Pandoc is not available, can only run rst/rstorm output.')
|
|
167
|
+
return 1
|
|
168
|
+
|
|
169
|
+
await buildPkgDocs(outp, opts.pkgfile, rst_only=opts.rst_only)
|
|
170
|
+
|
|
171
|
+
return 0
|
|
172
|
+
|
|
173
|
+
if __name__ == '__main__': # pragma: no cover
|
|
174
|
+
s_common.setlogging(logger, 'DEBUG')
|
|
175
|
+
logging.getLogger('vcr').setLevel(logging.WARNING)
|
|
176
|
+
s_cmd.exitmain(main)
|
|
@@ -0,0 +1,315 @@
|
|
|
1
|
+
import io
|
|
2
|
+
import os
|
|
3
|
+
import base64
|
|
4
|
+
import logging
|
|
5
|
+
|
|
6
|
+
import regex
|
|
7
|
+
|
|
8
|
+
import synapse.exc as s_exc
|
|
9
|
+
import synapse.common as s_common
|
|
10
|
+
import synapse.telepath as s_telepath
|
|
11
|
+
|
|
12
|
+
import synapse.lib.cmd as s_cmd
|
|
13
|
+
import synapse.lib.json as s_json
|
|
14
|
+
import synapse.lib.output as s_output
|
|
15
|
+
import synapse.lib.certdir as s_certdir
|
|
16
|
+
import synapse.lib.dyndeps as s_dyndeps
|
|
17
|
+
import synapse.lib.schemas as s_schemas
|
|
18
|
+
import synapse.lib.version as s_version
|
|
19
|
+
|
|
20
|
+
logger = logging.getLogger(__name__)
|
|
21
|
+
|
|
22
|
+
wflownamere = regex.compile(r'^([\w-]+)\.yaml$')
|
|
23
|
+
|
|
24
|
+
def getStormStr(fn):
|
|
25
|
+
if not os.path.isfile(fn):
|
|
26
|
+
raise s_exc.NoSuchFile(mesg='Storm file {} not found'.format(fn), path=fn)
|
|
27
|
+
|
|
28
|
+
with open(fn, 'rb') as f:
|
|
29
|
+
return f.read().decode()
|
|
30
|
+
|
|
31
|
+
def loadOpticFiles(pkgdef, path):
|
|
32
|
+
|
|
33
|
+
pkgfiles = pkgdef['optic']['files']
|
|
34
|
+
|
|
35
|
+
abspath = s_common.genpath(path)
|
|
36
|
+
for root, dirs, files, in os.walk(path):
|
|
37
|
+
|
|
38
|
+
for name in files:
|
|
39
|
+
|
|
40
|
+
if name.startswith('.'): # pragma: no cover
|
|
41
|
+
continue
|
|
42
|
+
|
|
43
|
+
fullname = s_common.genpath(root, name)
|
|
44
|
+
if not os.path.isfile(fullname): # pragma: no cover
|
|
45
|
+
continue
|
|
46
|
+
|
|
47
|
+
pkgfname = fullname[len(abspath) + 1:]
|
|
48
|
+
|
|
49
|
+
with open(fullname, 'rb') as fd:
|
|
50
|
+
pkgfiles[pkgfname] = {
|
|
51
|
+
'file': base64.b64encode(fd.read()).decode(),
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
def loadOpticWorkflows(pkgdef, path):
|
|
55
|
+
|
|
56
|
+
wdefs = pkgdef['optic']['workflows']
|
|
57
|
+
|
|
58
|
+
for root, dirs, files in os.walk(path):
|
|
59
|
+
|
|
60
|
+
for name in files:
|
|
61
|
+
|
|
62
|
+
match = wflownamere.match(name)
|
|
63
|
+
|
|
64
|
+
if match is None:
|
|
65
|
+
logger.warning('Skipping workflow "%s" that does not match pattern "%s"' % (name, wflownamere.pattern))
|
|
66
|
+
continue
|
|
67
|
+
|
|
68
|
+
wname = match.groups()[0]
|
|
69
|
+
|
|
70
|
+
fullname = s_common.genpath(root, name)
|
|
71
|
+
if not os.path.isfile(fullname): # pragma: no cover
|
|
72
|
+
continue
|
|
73
|
+
|
|
74
|
+
wdefs[wname] = s_common.yamlload(fullname)
|
|
75
|
+
|
|
76
|
+
def tryLoadPkgProto(fp, opticdir=None, readonly=False):
|
|
77
|
+
'''
|
|
78
|
+
Try to get a Storm Package prototype from disk with or without inline documentation.
|
|
79
|
+
|
|
80
|
+
Args:
|
|
81
|
+
fp (str): Path to the package .yaml file on disk.
|
|
82
|
+
opticdir (str): Path to optional Optic module code to add to the Storm Package.
|
|
83
|
+
readonly (bool): If set, open files in read-only mode. If files are missing, that will raise a NoSuchFile
|
|
84
|
+
exception.
|
|
85
|
+
|
|
86
|
+
Returns:
|
|
87
|
+
dict: A Storm package definition.
|
|
88
|
+
'''
|
|
89
|
+
try:
|
|
90
|
+
return loadPkgProto(fp, opticdir=opticdir, readonly=readonly)
|
|
91
|
+
except s_exc.NoSuchFile:
|
|
92
|
+
return loadPkgProto(fp, opticdir=opticdir, no_docs=True, readonly=readonly)
|
|
93
|
+
|
|
94
|
+
def loadPkgProto(path, opticdir=None, no_docs=False, readonly=False):
|
|
95
|
+
'''
|
|
96
|
+
Get a Storm Package definition from disk.
|
|
97
|
+
|
|
98
|
+
Args:
|
|
99
|
+
path (str): Path to the package .yaml file on disk.
|
|
100
|
+
opticdir (str): Path to optional Optic module code to add to the Storm Package.
|
|
101
|
+
no_docs (bool): If true, omit inline documentation content if it is not present on disk.
|
|
102
|
+
readonly (bool): If set, open files in read-only mode. If files are missing, that will raise a NoSuchFile
|
|
103
|
+
exception.
|
|
104
|
+
|
|
105
|
+
Returns:
|
|
106
|
+
dict: A Storm package definition.
|
|
107
|
+
'''
|
|
108
|
+
|
|
109
|
+
full = s_common.genpath(path)
|
|
110
|
+
pkgdef = s_common.yamlload(full)
|
|
111
|
+
if pkgdef is None:
|
|
112
|
+
raise s_exc.NoSuchFile(mesg=f'File {full} does not exist or is empty.', path=full)
|
|
113
|
+
|
|
114
|
+
version = pkgdef.get('version')
|
|
115
|
+
if isinstance(version, (tuple, list)):
|
|
116
|
+
pkgdef['version'] = '%d.%d.%d' % tuple(version)
|
|
117
|
+
|
|
118
|
+
protodir = os.path.dirname(full)
|
|
119
|
+
pkgname = pkgdef.get('name')
|
|
120
|
+
|
|
121
|
+
genopts = pkgdef.pop('genopts', {})
|
|
122
|
+
|
|
123
|
+
# Stamp build info into the pkgdef if it doesn't already exist
|
|
124
|
+
pkgdef.setdefault('build', {})
|
|
125
|
+
pkgdef['build'].setdefault('time', s_common.now())
|
|
126
|
+
pkgdef['build'].setdefault('synapse:version', s_version.verstring)
|
|
127
|
+
pkgdef['build'].setdefault('synapse:commit', s_version.commit)
|
|
128
|
+
|
|
129
|
+
logodef = pkgdef.get('logo')
|
|
130
|
+
if logodef is not None:
|
|
131
|
+
|
|
132
|
+
path = logodef.pop('path', None)
|
|
133
|
+
|
|
134
|
+
if path is not None:
|
|
135
|
+
with s_common.reqfile(protodir, path) as fd:
|
|
136
|
+
logodef['file'] = base64.b64encode(fd.read()).decode()
|
|
137
|
+
|
|
138
|
+
if logodef.get('mime') is None:
|
|
139
|
+
mesg = 'Mime type must be specified for logo file.'
|
|
140
|
+
raise s_exc.BadPkgDef(mesg=mesg)
|
|
141
|
+
|
|
142
|
+
if logodef.get('file') is None:
|
|
143
|
+
mesg = 'Logo def must contain path or file.'
|
|
144
|
+
raise s_exc.BadPkgDef(mesg=mesg)
|
|
145
|
+
|
|
146
|
+
for docdef in pkgdef.get('docs', ()):
|
|
147
|
+
|
|
148
|
+
if docdef.get('title') is None:
|
|
149
|
+
mesg = 'Each entry in docs must have a title.'
|
|
150
|
+
raise s_exc.BadPkgDef(mesg=mesg)
|
|
151
|
+
|
|
152
|
+
if no_docs:
|
|
153
|
+
docdef['content'] = ''
|
|
154
|
+
continue
|
|
155
|
+
|
|
156
|
+
path = docdef.pop('path', None)
|
|
157
|
+
if path is not None:
|
|
158
|
+
with s_common.reqfile(protodir, path) as fd:
|
|
159
|
+
docdef['content'] = fd.read().decode()
|
|
160
|
+
|
|
161
|
+
if docdef.get('content') is None:
|
|
162
|
+
mesg = 'Docs entry has no path or content.'
|
|
163
|
+
raise s_exc.BadPkgDef(mesg=mesg)
|
|
164
|
+
|
|
165
|
+
for mod in pkgdef.get('modules', ()):
|
|
166
|
+
|
|
167
|
+
name = mod.get('name')
|
|
168
|
+
|
|
169
|
+
basename = name
|
|
170
|
+
if genopts.get('dotstorm', False):
|
|
171
|
+
basename = f'{basename}.storm'
|
|
172
|
+
|
|
173
|
+
mod_path = s_common.genpath(protodir, 'storm', 'modules', basename)
|
|
174
|
+
if readonly:
|
|
175
|
+
mod['storm'] = getStormStr(mod_path)
|
|
176
|
+
else:
|
|
177
|
+
with s_common.genfile(mod_path) as fd:
|
|
178
|
+
mod['storm'] = fd.read().decode()
|
|
179
|
+
|
|
180
|
+
for extmod in pkgdef.get('external_modules', ()):
|
|
181
|
+
fpth = extmod.get('file_path')
|
|
182
|
+
if fpth is not None:
|
|
183
|
+
extmod['storm'] = getStormStr(fpth)
|
|
184
|
+
else:
|
|
185
|
+
path = extmod.get('package_path')
|
|
186
|
+
extpkg = s_dyndeps.tryDynMod(extmod.get('package'))
|
|
187
|
+
extmod['storm'] = extpkg.getAssetStr(path)
|
|
188
|
+
|
|
189
|
+
extname = extmod.get('name')
|
|
190
|
+
extmod['name'] = f'{pkgname}.{extname}'
|
|
191
|
+
|
|
192
|
+
pkgdef.setdefault('modules', [])
|
|
193
|
+
pkgdef['modules'].append(extmod)
|
|
194
|
+
|
|
195
|
+
pkgdef.pop('external_modules', None)
|
|
196
|
+
|
|
197
|
+
for cmd in pkgdef.get('commands', ()):
|
|
198
|
+
name = cmd.get('name')
|
|
199
|
+
|
|
200
|
+
basename = name
|
|
201
|
+
if genopts.get('dotstorm'):
|
|
202
|
+
basename = f'{basename}.storm'
|
|
203
|
+
|
|
204
|
+
cmd_path = s_common.genpath(protodir, 'storm', 'commands', basename)
|
|
205
|
+
if readonly:
|
|
206
|
+
cmd['storm'] = getStormStr(cmd_path)
|
|
207
|
+
else:
|
|
208
|
+
with s_common.genfile(cmd_path) as fd:
|
|
209
|
+
cmd['storm'] = fd.read().decode()
|
|
210
|
+
|
|
211
|
+
for gdef in pkgdef.get('graphs', ()):
|
|
212
|
+
gdef['iden'] = s_common.guid((pkgname, gdef.get('name')))
|
|
213
|
+
gdef['scope'] = 'power-up'
|
|
214
|
+
gdef['power-up'] = pkgname
|
|
215
|
+
|
|
216
|
+
inits = pkgdef.get('inits')
|
|
217
|
+
if inits is not None:
|
|
218
|
+
lastver = None
|
|
219
|
+
for initdef in inits.get('versions'):
|
|
220
|
+
curver = initdef.get('version')
|
|
221
|
+
if lastver is not None and not curver > lastver:
|
|
222
|
+
raise s_exc.BadPkgDef(mesg='Init versions must be monotonically increasing.', version=curver)
|
|
223
|
+
lastver = curver
|
|
224
|
+
|
|
225
|
+
wflowdir = s_common.genpath(protodir, 'workflows')
|
|
226
|
+
if os.path.isdir(wflowdir):
|
|
227
|
+
pkgdef.setdefault('optic', {})
|
|
228
|
+
pkgdef['optic'].setdefault('workflows', {})
|
|
229
|
+
loadOpticWorkflows(pkgdef, wflowdir)
|
|
230
|
+
|
|
231
|
+
if opticdir is None:
|
|
232
|
+
opticdir = s_common.genpath(protodir, 'optic')
|
|
233
|
+
|
|
234
|
+
if os.path.isdir(opticdir):
|
|
235
|
+
pkgdef.setdefault('optic', {})
|
|
236
|
+
pkgdef['optic'].setdefault('files', {})
|
|
237
|
+
loadOpticFiles(pkgdef, opticdir)
|
|
238
|
+
|
|
239
|
+
s_schemas.reqValidPkgdef(pkgdef)
|
|
240
|
+
|
|
241
|
+
# Ensure the package is json safe and tuplify it.
|
|
242
|
+
s_json.reqjsonsafe(pkgdef, strict=True)
|
|
243
|
+
pkgdef = s_common.tuplify(pkgdef)
|
|
244
|
+
return pkgdef
|
|
245
|
+
|
|
246
|
+
|
|
247
|
+
desc = 'A tool for generating/pushing storm packages from YAML prototypes.'
|
|
248
|
+
|
|
249
|
+
async def main(argv, outp=s_output.stdout):
|
|
250
|
+
|
|
251
|
+
pars = s_cmd.Parser(prog='synapse.tools.storm.pkg.gen', outp=outp, description=desc)
|
|
252
|
+
pars.add_argument('--push', metavar='<url>', help='A telepath URL of a Cortex or PkgRepo.')
|
|
253
|
+
pars.add_argument('--push-verify', default=False, action='store_true',
|
|
254
|
+
help='Tell the Cortex to verify the package signature.')
|
|
255
|
+
pars.add_argument('--save', metavar='<path>', help='Save the completed package JSON to a file.')
|
|
256
|
+
pars.add_argument('--optic', metavar='<path>', help='Load Optic module files from a directory.')
|
|
257
|
+
pars.add_argument('--signas', metavar='<name>', help='Specify a code signing identity to use from ~/.syn/certs/code.')
|
|
258
|
+
pars.add_argument('--certdir', metavar='<dir>', default='~/.syn/certs',
|
|
259
|
+
help='Specify an alternate certdir to ~/.syn/certs.')
|
|
260
|
+
pars.add_argument('--no-build', action='store_true',
|
|
261
|
+
help='Treat pkgfile argument as an already-built package')
|
|
262
|
+
pars.add_argument('--no-docs', default=False, action='store_true',
|
|
263
|
+
help='Do not require docs to be present and replace any doc content with empty strings.')
|
|
264
|
+
pars.add_argument('pkgfile', metavar='<pkgfile>',
|
|
265
|
+
help='Path to a storm package prototype .yaml file, or a completed package .json/.yaml file.')
|
|
266
|
+
|
|
267
|
+
opts = pars.parse_args(argv)
|
|
268
|
+
|
|
269
|
+
if opts.no_build:
|
|
270
|
+
pkgdef = s_common.yamlload(opts.pkgfile)
|
|
271
|
+
if not pkgdef:
|
|
272
|
+
outp.printf(f'Unable to load pkgdef from [{opts.pkgfile}]')
|
|
273
|
+
return 1
|
|
274
|
+
if opts.save:
|
|
275
|
+
outp.printf(f'File {opts.pkgfile} is treated as already built (--no-build); incompatible with --save.')
|
|
276
|
+
return 1
|
|
277
|
+
else:
|
|
278
|
+
pkgdef = loadPkgProto(opts.pkgfile, opticdir=opts.optic, no_docs=opts.no_docs)
|
|
279
|
+
|
|
280
|
+
if opts.signas is not None:
|
|
281
|
+
|
|
282
|
+
s_certdir.addCertPath(opts.certdir)
|
|
283
|
+
certdir = s_certdir.getCertDir()
|
|
284
|
+
|
|
285
|
+
pkey = certdir.getCodeKey(opts.signas)
|
|
286
|
+
with io.open(certdir.getCodeCertPath(opts.signas)) as fd:
|
|
287
|
+
cert = fd.read()
|
|
288
|
+
|
|
289
|
+
sign = s_common.ehex(pkey.signitem(pkgdef))
|
|
290
|
+
|
|
291
|
+
pkgdef['codesign'] = {
|
|
292
|
+
'cert': cert,
|
|
293
|
+
'sign': sign,
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
s_schemas.reqValidPkgdef(pkgdef)
|
|
297
|
+
|
|
298
|
+
if not opts.save and not opts.push:
|
|
299
|
+
outp.printf('Neither --push nor --save provided. Nothing to do.')
|
|
300
|
+
return 1
|
|
301
|
+
|
|
302
|
+
if opts.save:
|
|
303
|
+
s_json.jssave(pkgdef, opts.save)
|
|
304
|
+
|
|
305
|
+
if opts.push:
|
|
306
|
+
|
|
307
|
+
async with s_telepath.withTeleEnv():
|
|
308
|
+
|
|
309
|
+
async with await s_telepath.openurl(opts.push) as core:
|
|
310
|
+
await core.addStormPkg(pkgdef, verify=opts.push_verify)
|
|
311
|
+
|
|
312
|
+
return 0
|
|
313
|
+
|
|
314
|
+
if __name__ == '__main__': # pragma: no cover
|
|
315
|
+
s_cmd.exitmain(main)
|
|
File without changes
|