synapse 2.213.0__py311-none-any.whl → 2.214.0__py311-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of synapse might be problematic. Click here for more details.
- synapse/cortex.py +37 -6
- synapse/daemon.py +6 -6
- synapse/exc.py +13 -1
- synapse/lib/aha.py +5 -0
- synapse/lib/ast.py +2 -6
- synapse/lib/boss.py +47 -2
- synapse/lib/cell.py +193 -3
- synapse/lib/certdir.py +44 -1
- synapse/lib/cmd.py +24 -0
- synapse/lib/coro.py +8 -2
- synapse/lib/drive.py +7 -2
- synapse/lib/link.py +11 -3
- synapse/lib/schemas.py +1 -1
- synapse/lib/snap.py +76 -65
- synapse/lib/storm.py +2 -1
- synapse/lib/stormlib/imap.py +3 -2
- synapse/lib/stormlib/spooled.py +1 -0
- synapse/lib/task.py +1 -0
- synapse/lib/version.py +2 -2
- synapse/models/inet.py +5 -0
- synapse/tests/files/testpkg_build_docs/docs/bar.rst +15 -0
- synapse/tests/files/testpkg_build_docs/docs/foo.rst +4 -0
- synapse/tests/files/testpkg_build_docs/storm/commands/testcmd.storm +0 -0
- synapse/tests/files/testpkg_build_docs/storm/modules/apimod.storm +0 -0
- synapse/tests/files/testpkg_build_docs/storm/modules/testmod.storm +0 -0
- synapse/tests/files/testpkg_build_docs/storm/testcmd.storm +5 -0
- synapse/tests/files/testpkg_build_docs/testpkg.yaml +69 -0
- synapse/tests/test_cortex.py +20 -1
- synapse/tests/test_daemon.py +1 -1
- synapse/tests/test_exc.py +6 -0
- synapse/tests/test_lib_ast.py +69 -14
- synapse/tests/test_lib_boss.py +8 -0
- synapse/tests/test_lib_cell.py +104 -5
- synapse/tests/test_lib_certdir.py +8 -0
- synapse/tests/test_lib_coro.py +5 -0
- synapse/tests/test_lib_httpapi.py +10 -2
- synapse/tests/test_lib_link.py +1 -1
- synapse/tests/test_lib_storm.py +121 -1
- synapse/tests/test_lib_stormlib_spooled.py +20 -0
- synapse/tests/test_lib_types.py +1 -1
- synapse/tests/test_model_inet.py +7 -0
- synapse/tests/test_telepath.py +32 -5
- synapse/tests/test_tools_axon.py +304 -0
- synapse/tests/test_tools_cortex_layer.py +419 -0
- synapse/tests/test_tools_demote.py +114 -0
- synapse/tests/test_tools_pkgs_gendocs.py +100 -0
- synapse/tests/test_tools_shutdown.py +95 -0
- synapse/tests/test_utils.py +22 -1
- synapse/tests/utils.py +44 -29
- synapse/tools/aha/easycert.py +2 -0
- synapse/tools/aha/enroll.py +3 -0
- synapse/tools/axon/__init__.py +0 -0
- synapse/tools/axon/dump.py +155 -0
- synapse/tools/axon/load.py +89 -0
- synapse/tools/cortex/__init__.py +0 -0
- synapse/tools/cortex/layer/__init__.py +0 -0
- synapse/tools/cortex/layer/dump.py +184 -0
- synapse/tools/cortex/layer/load.py +129 -0
- synapse/tools/demote.py +52 -0
- synapse/tools/healthcheck.py +1 -1
- synapse/tools/pkgs/gendocs.py +176 -0
- synapse/tools/pkgs/pandoc_filter.py +79 -0
- synapse/tools/shutdown.py +52 -0
- {synapse-2.213.0.dist-info → synapse-2.214.0.dist-info}/METADATA +1 -1
- {synapse-2.213.0.dist-info → synapse-2.214.0.dist-info}/RECORD +68 -45
- {synapse-2.213.0.dist-info → synapse-2.214.0.dist-info}/WHEEL +0 -0
- {synapse-2.213.0.dist-info → synapse-2.214.0.dist-info}/licenses/LICENSE +0 -0
- {synapse-2.213.0.dist-info → synapse-2.214.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,184 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import tempfile
|
|
3
|
+
import contextlib
|
|
4
|
+
|
|
5
|
+
import synapse.exc as s_exc
|
|
6
|
+
import synapse.common as s_common
|
|
7
|
+
import synapse.telepath as s_telepath
|
|
8
|
+
|
|
9
|
+
import synapse.lib.cmd as s_cmd
|
|
10
|
+
import synapse.lib.output as s_output
|
|
11
|
+
import synapse.lib.msgpack as s_msgpack
|
|
12
|
+
|
|
13
|
+
descr = '''
|
|
14
|
+
Export node edits from a Synapse layer.
|
|
15
|
+
'''
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@contextlib.contextmanager
|
|
19
|
+
def _tmpfile(dirn: str | None = None, prefix: str | None = None):
|
|
20
|
+
'''
|
|
21
|
+
Context manager to create a temporary file and close it when finished. If an
|
|
22
|
+
error occurs within the scope of the context manager, the tempfile will be
|
|
23
|
+
automatically deleted.
|
|
24
|
+
|
|
25
|
+
Args:
|
|
26
|
+
dirn: The optional directory name to create the tempfile in.
|
|
27
|
+
prefix: The optional tempfile name prefix.
|
|
28
|
+
'''
|
|
29
|
+
(_fd, path) = tempfile.mkstemp(dir=dirn, prefix=prefix)
|
|
30
|
+
|
|
31
|
+
try:
|
|
32
|
+
with contextlib.closing(os.fdopen(_fd, 'wb+')) as fd:
|
|
33
|
+
yield (fd, path)
|
|
34
|
+
|
|
35
|
+
except Exception: # pragma: no cover
|
|
36
|
+
os.unlink(path)
|
|
37
|
+
raise
|
|
38
|
+
|
|
39
|
+
async def exportLayer(opts, outp):
|
|
40
|
+
|
|
41
|
+
async with await s_telepath.openurl(opts.url) as cell:
|
|
42
|
+
|
|
43
|
+
info = await cell.getCellInfo()
|
|
44
|
+
|
|
45
|
+
if (celltype := info['cell']['type']) != 'cortex':
|
|
46
|
+
mesg = f'Layer dump tool only works on cortexes, not {celltype}'
|
|
47
|
+
raise s_exc.TypeMismatch(mesg=mesg)
|
|
48
|
+
|
|
49
|
+
celliden = info['cell']['iden']
|
|
50
|
+
cellvers = info['cell']['version']
|
|
51
|
+
|
|
52
|
+
# Find and read state file
|
|
53
|
+
state = {}
|
|
54
|
+
statefile = opts.statefile
|
|
55
|
+
if statefile is None:
|
|
56
|
+
statefile = s_common.genpath(opts.outdir, f'{celliden}.{opts.iden}.yaml')
|
|
57
|
+
|
|
58
|
+
if (data := s_common.yamlload(statefile)) is not None:
|
|
59
|
+
state = data
|
|
60
|
+
|
|
61
|
+
if (soffs := opts.offset) is None:
|
|
62
|
+
soffs = state.get('offset:next', 0)
|
|
63
|
+
|
|
64
|
+
eoffs = None
|
|
65
|
+
|
|
66
|
+
async with await s_telepath.openurl(opts.url, name=f'*/layer/{opts.iden}') as layer:
|
|
67
|
+
|
|
68
|
+
# Handle no edits to export
|
|
69
|
+
if soffs >= await layer.getEditIndx():
|
|
70
|
+
mesg = f'No edits to export starting from offset ({soffs})'
|
|
71
|
+
raise s_exc.BadArg(mesg=mesg)
|
|
72
|
+
|
|
73
|
+
finished = False
|
|
74
|
+
|
|
75
|
+
genr = layer.syncNodeEdits2(soffs, wait=False)
|
|
76
|
+
|
|
77
|
+
nodeiter = aiter(genr)
|
|
78
|
+
|
|
79
|
+
while not finished:
|
|
80
|
+
|
|
81
|
+
try:
|
|
82
|
+
# Pull the first edit so we can get the starting offset
|
|
83
|
+
first = await anext(nodeiter)
|
|
84
|
+
except StopAsyncIteration: # pragma: no cover
|
|
85
|
+
break
|
|
86
|
+
|
|
87
|
+
soffs = first[0]
|
|
88
|
+
|
|
89
|
+
with _tmpfile(dirn=opts.outdir, prefix='layer.dump') as (fd, tmppath):
|
|
90
|
+
|
|
91
|
+
# Write header to file
|
|
92
|
+
fd.write(s_msgpack.en((
|
|
93
|
+
'init',
|
|
94
|
+
{
|
|
95
|
+
'hdrvers': 1,
|
|
96
|
+
'celliden': celliden,
|
|
97
|
+
'cellvers': cellvers,
|
|
98
|
+
'layriden': opts.iden,
|
|
99
|
+
'offset': soffs,
|
|
100
|
+
'chunksize': opts.chunksize,
|
|
101
|
+
'tick': s_common.now(),
|
|
102
|
+
}
|
|
103
|
+
)))
|
|
104
|
+
|
|
105
|
+
# Now write the first edit that we already pulled
|
|
106
|
+
fd.write(s_msgpack.en(('edit', first)))
|
|
107
|
+
|
|
108
|
+
count = 1
|
|
109
|
+
|
|
110
|
+
async for nodeedit in nodeiter:
|
|
111
|
+
|
|
112
|
+
# Write individual edits to file
|
|
113
|
+
fd.write(s_msgpack.en(('edit', nodeedit)))
|
|
114
|
+
|
|
115
|
+
eoffs = nodeedit[0]
|
|
116
|
+
|
|
117
|
+
count += 1
|
|
118
|
+
|
|
119
|
+
if opts.chunksize and count % opts.chunksize == 0:
|
|
120
|
+
break
|
|
121
|
+
|
|
122
|
+
else:
|
|
123
|
+
finished = True
|
|
124
|
+
|
|
125
|
+
# Write footer to file
|
|
126
|
+
fd.write(s_msgpack.en(('fini', {
|
|
127
|
+
'offset': eoffs,
|
|
128
|
+
'tock': s_common.now(),
|
|
129
|
+
})))
|
|
130
|
+
|
|
131
|
+
path = s_common.genpath(opts.outdir, f'{celliden}.{opts.iden}.{soffs}-{eoffs}.nodeedits')
|
|
132
|
+
os.rename(tmppath, path)
|
|
133
|
+
outp.printf(f'Wrote layer node edits {soffs}-{eoffs} to {path}.')
|
|
134
|
+
|
|
135
|
+
# Save state file after each export file
|
|
136
|
+
state['offset:next'] = eoffs + 1
|
|
137
|
+
s_common.yamlsave(state, statefile)
|
|
138
|
+
|
|
139
|
+
return 0
|
|
140
|
+
|
|
141
|
+
async def main(argv, outp=s_output.stdout):
|
|
142
|
+
|
|
143
|
+
pars = s_cmd.Parser(prog='layer.dump', outp=outp, description=descr)
|
|
144
|
+
pars.add_argument('--url', default='cell:///vertex/storage',
|
|
145
|
+
help='The telepath URL of the Synapse service.')
|
|
146
|
+
pars.add_argument('--offset', default=None, type=int,
|
|
147
|
+
help='The starting offset of the node edits to export.')
|
|
148
|
+
pars.add_argument('--chunksize', default=0, type=int,
|
|
149
|
+
help='The number of node edits to store in a single file. Zero to disable chunking.')
|
|
150
|
+
pars.add_argument('--statefile', type=str, default=None,
|
|
151
|
+
help='Path to the state tracking file for this layer dump.')
|
|
152
|
+
|
|
153
|
+
pars.add_argument('iden', help='The iden of the layer to export.')
|
|
154
|
+
pars.add_argument('outdir', help='The directory to save the exported node edits to.')
|
|
155
|
+
|
|
156
|
+
opts = pars.parse_args(argv)
|
|
157
|
+
|
|
158
|
+
if os.path.exists(opts.outdir) and not os.path.isdir(opts.outdir):
|
|
159
|
+
mesg = f'Specified output directory {opts.outdir} exists but is not a directory.'
|
|
160
|
+
outp.printf(f'ERROR: {mesg}')
|
|
161
|
+
return 1
|
|
162
|
+
|
|
163
|
+
os.makedirs(opts.outdir, exist_ok=True)
|
|
164
|
+
|
|
165
|
+
async with s_telepath.withTeleEnv():
|
|
166
|
+
try:
|
|
167
|
+
await exportLayer(opts, outp)
|
|
168
|
+
|
|
169
|
+
except s_exc.SynErr as exc:
|
|
170
|
+
mesg = exc.get('mesg')
|
|
171
|
+
outp.printf(f'ERROR: {mesg}.')
|
|
172
|
+
return 1
|
|
173
|
+
|
|
174
|
+
except Exception as exc: # pragma: no cover
|
|
175
|
+
mesg = str(exc)
|
|
176
|
+
outp.printf(f'ERROR: {mesg}.')
|
|
177
|
+
return 1
|
|
178
|
+
|
|
179
|
+
outp.printf(f'Successfully exported layer {opts.iden}.')
|
|
180
|
+
|
|
181
|
+
return 0
|
|
182
|
+
|
|
183
|
+
if __name__ == '__main__': # pragma: no cover
|
|
184
|
+
s_cmd.exitmain(main)
|
|
@@ -0,0 +1,129 @@
|
|
|
1
|
+
import os
|
|
2
|
+
|
|
3
|
+
import synapse.exc as s_exc
|
|
4
|
+
import synapse.telepath as s_telepath
|
|
5
|
+
|
|
6
|
+
import synapse.lib.cmd as s_cmd
|
|
7
|
+
import synapse.lib.time as s_time
|
|
8
|
+
import synapse.lib.output as s_output
|
|
9
|
+
import synapse.lib.msgpack as s_msgpack
|
|
10
|
+
import synapse.lib.version as s_version
|
|
11
|
+
|
|
12
|
+
descr = '''
|
|
13
|
+
Import node edits to a Synapse layer.
|
|
14
|
+
'''
|
|
15
|
+
|
|
16
|
+
async def importLayer(infiles, opts, outp):
|
|
17
|
+
|
|
18
|
+
async with await s_telepath.openurl(opts.url) as cell:
|
|
19
|
+
|
|
20
|
+
info = await cell.getCellInfo()
|
|
21
|
+
|
|
22
|
+
if (celltype := info['cell']['type']) != 'cortex':
|
|
23
|
+
mesg = f'Layer load tool only works on cortexes, not {celltype}.'
|
|
24
|
+
raise s_exc.TypeMismatch(mesg=mesg)
|
|
25
|
+
|
|
26
|
+
# Get the highest cellvers from all the input files
|
|
27
|
+
reqver = max([infile[0].get('cellvers') for infile in infiles])
|
|
28
|
+
|
|
29
|
+
if (synver := info.get('cell').get('version')) < reqver:
|
|
30
|
+
synstr = s_version.fmtVersion(*synver)
|
|
31
|
+
reqstr = s_version.fmtVersion(*reqver)
|
|
32
|
+
mesg = f'Synapse version mismatch ({synstr} < {reqstr}).'
|
|
33
|
+
raise s_exc.BadVersion(mesg=mesg)
|
|
34
|
+
|
|
35
|
+
async with await s_telepath.openurl(opts.url, name=f'*/layer/{opts.iden}') as layer:
|
|
36
|
+
for header, filename, genr in infiles:
|
|
37
|
+
soffs = header.get('offset')
|
|
38
|
+
tick = header.get('tick')
|
|
39
|
+
|
|
40
|
+
outp.printf(f'Loading {filename}, offset={soffs}, tick={s_time.repr(tick)}.')
|
|
41
|
+
|
|
42
|
+
eoffs = soffs
|
|
43
|
+
fini = None
|
|
44
|
+
|
|
45
|
+
for item in genr:
|
|
46
|
+
match item:
|
|
47
|
+
case ('edit', (eoffs, edit, meta)):
|
|
48
|
+
if opts.dryrun:
|
|
49
|
+
continue
|
|
50
|
+
|
|
51
|
+
await layer.saveNodeEdits(edit, meta=meta)
|
|
52
|
+
|
|
53
|
+
case ('fini', info):
|
|
54
|
+
fini = info
|
|
55
|
+
break
|
|
56
|
+
|
|
57
|
+
case _:
|
|
58
|
+
mtype = item[0]
|
|
59
|
+
mesg = f'Unexpected message type: {mtype}.'
|
|
60
|
+
raise s_exc.BadMesgFormat(mesg=mesg)
|
|
61
|
+
|
|
62
|
+
if fini is None:
|
|
63
|
+
mesg = f'Incomplete/corrupt export: {filename}.'
|
|
64
|
+
raise s_exc.BadDataValu(mesg=mesg)
|
|
65
|
+
|
|
66
|
+
elif (offset := fini.get('offset')) != eoffs:
|
|
67
|
+
mesg = f'Incomplete/corrupt export: {filename}. Expected offset {offset}, got {eoffs}.'
|
|
68
|
+
raise s_exc.BadDataValu(mesg=mesg)
|
|
69
|
+
|
|
70
|
+
else:
|
|
71
|
+
if opts.dryrun:
|
|
72
|
+
outp.printf(f'Successfully read {filename} as a dryrun test.')
|
|
73
|
+
else:
|
|
74
|
+
outp.printf(f'Successfully loaded {filename} with {eoffs + 1 - soffs} edits ({soffs} - {eoffs}).')
|
|
75
|
+
|
|
76
|
+
async def main(argv, outp=s_output.stdout):
|
|
77
|
+
|
|
78
|
+
pars = s_cmd.Parser(prog='layer.load', outp=outp, description=descr)
|
|
79
|
+
pars.add_argument('--dryrun', action='store_true', help="Process files but don't apply changes.")
|
|
80
|
+
pars.add_argument('--url', default='cell:///vertex/storage', help='The telepath URL of the Synapse service.')
|
|
81
|
+
|
|
82
|
+
pars.add_argument('iden', help='The iden of the layer to import to.')
|
|
83
|
+
pars.add_argument('files', nargs='+', help='The .nodeedits files to import from.')
|
|
84
|
+
|
|
85
|
+
opts = pars.parse_args(argv)
|
|
86
|
+
|
|
87
|
+
infiles = []
|
|
88
|
+
|
|
89
|
+
try:
|
|
90
|
+
# Load the files
|
|
91
|
+
for filename in opts.files:
|
|
92
|
+
if not os.path.exists(filename) or not os.path.isfile(filename):
|
|
93
|
+
mesg = f'Invalid input file specified: {filename}.'
|
|
94
|
+
raise s_exc.NoSuchFile(mesg=mesg)
|
|
95
|
+
|
|
96
|
+
genr = s_msgpack.iterfile(filename)
|
|
97
|
+
header = next(genr)
|
|
98
|
+
if header[0] != 'init':
|
|
99
|
+
mesg = f'Invalid header in {filename}.'
|
|
100
|
+
raise s_exc.BadMesgFormat(mesg=mesg)
|
|
101
|
+
|
|
102
|
+
infiles.append((header[1], filename, genr))
|
|
103
|
+
|
|
104
|
+
# Sort the files based on their offset
|
|
105
|
+
infiles = sorted(infiles, key=lambda x: x[0].get('offset'))
|
|
106
|
+
|
|
107
|
+
outp.printf('Processing the following nodeedits:')
|
|
108
|
+
outp.printf('Offset | Filename')
|
|
109
|
+
outp.printf('-----------------|----------')
|
|
110
|
+
for header, filename, genr in infiles:
|
|
111
|
+
offset = header.get('offset')
|
|
112
|
+
outp.printf(f'{offset:<16d} | {filename}')
|
|
113
|
+
|
|
114
|
+
async with s_telepath.withTeleEnv():
|
|
115
|
+
await importLayer(infiles, opts, outp)
|
|
116
|
+
|
|
117
|
+
return 0
|
|
118
|
+
|
|
119
|
+
except s_exc.SynErr as exc:
|
|
120
|
+
mesg = exc.get('mesg')
|
|
121
|
+
outp.printf(f'ERROR: {mesg}.')
|
|
122
|
+
return 1
|
|
123
|
+
|
|
124
|
+
except Exception as exc: # pragma: no cover
|
|
125
|
+
outp.printf(f'ERROR: {str(exc)}.')
|
|
126
|
+
return 1
|
|
127
|
+
|
|
128
|
+
if __name__ == '__main__': # pragma: no cover
|
|
129
|
+
s_cmd.exitmain(main)
|
synapse/tools/demote.py
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
import asyncio
|
|
3
|
+
import argparse
|
|
4
|
+
|
|
5
|
+
import synapse.exc as s_exc
|
|
6
|
+
|
|
7
|
+
import synapse.telepath as s_telepath
|
|
8
|
+
|
|
9
|
+
import synapse.lib.cmd as s_cmd
|
|
10
|
+
import synapse.lib.output as s_output
|
|
11
|
+
import synapse.lib.urlhelp as s_urlhelp
|
|
12
|
+
|
|
13
|
+
descr = '''
|
|
14
|
+
Automatically select a new leader and demote this service.
|
|
15
|
+
|
|
16
|
+
Example:
|
|
17
|
+
python -m synapse.tools.demote
|
|
18
|
+
'''
|
|
19
|
+
|
|
20
|
+
async def main(argv, outp=s_output.stdout):
|
|
21
|
+
|
|
22
|
+
pars = argparse.ArgumentParser(prog='synapse.tools.demote', description=descr,
|
|
23
|
+
formatter_class=argparse.RawDescriptionHelpFormatter)
|
|
24
|
+
|
|
25
|
+
pars.add_argument('--url', default='cell:///vertex/storage',
|
|
26
|
+
help='The telepath URL of the Synapse service.')
|
|
27
|
+
|
|
28
|
+
pars.add_argument('--timeout', type=int, default=60,
|
|
29
|
+
help='The timeout to use awaiting network connections.')
|
|
30
|
+
|
|
31
|
+
opts = pars.parse_args(argv)
|
|
32
|
+
|
|
33
|
+
async with s_telepath.withTeleEnv():
|
|
34
|
+
|
|
35
|
+
try:
|
|
36
|
+
|
|
37
|
+
async with await s_telepath.openurl(opts.url) as cell:
|
|
38
|
+
|
|
39
|
+
outp.printf(f'Demoting leader: {opts.url}')
|
|
40
|
+
|
|
41
|
+
if await cell.demote(timeout=opts.timeout):
|
|
42
|
+
return 0
|
|
43
|
+
|
|
44
|
+
except s_exc.SynErr as e:
|
|
45
|
+
outp.printf(f'Error while demoting service {s_urlhelp.sanitizeUrl(opts.url)}: {e}')
|
|
46
|
+
return 1
|
|
47
|
+
|
|
48
|
+
outp.printf(f'Failed to demote service {s_urlhelp.sanitizeUrl(opts.url)}')
|
|
49
|
+
return 1
|
|
50
|
+
|
|
51
|
+
if __name__ == '__main__': # pragma: no cover
|
|
52
|
+
s_cmd.exitmain(main)
|
synapse/tools/healthcheck.py
CHANGED
|
@@ -44,7 +44,7 @@ async def main(argv, outp=s_output.stdout):
|
|
|
44
44
|
|
|
45
45
|
prox = await s_common.wait_for(s_telepath.openurl(url),
|
|
46
46
|
timeout=opts.timeout)
|
|
47
|
-
except (
|
|
47
|
+
except (s_exc.LinkErr, s_exc.NoSuchPath, socket.gaierror) as e:
|
|
48
48
|
mesg = f'Unable to connect to cell @ {sanitized_url}.'
|
|
49
49
|
ret = {'status': 'failed',
|
|
50
50
|
'iden': opts.cell,
|
|
@@ -0,0 +1,176 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import shutil
|
|
3
|
+
import logging
|
|
4
|
+
import subprocess
|
|
5
|
+
|
|
6
|
+
import regex as re
|
|
7
|
+
|
|
8
|
+
import synapse.exc as s_exc
|
|
9
|
+
import synapse.common as s_common
|
|
10
|
+
|
|
11
|
+
import synapse.lib.cmd as s_cmd
|
|
12
|
+
import synapse.lib.output as s_output
|
|
13
|
+
|
|
14
|
+
import synapse.tools.rstorm as s_rstorm
|
|
15
|
+
import synapse.tools.autodoc as s_autodoc
|
|
16
|
+
|
|
17
|
+
logger = logging.getLogger(__name__)
|
|
18
|
+
|
|
19
|
+
_TOOLDIR = os.path.split(__file__)[0]
|
|
20
|
+
PANDOC_FILTER = os.path.join(_TOOLDIR, 'pandoc_filter.py')
|
|
21
|
+
|
|
22
|
+
# see https://www.sphinx-doc.org/en/master/usage/restructuredtext/field-lists.html#file-wide-metadata
|
|
23
|
+
re_sphinx_metadata_fields = re.compile(r'^:(tocdepth|nocomments|orphan|nosearch):( \w+)?\n\n',
|
|
24
|
+
flags=re.MULTILINE)
|
|
25
|
+
|
|
26
|
+
def hasPandoc():
|
|
27
|
+
if os.system('pandoc --version') == 0:
|
|
28
|
+
return True
|
|
29
|
+
return False
|
|
30
|
+
|
|
31
|
+
async def buildPkgDocs(outp: s_output.OutPut, pkgpath: str, rst_only: bool =False):
|
|
32
|
+
|
|
33
|
+
logger.info(f'Building pkg for {pkgpath}')
|
|
34
|
+
pkgdef = s_common.yamlload(pkgpath)
|
|
35
|
+
if pkgdef is None:
|
|
36
|
+
raise s_exc.BadArg(mesg=f'Package does not exist or does not contain yaml: {pkgpath}')
|
|
37
|
+
|
|
38
|
+
dirn = os.path.dirname(s_common.genpath(pkgpath))
|
|
39
|
+
|
|
40
|
+
docsdir = os.path.join(dirn, 'docs')
|
|
41
|
+
builddir = os.path.join(dirn, 'docs', '_build')
|
|
42
|
+
|
|
43
|
+
shutil.rmtree(builddir, ignore_errors=True)
|
|
44
|
+
|
|
45
|
+
s_common.gendir(builddir)
|
|
46
|
+
|
|
47
|
+
# touch any files we need in order to load a package, due to
|
|
48
|
+
# rstorm needing to load the package using genpkg tool. This
|
|
49
|
+
# does mean that standalone builds of a storm package from this
|
|
50
|
+
# must be done after using this buildpkg tool.
|
|
51
|
+
stormpkg_md_present = False
|
|
52
|
+
for dnfo in pkgdef.get('docs', ()):
|
|
53
|
+
fpath = dnfo.get('path')
|
|
54
|
+
with s_common.genfile(dirn, fpath) as fd:
|
|
55
|
+
pass
|
|
56
|
+
if fpath.endswith('stormpackage.md'):
|
|
57
|
+
stormpkg_md_present = True
|
|
58
|
+
|
|
59
|
+
# Generate the build .RST for stormpackage.md
|
|
60
|
+
if stormpkg_md_present:
|
|
61
|
+
logger.info(f'Generating stormpackage.rst for {pkgpath}')
|
|
62
|
+
pkgdocs, pkgname = await s_autodoc.docStormpkg(pkgpath)
|
|
63
|
+
with s_common.genfile(docsdir, 'stormpackage.rst') as fd:
|
|
64
|
+
text = pkgdocs.getRstText()
|
|
65
|
+
if rst_only is False:
|
|
66
|
+
# Leave this in place if we're only generating RST
|
|
67
|
+
text = text.replace('.. highlight:: none\n', '')
|
|
68
|
+
fd.write(text.encode())
|
|
69
|
+
logger.info('Generated the stormpackage.rst file!')
|
|
70
|
+
|
|
71
|
+
for name in os.listdir(docsdir):
|
|
72
|
+
|
|
73
|
+
if not name.endswith('.rst'):
|
|
74
|
+
continue
|
|
75
|
+
|
|
76
|
+
docpath = os.path.join(docsdir, name)
|
|
77
|
+
|
|
78
|
+
basename = name.rsplit('.', 1)[0]
|
|
79
|
+
|
|
80
|
+
builtmd = os.path.join(builddir, f'{basename}.md')
|
|
81
|
+
builtrst = os.path.join(builddir, name)
|
|
82
|
+
|
|
83
|
+
argv = (docpath, '--save', builtrst)
|
|
84
|
+
logger.info(f'Executing rstorm for {argv}')
|
|
85
|
+
await s_rstorm.main(argv)
|
|
86
|
+
|
|
87
|
+
if rst_only:
|
|
88
|
+
logger.info(f'rst_only enabled, done processing {name}')
|
|
89
|
+
continue
|
|
90
|
+
|
|
91
|
+
logger.info('Preprocessing rstorm output')
|
|
92
|
+
with s_common.genfile(builtrst) as fd:
|
|
93
|
+
buf = fd.read().decode()
|
|
94
|
+
|
|
95
|
+
# Remove highglight:: none directives
|
|
96
|
+
buf = buf.replace('.. highlight:: none\n', '')
|
|
97
|
+
|
|
98
|
+
# Remove sphinx metadata fields
|
|
99
|
+
buf = re_sphinx_metadata_fields.sub('', buf)
|
|
100
|
+
|
|
101
|
+
lines = buf.splitlines(keepends=True)
|
|
102
|
+
|
|
103
|
+
# Remove lines which start with explicit sphinx rst targets
|
|
104
|
+
nlines1 = []
|
|
105
|
+
for line in lines:
|
|
106
|
+
if line.startswith('.. _') and line.strip().endswith(':'):
|
|
107
|
+
logger.info(f'Dropping: [{line.strip()}]')
|
|
108
|
+
continue
|
|
109
|
+
nlines1.append(line)
|
|
110
|
+
|
|
111
|
+
buf = ''.join(nlines1)
|
|
112
|
+
|
|
113
|
+
with s_common.genfile(builtrst) as fd:
|
|
114
|
+
fd.truncate()
|
|
115
|
+
_ = fd.write(buf.encode())
|
|
116
|
+
|
|
117
|
+
logger.info(f'Converting {builtrst} to markdown')
|
|
118
|
+
if name == 'stormpackage.rst':
|
|
119
|
+
args = ['pandoc', '--filter', PANDOC_FILTER, '-f', 'rst', '-t', 'markdown', '-o', builtmd, builtrst]
|
|
120
|
+
else:
|
|
121
|
+
args = ['pandoc', '-f', 'rst', '-t', 'markdown', '-o', builtmd, builtrst]
|
|
122
|
+
|
|
123
|
+
r = subprocess.run(args, capture_output=True)
|
|
124
|
+
|
|
125
|
+
# Re-write stderr (logging) to our outp
|
|
126
|
+
for line in r.stderr.decode().splitlines():
|
|
127
|
+
outp.printf(f'ERR: {line}')
|
|
128
|
+
|
|
129
|
+
if r.returncode != 0:
|
|
130
|
+
raise s_exc.SynErr(mesg=f'Error converting {builtrst} to {builtmd}')
|
|
131
|
+
|
|
132
|
+
logger.info(f'Done converting {builtrst} to {builtmd}')
|
|
133
|
+
|
|
134
|
+
# Strip out / manipulate the md content
|
|
135
|
+
with s_common.genfile(builtmd) as fd:
|
|
136
|
+
buf = fd.read().decode()
|
|
137
|
+
|
|
138
|
+
lines = buf.splitlines(keepends=True)
|
|
139
|
+
|
|
140
|
+
# Remove lines which only have a single `:` left in them
|
|
141
|
+
nlines1 = [line for line in lines if line.strip() != ':']
|
|
142
|
+
|
|
143
|
+
buf = ''.join(nlines1)
|
|
144
|
+
|
|
145
|
+
with s_common.genfile(builtmd) as fd:
|
|
146
|
+
fd.truncate()
|
|
147
|
+
_ = fd.write(buf.encode())
|
|
148
|
+
|
|
149
|
+
logger.info('Done manipulating markdown')
|
|
150
|
+
|
|
151
|
+
logger.info(f'buildPkgDocs complete for {pkgpath}.')
|
|
152
|
+
|
|
153
|
+
prog = 'synapse.tools.pkgs.gendocs'
|
|
154
|
+
desc = 'A tool for building storm package docs from RStorm into markdown. This tool requires pandoc to be available.'
|
|
155
|
+
|
|
156
|
+
async def main(argv, outp=s_output.stdout):
|
|
157
|
+
|
|
158
|
+
pars = s_cmd.Parser(prog=prog, outp=outp, description=desc)
|
|
159
|
+
pars.add_argument('pkgfile', metavar='<pkgfile>', help='Path to a storm package prototype yml file.')
|
|
160
|
+
pars.add_argument('--rst-only', default=False, action='store_true',
|
|
161
|
+
help='Stops building after the .rst files have been generated.')
|
|
162
|
+
|
|
163
|
+
opts = pars.parse_args(argv)
|
|
164
|
+
|
|
165
|
+
if opts.rst_only is False and not hasPandoc():
|
|
166
|
+
logger.error('Pandoc is not available, can only run rst/rstorm output.')
|
|
167
|
+
return 1
|
|
168
|
+
|
|
169
|
+
await buildPkgDocs(outp, opts.pkgfile, rst_only=opts.rst_only)
|
|
170
|
+
|
|
171
|
+
return 0
|
|
172
|
+
|
|
173
|
+
if __name__ == '__main__': # pragma: no cover
|
|
174
|
+
s_common.setlogging(logger, 'DEBUG')
|
|
175
|
+
logging.getLogger('vcr').setLevel(logging.WARNING)
|
|
176
|
+
s_cmd.exitmain(main)
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
import json
|
|
3
|
+
|
|
4
|
+
import packaging.version as p_version
|
|
5
|
+
import packaging.specifiers as p_specifiers
|
|
6
|
+
|
|
7
|
+
PANDOC_API_REQVERS = '>=1.23.0,<1.24.0'
|
|
8
|
+
|
|
9
|
+
def walk(elem):
|
|
10
|
+
'''
|
|
11
|
+
Walk the pandoc AST, yielding (type, content) tuples.
|
|
12
|
+
Ref: https://pandoc.org/using-the-pandoc-api.html#walking-the-ast
|
|
13
|
+
'''
|
|
14
|
+
|
|
15
|
+
if isinstance(elem, list):
|
|
16
|
+
for subelem in elem:
|
|
17
|
+
yield from walk(subelem)
|
|
18
|
+
return
|
|
19
|
+
|
|
20
|
+
if isinstance(elem, dict):
|
|
21
|
+
if 't' in elem:
|
|
22
|
+
yield elem['t'], elem.get('c')
|
|
23
|
+
for v in elem.values():
|
|
24
|
+
yield from walk(v)
|
|
25
|
+
return
|
|
26
|
+
|
|
27
|
+
def main():
|
|
28
|
+
'''
|
|
29
|
+
A pandoc filter reads the intermediate JSON-formatted AST generated from the source, makes any modifications,
|
|
30
|
+
and then writes the JSON-formatted AST to be used to generate the target.
|
|
31
|
+
|
|
32
|
+
Ref: https://pandoc.org/filters.html
|
|
33
|
+
|
|
34
|
+
Usage:
|
|
35
|
+
|
|
36
|
+
pandoc -f rst -t markdown --filter ./synapse/tools/pkg/pandoc_filter.py -o foo.md foo.rst
|
|
37
|
+
'''
|
|
38
|
+
|
|
39
|
+
ast = json.load(sys.stdin)
|
|
40
|
+
|
|
41
|
+
spec = p_specifiers.SpecifierSet(PANDOC_API_REQVERS)
|
|
42
|
+
vers = p_version.Version('.'.join(str(part) for part in ast['pandoc-api-version']))
|
|
43
|
+
if vers not in spec:
|
|
44
|
+
raise Exception(f'Pandoc API version {vers} does not match required version {PANDOC_API_REQVERS}')
|
|
45
|
+
|
|
46
|
+
for type_, content in walk(ast['blocks']):
|
|
47
|
+
|
|
48
|
+
if type_ != 'DefinitionList':
|
|
49
|
+
continue
|
|
50
|
+
|
|
51
|
+
# An RST term with multiple definitions gets combined into one -> split
|
|
52
|
+
# ( Only Para types should get split )
|
|
53
|
+
for term, defs in content:
|
|
54
|
+
|
|
55
|
+
newdefs = []
|
|
56
|
+
newdef = []
|
|
57
|
+
|
|
58
|
+
for def_ in defs[0]:
|
|
59
|
+
|
|
60
|
+
if def_['t'] == 'Para' and newdef:
|
|
61
|
+
# we are on a new paragraph so save the
|
|
62
|
+
# previous term/def group
|
|
63
|
+
newdefs.append(newdef.copy())
|
|
64
|
+
newdef.clear()
|
|
65
|
+
|
|
66
|
+
newdef.append(def_)
|
|
67
|
+
|
|
68
|
+
if newdef:
|
|
69
|
+
newdefs.append(newdef.copy())
|
|
70
|
+
|
|
71
|
+
defs.clear()
|
|
72
|
+
defs.extend(newdefs)
|
|
73
|
+
|
|
74
|
+
sys.stdout.write(json.dumps(ast))
|
|
75
|
+
|
|
76
|
+
return 0
|
|
77
|
+
|
|
78
|
+
if __name__ == '__main__':
|
|
79
|
+
sys.exit(main())
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
import synapse.exc as s_exc
|
|
2
|
+
import synapse.common as s_common
|
|
3
|
+
import synapse.telepath as s_telepath
|
|
4
|
+
|
|
5
|
+
import synapse.lib.cmd as s_cmd
|
|
6
|
+
import synapse.lib.output as s_output
|
|
7
|
+
|
|
8
|
+
desc = '''
|
|
9
|
+
Initiate a graceful shutdown of a service.
|
|
10
|
+
|
|
11
|
+
This tool is designed to put the service into a state where
|
|
12
|
+
any non-background tasks will be allowed to complete while ensuring
|
|
13
|
+
no new tasks are created. Without a timeout, it can block forever if
|
|
14
|
+
tasks do not exit.
|
|
15
|
+
|
|
16
|
+
The command exits with code 0 if the graceful shutdown was successful and
|
|
17
|
+
exit code 1 if a timeout was specified and was hit. Upon hitting the timeout
|
|
18
|
+
the system resumes normal operation.
|
|
19
|
+
|
|
20
|
+
NOTE: This will also demote the service if run on a leader with mirrors.
|
|
21
|
+
'''
|
|
22
|
+
|
|
23
|
+
async def main(argv, outp=s_output.stdout):
|
|
24
|
+
|
|
25
|
+
pars = s_cmd.Parser('synapse.tools.shutdown', outp=outp, description=desc)
|
|
26
|
+
|
|
27
|
+
pars.add_argument('--url', default='cell:///vertex/storage',
|
|
28
|
+
help='The telepath URL to connect to the service.')
|
|
29
|
+
|
|
30
|
+
pars.add_argument('--timeout', default=None, type=int,
|
|
31
|
+
help='An optional timeout in seconds. If timeout is reached, the shutdown is aborted.')
|
|
32
|
+
|
|
33
|
+
opts = pars.parse_args(argv)
|
|
34
|
+
|
|
35
|
+
async with s_telepath.withTeleEnv():
|
|
36
|
+
|
|
37
|
+
try:
|
|
38
|
+
|
|
39
|
+
async with await s_telepath.openurl(opts.url) as proxy:
|
|
40
|
+
|
|
41
|
+
if await proxy.shutdown(timeout=opts.timeout):
|
|
42
|
+
return 0
|
|
43
|
+
|
|
44
|
+
return 1
|
|
45
|
+
|
|
46
|
+
except Exception as e: # pragma: no cover
|
|
47
|
+
text = s_exc.reprexc(e)
|
|
48
|
+
outp.printf(f'Error while attempting graceful shutdown: {text}')
|
|
49
|
+
return 1
|
|
50
|
+
|
|
51
|
+
if __name__ == '__main__': # pragma: no cover
|
|
52
|
+
s_cmd.exitmain(main)
|