synapse 2.224.0__py311-none-any.whl → 2.226.0__py311-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of synapse might be problematic. Click here for more details.
- synapse/axon.py +10 -5
- synapse/cortex.py +6 -8
- synapse/lib/agenda.py +28 -15
- synapse/lib/ast.py +4 -8
- synapse/lib/cell.py +1 -1
- synapse/lib/const.py +4 -0
- synapse/lib/multislabseqn.py +36 -1
- synapse/lib/nexus.py +67 -8
- synapse/lib/platforms/linux.py +2 -0
- synapse/lib/queue.py +4 -1
- synapse/lib/rstorm.py +2 -2
- synapse/lib/schemas.py +11 -1
- synapse/lib/slabseqn.py +28 -0
- synapse/lib/storm.py +16 -4
- synapse/lib/stormhttp.py +7 -1
- synapse/lib/stormlib/aha.py +3 -3
- synapse/lib/stormtypes.py +10 -3
- synapse/lib/types.py +20 -0
- synapse/lib/version.py +2 -2
- synapse/models/base.py +3 -0
- synapse/models/inet.py +62 -5
- synapse/models/infotech.py +18 -0
- synapse/models/media.py +4 -0
- synapse/models/risk.py +3 -0
- synapse/tests/test_cortex.py +115 -2
- synapse/tests/test_lib_agenda.py +141 -28
- synapse/tests/test_lib_cell.py +1 -1
- synapse/tests/test_lib_certdir.py +1 -1
- synapse/tests/test_lib_httpapi.py +1 -1
- synapse/tests/test_lib_layer.py +1 -1
- synapse/tests/test_lib_lmdbslab.py +2 -0
- synapse/tests/test_lib_multislabseqn.py +22 -0
- synapse/tests/test_lib_nexus.py +42 -1
- synapse/tests/test_lib_platforms_linux.py +4 -0
- synapse/tests/test_lib_slabseqn.py +30 -1
- synapse/tests/test_lib_storm.py +65 -1
- synapse/tests/test_lib_stormhttp.py +16 -0
- synapse/tests/test_lib_stormlib_aha.py +6 -2
- synapse/tests/test_lib_stormlib_oauth.py +1 -1
- synapse/tests/test_lib_stormsvc.py +1 -1
- synapse/tests/test_lib_trigger.py +1 -1
- synapse/tests/test_model_inet.py +37 -0
- synapse/tests/test_model_infotech.py +15 -1
- synapse/tests/test_model_media.py +4 -1
- synapse/tests/test_model_risk.py +2 -0
- synapse/tests/test_tools_aha.py +2 -2
- synapse/tests/{test_tools_axon2axon.py → test_tools_axon_copy.py} +4 -4
- synapse/tests/{test_tools_pullfile.py → test_tools_axon_get.py} +4 -4
- synapse/tests/{test_tools_pushfile.py → test_tools_axon_put.py} +7 -7
- synapse/tests/{test_tools_csvtool.py → test_tools_cortex_csv.py} +12 -3
- synapse/tests/{test_tools_feed.py → test_tools_cortex_feed.py} +2 -2
- synapse/tests/{test_tools_apikey.py → test_tools_service_apikey.py} +1 -4
- synapse/tests/{test_tools_backup.py → test_tools_service_backup.py} +5 -5
- synapse/tests/{test_tools_demote.py → test_tools_service_demote.py} +1 -1
- synapse/tests/{test_tools_healthcheck.py → test_tools_service_healthcheck.py} +1 -1
- synapse/tests/{test_tools_livebackup.py → test_tools_service_livebackup.py} +1 -1
- synapse/tests/{test_tools_modrole.py → test_tools_service_modrole.py} +1 -1
- synapse/tests/{test_tools_moduser.py → test_tools_service_moduser.py} +1 -1
- synapse/tests/{test_tools_promote.py → test_tools_service_promote.py} +1 -1
- synapse/tests/{test_tools_reload.py → test_tools_service_reload.py} +1 -1
- synapse/tests/{test_tools_shutdown.py → test_tools_service_shutdown.py} +1 -1
- synapse/tests/{test_tools_snapshot.py → test_tools_service_snapshot.py} +1 -1
- synapse/tests/{test_tools_storm.py → test_tools_storm_cli.py} +1 -1
- synapse/tests/{test_tools_pkgs_gendocs.py → test_tools_storm_pkg_doc.py} +12 -3
- synapse/tests/{test_tools_genpkg.py → test_tools_storm_pkg_gen.py} +1 -1
- synapse/tests/{test_tools_autodoc.py → test_tools_utils_autodoc.py} +1 -1
- synapse/tests/test_tools_utils_changelog.py +454 -0
- synapse/tests/{test_tools_easycert.py → test_tools_utils_easycert.py} +48 -46
- synapse/tests/{test_tools_guid.py → test_tools_utils_guid.py} +3 -3
- synapse/tests/{test_tools_json2mpk.py → test_tools_utils_json2mpk.py} +3 -3
- synapse/tests/{test_tools_rstorm.py → test_tools_utils_rstorm.py} +6 -1
- synapse/tests/utils.py +15 -1
- synapse/tools/aha/mirror.py +1 -1
- synapse/tools/apikey.py +4 -83
- synapse/tools/autodoc.py +3 -1031
- synapse/tools/axon/copy.py +44 -0
- synapse/tools/axon/get.py +64 -0
- synapse/tools/axon/put.py +122 -0
- synapse/tools/axon2axon.py +3 -36
- synapse/tools/backup.py +6 -176
- synapse/tools/changelog.py +3 -1098
- synapse/tools/cortex/csv.py +236 -0
- synapse/tools/cortex/feed.py +151 -0
- synapse/tools/csvtool.py +3 -227
- synapse/tools/demote.py +4 -40
- synapse/tools/docker/validate.py +3 -3
- synapse/tools/easycert.py +4 -129
- synapse/tools/feed.py +3 -140
- synapse/tools/genpkg.py +3 -307
- synapse/tools/guid.py +7 -6
- synapse/tools/healthcheck.py +3 -101
- synapse/tools/json2mpk.py +6 -38
- synapse/tools/livebackup.py +4 -27
- synapse/tools/modrole.py +3 -108
- synapse/tools/moduser.py +3 -179
- synapse/tools/pkgs/gendocs.py +3 -164
- synapse/tools/promote.py +4 -41
- synapse/tools/pullfile.py +3 -56
- synapse/tools/pushfile.py +3 -114
- synapse/tools/reload.py +4 -61
- synapse/tools/rstorm.py +3 -26
- synapse/tools/service/__init__.py +0 -0
- synapse/tools/service/apikey.py +90 -0
- synapse/tools/service/backup.py +181 -0
- synapse/tools/service/demote.py +47 -0
- synapse/tools/service/healthcheck.py +109 -0
- synapse/tools/service/livebackup.py +34 -0
- synapse/tools/service/modrole.py +116 -0
- synapse/tools/service/moduser.py +184 -0
- synapse/tools/service/promote.py +48 -0
- synapse/tools/service/reload.py +68 -0
- synapse/tools/service/shutdown.py +51 -0
- synapse/tools/service/snapshot.py +64 -0
- synapse/tools/shutdown.py +5 -45
- synapse/tools/snapshot.py +4 -57
- synapse/tools/storm/__init__.py +0 -0
- synapse/tools/storm/__main__.py +5 -0
- synapse/tools/{storm.py → storm/_cli.py} +0 -3
- synapse/tools/storm/pkg/__init__.py +0 -0
- synapse/tools/{pkgs/pandoc_filter.py → storm/pkg/_pandoc_filter.py} +1 -1
- synapse/tools/storm/pkg/doc.py +176 -0
- synapse/tools/storm/pkg/gen.py +315 -0
- synapse/tools/utils/__init__.py +0 -0
- synapse/tools/utils/autodoc.py +1040 -0
- synapse/tools/utils/changelog.py +1124 -0
- synapse/tools/utils/easycert.py +136 -0
- synapse/tools/utils/guid.py +11 -0
- synapse/tools/utils/json2mpk.py +46 -0
- synapse/tools/utils/rstorm.py +35 -0
- {synapse-2.224.0.dist-info → synapse-2.226.0.dist-info}/METADATA +1 -1
- {synapse-2.224.0.dist-info → synapse-2.226.0.dist-info}/RECORD +135 -106
- synapse/tests/test_tools_changelog.py +0 -196
- /synapse/tests/{test_tools_axon.py → test_tools_axon_dump_load.py} +0 -0
- {synapse-2.224.0.dist-info → synapse-2.226.0.dist-info}/WHEEL +0 -0
- {synapse-2.224.0.dist-info → synapse-2.226.0.dist-info}/licenses/LICENSE +0 -0
- {synapse-2.224.0.dist-info → synapse-2.226.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,236 @@
|
|
|
1
|
+
import csv
|
|
2
|
+
|
|
3
|
+
import synapse.exc as s_exc
|
|
4
|
+
import synapse.cortex as s_cortex
|
|
5
|
+
import synapse.common as s_common
|
|
6
|
+
import synapse.telepath as s_telepath
|
|
7
|
+
|
|
8
|
+
import synapse.lib.cmd as s_cmd
|
|
9
|
+
import synapse.lib.cmdr as s_cmdr
|
|
10
|
+
import synapse.lib.coro as s_coro
|
|
11
|
+
import synapse.lib.json as s_json
|
|
12
|
+
import synapse.lib.output as s_output
|
|
13
|
+
import synapse.lib.version as s_version
|
|
14
|
+
|
|
15
|
+
reqver = '>=0.2.0,<3.0.0'
|
|
16
|
+
prog = 'synapse.tools.cortex.csv'
|
|
17
|
+
desc = '''Command line tool for ingesting csv files into a cortex
|
|
18
|
+
|
|
19
|
+
The storm file is run with the CSV rows specified in the variable "rows" so most
|
|
20
|
+
storm files will use a variable based for loop to create edit nodes. For example:
|
|
21
|
+
|
|
22
|
+
for ($fqdn, $ipv4, $tag) in $rows {
|
|
23
|
+
|
|
24
|
+
[ inet:dns:a=($fqdn, $ipv4) +#$tag ]
|
|
25
|
+
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
More advanced uses may include switch cases to provide different logic based on
|
|
29
|
+
a column value.
|
|
30
|
+
|
|
31
|
+
for ($type, $valu, $info) in $rows {
|
|
32
|
+
|
|
33
|
+
switch $type {
|
|
34
|
+
|
|
35
|
+
fqdn: {
|
|
36
|
+
[ inet:fqdn=$valu ]
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
"person name": {
|
|
40
|
+
[ ps:name=$valu ]
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
*: {
|
|
44
|
+
// default case...
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
switch $info {
|
|
50
|
+
"known malware": { [+#cno.mal] }
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
}
|
|
54
|
+
'''
|
|
55
|
+
|
|
56
|
+
async def runCsvExport(opts, outp, text, stormopts):
|
|
57
|
+
if not opts.cortex:
|
|
58
|
+
outp.printf('--export requires --cortex')
|
|
59
|
+
return 1
|
|
60
|
+
|
|
61
|
+
if len(opts.csvfiles) != 1:
|
|
62
|
+
outp.printf('--export requires exactly 1 csvfile')
|
|
63
|
+
return 1
|
|
64
|
+
|
|
65
|
+
path = s_common.genpath(opts.csvfiles[0])
|
|
66
|
+
outp.printf(f'Exporting CSV rows to: {path}')
|
|
67
|
+
|
|
68
|
+
async with await s_telepath.openurl(opts.cortex) as core:
|
|
69
|
+
|
|
70
|
+
try:
|
|
71
|
+
s_version.reqVersion(core._getSynVers(), reqver)
|
|
72
|
+
except s_exc.BadVersion as e:
|
|
73
|
+
valu = s_version.fmtVersion(*e.get('valu'))
|
|
74
|
+
outp.printf(f'Cortex version {valu} is outside of the {prog} supported range ({reqver}).')
|
|
75
|
+
outp.printf(f'Please use a version of Synapse which supports {valu}; '
|
|
76
|
+
f'current version is {s_version.verstring}.')
|
|
77
|
+
return 1
|
|
78
|
+
|
|
79
|
+
with open(path, 'w') as fd:
|
|
80
|
+
|
|
81
|
+
wcsv = csv.writer(fd)
|
|
82
|
+
# prevent streaming nodes by limiting shown events
|
|
83
|
+
stormopts['show'] = ('csv:row', 'print', 'warn', 'err')
|
|
84
|
+
count = 0
|
|
85
|
+
async for name, info in core.storm(text, opts=stormopts):
|
|
86
|
+
|
|
87
|
+
if name == 'csv:row':
|
|
88
|
+
count += 1
|
|
89
|
+
wcsv.writerow(info['row'])
|
|
90
|
+
continue
|
|
91
|
+
|
|
92
|
+
if name in ('init', 'fini'):
|
|
93
|
+
continue
|
|
94
|
+
|
|
95
|
+
outp.printf('%s: %r' % (name, info))
|
|
96
|
+
|
|
97
|
+
outp.printf(f'exported {count} csv rows.')
|
|
98
|
+
|
|
99
|
+
return 0
|
|
100
|
+
|
|
101
|
+
async def runCsvImport(opts, outp, text, stormopts):
|
|
102
|
+
|
|
103
|
+
def iterrows():
|
|
104
|
+
for path in opts.csvfiles:
|
|
105
|
+
|
|
106
|
+
with open(path, 'r', encoding='utf8') as fd:
|
|
107
|
+
|
|
108
|
+
if opts.csv_header:
|
|
109
|
+
fd.readline()
|
|
110
|
+
|
|
111
|
+
def genr():
|
|
112
|
+
|
|
113
|
+
for row in csv.reader(fd):
|
|
114
|
+
yield row
|
|
115
|
+
|
|
116
|
+
for rows in s_common.chunks(genr(), 1000):
|
|
117
|
+
yield rows
|
|
118
|
+
|
|
119
|
+
rowgenr = iterrows()
|
|
120
|
+
|
|
121
|
+
logfd = None
|
|
122
|
+
if opts.logfile is not None:
|
|
123
|
+
logfd = s_common.genfile(opts.logfile)
|
|
124
|
+
logfd.seek(0, 2)
|
|
125
|
+
|
|
126
|
+
async def addCsvData(core):
|
|
127
|
+
|
|
128
|
+
nodecount = 0
|
|
129
|
+
|
|
130
|
+
stormopts['editformat'] = 'nodeedits'
|
|
131
|
+
|
|
132
|
+
vars = stormopts.setdefault('vars', {})
|
|
133
|
+
|
|
134
|
+
for rows in rowgenr:
|
|
135
|
+
|
|
136
|
+
vars['rows'] = rows
|
|
137
|
+
|
|
138
|
+
async for mesg in core.storm(text, opts=stormopts):
|
|
139
|
+
|
|
140
|
+
if mesg[0] == 'node':
|
|
141
|
+
nodecount += 1
|
|
142
|
+
|
|
143
|
+
elif mesg[0] == 'err' and not opts.debug:
|
|
144
|
+
outp.printf(repr(mesg))
|
|
145
|
+
|
|
146
|
+
elif mesg[0] == 'print':
|
|
147
|
+
outp.printf(mesg[1].get('mesg'))
|
|
148
|
+
|
|
149
|
+
if opts.debug:
|
|
150
|
+
outp.printf(repr(mesg))
|
|
151
|
+
|
|
152
|
+
if logfd is not None:
|
|
153
|
+
logfd.write(s_json.dumps(mesg, newline=True))
|
|
154
|
+
|
|
155
|
+
if opts.cli:
|
|
156
|
+
await s_cmdr.runItemCmdr(core, outp, True)
|
|
157
|
+
|
|
158
|
+
return nodecount
|
|
159
|
+
|
|
160
|
+
if opts.test:
|
|
161
|
+
async with s_cortex.getTempCortex() as core:
|
|
162
|
+
nodecount = await addCsvData(core)
|
|
163
|
+
|
|
164
|
+
else:
|
|
165
|
+
async with await s_telepath.openurl(opts.cortex) as core:
|
|
166
|
+
|
|
167
|
+
try:
|
|
168
|
+
s_version.reqVersion(core._getSynVers(), reqver)
|
|
169
|
+
except s_exc.BadVersion as e:
|
|
170
|
+
valu = s_version.fmtVersion(*e.get('valu'))
|
|
171
|
+
outp.printf(f'Cortex version {valu} is outside of the {prog} supported range ({reqver}).')
|
|
172
|
+
outp.printf(f'Please use a version of Synapse which supports {valu}; '
|
|
173
|
+
f'current version is {s_version.verstring}.')
|
|
174
|
+
return 1
|
|
175
|
+
|
|
176
|
+
nodecount = await addCsvData(core)
|
|
177
|
+
|
|
178
|
+
if logfd is not None:
|
|
179
|
+
logfd.close()
|
|
180
|
+
|
|
181
|
+
outp.printf('%d nodes.' % (nodecount, ))
|
|
182
|
+
return 0
|
|
183
|
+
|
|
184
|
+
async def main(argv, outp=s_output.stdout):
|
|
185
|
+
pars = makeargparser(outp)
|
|
186
|
+
|
|
187
|
+
try:
|
|
188
|
+
opts = pars.parse_args(argv)
|
|
189
|
+
except s_exc.ParserExit as e:
|
|
190
|
+
return e.get('status')
|
|
191
|
+
|
|
192
|
+
with open(opts.stormfile, 'r', encoding='utf8') as fd:
|
|
193
|
+
text = fd.read()
|
|
194
|
+
|
|
195
|
+
stormopts = {}
|
|
196
|
+
if opts.optsfile:
|
|
197
|
+
stormopts = s_common.yamlload(opts.optsfile)
|
|
198
|
+
|
|
199
|
+
if opts.view:
|
|
200
|
+
if not s_common.isguid(opts.view):
|
|
201
|
+
outp.printf(f'View is not a guid {opts.view}')
|
|
202
|
+
return 1
|
|
203
|
+
stormopts['view'] = opts.view
|
|
204
|
+
|
|
205
|
+
async with s_telepath.withTeleEnv():
|
|
206
|
+
|
|
207
|
+
if opts.export:
|
|
208
|
+
return await runCsvExport(opts, outp, text, stormopts)
|
|
209
|
+
else:
|
|
210
|
+
return await runCsvImport(opts, outp, text, stormopts)
|
|
211
|
+
|
|
212
|
+
def makeargparser(outp):
|
|
213
|
+
pars = s_cmd.Parser(prog=prog, description=desc, outp=outp)
|
|
214
|
+
pars.add_argument('--logfile', help='Set a log file to get JSON lines from the server events.')
|
|
215
|
+
pars.add_argument('--csv-header', default=False, action='store_true',
|
|
216
|
+
help='Skip the first line from each CSV file.')
|
|
217
|
+
pars.add_argument('--cli', default=False, action='store_true',
|
|
218
|
+
help='Drop into a cli session after loading data.')
|
|
219
|
+
pars.add_argument('--debug', default=False, action='store_true', help='Enable verbose debug output.')
|
|
220
|
+
muxp = pars.add_mutually_exclusive_group(required=True)
|
|
221
|
+
muxp.add_argument('--cortex', '-c', type=str,
|
|
222
|
+
help='The telepath URL for the cortex ( or alias from ~/.syn/aliases ).')
|
|
223
|
+
muxp.add_argument('--test', '-t', default=False, action='store_true',
|
|
224
|
+
help='Perform a local CSV ingest against a temporary cortex.')
|
|
225
|
+
pars.add_argument('--export', default=False, action='store_true',
|
|
226
|
+
help='Export CSV data to file from storm using $lib.csv.emit(...) events.')
|
|
227
|
+
pars.add_argument('--view', default=None, action='store',
|
|
228
|
+
help='Optional view to work in.')
|
|
229
|
+
pars.add_argument('--optsfile', default=None, action='store',
|
|
230
|
+
help='Path to an opts file (.yaml) on disk.')
|
|
231
|
+
pars.add_argument('stormfile', help='A Storm script describing how to create nodes from rows.')
|
|
232
|
+
pars.add_argument('csvfiles', nargs='+', help='CSV files to load.')
|
|
233
|
+
return pars
|
|
234
|
+
|
|
235
|
+
if __name__ == '__main__': # pragma: no cover
|
|
236
|
+
s_cmd.exitmain(main)
|
|
@@ -0,0 +1,151 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import time
|
|
3
|
+
import logging
|
|
4
|
+
|
|
5
|
+
import synapse.exc as s_exc
|
|
6
|
+
import synapse.common as s_common
|
|
7
|
+
import synapse.cortex as s_cortex
|
|
8
|
+
import synapse.telepath as s_telepath
|
|
9
|
+
|
|
10
|
+
import synapse.lib.cmd as s_cmd
|
|
11
|
+
import synapse.lib.cmdr as s_cmdr
|
|
12
|
+
import synapse.lib.json as s_json
|
|
13
|
+
import synapse.lib.output as s_output
|
|
14
|
+
import synapse.lib.msgpack as s_msgpack
|
|
15
|
+
import synapse.lib.version as s_version
|
|
16
|
+
import synapse.lib.encoding as s_encoding
|
|
17
|
+
|
|
18
|
+
logger = logging.getLogger(__name__)
|
|
19
|
+
|
|
20
|
+
reqver = '>=0.2.0,<3.0.0'
|
|
21
|
+
|
|
22
|
+
prog = 'synapse.tools.cortex.feed'
|
|
23
|
+
|
|
24
|
+
def getItems(*paths):
|
|
25
|
+
items = []
|
|
26
|
+
for path in paths:
|
|
27
|
+
if path.endswith('.json'):
|
|
28
|
+
item = s_json.jsload(path)
|
|
29
|
+
if not isinstance(item, list):
|
|
30
|
+
item = [item]
|
|
31
|
+
items.append((path, item))
|
|
32
|
+
elif path.endswith('.jsonl'):
|
|
33
|
+
with s_common.genfile(path) as fd:
|
|
34
|
+
item = list(s_encoding.iterdata(fd, False, format='jsonl'))
|
|
35
|
+
items.append((path, item))
|
|
36
|
+
elif path.endswith(('.yaml', '.yml')):
|
|
37
|
+
item = s_common.yamlload(path)
|
|
38
|
+
if not isinstance(item, list):
|
|
39
|
+
item = [item]
|
|
40
|
+
items.append((path, item))
|
|
41
|
+
elif path.endswith('.mpk') or path.endswith('.nodes'):
|
|
42
|
+
genr = s_msgpack.iterfile(path)
|
|
43
|
+
items.append((path, genr))
|
|
44
|
+
else: # pragma: no cover
|
|
45
|
+
logger.warning('Unsupported file path: [%s]', path)
|
|
46
|
+
return items
|
|
47
|
+
|
|
48
|
+
async def addFeedData(core, outp, feedformat, debug=False, *paths, chunksize=1000, offset=0, viewiden=None):
|
|
49
|
+
|
|
50
|
+
items = getItems(*paths)
|
|
51
|
+
for path, item in items:
|
|
52
|
+
|
|
53
|
+
bname = os.path.basename(path)
|
|
54
|
+
|
|
55
|
+
tick = time.time()
|
|
56
|
+
outp.printf(f'Adding items from [{path}]')
|
|
57
|
+
|
|
58
|
+
foff = 0
|
|
59
|
+
for chunk in s_common.chunks(item, chunksize):
|
|
60
|
+
|
|
61
|
+
clen = len(chunk)
|
|
62
|
+
if offset and foff + clen < offset:
|
|
63
|
+
# We have not yet encountered a chunk which
|
|
64
|
+
# will include the offset size.
|
|
65
|
+
foff += clen
|
|
66
|
+
continue
|
|
67
|
+
|
|
68
|
+
await core.addFeedData(feedformat, chunk, viewiden=viewiden)
|
|
69
|
+
|
|
70
|
+
foff += clen
|
|
71
|
+
outp.printf(f'Added [{clen}] items from [{bname}] - offset [{foff}]')
|
|
72
|
+
|
|
73
|
+
tock = time.time()
|
|
74
|
+
|
|
75
|
+
outp.printf(f'Done consuming from [{bname}]')
|
|
76
|
+
outp.printf(f'Took [{tock - tick}] seconds.')
|
|
77
|
+
|
|
78
|
+
if debug:
|
|
79
|
+
await s_cmdr.runItemCmdr(core, outp, True)
|
|
80
|
+
|
|
81
|
+
async def main(argv, outp=s_output.stdout):
|
|
82
|
+
|
|
83
|
+
pars = getArgParser(outp)
|
|
84
|
+
opts = pars.parse_args(argv)
|
|
85
|
+
|
|
86
|
+
if opts.offset:
|
|
87
|
+
if len(opts.files) > 1:
|
|
88
|
+
outp.printf('Cannot start from a arbitrary offset for more than 1 file.')
|
|
89
|
+
return 1
|
|
90
|
+
|
|
91
|
+
outp.printf(f'Starting from offset [{opts.offset}] - it may take a while'
|
|
92
|
+
f' to get to that location in the input file.')
|
|
93
|
+
|
|
94
|
+
if opts.test:
|
|
95
|
+
async with s_cortex.getTempCortex(mods=opts.modules) as prox:
|
|
96
|
+
await addFeedData(prox, outp, opts.format, opts.debug,
|
|
97
|
+
chunksize=opts.chunksize,
|
|
98
|
+
offset=opts.offset,
|
|
99
|
+
*opts.files)
|
|
100
|
+
|
|
101
|
+
elif opts.cortex:
|
|
102
|
+
async with s_telepath.withTeleEnv():
|
|
103
|
+
async with await s_telepath.openurl(opts.cortex) as core:
|
|
104
|
+
try:
|
|
105
|
+
s_version.reqVersion(core._getSynVers(), reqver)
|
|
106
|
+
except s_exc.BadVersion as e:
|
|
107
|
+
valu = s_version.fmtVersion(*e.get('valu'))
|
|
108
|
+
outp.printf(f'Cortex version {valu} is outside of the {prog} supported range ({reqver}).')
|
|
109
|
+
outp.printf(f'Please use a version of Synapse which supports {valu}; '
|
|
110
|
+
f'current version is {s_version.verstring}.')
|
|
111
|
+
return 1
|
|
112
|
+
await addFeedData(core, outp, opts.format, opts.debug,
|
|
113
|
+
chunksize=opts.chunksize,
|
|
114
|
+
offset=opts.offset, viewiden=opts.view,
|
|
115
|
+
*opts.files)
|
|
116
|
+
|
|
117
|
+
else: # pragma: no cover
|
|
118
|
+
outp.printf('No valid options provided [%s]', opts)
|
|
119
|
+
return 1
|
|
120
|
+
|
|
121
|
+
return 0
|
|
122
|
+
|
|
123
|
+
def getArgParser(outp):
|
|
124
|
+
desc = 'Command line tool for ingesting data into a cortex'
|
|
125
|
+
pars = s_cmd.Parser(prog=prog, outp=outp, description=desc)
|
|
126
|
+
|
|
127
|
+
muxp = pars.add_mutually_exclusive_group(required=True)
|
|
128
|
+
muxp.add_argument('--cortex', '-c', type=str,
|
|
129
|
+
help='Cortex to connect and add nodes too.')
|
|
130
|
+
muxp.add_argument('--test', '-t', default=False, action='store_true',
|
|
131
|
+
help='Perform a local ingest against a temporary cortex.')
|
|
132
|
+
|
|
133
|
+
pars.add_argument('--debug', '-d', default=False, action='store_true',
|
|
134
|
+
help='Drop to interactive prompt to inspect cortex after loading data.')
|
|
135
|
+
pars.add_argument('--format', '-f', type=str, action='store', default='syn.nodes',
|
|
136
|
+
help='Feed format to use for the ingested data.')
|
|
137
|
+
pars.add_argument('--modules', '-m', type=str, action='append', default=[],
|
|
138
|
+
help='Additional modules to load locally with a test Cortex.')
|
|
139
|
+
pars.add_argument('--chunksize', type=int, action='store', default=1000,
|
|
140
|
+
help='Default chunksize for iterating over items.')
|
|
141
|
+
pars.add_argument('--offset', type=int, action='store', default=0,
|
|
142
|
+
help='Item offset to start consuming data from.')
|
|
143
|
+
pars.add_argument('--view', type=str, action='store', default=None,
|
|
144
|
+
help='The View to ingest the data into.')
|
|
145
|
+
pars.add_argument('files', nargs='*', help='json/yaml/msgpack feed files')
|
|
146
|
+
|
|
147
|
+
return pars
|
|
148
|
+
|
|
149
|
+
if __name__ == '__main__': # pragma: no cover
|
|
150
|
+
s_common.setlogging(logger, 'DEBUG')
|
|
151
|
+
s_cmd.exitmain(main)
|
synapse/tools/csvtool.py
CHANGED
|
@@ -1,235 +1,11 @@
|
|
|
1
|
-
import csv
|
|
2
|
-
|
|
3
|
-
import synapse.exc as s_exc
|
|
4
|
-
import synapse.cortex as s_cortex
|
|
5
1
|
import synapse.common as s_common
|
|
6
|
-
import synapse.telepath as s_telepath
|
|
7
2
|
|
|
8
3
|
import synapse.lib.cmd as s_cmd
|
|
9
|
-
import synapse.lib.cmdr as s_cmdr
|
|
10
|
-
import synapse.lib.coro as s_coro
|
|
11
|
-
import synapse.lib.json as s_json
|
|
12
|
-
import synapse.lib.output as s_output
|
|
13
|
-
import synapse.lib.version as s_version
|
|
14
|
-
|
|
15
|
-
reqver = '>=0.2.0,<3.0.0'
|
|
16
|
-
desc = '''Command line tool for ingesting csv files into a cortex
|
|
17
|
-
|
|
18
|
-
The storm file is run with the CSV rows specified in the variable "rows" so most
|
|
19
|
-
storm files will use a variable based for loop to create edit nodes. For example:
|
|
20
|
-
|
|
21
|
-
for ($fqdn, $ipv4, $tag) in $rows {
|
|
22
|
-
|
|
23
|
-
[ inet:dns:a=($fqdn, $ipv4) +#$tag ]
|
|
24
|
-
|
|
25
|
-
}
|
|
26
|
-
|
|
27
|
-
More advanced uses may include switch cases to provide different logic based on
|
|
28
|
-
a column value.
|
|
29
|
-
|
|
30
|
-
for ($type, $valu, $info) in $rows {
|
|
31
|
-
|
|
32
|
-
switch $type {
|
|
33
|
-
|
|
34
|
-
fqdn: {
|
|
35
|
-
[ inet:fqdn=$valu ]
|
|
36
|
-
}
|
|
37
|
-
|
|
38
|
-
"person name": {
|
|
39
|
-
[ ps:name=$valu ]
|
|
40
|
-
}
|
|
41
|
-
|
|
42
|
-
*: {
|
|
43
|
-
// default case...
|
|
44
|
-
}
|
|
45
|
-
|
|
46
|
-
}
|
|
47
|
-
|
|
48
|
-
switch $info {
|
|
49
|
-
"known malware": { [+#cno.mal] }
|
|
50
|
-
}
|
|
51
|
-
|
|
52
|
-
}
|
|
53
|
-
'''
|
|
54
|
-
|
|
55
|
-
async def runCsvExport(opts, outp, text, stormopts):
|
|
56
|
-
if not opts.cortex:
|
|
57
|
-
outp.printf('--export requires --cortex')
|
|
58
|
-
return 1
|
|
59
|
-
|
|
60
|
-
if len(opts.csvfiles) != 1:
|
|
61
|
-
outp.printf('--export requires exactly 1 csvfile')
|
|
62
|
-
return 1
|
|
63
|
-
|
|
64
|
-
path = s_common.genpath(opts.csvfiles[0])
|
|
65
|
-
outp.printf(f'Exporting CSV rows to: {path}')
|
|
66
|
-
|
|
67
|
-
async with await s_telepath.openurl(opts.cortex) as core:
|
|
68
|
-
|
|
69
|
-
try:
|
|
70
|
-
s_version.reqVersion(core._getSynVers(), reqver)
|
|
71
|
-
except s_exc.BadVersion as e:
|
|
72
|
-
valu = s_version.fmtVersion(*e.get('valu'))
|
|
73
|
-
outp.printf(f'Cortex version {valu} is outside of the csvtool supported range ({reqver}).')
|
|
74
|
-
outp.printf(f'Please use a version of Synapse which supports {valu}; '
|
|
75
|
-
f'current version is {s_version.verstring}.')
|
|
76
|
-
return 1
|
|
77
|
-
|
|
78
|
-
with open(path, 'w') as fd:
|
|
79
|
-
|
|
80
|
-
wcsv = csv.writer(fd)
|
|
81
|
-
# prevent streaming nodes by limiting shown events
|
|
82
|
-
stormopts['show'] = ('csv:row', 'print', 'warn', 'err')
|
|
83
|
-
count = 0
|
|
84
|
-
async for name, info in core.storm(text, opts=stormopts):
|
|
85
|
-
|
|
86
|
-
if name == 'csv:row':
|
|
87
|
-
count += 1
|
|
88
|
-
wcsv.writerow(info['row'])
|
|
89
|
-
continue
|
|
90
|
-
|
|
91
|
-
if name in ('init', 'fini'):
|
|
92
|
-
continue
|
|
93
|
-
|
|
94
|
-
outp.printf('%s: %r' % (name, info))
|
|
95
|
-
|
|
96
|
-
outp.printf(f'exported {count} csv rows.')
|
|
97
|
-
|
|
98
|
-
return 0
|
|
99
|
-
|
|
100
|
-
async def runCsvImport(opts, outp, text, stormopts):
|
|
101
|
-
|
|
102
|
-
def iterrows():
|
|
103
|
-
for path in opts.csvfiles:
|
|
104
|
-
|
|
105
|
-
with open(path, 'r', encoding='utf8') as fd:
|
|
106
|
-
|
|
107
|
-
if opts.csv_header:
|
|
108
|
-
fd.readline()
|
|
109
|
-
|
|
110
|
-
def genr():
|
|
111
|
-
|
|
112
|
-
for row in csv.reader(fd):
|
|
113
|
-
yield row
|
|
114
|
-
|
|
115
|
-
for rows in s_common.chunks(genr(), 1000):
|
|
116
|
-
yield rows
|
|
117
|
-
|
|
118
|
-
rowgenr = iterrows()
|
|
119
|
-
|
|
120
|
-
logfd = None
|
|
121
|
-
if opts.logfile is not None:
|
|
122
|
-
logfd = s_common.genfile(opts.logfile)
|
|
123
|
-
logfd.seek(0, 2)
|
|
124
|
-
|
|
125
|
-
async def addCsvData(core):
|
|
126
|
-
|
|
127
|
-
nodecount = 0
|
|
128
|
-
|
|
129
|
-
stormopts['editformat'] = 'nodeedits'
|
|
130
|
-
|
|
131
|
-
vars = stormopts.setdefault('vars', {})
|
|
132
|
-
|
|
133
|
-
for rows in rowgenr:
|
|
134
|
-
|
|
135
|
-
vars['rows'] = rows
|
|
136
|
-
|
|
137
|
-
async for mesg in core.storm(text, opts=stormopts):
|
|
138
|
-
|
|
139
|
-
if mesg[0] == 'node':
|
|
140
|
-
nodecount += 1
|
|
141
|
-
|
|
142
|
-
elif mesg[0] == 'err' and not opts.debug:
|
|
143
|
-
outp.printf(repr(mesg))
|
|
144
|
-
|
|
145
|
-
elif mesg[0] == 'print':
|
|
146
|
-
outp.printf(mesg[1].get('mesg'))
|
|
147
|
-
|
|
148
|
-
if opts.debug:
|
|
149
|
-
outp.printf(repr(mesg))
|
|
150
|
-
|
|
151
|
-
if logfd is not None:
|
|
152
|
-
logfd.write(s_json.dumps(mesg, newline=True))
|
|
153
|
-
|
|
154
|
-
if opts.cli:
|
|
155
|
-
await s_cmdr.runItemCmdr(core, outp, True)
|
|
156
|
-
|
|
157
|
-
return nodecount
|
|
158
|
-
|
|
159
|
-
if opts.test:
|
|
160
|
-
async with s_cortex.getTempCortex() as core:
|
|
161
|
-
nodecount = await addCsvData(core)
|
|
162
|
-
|
|
163
|
-
else:
|
|
164
|
-
async with await s_telepath.openurl(opts.cortex) as core:
|
|
165
|
-
|
|
166
|
-
try:
|
|
167
|
-
s_version.reqVersion(core._getSynVers(), reqver)
|
|
168
|
-
except s_exc.BadVersion as e:
|
|
169
|
-
valu = s_version.fmtVersion(*e.get('valu'))
|
|
170
|
-
outp.printf(f'Cortex version {valu} is outside of the csvtool supported range ({reqver}).')
|
|
171
|
-
outp.printf(f'Please use a version of Synapse which supports {valu}; '
|
|
172
|
-
f'current version is {s_version.verstring}.')
|
|
173
|
-
return 1
|
|
174
|
-
|
|
175
|
-
nodecount = await addCsvData(core)
|
|
176
|
-
|
|
177
|
-
if logfd is not None:
|
|
178
|
-
logfd.close()
|
|
179
|
-
|
|
180
|
-
outp.printf('%d nodes.' % (nodecount, ))
|
|
181
|
-
return 0
|
|
182
|
-
|
|
183
|
-
async def main(argv, outp=s_output.stdout):
|
|
184
|
-
pars = makeargparser(outp)
|
|
185
|
-
|
|
186
|
-
try:
|
|
187
|
-
opts = pars.parse_args(argv)
|
|
188
|
-
except s_exc.ParserExit as e:
|
|
189
|
-
return e.get('status')
|
|
190
|
-
|
|
191
|
-
with open(opts.stormfile, 'r', encoding='utf8') as fd:
|
|
192
|
-
text = fd.read()
|
|
193
|
-
|
|
194
|
-
stormopts = {}
|
|
195
|
-
if opts.optsfile:
|
|
196
|
-
stormopts = s_common.yamlload(opts.optsfile)
|
|
197
|
-
|
|
198
|
-
if opts.view:
|
|
199
|
-
if not s_common.isguid(opts.view):
|
|
200
|
-
outp.printf(f'View is not a guid {opts.view}')
|
|
201
|
-
return 1
|
|
202
|
-
stormopts['view'] = opts.view
|
|
203
|
-
|
|
204
|
-
async with s_telepath.withTeleEnv():
|
|
205
4
|
|
|
206
|
-
|
|
207
|
-
return await runCsvExport(opts, outp, text, stormopts)
|
|
208
|
-
else:
|
|
209
|
-
return await runCsvImport(opts, outp, text, stormopts)
|
|
5
|
+
from synapse.tools.cortex.csv import main
|
|
210
6
|
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
pars.add_argument('--logfile', help='Set a log file to get JSON lines from the server events.')
|
|
214
|
-
pars.add_argument('--csv-header', default=False, action='store_true',
|
|
215
|
-
help='Skip the first line from each CSV file.')
|
|
216
|
-
pars.add_argument('--cli', default=False, action='store_true',
|
|
217
|
-
help='Drop into a cli session after loading data.')
|
|
218
|
-
pars.add_argument('--debug', default=False, action='store_true', help='Enable verbose debug output.')
|
|
219
|
-
muxp = pars.add_mutually_exclusive_group(required=True)
|
|
220
|
-
muxp.add_argument('--cortex', '-c', type=str,
|
|
221
|
-
help='The telepath URL for the cortex ( or alias from ~/.syn/aliases ).')
|
|
222
|
-
muxp.add_argument('--test', '-t', default=False, action='store_true',
|
|
223
|
-
help='Perform a local CSV ingest against a temporary cortex.')
|
|
224
|
-
pars.add_argument('--export', default=False, action='store_true',
|
|
225
|
-
help='Export CSV data to file from storm using $lib.csv.emit(...) events.')
|
|
226
|
-
pars.add_argument('--view', default=None, action='store',
|
|
227
|
-
help='Optional view to work in.')
|
|
228
|
-
pars.add_argument('--optsfile', default=None, action='store',
|
|
229
|
-
help='Path to an opts file (.yaml) on disk.')
|
|
230
|
-
pars.add_argument('stormfile', help='A Storm script describing how to create nodes from rows.')
|
|
231
|
-
pars.add_argument('csvfiles', nargs='+', help='CSV files to load.')
|
|
232
|
-
return pars
|
|
7
|
+
s_common.deprecated('synapse.tools.csvtool is deprecated. Please use synapse.tools.cortex.csv instead.',
|
|
8
|
+
curv='v2.225.0')
|
|
233
9
|
|
|
234
10
|
if __name__ == '__main__': # pragma: no cover
|
|
235
11
|
s_cmd.exitmain(main)
|
synapse/tools/demote.py
CHANGED
|
@@ -1,47 +1,11 @@
|
|
|
1
|
-
import synapse.
|
|
2
|
-
|
|
3
|
-
import synapse.telepath as s_telepath
|
|
1
|
+
import synapse.common as s_common
|
|
4
2
|
|
|
5
3
|
import synapse.lib.cmd as s_cmd
|
|
6
|
-
import synapse.lib.output as s_output
|
|
7
|
-
import synapse.lib.urlhelp as s_urlhelp
|
|
8
|
-
|
|
9
|
-
descr = '''
|
|
10
|
-
Automatically select a new leader and demote this service.
|
|
11
|
-
|
|
12
|
-
Example:
|
|
13
|
-
python -m synapse.tools.demote
|
|
14
|
-
'''
|
|
15
|
-
|
|
16
|
-
async def main(argv, outp=s_output.stdout):
|
|
17
|
-
|
|
18
|
-
pars = s_cmd.Parser(prog='synapse.tools.demote', outp=outp, description=descr)
|
|
19
|
-
|
|
20
|
-
pars.add_argument('--url', default='cell:///vertex/storage',
|
|
21
|
-
help='The telepath URL of the Synapse service.')
|
|
22
|
-
|
|
23
|
-
pars.add_argument('--timeout', type=int, default=60,
|
|
24
|
-
help='The timeout to use awaiting network connections.')
|
|
25
|
-
|
|
26
|
-
opts = pars.parse_args(argv)
|
|
27
|
-
|
|
28
|
-
async with s_telepath.withTeleEnv():
|
|
29
|
-
|
|
30
|
-
try:
|
|
31
|
-
|
|
32
|
-
async with await s_telepath.openurl(opts.url) as cell:
|
|
33
|
-
|
|
34
|
-
outp.printf(f'Demoting leader: {opts.url}')
|
|
35
|
-
|
|
36
|
-
if await cell.demote(timeout=opts.timeout):
|
|
37
|
-
return 0
|
|
38
4
|
|
|
39
|
-
|
|
40
|
-
outp.printf(f'Error while demoting service {s_urlhelp.sanitizeUrl(opts.url)}: {e}')
|
|
41
|
-
return 1
|
|
5
|
+
from synapse.tools.service.demote import main
|
|
42
6
|
|
|
43
|
-
|
|
44
|
-
|
|
7
|
+
s_common.deprecated('synapse.tools.demote is deprecated. Please use synapse.tools.service.demote instead.',
|
|
8
|
+
curv='v2.225.0')
|
|
45
9
|
|
|
46
10
|
if __name__ == '__main__': # pragma: no cover
|
|
47
11
|
s_cmd.exitmain(main)
|