synapse 2.180.1__py311-none-any.whl → 2.182.0__py311-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of synapse might be problematic. Click here for more details.
- synapse/assets/__init__.py +35 -0
- synapse/assets/storm/migrations/model-0.2.28.storm +355 -0
- synapse/common.py +2 -1
- synapse/cortex.py +49 -35
- synapse/cryotank.py +1 -1
- synapse/datamodel.py +30 -0
- synapse/lib/ast.py +12 -7
- synapse/lib/auth.py +17 -0
- synapse/lib/cell.py +7 -9
- synapse/lib/chop.py +0 -1
- synapse/lib/drive.py +8 -8
- synapse/lib/layer.py +55 -13
- synapse/lib/lmdbslab.py +26 -5
- synapse/lib/modelrev.py +28 -1
- synapse/lib/modules.py +1 -0
- synapse/lib/nexus.py +1 -1
- synapse/lib/node.py +5 -0
- synapse/lib/parser.py +23 -16
- synapse/lib/scrape.py +1 -1
- synapse/lib/slabseqn.py +2 -2
- synapse/lib/snap.py +129 -0
- synapse/lib/storm.lark +16 -2
- synapse/lib/storm.py +20 -3
- synapse/lib/storm_format.py +1 -0
- synapse/lib/stormhttp.py +34 -1
- synapse/lib/stormlib/auth.py +5 -3
- synapse/lib/stormlib/cortex.py +5 -2
- synapse/lib/stormlib/easyperm.py +2 -2
- synapse/lib/stormlib/ipv6.py +2 -2
- synapse/lib/stormlib/model.py +114 -12
- synapse/lib/stormlib/project.py +1 -1
- synapse/lib/stormtypes.py +81 -7
- synapse/lib/types.py +7 -0
- synapse/lib/version.py +2 -2
- synapse/lib/view.py +47 -0
- synapse/models/inet.py +10 -3
- synapse/models/infotech.py +2 -1
- synapse/models/language.py +4 -0
- synapse/models/math.py +50 -0
- synapse/models/orgs.py +8 -0
- synapse/models/risk.py +9 -0
- synapse/tests/files/stormcov/pragma-nocov.storm +18 -0
- synapse/tests/test_assets.py +25 -0
- synapse/tests/test_cortex.py +129 -0
- synapse/tests/test_datamodel.py +6 -0
- synapse/tests/test_lib_cell.py +12 -0
- synapse/tests/test_lib_grammar.py +7 -1
- synapse/tests/test_lib_layer.py +35 -0
- synapse/tests/test_lib_lmdbslab.py +11 -9
- synapse/tests/test_lib_modelrev.py +655 -1
- synapse/tests/test_lib_slabseqn.py +5 -4
- synapse/tests/test_lib_snap.py +4 -0
- synapse/tests/test_lib_storm.py +110 -1
- synapse/tests/test_lib_stormhttp.py +99 -1
- synapse/tests/test_lib_stormlib_auth.py +15 -0
- synapse/tests/test_lib_stormlib_cortex.py +21 -4
- synapse/tests/test_lib_stormlib_iters.py +8 -5
- synapse/tests/test_lib_stormlib_model.py +45 -6
- synapse/tests/test_lib_stormtypes.py +158 -2
- synapse/tests/test_lib_types.py +6 -0
- synapse/tests/test_model_inet.py +10 -0
- synapse/tests/test_model_language.py +4 -0
- synapse/tests/test_model_math.py +22 -0
- synapse/tests/test_model_orgs.py +6 -2
- synapse/tests/test_model_risk.py +4 -0
- synapse/tests/test_tools_storm.py +1 -1
- synapse/tests/test_utils_stormcov.py +5 -0
- synapse/tests/utils.py +18 -5
- synapse/utils/stormcov/plugin.py +31 -1
- synapse/vendor/cpython/LICENSE +279 -0
- synapse/vendor/cpython/__init__.py +0 -0
- synapse/vendor/cpython/lib/__init__.py +0 -0
- synapse/vendor/cpython/lib/email/__init__.py +0 -0
- synapse/vendor/cpython/lib/email/_parseaddr.py +560 -0
- synapse/vendor/cpython/lib/email/utils.py +505 -0
- synapse/vendor/cpython/lib/ipaddress.py +2366 -0
- synapse/vendor/cpython/lib/test/__init__.py +0 -0
- synapse/vendor/cpython/lib/test/support/__init__.py +114 -0
- synapse/vendor/cpython/lib/test/test_email/__init__.py +0 -0
- synapse/vendor/cpython/lib/test/test_email/test_email.py +480 -0
- synapse/vendor/cpython/lib/test/test_email/test_utils.py +167 -0
- synapse/vendor/cpython/lib/test/test_ipaddress.py +2672 -0
- synapse/vendor/utils.py +4 -3
- {synapse-2.180.1.dist-info → synapse-2.182.0.dist-info}/METADATA +3 -3
- {synapse-2.180.1.dist-info → synapse-2.182.0.dist-info}/RECORD +88 -71
- {synapse-2.180.1.dist-info → synapse-2.182.0.dist-info}/WHEEL +1 -1
- synapse/lib/jupyter.py +0 -505
- synapse/tests/test_lib_jupyter.py +0 -224
- {synapse-2.180.1.dist-info → synapse-2.182.0.dist-info}/LICENSE +0 -0
- {synapse-2.180.1.dist-info → synapse-2.182.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import logging
|
|
3
|
+
|
|
4
|
+
import synapse.common as s_common
|
|
5
|
+
|
|
6
|
+
logger = logging.getLogger(__name__)
|
|
7
|
+
dirname = os.path.dirname(__file__)
|
|
8
|
+
|
|
9
|
+
def getStorm(*names):
|
|
10
|
+
'''
|
|
11
|
+
Return a storm file from the synapse storm folder.
|
|
12
|
+
|
|
13
|
+
Example:
|
|
14
|
+
|
|
15
|
+
text = storm.get('migrate.storm')
|
|
16
|
+
await core.callStorm(text)
|
|
17
|
+
|
|
18
|
+
Example #2:
|
|
19
|
+
text = storm.get('migrations', 'model-0.2.28.storm')
|
|
20
|
+
await core.callStorm(text)
|
|
21
|
+
'''
|
|
22
|
+
fp = getAssetPath('storm', *names)
|
|
23
|
+
with open(fp, 'rb') as fd:
|
|
24
|
+
text = fd.read()
|
|
25
|
+
return text.decode('utf8')
|
|
26
|
+
|
|
27
|
+
def getAssetPath(*names):
|
|
28
|
+
fp = s_common.genpath(dirname, *names)
|
|
29
|
+
if not fp.startswith(dirname):
|
|
30
|
+
logger.error(f'{fp} is not in {dirname}')
|
|
31
|
+
raise ValueError(f'Path escaping detected for {names}')
|
|
32
|
+
if not os.path.isfile(fp):
|
|
33
|
+
logger.error('{} does not exist'.format(fp))
|
|
34
|
+
raise ValueError(f'Asset does not exist for {names}')
|
|
35
|
+
return fp
|
|
@@ -0,0 +1,355 @@
|
|
|
1
|
+
$layers = $lib.spooled.set()
|
|
2
|
+
$layers.adds($layridens)
|
|
3
|
+
|
|
4
|
+
$refinfoCache = ({})
|
|
5
|
+
|
|
6
|
+
function getRefInfo(formname) {
|
|
7
|
+
$cached = $refinfoCache.$formname
|
|
8
|
+
if $cached { return($cached) }
|
|
9
|
+
|
|
10
|
+
$refinfo = ([])
|
|
11
|
+
|
|
12
|
+
syn:prop:type*in=($formname, array, ndef)
|
|
13
|
+
// Don't include the form we're querying
|
|
14
|
+
-:form = $formname
|
|
15
|
+
|
|
16
|
+
$prop = $lib.model.prop($node.repr())
|
|
17
|
+
$proptype = $prop.type.name
|
|
18
|
+
|
|
19
|
+
// Don't include array types that don't have a base type of $formname or ndef
|
|
20
|
+
-$($proptype = 'array' and not ([$formname, 'ndef']).has($prop.type.opts.type))
|
|
21
|
+
|
|
22
|
+
$isarray = $lib.false
|
|
23
|
+
|
|
24
|
+
if ($proptype = 'array') {
|
|
25
|
+
$isarray = $lib.true
|
|
26
|
+
$proptype = $prop.type.opts.type
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
$refinfo.append((:form, $prop.name, $proptype, $isarray))
|
|
30
|
+
|
|
31
|
+
fini {
|
|
32
|
+
$refinfoCache.$formname = $refinfo
|
|
33
|
+
return($refinfo)
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
function removeNode(n, viewiden, layriden) {
|
|
38
|
+
|
|
39
|
+
$nodeq = $lib.queue.gen("model_0_2_28:nodes")
|
|
40
|
+
$refsq = $lib.queue.gen("model_0_2_28:nodes:refs")
|
|
41
|
+
$edgeq = $lib.queue.gen("model_0_2_28:nodes:edges")
|
|
42
|
+
|
|
43
|
+
$sources = ([])
|
|
44
|
+
|
|
45
|
+
$iden = $n.iden()
|
|
46
|
+
$repr = $n.repr()
|
|
47
|
+
$form = $n.form()
|
|
48
|
+
$ndef = $n.ndef()
|
|
49
|
+
|
|
50
|
+
$lib.log.debug(`Saving invalid {$form} node info: {$repr}`)
|
|
51
|
+
|
|
52
|
+
$refinfos = $getRefInfo($form)
|
|
53
|
+
|
|
54
|
+
for $view in $lib.view.list(deporder=$lib.true) {
|
|
55
|
+
if (not $layers.has($view.layers.0.iden)) { continue }
|
|
56
|
+
|
|
57
|
+
view.exec $view.iden {
|
|
58
|
+
$edges = ([])
|
|
59
|
+
$references = ([])
|
|
60
|
+
|
|
61
|
+
// Get references and store them in queue
|
|
62
|
+
for $refinfo in $refinfos {
|
|
63
|
+
|
|
64
|
+
($formname, $propname, $proptype, $isarray) = $refinfo
|
|
65
|
+
if ($proptype = "ndef") {
|
|
66
|
+
$oldvalu = $ndef
|
|
67
|
+
} else {
|
|
68
|
+
$oldvalu = $repr
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
for $ref in $lib.model.migration.liftByPropValuNoNorm($formname, $propname, $oldvalu) {
|
|
72
|
+
yield $ref
|
|
73
|
+
|
|
74
|
+
if $isarray {
|
|
75
|
+
|
|
76
|
+
// We can't just [ :$propname-=$oldvalu ] because the norm() function gets called on the array type
|
|
77
|
+
// deep down in the AST. So, instead, we have to operate on the whole array.
|
|
78
|
+
|
|
79
|
+
$list = $lib.copy(:$propname)
|
|
80
|
+
$list.rem($oldvalu, all=$lib.true)
|
|
81
|
+
|
|
82
|
+
if $list {
|
|
83
|
+
$lib.model.migration.setNodePropValuNoNorm($node, $propname, $list)
|
|
84
|
+
} else {
|
|
85
|
+
[ -:$propname ]
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
} else {
|
|
89
|
+
try {
|
|
90
|
+
[ -:$propname ]
|
|
91
|
+
} catch ReadOnlyProp as exc {
|
|
92
|
+
// The property is readonly so we can only delete it
|
|
93
|
+
$lib.log.warning(`{$formname}:{$propname} is readonly, cannot modify. Storing node: {$node.repr()}`)
|
|
94
|
+
$removeNode($node, $view.iden, $view.layers.0.iden)
|
|
95
|
+
| delnode |
|
|
96
|
+
continue
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
$ref = ({
|
|
101
|
+
"iden": $node.iden(),
|
|
102
|
+
"refinfo": $refinfo,
|
|
103
|
+
})
|
|
104
|
+
|
|
105
|
+
$references.append($ref)
|
|
106
|
+
|
|
107
|
+
// Flush to the queue if the list grows too large
|
|
108
|
+
if ($references.size() > 1000) { // pragma: no cover start
|
|
109
|
+
$item = ({
|
|
110
|
+
"iden": $iden,
|
|
111
|
+
"view": $view.iden,
|
|
112
|
+
"refs": $references,
|
|
113
|
+
})
|
|
114
|
+
|
|
115
|
+
$refsq.put($item)
|
|
116
|
+
$references = ([])
|
|
117
|
+
} // pragma: no cover stop
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
if $references {
|
|
122
|
+
$item = ({
|
|
123
|
+
"iden": $iden,
|
|
124
|
+
"view": $view.iden,
|
|
125
|
+
"refs": $references,
|
|
126
|
+
})
|
|
127
|
+
|
|
128
|
+
$refsq.put($item)
|
|
129
|
+
$references = ([])
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
spin |
|
|
133
|
+
|
|
134
|
+
yield $iden |
|
|
135
|
+
|
|
136
|
+
// Get sources and store them in the queue
|
|
137
|
+
// Do this before we store/delete edges.
|
|
138
|
+
{ <(seen)- meta:source
|
|
139
|
+
$sources.append($node.repr())
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
// Get N1 edges and store them in the queue
|
|
143
|
+
{ for $edge in $node.edges() {
|
|
144
|
+
($verb, $dst) = $edge
|
|
145
|
+
$edges.append($edge)
|
|
146
|
+
|
|
147
|
+
[ -($verb)> { yield $dst } ]
|
|
148
|
+
|
|
149
|
+
// Flush to the queue if the list grows too large
|
|
150
|
+
if ($edges.size() > 1000) { // pragma: no cover start
|
|
151
|
+
$item = ({
|
|
152
|
+
"iden": $iden,
|
|
153
|
+
"view": $view.iden,
|
|
154
|
+
"direction": "n1",
|
|
155
|
+
"edges": $edges,
|
|
156
|
+
})
|
|
157
|
+
|
|
158
|
+
$edgeq.put($item)
|
|
159
|
+
$edges = ([])
|
|
160
|
+
} // pragma: no cover stop
|
|
161
|
+
}}
|
|
162
|
+
|
|
163
|
+
if $edges {
|
|
164
|
+
$item = ({
|
|
165
|
+
"iden": $iden,
|
|
166
|
+
"view": $view.iden,
|
|
167
|
+
"direction": "n1",
|
|
168
|
+
"edges": $edges,
|
|
169
|
+
})
|
|
170
|
+
|
|
171
|
+
$edgeq.put($item)
|
|
172
|
+
$edges = ([])
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
// Get N2 edges and store them in the queue
|
|
176
|
+
{ for $edge in $node.edges(reverse=$lib.true) {
|
|
177
|
+
($verb, $src) = $edge
|
|
178
|
+
$edges.append($edge)
|
|
179
|
+
|
|
180
|
+
[ <($verb)- { yield $src } ]
|
|
181
|
+
|
|
182
|
+
// Flush to the queue if the list grows too large
|
|
183
|
+
if ($edges.size() > 1000) { // pragma: no cover start
|
|
184
|
+
$item = ({
|
|
185
|
+
"iden": $iden,
|
|
186
|
+
"view": $view.iden,
|
|
187
|
+
"direction": "n2",
|
|
188
|
+
"edges": $edges,
|
|
189
|
+
})
|
|
190
|
+
|
|
191
|
+
$edgeq.put($item)
|
|
192
|
+
$edges = ([])
|
|
193
|
+
} // pragma: no cover stop
|
|
194
|
+
}}
|
|
195
|
+
|
|
196
|
+
if $edges {
|
|
197
|
+
$item = ({
|
|
198
|
+
"iden": $iden,
|
|
199
|
+
"view": $view.iden,
|
|
200
|
+
"direction": "n2",
|
|
201
|
+
"edges": $edges,
|
|
202
|
+
})
|
|
203
|
+
|
|
204
|
+
$edgeq.put($item)
|
|
205
|
+
$edges = ([])
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
$item = ({
|
|
211
|
+
"iden": $iden,
|
|
212
|
+
"form": $form,
|
|
213
|
+
"repr": $repr,
|
|
214
|
+
"props": $n.props,
|
|
215
|
+
"view": $viewiden,
|
|
216
|
+
"layer": $layriden,
|
|
217
|
+
"tags": $n.tags(),
|
|
218
|
+
"data": $n.data.list(),
|
|
219
|
+
"sources": $sources,
|
|
220
|
+
})
|
|
221
|
+
|
|
222
|
+
$nodeq.put($item)
|
|
223
|
+
|
|
224
|
+
return()
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
function migrateCpeNode(oldcpe, newcpe) {
|
|
228
|
+
|
|
229
|
+
// migrate the edges, tags, node data, and extended props
|
|
230
|
+
$lib.model.migration.copyEdges($oldcpe, $newcpe)
|
|
231
|
+
$lib.model.migration.copyTags($oldcpe, $newcpe)
|
|
232
|
+
$lib.model.migration.copyData($oldcpe, $newcpe)
|
|
233
|
+
$lib.model.migration.copyExtProps($oldcpe, $newcpe)
|
|
234
|
+
|
|
235
|
+
$refinfos = $getRefInfo($oldcpe.form())
|
|
236
|
+
|
|
237
|
+
// Iterate through the views and fix up all the references
|
|
238
|
+
for $view in $lib.view.list(deporder=$lib.true) {
|
|
239
|
+
if (not $layers.has($view.layers.0.iden)) { continue }
|
|
240
|
+
|
|
241
|
+
view.exec $view.iden {
|
|
242
|
+
|
|
243
|
+
// Fix references that point to old node to now point to new node
|
|
244
|
+
for $refinfo in $refinfos {
|
|
245
|
+
($form, $prop, $proptype, $isarray) = $refinfo
|
|
246
|
+
if ($proptype = "ndef") {
|
|
247
|
+
$oldvalu = $oldcpe.ndef()
|
|
248
|
+
$newvalu = $newcpe.ndef()
|
|
249
|
+
} else {
|
|
250
|
+
$oldvalu = $oldcpe.repr()
|
|
251
|
+
$newvalu = $newcpe.repr()
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
for $n in $lib.model.migration.liftByPropValuNoNorm($form, $prop, $oldvalu) {
|
|
255
|
+
yield $n
|
|
256
|
+
|
|
257
|
+
if $isarray {
|
|
258
|
+
|
|
259
|
+
// We can't just [ :$prop-=$oldvalu :$prop+=$newvalu ] because the norm() function gets called
|
|
260
|
+
// on the array type deep down in the AST. So, instead, we have to operate on the whole array.
|
|
261
|
+
|
|
262
|
+
$list = $lib.copy(:$prop)
|
|
263
|
+
$list.rem($oldvalu, all=$lib.true)
|
|
264
|
+
|
|
265
|
+
if (not $list.has($newvalu)) {
|
|
266
|
+
$list.append($newvalu)
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
$lib.model.migration.setNodePropValuNoNorm($node, $prop, $list)
|
|
270
|
+
|
|
271
|
+
} else {
|
|
272
|
+
try {
|
|
273
|
+
[ -:$prop :$prop=$newvalu ]
|
|
274
|
+
} catch ReadOnlyProp as exc {
|
|
275
|
+
// The property is readonly so we can only delete it
|
|
276
|
+
$lib.log.warning(`{$form}:{$prop} is readonly, cannot modify. Storing node: {$node.repr()}`)
|
|
277
|
+
$removeNode($node, $view.iden, $view.layers.0.iden)
|
|
278
|
+
| delnode
|
|
279
|
+
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
}
|
|
283
|
+
}
|
|
284
|
+
}
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
return()
|
|
288
|
+
}
|
|
289
|
+
|
|
290
|
+
// This is a separate function because I was getting weird bugs from scoping
|
|
291
|
+
// issues. Mainly, the $view variable being overwritten because this code was
|
|
292
|
+
// scoped to the whole script.
|
|
293
|
+
function main() {
|
|
294
|
+
|
|
295
|
+
for $view in $lib.view.list(deporder=$lib.true) {
|
|
296
|
+
|
|
297
|
+
if (not $layers.has($view.layers.0.iden)) { continue }
|
|
298
|
+
|
|
299
|
+
view.exec $view.iden {
|
|
300
|
+
|
|
301
|
+
$layer = $lib.layer.get()
|
|
302
|
+
|
|
303
|
+
for $oldcpe in $layer.liftByProp(it:sec:cpe) {
|
|
304
|
+
$info = $lib.model.migration.s.itSecCpe_2_170_0_internal($oldcpe)
|
|
305
|
+
|
|
306
|
+
if ($info.status = "success") {
|
|
307
|
+
// No primary property changes, nothing to do. Node has been fully migrated.
|
|
308
|
+
if (not $info.valu) {
|
|
309
|
+
continue
|
|
310
|
+
}
|
|
311
|
+
|
|
312
|
+
/*
|
|
313
|
+
* At this point, we have a node that can be fixed but
|
|
314
|
+
* needs to be migrated to a new node because the primary
|
|
315
|
+
* property needs to be changed. We'll create a new
|
|
316
|
+
* (correct) node, and copy everything from the old node.
|
|
317
|
+
* Then we complete the migration by iterating through
|
|
318
|
+
* all the views to fix the references.
|
|
319
|
+
*/
|
|
320
|
+
|
|
321
|
+
$lib.log.debug(`Migrating invalid it:sec:cpe node: {$oldcpe.repr()} -> {$info.valu}`)
|
|
322
|
+
|
|
323
|
+
[ it:sec:cpe=$info.valu
|
|
324
|
+
.seen ?= $oldcpe.props.".seen"
|
|
325
|
+
]
|
|
326
|
+
|
|
327
|
+
$migrateCpeNode($oldcpe, $node)
|
|
328
|
+
|
|
329
|
+
spin |
|
|
330
|
+
|
|
331
|
+
yield $oldcpe |
|
|
332
|
+
delnode --deledges --force
|
|
333
|
+
|
|
334
|
+
} else {
|
|
335
|
+
|
|
336
|
+
// Node could not be automatically migrated. Collect
|
|
337
|
+
// critical information to eventually reconstruct this node
|
|
338
|
+
// and store it in a queue.
|
|
339
|
+
|
|
340
|
+
$removeNode($oldcpe, $view.iden, $view.layers.0.iden)
|
|
341
|
+
|
|
342
|
+
yield $oldcpe |
|
|
343
|
+
delnode --deledges --force
|
|
344
|
+
}
|
|
345
|
+
}
|
|
346
|
+
}
|
|
347
|
+
}
|
|
348
|
+
|
|
349
|
+
return()
|
|
350
|
+
}
|
|
351
|
+
|
|
352
|
+
// Pre-cache the it:sec:cpe references
|
|
353
|
+
$getRefInfo(it:sec:cpe)
|
|
354
|
+
|
|
355
|
+
$main()
|
synapse/common.py
CHANGED
|
@@ -2,6 +2,7 @@ import io
|
|
|
2
2
|
import os
|
|
3
3
|
import ssl
|
|
4
4
|
import sys
|
|
5
|
+
import enum
|
|
5
6
|
import json
|
|
6
7
|
import http
|
|
7
8
|
import stat
|
|
@@ -36,7 +37,7 @@ import synapse.lib.const as s_const
|
|
|
36
37
|
import synapse.lib.msgpack as s_msgpack
|
|
37
38
|
import synapse.lib.structlog as s_structlog
|
|
38
39
|
|
|
39
|
-
import
|
|
40
|
+
import synapse.vendor.cpython.lib.ipaddress as ipaddress
|
|
40
41
|
|
|
41
42
|
try:
|
|
42
43
|
from yaml import CSafeLoader as Loader
|
synapse/cortex.py
CHANGED
|
@@ -115,6 +115,8 @@ SYNC_NODEEDIT = 1 # A nodeedit: (<offs>, 0, <etyp>, (<etype args>))
|
|
|
115
115
|
SYNC_LAYR_ADD = 3 # A layer was added
|
|
116
116
|
SYNC_LAYR_DEL = 4 # A layer was deleted
|
|
117
117
|
|
|
118
|
+
MAX_NEXUS_DELTA = 3_600
|
|
119
|
+
|
|
118
120
|
reqValidTagModel = s_config.getJsValidator({
|
|
119
121
|
'type': 'object',
|
|
120
122
|
'properties': {
|
|
@@ -2278,31 +2280,6 @@ class Cortex(s_oauth.OAuthMixin, s_cell.Cell): # type: ignore
|
|
|
2278
2280
|
if sodelist is not None:
|
|
2279
2281
|
yield sodelist
|
|
2280
2282
|
|
|
2281
|
-
async def _mergeSodesUniq(self, layers, genrs, cmprkey, filtercmpr=None, reverse=False):
|
|
2282
|
-
lastbuid = None
|
|
2283
|
-
sodes = {}
|
|
2284
|
-
async with await s_spooled.Set.anit(dirn=self.dirn) as uniqset:
|
|
2285
|
-
async for layr, (_, buid), sode in s_common.merggenr2(genrs, cmprkey, reverse=reverse):
|
|
2286
|
-
if buid in uniqset:
|
|
2287
|
-
continue
|
|
2288
|
-
|
|
2289
|
-
if not buid == lastbuid or layr in sodes:
|
|
2290
|
-
if lastbuid is not None:
|
|
2291
|
-
sodelist = await self._genSodeList(lastbuid, sodes, layers, filtercmpr)
|
|
2292
|
-
if sodelist is not None:
|
|
2293
|
-
yield sodelist
|
|
2294
|
-
sodes.clear()
|
|
2295
|
-
|
|
2296
|
-
await uniqset.add(lastbuid)
|
|
2297
|
-
lastbuid = buid
|
|
2298
|
-
|
|
2299
|
-
sodes[layr] = sode
|
|
2300
|
-
|
|
2301
|
-
if lastbuid is not None:
|
|
2302
|
-
sodelist = await self._genSodeList(lastbuid, sodes, layers, filtercmpr)
|
|
2303
|
-
if sodelist is not None:
|
|
2304
|
-
yield sodelist
|
|
2305
|
-
|
|
2306
2283
|
async def _liftByDataName(self, name, layers):
|
|
2307
2284
|
if len(layers) == 1:
|
|
2308
2285
|
layr = layers[0].iden
|
|
@@ -2328,7 +2305,13 @@ class Cortex(s_oauth.OAuthMixin, s_cell.Cell): # type: ignore
|
|
|
2328
2305
|
for layr in layers:
|
|
2329
2306
|
genrs.append(wrap_liftgenr(layr.iden, layr.liftByProp(form, prop, reverse=reverse)))
|
|
2330
2307
|
|
|
2331
|
-
|
|
2308
|
+
def filtercmpr(sode):
|
|
2309
|
+
if (props := sode.get('props')) is None:
|
|
2310
|
+
return False
|
|
2311
|
+
|
|
2312
|
+
return props.get(prop) is not None
|
|
2313
|
+
|
|
2314
|
+
async for sodes in self._mergeSodes(layers, genrs, cmprkey_indx, filtercmpr, reverse=reverse):
|
|
2332
2315
|
yield sodes
|
|
2333
2316
|
|
|
2334
2317
|
async def _liftByPropValu(self, form, prop, cmprvals, layers, reverse=False):
|
|
@@ -2445,7 +2428,16 @@ class Cortex(s_oauth.OAuthMixin, s_cell.Cell): # type: ignore
|
|
|
2445
2428
|
for layr in layers:
|
|
2446
2429
|
genrs.append(wrap_liftgenr(layr.iden, layr.liftByTagProp(form, tag, prop, reverse=reverse)))
|
|
2447
2430
|
|
|
2448
|
-
|
|
2431
|
+
def filtercmpr(sode):
|
|
2432
|
+
if (tagprops := sode.get('tagprops')) is None:
|
|
2433
|
+
return False
|
|
2434
|
+
|
|
2435
|
+
if (props := tagprops.get(tag)) is None:
|
|
2436
|
+
return False
|
|
2437
|
+
|
|
2438
|
+
return props.get(prop) is not None
|
|
2439
|
+
|
|
2440
|
+
async for sodes in self._mergeSodes(layers, genrs, cmprkey_indx, filtercmpr, reverse=reverse):
|
|
2449
2441
|
yield sodes
|
|
2450
2442
|
|
|
2451
2443
|
async def _liftByTagPropValu(self, form, tag, prop, cmprvals, layers, reverse=False):
|
|
@@ -4401,7 +4393,14 @@ class Cortex(s_oauth.OAuthMixin, s_cell.Cell): # type: ignore
|
|
|
4401
4393
|
except Exception as e:
|
|
4402
4394
|
mesg = f'Invalid path for Extended HTTP API - cannot compile regular expression for [{path}] : {e}'
|
|
4403
4395
|
raise s_exc.BadArg(mesg=mesg) from None
|
|
4404
|
-
|
|
4396
|
+
|
|
4397
|
+
if adef.get('iden') is None:
|
|
4398
|
+
adef['iden'] = s_common.guid()
|
|
4399
|
+
|
|
4400
|
+
iden = adef['iden']
|
|
4401
|
+
if self._exthttpapis.get(iden) is not None:
|
|
4402
|
+
raise s_exc.DupIden(mesg=f'Duplicate iden specified for Extended HTTP API: {iden}', iden=iden)
|
|
4403
|
+
|
|
4405
4404
|
adef['created'] = s_common.now()
|
|
4406
4405
|
adef['updated'] = adef['created']
|
|
4407
4406
|
adef = s_schemas.reqValidHttpExtAPIConf(adef)
|
|
@@ -5714,7 +5713,7 @@ class Cortex(s_oauth.OAuthMixin, s_cell.Cell): # type: ignore
|
|
|
5714
5713
|
|
|
5715
5714
|
if self.stormpool is not None and opts.get('mirror', True):
|
|
5716
5715
|
extra = await self.getLogExtra(text=text)
|
|
5717
|
-
proxy = await self._getMirrorProxy()
|
|
5716
|
+
proxy = await self._getMirrorProxy(opts)
|
|
5718
5717
|
|
|
5719
5718
|
if proxy is not None:
|
|
5720
5719
|
logger.info(f'Offloading Storm query {{{text}}} to mirror.', extra=extra)
|
|
@@ -5741,13 +5740,13 @@ class Cortex(s_oauth.OAuthMixin, s_cell.Cell): # type: ignore
|
|
|
5741
5740
|
return i
|
|
5742
5741
|
|
|
5743
5742
|
async def _getMirrorOpts(self, opts):
|
|
5743
|
+
assert 'nexsoffs' in opts
|
|
5744
5744
|
mirropts = s_msgpack.deepcopy(opts)
|
|
5745
5745
|
mirropts['mirror'] = False
|
|
5746
|
-
mirropts['nexsoffs'] = (await self.getNexsIndx() - 1)
|
|
5747
5746
|
mirropts['nexstimeout'] = self.stormpoolopts.get('timeout:sync')
|
|
5748
5747
|
return mirropts
|
|
5749
5748
|
|
|
5750
|
-
async def _getMirrorProxy(self):
|
|
5749
|
+
async def _getMirrorProxy(self, opts):
|
|
5751
5750
|
|
|
5752
5751
|
if self.stormpool is None: # pragma: no cover
|
|
5753
5752
|
return None
|
|
@@ -5756,6 +5755,8 @@ class Cortex(s_oauth.OAuthMixin, s_cell.Cell): # type: ignore
|
|
|
5756
5755
|
logger.warning('Storm query mirror pool is empty, running query locally.')
|
|
5757
5756
|
return None
|
|
5758
5757
|
|
|
5758
|
+
proxy = None
|
|
5759
|
+
|
|
5759
5760
|
try:
|
|
5760
5761
|
timeout = self.stormpoolopts.get('timeout:connection')
|
|
5761
5762
|
proxy = await self.stormpool.proxy(timeout=timeout)
|
|
@@ -5764,10 +5765,23 @@ class Cortex(s_oauth.OAuthMixin, s_cell.Cell): # type: ignore
|
|
|
5764
5765
|
# we are part of the pool and were selected. Convert to local use.
|
|
5765
5766
|
return None
|
|
5766
5767
|
|
|
5768
|
+
curoffs = opts.setdefault('nexsoffs', await self.getNexsIndx() - 1)
|
|
5769
|
+
miroffs = await s_common.wait_for(proxy.getNexsIndx(), timeout) - 1
|
|
5770
|
+
if (delta := curoffs - miroffs) > MAX_NEXUS_DELTA:
|
|
5771
|
+
mesg = (f'Pool mirror [{proxyname}] Nexus offset delta too large '
|
|
5772
|
+
f'({delta} > {MAX_NEXUS_DELTA}), running query locally.')
|
|
5773
|
+
logger.warning(mesg, extra=await self.getLogExtra(delta=delta, mirror=proxyname, mirror_offset=miroffs))
|
|
5774
|
+
return None
|
|
5775
|
+
|
|
5767
5776
|
return proxy
|
|
5768
5777
|
|
|
5769
5778
|
except (TimeoutError, s_exc.IsFini):
|
|
5770
|
-
|
|
5779
|
+
if proxy is None:
|
|
5780
|
+
logger.warning('Timeout waiting for pool mirror, running query locally.')
|
|
5781
|
+
else:
|
|
5782
|
+
mesg = f'Timeout waiting for pool mirror [{proxyname}] Nexus offset, running query locally.'
|
|
5783
|
+
logger.warning(mesg, extra=await self.getLogExtra(mirror=proxyname))
|
|
5784
|
+
await proxy.fini()
|
|
5771
5785
|
return None
|
|
5772
5786
|
|
|
5773
5787
|
async def storm(self, text, opts=None):
|
|
@@ -5776,7 +5790,7 @@ class Cortex(s_oauth.OAuthMixin, s_cell.Cell): # type: ignore
|
|
|
5776
5790
|
|
|
5777
5791
|
if self.stormpool is not None and opts.get('mirror', True):
|
|
5778
5792
|
extra = await self.getLogExtra(text=text)
|
|
5779
|
-
proxy = await self._getMirrorProxy()
|
|
5793
|
+
proxy = await self._getMirrorProxy(opts)
|
|
5780
5794
|
|
|
5781
5795
|
if proxy is not None:
|
|
5782
5796
|
logger.info(f'Offloading Storm query {{{text}}} to mirror.', extra=extra)
|
|
@@ -5806,7 +5820,7 @@ class Cortex(s_oauth.OAuthMixin, s_cell.Cell): # type: ignore
|
|
|
5806
5820
|
|
|
5807
5821
|
if self.stormpool is not None and opts.get('mirror', True):
|
|
5808
5822
|
extra = await self.getLogExtra(text=text)
|
|
5809
|
-
proxy = await self._getMirrorProxy()
|
|
5823
|
+
proxy = await self._getMirrorProxy(opts)
|
|
5810
5824
|
|
|
5811
5825
|
if proxy is not None:
|
|
5812
5826
|
logger.info(f'Offloading Storm query {{{text}}} to mirror.', extra=extra)
|
|
@@ -5831,7 +5845,7 @@ class Cortex(s_oauth.OAuthMixin, s_cell.Cell): # type: ignore
|
|
|
5831
5845
|
|
|
5832
5846
|
if self.stormpool is not None and opts.get('mirror', True):
|
|
5833
5847
|
extra = await self.getLogExtra(text=text)
|
|
5834
|
-
proxy = await self._getMirrorProxy()
|
|
5848
|
+
proxy = await self._getMirrorProxy(opts)
|
|
5835
5849
|
|
|
5836
5850
|
if proxy is not None:
|
|
5837
5851
|
logger.info(f'Offloading Storm query {{{text}}} to mirror.', extra=extra)
|
synapse/cryotank.py
CHANGED
|
@@ -81,7 +81,7 @@ class CryoTank(s_base.Base):
|
|
|
81
81
|
size = 0
|
|
82
82
|
|
|
83
83
|
for chunk in s_common.chunks(items, 1000):
|
|
84
|
-
metrics = self._items.save(chunk)
|
|
84
|
+
metrics = await self._items.save(chunk)
|
|
85
85
|
self._metrics.add(metrics)
|
|
86
86
|
await self.fire('cryotank:puts', numrecords=len(chunk))
|
|
87
87
|
size += len(chunk)
|
synapse/datamodel.py
CHANGED
|
@@ -124,6 +124,7 @@ class Prop:
|
|
|
124
124
|
self.type = None
|
|
125
125
|
self.typedef = typedef
|
|
126
126
|
|
|
127
|
+
self.alts = None
|
|
127
128
|
self.locked = False
|
|
128
129
|
self.deprecated = self.info.get('deprecated', False)
|
|
129
130
|
|
|
@@ -250,6 +251,18 @@ class Prop:
|
|
|
250
251
|
|
|
251
252
|
return (buid, {'props': pnorms, 'ndef': ndef})
|
|
252
253
|
|
|
254
|
+
def getAlts(self):
|
|
255
|
+
'''
|
|
256
|
+
Return a list of Prop instances that are considered
|
|
257
|
+
alternative locations for our property value, including
|
|
258
|
+
self.
|
|
259
|
+
'''
|
|
260
|
+
if self.alts is None:
|
|
261
|
+
self.alts = [self]
|
|
262
|
+
for name in self.info.get('alts', ()):
|
|
263
|
+
self.alts.append(self.form.reqProp(name))
|
|
264
|
+
return self.alts
|
|
265
|
+
|
|
253
266
|
class Form:
|
|
254
267
|
'''
|
|
255
268
|
The Form class implements data model logic for a node form.
|
|
@@ -432,6 +445,15 @@ class Form:
|
|
|
432
445
|
'''
|
|
433
446
|
return self.props.get(name)
|
|
434
447
|
|
|
448
|
+
def reqProp(self, name):
|
|
449
|
+
prop = self.props.get(name)
|
|
450
|
+
if prop is not None:
|
|
451
|
+
return prop
|
|
452
|
+
|
|
453
|
+
full = f'{self.name}:{name}'
|
|
454
|
+
mesg = f'No property named {full}.'
|
|
455
|
+
raise s_exc.NoSuchProp(mesg=mesg, name=full)
|
|
456
|
+
|
|
435
457
|
def pack(self):
|
|
436
458
|
props = {p.name: p.pack() for p in self.props.values()}
|
|
437
459
|
info = {
|
|
@@ -1160,6 +1182,14 @@ class Model:
|
|
|
1160
1182
|
def form(self, name):
|
|
1161
1183
|
return self.forms.get(name)
|
|
1162
1184
|
|
|
1185
|
+
def reqForm(self, name):
|
|
1186
|
+
form = self.forms.get(name)
|
|
1187
|
+
if form is not None:
|
|
1188
|
+
return form
|
|
1189
|
+
|
|
1190
|
+
mesg = f'No form named {name}.'
|
|
1191
|
+
raise s_exc.NoSuchForm(mesg=mesg, name=name)
|
|
1192
|
+
|
|
1163
1193
|
def univ(self, name):
|
|
1164
1194
|
return self.univs.get(name)
|
|
1165
1195
|
|