synapse 2.201.0__py311-none-any.whl → 2.203.0__py311-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of synapse might be problematic. Click here for more details.
- synapse/axon.py +4 -4
- synapse/cmds/cortex.py +4 -6
- synapse/cmds/hive.py +10 -10
- synapse/common.py +17 -58
- synapse/cortex.py +36 -29
- synapse/data/__init__.py +3 -2
- synapse/data/iana.uris.mpk +1 -0
- synapse/lib/autodoc.py +3 -3
- synapse/lib/base.py +2 -12
- synapse/lib/cell.py +9 -13
- synapse/lib/cli.py +2 -2
- synapse/lib/config.py +2 -2
- synapse/lib/encoding.py +4 -3
- synapse/lib/httpapi.py +7 -11
- synapse/lib/json.py +224 -0
- synapse/lib/lmdbslab.py +1 -1
- synapse/lib/oauth.py +176 -54
- synapse/lib/parser.py +2 -1
- synapse/lib/rstorm.py +18 -14
- synapse/lib/schemas.py +87 -1
- synapse/lib/scrape.py +35 -13
- synapse/lib/snap.py +2 -1
- synapse/lib/storm.lark +5 -4
- synapse/lib/storm.py +2 -2
- synapse/lib/storm_format.py +2 -1
- synapse/lib/stormhttp.py +11 -13
- synapse/lib/stormlib/aha.py +4 -4
- synapse/lib/stormlib/auth.py +1 -1
- synapse/lib/stormlib/cache.py +2 -2
- synapse/lib/stormlib/cortex.py +5 -5
- synapse/lib/stormlib/graph.py +1 -1
- synapse/lib/stormlib/imap.py +1 -1
- synapse/lib/stormlib/json.py +8 -11
- synapse/lib/stormlib/model.py +1 -1
- synapse/lib/stormlib/notifications.py +2 -2
- synapse/lib/stormlib/oauth.py +105 -2
- synapse/lib/stormlib/stats.py +4 -0
- synapse/lib/stormlib/stix.py +3 -4
- synapse/lib/stormlib/vault.py +6 -6
- synapse/lib/stormlib/xml.py +2 -2
- synapse/lib/stormtypes.py +19 -28
- synapse/lib/structlog.py +3 -3
- synapse/lib/types.py +2 -1
- synapse/lib/version.py +2 -2
- synapse/lib/view.py +7 -3
- synapse/models/base.py +51 -2
- synapse/telepath.py +79 -18
- synapse/tests/files/__init__.py +0 -1
- synapse/tests/test_axon.py +1 -1
- synapse/tests/test_cmds_cortex.py +3 -2
- synapse/tests/test_cmds_hive.py +4 -4
- synapse/tests/test_common.py +29 -19
- synapse/tests/test_cortex.py +28 -8
- synapse/tests/test_lib_ast.py +3 -3
- synapse/tests/test_lib_autodoc.py +5 -5
- synapse/tests/test_lib_base.py +1 -1
- synapse/tests/test_lib_cell.py +24 -7
- synapse/tests/test_lib_config.py +2 -2
- synapse/tests/test_lib_encoding.py +2 -2
- synapse/tests/test_lib_grammar.py +68 -64
- synapse/tests/test_lib_httpapi.py +13 -13
- synapse/tests/test_lib_json.py +219 -0
- synapse/tests/test_lib_multislabseqn.py +2 -1
- synapse/tests/test_lib_node.py +2 -2
- synapse/tests/test_lib_scrape.py +50 -0
- synapse/tests/test_lib_storm.py +12 -6
- synapse/tests/test_lib_stormhttp.py +4 -4
- synapse/tests/test_lib_stormlib_auth.py +3 -2
- synapse/tests/test_lib_stormlib_cortex.py +10 -12
- synapse/tests/test_lib_stormlib_infosec.py +2 -3
- synapse/tests/test_lib_stormlib_json.py +18 -21
- synapse/tests/test_lib_stormlib_log.py +1 -1
- synapse/tests/test_lib_stormlib_oauth.py +603 -1
- synapse/tests/test_lib_stormlib_stats.py +13 -3
- synapse/tests/test_lib_stormlib_stix.py +5 -5
- synapse/tests/test_lib_stormtypes.py +4 -4
- synapse/tests/test_lib_structlog.py +5 -6
- synapse/tests/test_lib_view.py +8 -0
- synapse/tests/test_model_base.py +32 -0
- synapse/tests/test_model_infotech.py +2 -2
- synapse/tests/test_telepath.py +56 -35
- synapse/tests/test_tools_cryo_cat.py +4 -3
- synapse/tests/test_tools_docker_validate.py +4 -2
- synapse/tests/test_tools_feed.py +30 -2
- synapse/tests/test_tools_genpkg.py +1 -1
- synapse/tests/test_tools_healthcheck.py +8 -7
- synapse/tests/test_utils.py +2 -2
- synapse/tests/test_utils_getrefs.py +35 -28
- synapse/tests/utils.py +3 -3
- synapse/tools/autodoc.py +3 -3
- synapse/tools/changelog.py +2 -2
- synapse/tools/cryo/cat.py +3 -3
- synapse/tools/csvtool.py +2 -3
- synapse/tools/docker/validate.py +5 -5
- synapse/tools/feed.py +2 -1
- synapse/tools/genpkg.py +3 -2
- synapse/tools/healthcheck.py +2 -3
- synapse/tools/json2mpk.py +2 -2
- synapse/utils/getrefs.py +10 -8
- synapse/vendor/cpython/lib/json.py +35 -0
- synapse/vendor/cpython/lib/test/test_json.py +22 -0
- {synapse-2.201.0.dist-info → synapse-2.203.0.dist-info}/METADATA +2 -1
- {synapse-2.201.0.dist-info → synapse-2.203.0.dist-info}/RECORD +106 -101
- {synapse-2.201.0.dist-info → synapse-2.203.0.dist-info}/WHEEL +1 -1
- {synapse-2.201.0.dist-info → synapse-2.203.0.dist-info}/LICENSE +0 -0
- {synapse-2.201.0.dist-info → synapse-2.203.0.dist-info}/top_level.txt +0 -0
synapse/lib/json.py
ADDED
|
@@ -0,0 +1,224 @@
|
|
|
1
|
+
import io
|
|
2
|
+
import os
|
|
3
|
+
import json
|
|
4
|
+
import logging
|
|
5
|
+
|
|
6
|
+
from typing import Any, BinaryIO, Callable, Iterator, Optional
|
|
7
|
+
|
|
8
|
+
from synapse.vendor.cpython.lib.json import detect_encoding
|
|
9
|
+
|
|
10
|
+
import orjson
|
|
11
|
+
|
|
12
|
+
import synapse.exc as s_exc
|
|
13
|
+
|
|
14
|
+
logger = logging.getLogger(__name__)
|
|
15
|
+
|
|
16
|
+
def _fallback_loads(s: str | bytes) -> Any:
|
|
17
|
+
|
|
18
|
+
try:
|
|
19
|
+
return json.loads(s)
|
|
20
|
+
except json.JSONDecodeError as exc:
|
|
21
|
+
raise s_exc.BadJsonText(mesg=exc.args[0])
|
|
22
|
+
|
|
23
|
+
def loads(s: str | bytes) -> Any:
|
|
24
|
+
'''
|
|
25
|
+
Deserialize a JSON string.
|
|
26
|
+
|
|
27
|
+
Similar to the standard library json.loads().
|
|
28
|
+
|
|
29
|
+
Arguments:
|
|
30
|
+
s (str | bytes): The JSON data to be deserialized.
|
|
31
|
+
|
|
32
|
+
Returns:
|
|
33
|
+
(object): The deserialized JSON data.
|
|
34
|
+
|
|
35
|
+
Raises:
|
|
36
|
+
synapse.exc.BadJsonText: This exception is raised when there is an error
|
|
37
|
+
deserializing the provided data.
|
|
38
|
+
'''
|
|
39
|
+
try:
|
|
40
|
+
return orjson.loads(s)
|
|
41
|
+
except orjson.JSONDecodeError as exc:
|
|
42
|
+
extra = {'synapse': {'fn': 'loads', 'reason': str(exc)}}
|
|
43
|
+
logger.warning('Using fallback JSON deserialization. Please report this to Vertex.', extra=extra)
|
|
44
|
+
return _fallback_loads(s)
|
|
45
|
+
|
|
46
|
+
def load(fp: BinaryIO) -> Any:
|
|
47
|
+
'''
|
|
48
|
+
Deserialize JSON data from a file.
|
|
49
|
+
|
|
50
|
+
Similar to the standard library json.load().
|
|
51
|
+
|
|
52
|
+
Arguments:
|
|
53
|
+
fp (file): The python file pointer to read the data from.
|
|
54
|
+
|
|
55
|
+
Returns:
|
|
56
|
+
(object): The deserialized JSON data.
|
|
57
|
+
|
|
58
|
+
Raises:
|
|
59
|
+
synapse.exc.BadJsonText: This exception is raised when there is an error
|
|
60
|
+
deserializing the provided data.
|
|
61
|
+
'''
|
|
62
|
+
return loads(fp.read())
|
|
63
|
+
|
|
64
|
+
def _fallback_dumps(obj: Any, sort_keys: bool = False, indent: bool = False, default: Optional[Callable] = None) -> bytes:
|
|
65
|
+
indent = 2 if indent else None
|
|
66
|
+
|
|
67
|
+
try:
|
|
68
|
+
ret = json.dumps(obj, sort_keys=sort_keys, indent=indent, default=default)
|
|
69
|
+
return ret.encode()
|
|
70
|
+
except TypeError as exc:
|
|
71
|
+
raise s_exc.MustBeJsonSafe(mesg=exc.args[0])
|
|
72
|
+
|
|
73
|
+
def dumps(obj: Any, sort_keys: bool = False, indent: bool = False, default: Optional[Callable] = None, newline: bool = False) -> bytes:
|
|
74
|
+
'''
|
|
75
|
+
Serialize a python object to byte string.
|
|
76
|
+
|
|
77
|
+
Similar to the standard library json.dumps().
|
|
78
|
+
|
|
79
|
+
Arguments:
|
|
80
|
+
obj (object): The python object to serialize.
|
|
81
|
+
sort_keys (bool): Sort dictionary keys. Default: False.
|
|
82
|
+
indent (bool): Include 2 spaces of indentation. Default: False.
|
|
83
|
+
default (Optional[Callable]): Callback for serializing unknown object types. Default: None.
|
|
84
|
+
newline (bool): Append a newline to the end of the serialized data. Default: False.
|
|
85
|
+
|
|
86
|
+
Returns:
|
|
87
|
+
(bytes): The JSON serialized python object.
|
|
88
|
+
|
|
89
|
+
Raises:
|
|
90
|
+
synapse.exc.MustBeJsonSafe: This exception is raised when a python object cannot be serialized.
|
|
91
|
+
'''
|
|
92
|
+
opts = 0
|
|
93
|
+
|
|
94
|
+
if indent:
|
|
95
|
+
opts |= orjson.OPT_INDENT_2
|
|
96
|
+
|
|
97
|
+
if sort_keys:
|
|
98
|
+
opts |= orjson.OPT_SORT_KEYS
|
|
99
|
+
|
|
100
|
+
if newline:
|
|
101
|
+
opts |= orjson.OPT_APPEND_NEWLINE
|
|
102
|
+
|
|
103
|
+
try:
|
|
104
|
+
return orjson.dumps(obj, option=opts, default=default)
|
|
105
|
+
|
|
106
|
+
except orjson.JSONEncodeError as exc:
|
|
107
|
+
if not isinstance(exc.__cause__, UnicodeEncodeError):
|
|
108
|
+
raise s_exc.MustBeJsonSafe(mesg=exc.args[0])
|
|
109
|
+
|
|
110
|
+
extra = {'synapse': {'fn': 'dumps', 'reason': str(exc)}}
|
|
111
|
+
logger.warning('Using fallback JSON serialization. Please report this to Vertex.', extra=extra)
|
|
112
|
+
|
|
113
|
+
ret = _fallback_dumps(obj, sort_keys=sort_keys, indent=indent, default=default)
|
|
114
|
+
|
|
115
|
+
if newline:
|
|
116
|
+
ret += b'\n'
|
|
117
|
+
|
|
118
|
+
return ret
|
|
119
|
+
|
|
120
|
+
def dump(obj: Any, fp: BinaryIO, sort_keys: bool = False, indent: bool = False, default: Optional[Callable] = None, newline: bool = False) -> None:
|
|
121
|
+
'''
|
|
122
|
+
Serialize a python object to a file-like object opened in binary mode.
|
|
123
|
+
|
|
124
|
+
Similar to the standard library json.dump().
|
|
125
|
+
|
|
126
|
+
Arguments:
|
|
127
|
+
obj (object): The python object to serialize.
|
|
128
|
+
fp (file): The python file pointer to write the serialized data to.
|
|
129
|
+
sort_keys (bool): Sort dictionary keys. Default: False.
|
|
130
|
+
indent (bool): Include 2 spaces of indentation. Default: False.
|
|
131
|
+
default (Optional[Callable]): Callback for serializing unknown object types. Default: None.
|
|
132
|
+
newline (bool): Append a newline to the end of the serialized data. Default: False.
|
|
133
|
+
|
|
134
|
+
Returns: None
|
|
135
|
+
|
|
136
|
+
Raises:
|
|
137
|
+
synapse.exc.MustBeJsonSafe: This exception is raised when a python object cannot be serialized.
|
|
138
|
+
'''
|
|
139
|
+
data = dumps(obj, sort_keys=sort_keys, indent=indent, default=default, newline=newline)
|
|
140
|
+
fp.write(data)
|
|
141
|
+
|
|
142
|
+
def jsload(*paths: str) -> Any:
|
|
143
|
+
'''
|
|
144
|
+
Deserialize the JSON data at *paths.
|
|
145
|
+
|
|
146
|
+
Arguments:
|
|
147
|
+
*paths: The file path parts to load the data from.
|
|
148
|
+
|
|
149
|
+
Returns:
|
|
150
|
+
(object): The deserialized JSON data.
|
|
151
|
+
|
|
152
|
+
Raises:
|
|
153
|
+
synapse.exc.BadJsonText: This exception is raised when there is an error
|
|
154
|
+
deserializing the provided data.
|
|
155
|
+
'''
|
|
156
|
+
import synapse.common as s_common # Avoid circular import
|
|
157
|
+
with s_common.genfile(*paths) as fd:
|
|
158
|
+
if os.fstat(fd.fileno()).st_size == 0:
|
|
159
|
+
return None
|
|
160
|
+
|
|
161
|
+
return load(fd)
|
|
162
|
+
|
|
163
|
+
def jslines(*paths: list[str]) -> Iterator[Any]:
|
|
164
|
+
'''
|
|
165
|
+
Deserialize the JSON lines data at *paths.
|
|
166
|
+
|
|
167
|
+
Arguments:
|
|
168
|
+
*paths: The file path parts to load the data from.
|
|
169
|
+
|
|
170
|
+
Yields:
|
|
171
|
+
(object): The deserialized JSON data from each line.
|
|
172
|
+
|
|
173
|
+
Raises:
|
|
174
|
+
synapse.exc.BadJsonText: This exception is raised when there is an error
|
|
175
|
+
deserializing the provided data.
|
|
176
|
+
'''
|
|
177
|
+
import synapse.common as s_common # Avoid circular import
|
|
178
|
+
with s_common.genfile(*paths) as fd:
|
|
179
|
+
for line in fd:
|
|
180
|
+
yield loads(line)
|
|
181
|
+
|
|
182
|
+
def jssave(js: Any, *paths: list[str]) -> None:
|
|
183
|
+
'''
|
|
184
|
+
Serialize the python object to a file.
|
|
185
|
+
|
|
186
|
+
Arguments:
|
|
187
|
+
js: The python object to serialize.
|
|
188
|
+
*paths: The file path parts to save the data to.
|
|
189
|
+
|
|
190
|
+
Returns: None
|
|
191
|
+
|
|
192
|
+
Raises:
|
|
193
|
+
synapse.exc.MustBeJsonSafe: This exception is raised when a python
|
|
194
|
+
object cannot be serialized.
|
|
195
|
+
'''
|
|
196
|
+
import synapse.common as s_common # Avoid circular import
|
|
197
|
+
path = s_common.genpath(*paths)
|
|
198
|
+
with io.open(path, 'wb') as fd:
|
|
199
|
+
dump(js, fd, sort_keys=True, indent=True)
|
|
200
|
+
|
|
201
|
+
def reqjsonsafe(item: Any, strict: bool = False) -> None:
|
|
202
|
+
'''
|
|
203
|
+
Check if a python object is safe to be serialized to JSON.
|
|
204
|
+
|
|
205
|
+
Uses default type coercion from synapse.lib.json.dumps.
|
|
206
|
+
|
|
207
|
+
Arguments:
|
|
208
|
+
item (any): The python object to check.
|
|
209
|
+
strict (bool): If specified, do not fallback to python json library which is
|
|
210
|
+
more permissive of unicode strings. Default: False
|
|
211
|
+
|
|
212
|
+
Returns: None if item is json serializable, otherwise raises an exception.
|
|
213
|
+
|
|
214
|
+
Raises:
|
|
215
|
+
synapse.exc.MustBeJsonSafe: This exception is raised when the item
|
|
216
|
+
cannot be serialized.
|
|
217
|
+
'''
|
|
218
|
+
if strict:
|
|
219
|
+
try:
|
|
220
|
+
orjson.dumps(item)
|
|
221
|
+
except Exception as exc:
|
|
222
|
+
raise s_exc.MustBeJsonSafe(mesg=exc.args[0])
|
|
223
|
+
else:
|
|
224
|
+
dumps(item)
|
synapse/lib/lmdbslab.py
CHANGED
|
@@ -870,7 +870,7 @@ class Slab(s_base.Base):
|
|
|
870
870
|
|
|
871
871
|
initial_mapsize = opts.get('map_size')
|
|
872
872
|
if initial_mapsize is None:
|
|
873
|
-
raise s_exc.BadArg('Slab requires map_size')
|
|
873
|
+
raise s_exc.BadArg(mesg='Slab requires map_size')
|
|
874
874
|
|
|
875
875
|
mdbpath = s_common.genpath(path, 'data.mdb')
|
|
876
876
|
if os.path.isfile(mdbpath):
|
synapse/lib/oauth.py
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import os
|
|
1
2
|
import copy
|
|
2
3
|
import heapq
|
|
3
4
|
import asyncio
|
|
@@ -10,7 +11,7 @@ import synapse.common as s_common
|
|
|
10
11
|
|
|
11
12
|
import synapse.lib.coro as s_coro
|
|
12
13
|
import synapse.lib.nexus as s_nexus
|
|
13
|
-
import synapse.lib.
|
|
14
|
+
import synapse.lib.schemas as s_schemas
|
|
14
15
|
import synapse.lib.lmdbslab as s_lmdbslab
|
|
15
16
|
|
|
16
17
|
logger = logging.getLogger(__name__)
|
|
@@ -19,51 +20,11 @@ KEY_LEN = 32 # length of a provider/user iden in a key
|
|
|
19
20
|
REFRESH_WINDOW = 0.5 # refresh in REFRESH_WINDOW * expires_in
|
|
20
21
|
DEFAULT_TIMEOUT = 10 # secs
|
|
21
22
|
|
|
22
|
-
reqValidProvider = s_config.getJsValidator({
|
|
23
|
-
'type': 'object',
|
|
24
|
-
'properties': {
|
|
25
|
-
'iden': {'type': 'string', 'pattern': s_config.re_iden},
|
|
26
|
-
'name': {'type': 'string'},
|
|
27
|
-
'flow_type': {'type': 'string', 'default': 'authorization_code', 'enum': ['authorization_code']},
|
|
28
|
-
'auth_scheme': {'type': 'string', 'default': 'basic', 'enum': ['basic']},
|
|
29
|
-
'client_id': {'type': 'string'},
|
|
30
|
-
'client_secret': {'type': 'string'},
|
|
31
|
-
'scope': {'type': 'string'},
|
|
32
|
-
'ssl_verify': {'type': 'boolean', 'default': True},
|
|
33
|
-
'auth_uri': {'type': 'string'},
|
|
34
|
-
'token_uri': {'type': 'string'},
|
|
35
|
-
'redirect_uri': {'type': 'string'},
|
|
36
|
-
'extensions': {
|
|
37
|
-
'type': 'object',
|
|
38
|
-
'properties': {
|
|
39
|
-
'pkce': {'type': 'boolean'},
|
|
40
|
-
},
|
|
41
|
-
'additionalProperties': False,
|
|
42
|
-
},
|
|
43
|
-
'extra_auth_params': {
|
|
44
|
-
'type': 'object',
|
|
45
|
-
'additionalProperties': {'type': 'string'},
|
|
46
|
-
},
|
|
47
|
-
},
|
|
48
|
-
'additionalProperties': False,
|
|
49
|
-
'required': ['iden', 'name', 'client_id', 'client_secret', 'scope', 'auth_uri', 'token_uri', 'redirect_uri'],
|
|
50
|
-
})
|
|
51
|
-
|
|
52
|
-
reqValidTokenResponse = s_config.getJsValidator({
|
|
53
|
-
'type': 'object',
|
|
54
|
-
'properties': {
|
|
55
|
-
'access_token': {'type': 'string'},
|
|
56
|
-
'expires_in': {'type': 'number', 'exclusiveMinimum': 0},
|
|
57
|
-
},
|
|
58
|
-
'additionalProperties': True,
|
|
59
|
-
'required': ['access_token', 'expires_in'],
|
|
60
|
-
})
|
|
61
|
-
|
|
62
23
|
def normOAuthTokenData(issued_at, data):
|
|
63
24
|
'''
|
|
64
25
|
Normalize timestamps to be in epoch millis and set expires_at/refresh_at.
|
|
65
26
|
'''
|
|
66
|
-
|
|
27
|
+
s_schemas.reqValidOauth2TokenResponse(data)
|
|
67
28
|
expires_in = data['expires_in']
|
|
68
29
|
return {
|
|
69
30
|
'access_token': data['access_token'],
|
|
@@ -73,6 +34,29 @@ def normOAuthTokenData(issued_at, data):
|
|
|
73
34
|
'refresh_token': data.get('refresh_token'),
|
|
74
35
|
}
|
|
75
36
|
|
|
37
|
+
az_tfile_envar = 'AZURE_FEDERATED_TOKEN_FILE'
|
|
38
|
+
def _getAzureTokenFile() -> tuple[bool, str]:
|
|
39
|
+
fp = os.getenv(az_tfile_envar, None)
|
|
40
|
+
if fp is None:
|
|
41
|
+
return False, f'{az_tfile_envar} environment variable is not set.'
|
|
42
|
+
if os.path.exists(fp):
|
|
43
|
+
with open(fp, 'r') as fd:
|
|
44
|
+
assertion = fd.read()
|
|
45
|
+
return True, assertion
|
|
46
|
+
else:
|
|
47
|
+
return False, f'{az_tfile_envar} file does not exist {fp}'
|
|
48
|
+
|
|
49
|
+
az_clientid_envar = 'AZURE_CLIENT_ID'
|
|
50
|
+
def _getAzureClientId() -> tuple[bool, str]:
|
|
51
|
+
valu = os.getenv(az_clientid_envar, None)
|
|
52
|
+
if valu is None:
|
|
53
|
+
return False, f'{az_clientid_envar} environment variable is not set.'
|
|
54
|
+
if valu:
|
|
55
|
+
return True, valu
|
|
56
|
+
else:
|
|
57
|
+
return False, f'{az_clientid_envar} is set to an empty string.'
|
|
58
|
+
|
|
59
|
+
|
|
76
60
|
class OAuthMixin(s_nexus.Pusher):
|
|
77
61
|
'''
|
|
78
62
|
Mixin for Cells to organize and execute OAuth token refreshes.
|
|
@@ -175,7 +159,7 @@ class OAuthMixin(s_nexus.Pusher):
|
|
|
175
159
|
logger.debug(f'OAuth V2 client does not exist for provider={provideriden} user={useriden}')
|
|
176
160
|
continue
|
|
177
161
|
|
|
178
|
-
ok, data = await self._refreshOAuthAccessToken(providerconf, clientconf)
|
|
162
|
+
ok, data = await self._refreshOAuthAccessToken(providerconf, clientconf, useriden)
|
|
179
163
|
if not ok:
|
|
180
164
|
logger.warning(f'OAuth V2 token refresh failed provider={provideriden} user={useriden} data={data}')
|
|
181
165
|
|
|
@@ -185,12 +169,19 @@ class OAuthMixin(s_nexus.Pusher):
|
|
|
185
169
|
self._oauth_sched_empty.set()
|
|
186
170
|
await s_coro.event_wait(self._oauth_sched_wake)
|
|
187
171
|
self._oauth_sched_wake.clear()
|
|
172
|
+
self._oauth_sched_ran.clear()
|
|
173
|
+
|
|
174
|
+
async def _getOAuthAccessToken(self, providerconf, useriden, authcode, code_verifier=None):
|
|
175
|
+
|
|
176
|
+
ok, data = await self._getAuthData(providerconf, useriden)
|
|
177
|
+
if not ok:
|
|
178
|
+
return ok, data
|
|
188
179
|
|
|
189
|
-
async def _getOAuthAccessToken(self, providerconf, authcode, code_verifier=None):
|
|
190
180
|
token_uri = providerconf['token_uri']
|
|
191
181
|
ssl_verify = providerconf['ssl_verify']
|
|
192
182
|
|
|
193
|
-
formdata =
|
|
183
|
+
auth, formdata = self._unpackAuthData(data)
|
|
184
|
+
|
|
194
185
|
formdata.add_field('grant_type', 'authorization_code')
|
|
195
186
|
formdata.add_field('scope', providerconf['scope'])
|
|
196
187
|
formdata.add_field('redirect_uri', providerconf['redirect_uri'])
|
|
@@ -198,19 +189,23 @@ class OAuthMixin(s_nexus.Pusher):
|
|
|
198
189
|
if code_verifier is not None:
|
|
199
190
|
formdata.add_field('code_verifier', code_verifier)
|
|
200
191
|
|
|
201
|
-
auth = aiohttp.BasicAuth(providerconf['client_id'], password=providerconf['client_secret'])
|
|
202
192
|
return await self._fetchOAuthToken(token_uri, auth, formdata, ssl_verify=ssl_verify)
|
|
203
193
|
|
|
204
|
-
async def _refreshOAuthAccessToken(self, providerconf, clientconf):
|
|
194
|
+
async def _refreshOAuthAccessToken(self, providerconf, clientconf, useriden):
|
|
195
|
+
|
|
196
|
+
ok, data = await self._getAuthData(providerconf, useriden)
|
|
197
|
+
if not ok:
|
|
198
|
+
return ok, data
|
|
199
|
+
|
|
205
200
|
token_uri = providerconf['token_uri']
|
|
206
201
|
ssl_verify = providerconf['ssl_verify']
|
|
207
202
|
refresh_token = clientconf['refresh_token']
|
|
208
203
|
|
|
209
|
-
formdata =
|
|
204
|
+
auth, formdata = self._unpackAuthData(data)
|
|
205
|
+
|
|
210
206
|
formdata.add_field('grant_type', 'refresh_token')
|
|
211
207
|
formdata.add_field('refresh_token', refresh_token)
|
|
212
208
|
|
|
213
|
-
auth = aiohttp.BasicAuth(providerconf['client_id'], password=providerconf['client_secret'])
|
|
214
209
|
ok, data = await self._fetchOAuthToken(token_uri, auth, formdata, ssl_verify=ssl_verify, retries=3)
|
|
215
210
|
if ok and not data.get('refresh_token'):
|
|
216
211
|
# if a refresh_token is not provided in the response persist the existing token
|
|
@@ -218,6 +213,77 @@ class OAuthMixin(s_nexus.Pusher):
|
|
|
218
213
|
|
|
219
214
|
return ok, data
|
|
220
215
|
|
|
216
|
+
async def _getAuthData(self, providerconf, useriden):
|
|
217
|
+
isok = False
|
|
218
|
+
ret = {}
|
|
219
|
+
auth_scheme = providerconf['auth_scheme']
|
|
220
|
+
|
|
221
|
+
if auth_scheme == 'basic':
|
|
222
|
+
ret['auth'] = {'login': providerconf['client_id'], 'password': providerconf['client_secret']}
|
|
223
|
+
ret['formdata'] = {}
|
|
224
|
+
isok = True
|
|
225
|
+
|
|
226
|
+
elif auth_scheme == 'client_assertion':
|
|
227
|
+
assertion = None
|
|
228
|
+
client_id = providerconf.get('client_id', None)
|
|
229
|
+
client_assertion = providerconf['client_assertion']
|
|
230
|
+
|
|
231
|
+
if (info := client_assertion.get('cortex:callstorm')):
|
|
232
|
+
opts = {
|
|
233
|
+
'view': info['view'],
|
|
234
|
+
'vars': info.get('vars', {}),
|
|
235
|
+
'user': useriden,
|
|
236
|
+
}
|
|
237
|
+
try:
|
|
238
|
+
ok, info = await self.callStorm(info['query'], opts=opts)
|
|
239
|
+
except Exception as e:
|
|
240
|
+
isok = False
|
|
241
|
+
ret['error'] = f'Error executing callStorm: {e}'
|
|
242
|
+
else:
|
|
243
|
+
if not ok:
|
|
244
|
+
return ok, info
|
|
245
|
+
assertion = info.get('token')
|
|
246
|
+
|
|
247
|
+
elif (info := client_assertion.get('msft:azure:workloadidentity')):
|
|
248
|
+
ok, valu = _getAzureTokenFile()
|
|
249
|
+
if not ok:
|
|
250
|
+
return ok, {'error': valu}
|
|
251
|
+
assertion = valu
|
|
252
|
+
if info.get('client_id'):
|
|
253
|
+
ok, valu = _getAzureClientId()
|
|
254
|
+
if not ok:
|
|
255
|
+
return ok, {'error': valu}
|
|
256
|
+
client_id = valu
|
|
257
|
+
|
|
258
|
+
else:
|
|
259
|
+
isok = False
|
|
260
|
+
ret['error'] = f'Unknown client_assertions data: {client_assertion}'
|
|
261
|
+
|
|
262
|
+
if assertion:
|
|
263
|
+
formdata = {
|
|
264
|
+
'client_id': client_id,
|
|
265
|
+
'client_assertion': assertion,
|
|
266
|
+
'client_assertion_type': 'urn:ietf:params:oauth:client-assertion-type:jwt-bearer',
|
|
267
|
+
}
|
|
268
|
+
ret['formdata'] = formdata
|
|
269
|
+
isok = True
|
|
270
|
+
|
|
271
|
+
else:
|
|
272
|
+
isok = False
|
|
273
|
+
ret['error'] = f'Unknown authorization scheme: {auth_scheme}'
|
|
274
|
+
|
|
275
|
+
return isok, ret
|
|
276
|
+
|
|
277
|
+
@staticmethod
|
|
278
|
+
def _unpackAuthData(data: dict) -> tuple[aiohttp.BasicAuth | None, aiohttp.FormData]:
|
|
279
|
+
auth = data.get('auth', None) # type: dict | None
|
|
280
|
+
if auth:
|
|
281
|
+
auth = aiohttp.BasicAuth(auth.get('login'), password=auth.get('password'))
|
|
282
|
+
formdata = aiohttp.FormData()
|
|
283
|
+
for k, v in data.get('formdata', {}).items():
|
|
284
|
+
formdata.add_field(k, v)
|
|
285
|
+
return auth, formdata
|
|
286
|
+
|
|
221
287
|
async def _fetchOAuthToken(self, url, auth, formdata, ssl_verify=True, retries=1):
|
|
222
288
|
|
|
223
289
|
headers = {
|
|
@@ -269,12 +335,69 @@ class OAuthMixin(s_nexus.Pusher):
|
|
|
269
335
|
return retn
|
|
270
336
|
|
|
271
337
|
async def addOAuthProvider(self, conf):
|
|
272
|
-
conf = reqValidProvider(conf)
|
|
273
338
|
|
|
339
|
+
conf = s_schemas.reqValidOauth2Provider(conf)
|
|
274
340
|
iden = conf['iden']
|
|
275
341
|
if self._getOAuthProvider(iden) is not None:
|
|
276
342
|
raise s_exc.DupIden(mesg=f'Duplicate OAuth V2 client iden ({iden})', iden=iden)
|
|
277
343
|
|
|
344
|
+
# N.B. The schema ensures that the possible values in the conf are valid
|
|
345
|
+
# when they are provided. Since writing multi-path schemas in draft07 is
|
|
346
|
+
# overly complicated, some of the mutual exclusion values and logical
|
|
347
|
+
# "is this meaningful?" type checks are made here before pushing the
|
|
348
|
+
# nexus event to create the provider.
|
|
349
|
+
|
|
350
|
+
client_secret = conf.get('client_secret')
|
|
351
|
+
client_assertion = conf.get('client_assertion', {})
|
|
352
|
+
|
|
353
|
+
if client_assertion and client_secret:
|
|
354
|
+
mesg = 'client_assertion and client_secret provided. These are mutually exclusive options.'
|
|
355
|
+
raise s_exc.BadArg(mesg=mesg)
|
|
356
|
+
if not client_assertion and not client_secret:
|
|
357
|
+
mesg = 'client_assertion and client_secret missing. These are mutually exclusive options and one must be provided.'
|
|
358
|
+
raise s_exc.BadArg(mesg=mesg)
|
|
359
|
+
|
|
360
|
+
auth_scheme = conf.get('auth_scheme')
|
|
361
|
+
client_id = conf.get('client_id')
|
|
362
|
+
if auth_scheme == 'basic':
|
|
363
|
+
if not client_id:
|
|
364
|
+
raise s_exc.BadArg(mesg='Must provide client_id for auth_scheme=basic')
|
|
365
|
+
if not client_secret:
|
|
366
|
+
raise s_exc.BadArg(mesg='Must provide client_secret for auth_scheme=basic')
|
|
367
|
+
|
|
368
|
+
elif auth_scheme == 'client_assertion':
|
|
369
|
+
if (info := client_assertion.get('cortex:callstorm')) is not None:
|
|
370
|
+
if not hasattr(self, 'callStorm'):
|
|
371
|
+
mesg = f'cortex:callstorm client assertion not supported by {self.__class__.__name__}'
|
|
372
|
+
raise s_exc.BadArg(mesg=mesg)
|
|
373
|
+
|
|
374
|
+
if not client_id:
|
|
375
|
+
raise s_exc.BadArg(mesg='Must provide client_id for with cortex:callstorm provider.')
|
|
376
|
+
|
|
377
|
+
text = info['query']
|
|
378
|
+
# Validate the query text
|
|
379
|
+
try:
|
|
380
|
+
await self.reqValidStorm(text)
|
|
381
|
+
except s_exc.BadSyntax as e:
|
|
382
|
+
raise s_exc.BadArg(mesg=f'Bad storm query: {e.get("mesg")}') from None
|
|
383
|
+
view = self.getView(info['view'])
|
|
384
|
+
if view is None:
|
|
385
|
+
raise s_exc.BadArg(mesg=f'View {info["view"]} does not exist.')
|
|
386
|
+
elif (info := client_assertion.get('msft:azure:workloadidentity')) is not None:
|
|
387
|
+
if not info.get('token'):
|
|
388
|
+
raise s_exc.BadArg(mesg='msft:azure:workloadidentity token key must be true')
|
|
389
|
+
ok, tknkvalu = _getAzureTokenFile()
|
|
390
|
+
if not ok:
|
|
391
|
+
raise s_exc.BadArg(mesg=f'Failed to get the client_assertion data: {tknkvalu}')
|
|
392
|
+
if info.get('client_id'):
|
|
393
|
+
if client_id:
|
|
394
|
+
raise s_exc.BadArg(mesg='Cannot specify a fixed client_id and a dynamic client_id value.')
|
|
395
|
+
ok, idvalu = _getAzureClientId()
|
|
396
|
+
if not ok:
|
|
397
|
+
raise s_exc.BadArg(mesg=f'Failed to get the client_id data: {idvalu}')
|
|
398
|
+
else: # pragma: no cover
|
|
399
|
+
raise s_exc.BadArg(mesg=f'Unknown auth_scheme={auth_scheme}')
|
|
400
|
+
|
|
278
401
|
await self._push('oauth:provider:add', conf)
|
|
279
402
|
|
|
280
403
|
@s_nexus.Pusher.onPush('oauth:provider:add')
|
|
@@ -291,7 +414,7 @@ class OAuthMixin(s_nexus.Pusher):
|
|
|
291
414
|
async def getOAuthProvider(self, iden):
|
|
292
415
|
conf = self._getOAuthProvider(iden)
|
|
293
416
|
if conf is not None:
|
|
294
|
-
conf.pop('client_secret')
|
|
417
|
+
conf.pop('client_secret', None)
|
|
295
418
|
return conf
|
|
296
419
|
|
|
297
420
|
async def listOAuthProviders(self):
|
|
@@ -309,7 +432,7 @@ class OAuthMixin(s_nexus.Pusher):
|
|
|
309
432
|
|
|
310
433
|
conf = self._oauth_providers.pop(iden)
|
|
311
434
|
if conf is not None:
|
|
312
|
-
conf.pop('client_secret')
|
|
435
|
+
conf.pop('client_secret', None)
|
|
313
436
|
|
|
314
437
|
return conf
|
|
315
438
|
|
|
@@ -374,10 +497,9 @@ class OAuthMixin(s_nexus.Pusher):
|
|
|
374
497
|
|
|
375
498
|
await self.clearOAuthAccessToken(provideriden, useriden)
|
|
376
499
|
|
|
377
|
-
ok, data = await self._getOAuthAccessToken(providerconf, authcode, code_verifier=code_verifier)
|
|
500
|
+
ok, data = await self._getOAuthAccessToken(providerconf, useriden, authcode, code_verifier=code_verifier)
|
|
378
501
|
if not ok:
|
|
379
502
|
raise s_exc.SynErr(mesg=f'Failed to get OAuth v2 token: {data["error"]}')
|
|
380
|
-
|
|
381
503
|
await self._setOAuthTokenData(provideriden, useriden, data)
|
|
382
504
|
|
|
383
505
|
@s_nexus.Pusher.onPushAuto('oauth:client:data:set')
|
synapse/lib/parser.py
CHANGED
|
@@ -75,7 +75,7 @@ terminalEnglishMap = {
|
|
|
75
75
|
'MODSET': '+= or -=',
|
|
76
76
|
'MODSETMULTI': '++= or --=',
|
|
77
77
|
'NONQUOTEWORD': 'unquoted value',
|
|
78
|
-
'
|
|
78
|
+
'NOTOP': 'not',
|
|
79
79
|
'NULL': 'null',
|
|
80
80
|
'NUMBER': 'number',
|
|
81
81
|
'OCTNUMBER': 'number',
|
|
@@ -134,6 +134,7 @@ terminalEnglishMap = {
|
|
|
134
134
|
'_LPARNOSPACE': '(',
|
|
135
135
|
'_MATCHHASH': '#',
|
|
136
136
|
'_MATCHHASHWILD': '#',
|
|
137
|
+
'_NOT': 'not',
|
|
137
138
|
'_RETURN': 'return',
|
|
138
139
|
'_REVERSE': 'reverse',
|
|
139
140
|
'_RIGHTJOIN': '-+>',
|
synapse/lib/rstorm.py
CHANGED
|
@@ -1,6 +1,5 @@
|
|
|
1
1
|
import os
|
|
2
2
|
import copy
|
|
3
|
-
import json
|
|
4
3
|
import pprint
|
|
5
4
|
import logging
|
|
6
5
|
import contextlib
|
|
@@ -15,6 +14,7 @@ import synapse.exc as s_exc
|
|
|
15
14
|
import synapse.common as s_common
|
|
16
15
|
|
|
17
16
|
import synapse.lib.base as s_base
|
|
17
|
+
import synapse.lib.json as s_json
|
|
18
18
|
import synapse.lib.output as s_output
|
|
19
19
|
import synapse.lib.dyndeps as s_dyndeps
|
|
20
20
|
import synapse.lib.stormhttp as s_stormhttp
|
|
@@ -104,11 +104,13 @@ class StormOutput(s_cmds_cortex.StormCmd):
|
|
|
104
104
|
body = resp.get('body')
|
|
105
105
|
|
|
106
106
|
if isinstance(body, (dict, list)):
|
|
107
|
-
body =
|
|
107
|
+
body = s_json.dumps(body)
|
|
108
|
+
elif isinstance(body, str):
|
|
109
|
+
body = body.encode()
|
|
108
110
|
|
|
109
111
|
info = {
|
|
110
112
|
'code': resp.get('code', 200),
|
|
111
|
-
'body': body
|
|
113
|
+
'body': body,
|
|
112
114
|
}
|
|
113
115
|
|
|
114
116
|
return s_stormhttp.HttpResp(info)
|
|
@@ -126,8 +128,8 @@ class StormOutput(s_cmds_cortex.StormCmd):
|
|
|
126
128
|
# in any of those cases, default to using vcr
|
|
127
129
|
try:
|
|
128
130
|
with open(path, 'r') as fd:
|
|
129
|
-
byts =
|
|
130
|
-
except (FileNotFoundError,
|
|
131
|
+
byts = s_json.load(fd)
|
|
132
|
+
except (FileNotFoundError, s_exc.BadJsonText):
|
|
131
133
|
byts = None
|
|
132
134
|
|
|
133
135
|
if not byts:
|
|
@@ -247,11 +249,13 @@ class StormCliOutput(s_storm.StormCli):
|
|
|
247
249
|
body = resp.get('body')
|
|
248
250
|
|
|
249
251
|
if isinstance(body, (dict, list)):
|
|
250
|
-
body =
|
|
252
|
+
body = s_json.dumps(body)
|
|
253
|
+
elif isinstance(body, str):
|
|
254
|
+
body = body.encode()
|
|
251
255
|
|
|
252
256
|
info = {
|
|
253
257
|
'code': resp.get('code', 200),
|
|
254
|
-
'body': body
|
|
258
|
+
'body': body,
|
|
255
259
|
}
|
|
256
260
|
|
|
257
261
|
return s_stormhttp.HttpResp(info)
|
|
@@ -269,8 +273,8 @@ class StormCliOutput(s_storm.StormCli):
|
|
|
269
273
|
# in any of those cases, default to using vcr
|
|
270
274
|
try:
|
|
271
275
|
with open(path, 'r') as fd:
|
|
272
|
-
byts =
|
|
273
|
-
except (FileNotFoundError,
|
|
276
|
+
byts = s_json.load(fd)
|
|
277
|
+
except (FileNotFoundError, s_exc.BadJsonText):
|
|
274
278
|
byts = None
|
|
275
279
|
|
|
276
280
|
if not byts:
|
|
@@ -459,7 +463,7 @@ class StormRst(s_base.Base):
|
|
|
459
463
|
|
|
460
464
|
splts = text.split(' ', 2)
|
|
461
465
|
ctor, svcname = splts[:2]
|
|
462
|
-
svcconf =
|
|
466
|
+
svcconf = s_json.loads(splts[2].strip()) if len(splts) == 3 else {}
|
|
463
467
|
|
|
464
468
|
svc = await self._getCell(ctor, conf=svcconf)
|
|
465
469
|
|
|
@@ -480,7 +484,7 @@ class StormRst(s_base.Base):
|
|
|
480
484
|
raise s_exc.SynErr(mesg=f'Package onload failed to run for service {svcname}')
|
|
481
485
|
|
|
482
486
|
async def _handleStormFail(self, text):
|
|
483
|
-
valu =
|
|
487
|
+
valu = s_json.loads(text)
|
|
484
488
|
assert valu in (True, False), f'storm-fail must be a boolean: {text}'
|
|
485
489
|
self.context['storm-fail'] = valu
|
|
486
490
|
|
|
@@ -497,7 +501,7 @@ class StormRst(s_base.Base):
|
|
|
497
501
|
async def _handleStormMultiline(self, text):
|
|
498
502
|
key, valu = text.split('=', 1)
|
|
499
503
|
assert key.isupper()
|
|
500
|
-
valu =
|
|
504
|
+
valu = s_json.loads(valu)
|
|
501
505
|
assert isinstance(valu, str)
|
|
502
506
|
multi = self.context.get('multiline', {})
|
|
503
507
|
multi[key] = valu
|
|
@@ -510,7 +514,7 @@ class StormRst(s_base.Base):
|
|
|
510
514
|
Args:
|
|
511
515
|
text (str): JSON string, e.g. {"vars": {"foo": "bar"}}
|
|
512
516
|
'''
|
|
513
|
-
item =
|
|
517
|
+
item = s_json.loads(text)
|
|
514
518
|
self.context['storm-opts'] = item
|
|
515
519
|
|
|
516
520
|
async def _handleStormClearHttp(self, text):
|
|
@@ -578,7 +582,7 @@ class StormRst(s_base.Base):
|
|
|
578
582
|
Args:
|
|
579
583
|
text (str): JSON string, e.g. {"filter_query_args": true}
|
|
580
584
|
'''
|
|
581
|
-
item =
|
|
585
|
+
item = s_json.loads(text)
|
|
582
586
|
self.context['storm-vcr-opts'] = item
|
|
583
587
|
|
|
584
588
|
async def _handleStormVcrCallback(self, text):
|