bsb-json 4.0.0rc2__py3-none-any.whl → 4.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of bsb-json might be problematic. Click here for more details.

bsb_json/__init__.py CHANGED
@@ -4,4 +4,4 @@ JSON parser and utilities for the BSB.
4
4
 
5
5
  from .schema import get_json_schema, get_schema
6
6
 
7
- __version__ = "4.0.0-rc2"
7
+ __version__ = "4.2.0"
bsb_json/parser.py CHANGED
@@ -1,340 +1,49 @@
1
- """
2
- JSON parsing module. Built on top of the Python ``json`` module. Adds JSON imports and
3
- references.
4
- """
5
-
6
- import json
7
- import os
8
-
9
- import numpy as np
10
- from bsb import ConfigurationParser, ConfigurationWarning, ParserError, warn
11
-
12
-
13
- class JsonImportError(ParserError):
14
- pass
15
-
16
-
17
- class JsonReferenceError(ParserError):
18
- pass
19
-
20
-
21
- def _json_iter(obj): # pragma: nocover
22
- if isinstance(obj, dict):
23
- return obj.items()
24
- elif isinstance(obj, list):
25
- return iter(obj)
26
- else:
27
- return iter(())
28
-
29
-
30
- class parsed_node:
31
- def location(self):
32
- return "/" + "/".join(str(part) for part in self._location_parts([]))
33
-
34
- def _location_parts(self, carry):
35
- if hasattr(self, "_parent"):
36
- self._parent._location_parts(carry)
37
- carry.append(self._key)
38
- return carry
39
-
40
- def __str__(self):
41
- return f"<parsed json config '{super().__str__()}' at '{self.location()}'>"
42
-
43
- def __repr__(self):
44
- return super().__str__()
45
-
46
-
47
- def _traverse_wrap(node, iter):
48
- for key, value in iter:
49
- if type(value) in recurse_handlers:
50
- value, iter = recurse_handlers[type(value)](value, node)
51
- value._key = key
52
- value._parent = node
53
- node[key] = value
54
- _traverse_wrap(value, iter)
55
-
56
-
57
- class parsed_dict(dict, parsed_node):
58
- def merge(self, other):
59
- """
60
- Recursively merge the values of another dictionary into us
61
- """
62
- for key, value in other.items():
63
- if key in self and isinstance(self[key], dict) and isinstance(value, dict):
64
- if not isinstance(self[key], parsed_dict): # pragma: nocover
65
- self[key] = d = parsed_dict(self[key])
66
- d._key = key
67
- d._parent = self
68
- self[key].merge(value)
69
- elif isinstance(value, dict):
70
- self[key] = d = parsed_dict(value)
71
- d._key = key
72
- d._parent = self
73
- _traverse_wrap(d, d.items())
74
- else:
75
- if isinstance(value, list):
76
- value = parsed_list(value)
77
- value._key = key
78
- value._parent = self
79
- self[key] = value
80
-
81
- def rev_merge(self, other):
82
- """
83
- Recursively merge ourself onto another dictionary
84
- """
85
- m = parsed_dict(other)
86
- _traverse_wrap(m, m.items())
87
- m.merge(self)
88
- self.clear()
89
- self.update(m)
90
- for v in self.values():
91
- if hasattr(v, "_parent"):
92
- v._parent = self
93
-
94
-
95
- class parsed_list(list, parsed_node):
96
- pass
97
-
98
-
99
- class json_ref:
100
- def __init__(self, node, doc, ref):
101
- self.node = node
102
- self.doc = doc
103
- self.ref = ref
104
-
105
- def resolve(self, parser, target):
106
- del self.node["$ref"]
107
- self.node.rev_merge(target)
108
-
109
- def __str__(self):
110
- return "<json ref '{}'>".format(
111
- ((self.doc + "#") if self.doc else "") + self.ref
112
- )
113
-
114
-
115
- class json_imp(json_ref):
116
- def __init__(self, node, doc, ref, values):
117
- super().__init__(node, doc, ref)
118
- self.values = values
119
-
120
- def resolve(self, parser, target):
121
- del self.node["$import"]
122
- for key in self.values:
123
- if key not in target:
124
- raise JsonImportError(
125
- "'{}' does not exist in import node '{}'".format(key, self.ref)
126
- )
127
- if isinstance(target[key], dict):
128
- imported = parsed_dict()
129
- imported.merge(target[key])
130
- imported._key = key
131
- imported._parent = self.node
132
- if key in self.node:
133
- if isinstance(self.node[key], dict):
134
- imported.merge(self.node[key])
135
- else:
136
- warn(
137
- f"Importkey '{key}' of {self} is ignored because the parent"
138
- f" already contains a key '{key}'"
139
- f" with value '{self.node[key]}'.",
140
- ConfigurationWarning,
141
- stacklevel=3,
142
- )
143
- continue
144
- self.node[key] = imported
145
- elif isinstance(target[key], list):
146
- imported, iter = _prep_list(target[key], self.node)
147
- imported._key = key
148
- imported._parent = self.node
149
- self.node[key] = imported
150
- _traverse_wrap(imported, iter)
151
- else:
152
- self.node[key] = target[key]
153
-
154
-
155
- def _to_json(value):
156
- if isinstance(value, np.ndarray):
157
- return value.tolist()
158
- else:
159
- raise TypeError(f"Can't encode '{value}' ({type(value)})")
160
-
161
-
162
- class JsonParser(ConfigurationParser):
163
- """
164
- Parser plugin class to parse JSON configuration files.
165
- """
166
-
167
- data_description = "JSON"
168
- data_extensions = ("json",)
169
- data_syntax = "json"
170
-
171
- def parse(self, content, path=None):
172
- # Parses the content. If path is set it's used as the root for the multi-document
173
- # features. During parsing the references (refs & imps) are stored. After parsing
174
- # the other documents are parsed by the standard json module (so no recursion yet)
175
- # After loading all required documents the references are resolved and all values
176
- # copied over to their final destination.
177
- if isinstance(content, str):
178
- content = parsed_dict(json.loads(content))
179
- self.root = content
180
- self.path = path or os.getcwd()
181
- self.is_doc = path and not os.path.isdir(path)
182
- self.references = []
183
- self.documents = {}
184
- self._traverse(content, content.items())
185
- self.resolved_documents = {}
186
- self._resolve_documents()
187
- self._resolve_references()
188
- meta = {"path": path}
189
- return content, meta
190
-
191
- def generate(self, tree, pretty=False):
192
- if pretty:
193
- return json.dumps(tree, indent=4, default=_to_json)
194
- else:
195
- return json.dumps(tree, default=_to_json)
196
-
197
- def _traverse(self, node, iter):
198
- # Iterates over all values in `iter` and checks for import keys, recursion or refs
199
- # Also wraps all nodes in their `parsed_*` counterparts.
200
- for key, value in iter:
201
- if self._is_import(key):
202
- self._store_import(node)
203
- elif type(value) in recurse_handlers:
204
- # The recurse handlers wrap the dicts and lists and return appropriate
205
- # iterators for them.
206
- value, iter = recurse_handlers[type(value)](value, node)
207
- # Set some metadata on the wrapped recursable objects.
208
- value._key = key
209
- value._parent = node
210
- # Overwrite the reference to the original object with a reference to the
211
- # wrapped object.
212
- node[key] = value
213
- # Recurse a level deeper
214
- self._traverse(value, iter)
215
- elif self._is_reference(key):
216
- self._store_reference(node, value)
217
-
218
- def _is_reference(self, key):
219
- return key == "$ref"
220
-
221
- def _is_import(self, key):
222
- return key == "$import"
223
-
224
- def _store_reference(self, node, ref):
225
- # Analyzes the reference and creates a ref object from the given data
226
- doc = _get_ref_document(ref, self.path)
227
- ref = _get_absolute_ref(node, ref)
228
- if doc not in self.documents:
229
- self.documents[doc] = set()
230
- self.documents[doc].add(ref)
231
- self.references.append(json_ref(node, doc, ref))
232
-
233
- def _store_import(self, node):
234
- # Analyzes the import node and creates a ref object from the given data
235
- imp = node["$import"]
236
- ref = imp["ref"]
237
- doc = _get_ref_document(ref)
238
- ref = _get_absolute_ref(node, ref)
239
- if doc not in self.documents:
240
- self.documents[doc] = set()
241
- self.documents[doc].add(ref)
242
- if "values" not in imp:
243
- e = RuntimeError(f"Import node {node} is missing a 'values' list.")
244
- e._bsbparser_show_user = True
245
- raise e
246
- self.references.append(json_imp(node, doc, ref, imp["values"]))
247
-
248
- def _resolve_documents(self):
249
- # Iterates over the list of stored documents parses them and fetches the content
250
- # of each reference node.
251
- for file, refs in self.documents.items():
252
- if file is None:
253
- content = self.root
254
- else:
255
- # We could open another JsonParser to easily recurse.
256
- with open(file, "r") as f:
257
- content = json.load(f)
258
- try:
259
- self.resolved_documents[file] = self._resolve_document(content, refs)
260
- except JsonReferenceError as jre:
261
- if not file:
262
- raise
263
- raise JsonReferenceError(
264
- str(jre) + " in document '{}'".format(file)
265
- ) from None
266
-
267
- def _resolve_document(self, content, refs):
268
- resolved = {}
269
- for ref in refs:
270
- resolved[ref] = self._fetch_reference(content, ref)
271
- return resolved
272
-
273
- def _fetch_reference(self, content, ref):
274
- parts = [p for p in ref.split("/")[1:] if p]
275
- n = content
276
- loc = ""
277
- for part in parts:
278
- loc += "/" + part
279
- try:
280
- n = n[part]
281
- except KeyError:
282
- raise JsonReferenceError(
283
- "'{}' in JSON reference '{}' does not exist".format(loc, ref)
284
- ) from None
285
- if not isinstance(n, dict):
286
- raise JsonReferenceError(
287
- "JSON references can only point to dictionaries. '{}' is a {}".format(
288
- "{}' in '{}".format(loc, ref) if loc != ref else ref,
289
- type(n).__name__,
290
- )
291
- )
292
- return n
293
-
294
- def _resolve_references(self):
295
- for ref in self.references:
296
- target = self.resolved_documents[ref.doc][ref.ref]
297
- ref.resolve(self, target)
298
-
299
-
300
- def _prep_dict(node, parent):
301
- return parsed_dict(node), node.items()
302
-
303
-
304
- def _prep_list(node, parent):
305
- return parsed_list(node), enumerate(node)
306
-
307
-
308
- recurse_handlers = {
309
- dict: _prep_dict,
310
- parsed_dict: _prep_dict,
311
- list: _prep_list,
312
- parsed_list: _prep_list,
313
- }
314
-
315
-
316
- def _get_ref_document(ref, base=None):
317
- if "#" not in ref or ref.split("#")[0] == "":
318
- return None
319
- doc = ref.split("#")[0]
320
- if not os.path.isabs(doc):
321
- if not base:
322
- base = os.getcwd()
323
- elif not os.path.isdir(base):
324
- base = os.path.dirname(base)
325
- if not os.path.exists(base):
326
- raise IOError("Can't find reference directory '{}'".format(base))
327
- doc = os.path.abspath(os.path.join(base, doc))
328
- return doc
329
-
330
-
331
- def _get_absolute_ref(node, ref):
332
- ref = ref.split("#")[-1]
333
- if ref.startswith("/"):
334
- path = ref
335
- else:
336
- path = os.path.join(node.location(), ref)
337
- return os.path.normpath(path).replace(os.path.sep, "/")
338
-
339
-
340
- __plugin__ = JsonParser
1
+ """
2
+ JSON parsing module. Built on top of the Python ``json`` module. Adds JSON imports and
3
+ references.
4
+ """
5
+
6
+ import json
7
+
8
+ import numpy as np
9
+ from bsb.config.parsers import ConfigurationParser, ParsesReferences
10
+
11
+
12
+ def _json_iter(obj): # pragma: nocover
13
+ if isinstance(obj, dict):
14
+ return obj.items()
15
+ elif isinstance(obj, list):
16
+ return iter(obj)
17
+ else:
18
+ return iter(())
19
+
20
+
21
+ def _to_json(value):
22
+ if isinstance(value, np.ndarray):
23
+ return value.tolist()
24
+ else:
25
+ raise TypeError(f"Can't encode '{value}' ({type(value)})")
26
+
27
+
28
+ class JsonParser(ParsesReferences, ConfigurationParser):
29
+ """
30
+ Parser plugin class to parse JSON configuration files.
31
+ """
32
+
33
+ data_description = "JSON"
34
+ data_extensions = ("json",)
35
+ data_syntax = "json"
36
+
37
+ def parse(self, content, path=None):
38
+ if isinstance(content, str):
39
+ content = json.loads(content)
40
+ return content, {"meta": path}
41
+
42
+ def generate(self, tree, pretty=False):
43
+ if pretty:
44
+ return json.dumps(tree, indent=4, default=_to_json)
45
+ else:
46
+ return json.dumps(tree, default=_to_json)
47
+
48
+
49
+ __plugin__ = JsonParser
@@ -1,85 +1,85 @@
1
- import json
2
- import typing
3
-
4
- from bsb.config import get_config_attributes
5
-
6
-
7
- def get_json_schema(root):
8
- schema = get_schema(root)
9
- return json.dumps(schema)
10
-
11
-
12
- def get_schema(root):
13
- defs = {}
14
- schema = object_schema(root, defs)
15
- schema["title"] = "Configuration"
16
- schema["description"] = "Automated JSON schema of configuration object"
17
- schema["$defs"] = defs
18
- return schema
19
-
20
-
21
- def object_schema(obj, defs=None):
22
- # Import the scaffold object here to avoid circular imports when the JSON parser is
23
- # loaded.
24
- from bsb.core import Scaffold
25
-
26
- schema = {"type": "object", "properties": {}}
27
- cls = obj.__class__
28
- obj_hints = typing.get_type_hints(cls, localns={"Scaffold": Scaffold})
29
- obj_attrs = get_config_attributes(cls)
30
- for attr, descr in obj_attrs.items():
31
- hint = obj_hints.get(attr, str)
32
- schema["properties"][attr] = attr_schema(hint, defs)
33
-
34
- return schema
35
-
36
-
37
- def attr_schema(hint, defs=None):
38
- if defs is None:
39
- defs = {}
40
- schema = {}
41
- try:
42
- is_dict = issubclass(typing.get_origin(hint), dict)
43
- is_list = issubclass(typing.get_origin(hint), list)
44
- except TypeError:
45
- is_dict = False
46
- is_list = False
47
- if hint is str:
48
- schema["type"] = "string"
49
- elif hint is int:
50
- schema["type"] = "integer"
51
- elif hint is float:
52
- schema["type"] = "number"
53
- elif hint is bool:
54
- schema["type"] = "boolean"
55
- elif is_list:
56
- schema["type"] = "array"
57
- schema["items"] = attr_schema(typing.get_args(hint)[0], defs)
58
- elif is_dict:
59
- schema["type"] = "object"
60
- schema["properties"] = {}
61
- schema["additionalProperties"] = attr_schema(typing.get_args(hint)[1], defs)
62
- else:
63
- try:
64
- is_node = get_config_attributes(hint)
65
- except:
66
- is_node = False
67
- if is_node:
68
- key = defs_key(hint)
69
- if key not in defs:
70
- defs[key] = object_schema(hint)
71
- return schema_def_ref(hint)
72
- else:
73
- schema["type"] = "object"
74
- schema["properties"] = {}
75
- schema["description"] = f"Could not determine schema of type {hint}"
76
-
77
- return schema
78
-
79
-
80
- def defs_key(hint):
81
- return str(hint.__name__)
82
-
83
-
84
- def schema_def_ref(hint):
85
- return {"$ref": f"#/$defs/{defs_key(hint)}"}
1
+ import json
2
+ import typing
3
+
4
+ from bsb.config import get_config_attributes
5
+
6
+
7
+ def get_json_schema(root):
8
+ schema = get_schema(root)
9
+ return json.dumps(schema)
10
+
11
+
12
+ def get_schema(root):
13
+ defs = {}
14
+ schema = object_schema(root, defs)
15
+ schema["title"] = "Configuration"
16
+ schema["description"] = "Automated JSON schema of configuration object"
17
+ schema["$defs"] = defs
18
+ return schema
19
+
20
+
21
+ def object_schema(obj, defs=None):
22
+ # Import the scaffold object here to avoid circular imports when the JSON parser is
23
+ # loaded.
24
+ from bsb.core import Scaffold
25
+
26
+ schema = {"type": "object", "properties": {}}
27
+ cls = obj.__class__
28
+ obj_hints = typing.get_type_hints(cls, localns={"Scaffold": Scaffold})
29
+ obj_attrs = get_config_attributes(cls)
30
+ for attr, descr in obj_attrs.items():
31
+ hint = obj_hints.get(attr, str)
32
+ schema["properties"][attr] = attr_schema(hint, defs)
33
+
34
+ return schema
35
+
36
+
37
+ def attr_schema(hint, defs=None):
38
+ if defs is None:
39
+ defs = {}
40
+ schema = {}
41
+ try:
42
+ is_dict = issubclass(typing.get_origin(hint), dict)
43
+ is_list = issubclass(typing.get_origin(hint), list)
44
+ except TypeError:
45
+ is_dict = False
46
+ is_list = False
47
+ if hint is str:
48
+ schema["type"] = "string"
49
+ elif hint is int:
50
+ schema["type"] = "integer"
51
+ elif hint is float:
52
+ schema["type"] = "number"
53
+ elif hint is bool:
54
+ schema["type"] = "boolean"
55
+ elif is_list:
56
+ schema["type"] = "array"
57
+ schema["items"] = attr_schema(typing.get_args(hint)[0], defs)
58
+ elif is_dict:
59
+ schema["type"] = "object"
60
+ schema["properties"] = {}
61
+ schema["additionalProperties"] = attr_schema(typing.get_args(hint)[1], defs)
62
+ else:
63
+ try:
64
+ is_node = get_config_attributes(hint)
65
+ except:
66
+ is_node = False
67
+ if is_node:
68
+ key = defs_key(hint)
69
+ if key not in defs:
70
+ defs[key] = object_schema(hint)
71
+ return schema_def_ref(hint)
72
+ else:
73
+ schema["type"] = "object"
74
+ schema["properties"] = {}
75
+ schema["description"] = f"Could not determine schema of type {hint}"
76
+
77
+ return schema
78
+
79
+
80
+ def defs_key(hint):
81
+ return str(hint.__name__)
82
+
83
+
84
+ def schema_def_ref(hint):
85
+ return {"$ref": f"#/$defs/{defs_key(hint)}"}
@@ -1,7 +1,7 @@
1
- # Create a class whose instances can, unlike a list, be modified by the plugin system
2
- class _obj_list(list):
3
- pass
4
-
5
-
6
- def __plugin__():
7
- return _obj_list(__path__)
1
+ # Create a class whose instances can, unlike a list, be modified by the plugin system
2
+ class _obj_list(list):
3
+ pass
4
+
5
+
6
+ def __plugin__():
7
+ return _obj_list(__path__)
@@ -1,36 +1,36 @@
1
- {
2
- "name": "Skeleton configuration",
3
- "storage": {
4
- "engine": "hdf5",
5
- "root": "network.hdf5"
6
- },
7
- "network": {
8
- "x": 100.0,
9
- "y": 100.0,
10
- "z": 100.0
11
- },
12
- "regions": {
13
-
14
- },
15
- "partitions": {
16
-
17
- },
18
- "cell_types": {
19
-
20
- },
21
- "placement": {
22
-
23
- },
24
- "after_placement": {
25
-
26
- },
27
- "connectivity": {
28
-
29
- },
30
- "after_connectivity": {
31
-
32
- },
33
- "simulations": {
34
-
35
- }
36
- }
1
+ {
2
+ "name": "Skeleton configuration",
3
+ "storage": {
4
+ "engine": "hdf5",
5
+ "root": "network.hdf5"
6
+ },
7
+ "network": {
8
+ "x": 100.0,
9
+ "y": 100.0,
10
+ "z": 100.0
11
+ },
12
+ "regions": {
13
+
14
+ },
15
+ "partitions": {
16
+
17
+ },
18
+ "cell_types": {
19
+
20
+ },
21
+ "placement": {
22
+
23
+ },
24
+ "after_placement": {
25
+
26
+ },
27
+ "connectivity": {
28
+
29
+ },
30
+ "after_connectivity": {
31
+
32
+ },
33
+ "simulations": {
34
+
35
+ }
36
+ }