xradio 0.0.40__py3-none-any.whl → 0.0.42__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- xradio/_utils/coord_math.py +100 -0
- xradio/_utils/list_and_array.py +49 -4
- xradio/_utils/schema.py +36 -16
- xradio/image/_util/_casacore/xds_from_casacore.py +5 -5
- xradio/image/_util/_casacore/xds_to_casacore.py +12 -11
- xradio/image/_util/_fits/xds_from_fits.py +18 -17
- xradio/image/_util/_zarr/zarr_low_level.py +29 -12
- xradio/image/_util/common.py +1 -1
- xradio/image/_util/image_factory.py +1 -1
- xradio/measurement_set/__init__.py +18 -0
- xradio/measurement_set/_utils/__init__.py +5 -0
- xradio/{vis/_vis_utils/_ms → measurement_set/_utils/_msv2}/_tables/load_main_table.py +1 -1
- xradio/{vis/_vis_utils/_ms → measurement_set/_utils/_msv2}/_tables/read.py +15 -1
- xradio/{vis/_vis_utils/_ms → measurement_set/_utils/_msv2}/conversion.py +186 -84
- xradio/measurement_set/_utils/_msv2/create_antenna_xds.py +535 -0
- xradio/{vis/_vis_utils/_ms → measurement_set/_utils/_msv2}/create_field_and_source_xds.py +146 -58
- xradio/measurement_set/_utils/_msv2/msv4_info_dicts.py +203 -0
- xradio/measurement_set/_utils/_msv2/msv4_sub_xdss.py +550 -0
- xradio/{vis/_vis_utils/_ms → measurement_set/_utils/_msv2}/subtables.py +1 -1
- xradio/{vis/_vis_utils → measurement_set/_utils}/_utils/xds_helper.py +1 -1
- xradio/{vis/_vis_utils/ms.py → measurement_set/_utils/msv2.py} +4 -4
- xradio/{vis/_vis_utils → measurement_set/_utils}/zarr.py +3 -3
- xradio/{vis → measurement_set}/convert_msv2_to_processing_set.py +9 -2
- xradio/{vis → measurement_set}/load_processing_set.py +16 -20
- xradio/measurement_set/measurement_set_xds.py +83 -0
- xradio/{vis/read_processing_set.py → measurement_set/open_processing_set.py} +25 -34
- xradio/measurement_set/processing_set.py +777 -0
- xradio/measurement_set/schema.py +1979 -0
- xradio/schema/check.py +42 -22
- xradio/schema/dataclass.py +56 -6
- xradio/sphinx/__init__.py +12 -0
- xradio/sphinx/schema_table.py +351 -0
- {xradio-0.0.40.dist-info → xradio-0.0.42.dist-info}/METADATA +17 -15
- xradio-0.0.42.dist-info/RECORD +76 -0
- {xradio-0.0.40.dist-info → xradio-0.0.42.dist-info}/WHEEL +1 -1
- xradio/_utils/common.py +0 -101
- xradio/vis/__init__.py +0 -14
- xradio/vis/_processing_set.py +0 -302
- xradio/vis/_vis_utils/__init__.py +0 -5
- xradio/vis/_vis_utils/_ms/create_antenna_xds.py +0 -482
- xradio/vis/_vis_utils/_ms/msv4_infos.py +0 -0
- xradio/vis/_vis_utils/_ms/msv4_sub_xdss.py +0 -306
- xradio/vis/schema.py +0 -1102
- xradio-0.0.40.dist-info/RECORD +0 -73
- /xradio/{vis/_vis_utils/_ms → measurement_set/_utils/_msv2}/_tables/load.py +0 -0
- /xradio/{vis/_vis_utils/_ms → measurement_set/_utils/_msv2}/_tables/read_main_table.py +0 -0
- /xradio/{vis/_vis_utils/_ms → measurement_set/_utils/_msv2}/_tables/read_subtables.py +0 -0
- /xradio/{vis/_vis_utils/_ms → measurement_set/_utils/_msv2}/_tables/table_query.py +0 -0
- /xradio/{vis/_vis_utils/_ms → measurement_set/_utils/_msv2}/_tables/write.py +0 -0
- /xradio/{vis/_vis_utils/_ms → measurement_set/_utils/_msv2}/_tables/write_exp_api.py +0 -0
- /xradio/{vis/_vis_utils/_ms → measurement_set/_utils/_msv2}/chunks.py +0 -0
- /xradio/{vis/_vis_utils/_ms → measurement_set/_utils/_msv2}/descr.py +0 -0
- /xradio/{vis/_vis_utils/_ms → measurement_set/_utils/_msv2}/msv2_msv3.py +0 -0
- /xradio/{vis/_vis_utils/_ms → measurement_set/_utils/_msv2}/msv2_to_msv4_meta.py +0 -0
- /xradio/{vis/_vis_utils/_ms → measurement_set/_utils/_msv2}/optimised_functions.py +0 -0
- /xradio/{vis/_vis_utils/_ms → measurement_set/_utils/_msv2}/partition_queries.py +0 -0
- /xradio/{vis/_vis_utils/_ms → measurement_set/_utils/_msv2}/partitions.py +0 -0
- /xradio/{vis/_vis_utils → measurement_set/_utils}/_utils/cds.py +0 -0
- /xradio/{vis/_vis_utils → measurement_set/_utils}/_utils/partition_attrs.py +0 -0
- /xradio/{vis/_vis_utils → measurement_set/_utils}/_utils/stokes_types.py +0 -0
- /xradio/{vis/_vis_utils → measurement_set/_utils}/_zarr/encoding.py +0 -0
- /xradio/{vis/_vis_utils → measurement_set/_utils}/_zarr/read.py +0 -0
- /xradio/{vis/_vis_utils → measurement_set/_utils}/_zarr/write.py +0 -0
- {xradio-0.0.40.dist-info → xradio-0.0.42.dist-info}/LICENSE.txt +0 -0
- {xradio-0.0.40.dist-info → xradio-0.0.42.dist-info}/top_level.txt +0 -0
xradio/schema/check.py
CHANGED
|
@@ -374,30 +374,50 @@ def check_data_vars(
|
|
|
374
374
|
|
|
375
375
|
issues = SchemaIssues()
|
|
376
376
|
for data_var_schema in data_vars_schema:
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
if
|
|
381
|
-
if
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
377
|
+
|
|
378
|
+
allow_mutiple_versions = False
|
|
379
|
+
for attr in data_var_schema.attributes:
|
|
380
|
+
if hasattr(attr, "name"):
|
|
381
|
+
if attr.name == "allow_mutiple_versions":
|
|
382
|
+
allow_mutiple_versions = attr.default
|
|
383
|
+
|
|
384
|
+
data_vars_names = []
|
|
385
|
+
if allow_mutiple_versions:
|
|
386
|
+
for data_var_name in data_vars:
|
|
387
|
+
if data_var_schema.name in data_var_name:
|
|
388
|
+
data_vars_names.append(data_var_name)
|
|
389
|
+
else:
|
|
390
|
+
data_vars_names = [data_var_schema.name]
|
|
391
|
+
|
|
392
|
+
if (len(data_vars_names) == 0) and ~data_var_schema.optional:
|
|
393
|
+
data_vars_names = [data_var_schema.name]
|
|
394
|
+
|
|
395
|
+
for data_var_name in data_vars_names:
|
|
396
|
+
data_var = data_vars.get(data_var_name)
|
|
397
|
+
|
|
398
|
+
if data_var is None:
|
|
399
|
+
if not data_var_schema.optional:
|
|
400
|
+
if data_var_kind == "coords":
|
|
401
|
+
message = (
|
|
402
|
+
f"Required coordinate '{data_var_schema.name}' is missing!"
|
|
403
|
+
)
|
|
404
|
+
else:
|
|
405
|
+
message = (
|
|
406
|
+
f"Required data variable '{data_var_schema.name}' is missing "
|
|
407
|
+
f"(have {','.join(data_vars)})!"
|
|
408
|
+
)
|
|
409
|
+
issues.add(
|
|
410
|
+
SchemaIssue(
|
|
411
|
+
path=[(data_var_kind, data_var_schema.name)],
|
|
412
|
+
message=message,
|
|
413
|
+
)
|
|
393
414
|
)
|
|
394
|
-
|
|
395
|
-
continue
|
|
415
|
+
continue
|
|
396
416
|
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
417
|
+
# Check array schema
|
|
418
|
+
issues += check_array(data_var, data_var_schema).at_path(
|
|
419
|
+
data_var_kind, data_var_schema.name
|
|
420
|
+
)
|
|
401
421
|
|
|
402
422
|
# Extra data_varinates / data variables are always okay
|
|
403
423
|
|
xradio/schema/dataclass.py
CHANGED
|
@@ -67,18 +67,60 @@ def extract_field_docstrings(klass):
|
|
|
67
67
|
return docstrings
|
|
68
68
|
|
|
69
69
|
|
|
70
|
-
def
|
|
70
|
+
def _check_invalid_dims(
|
|
71
|
+
dims: list[list[str]], all_coord_names: list[str], klass_name: str, field_name: str
|
|
72
|
+
):
|
|
73
|
+
"""
|
|
74
|
+
Check dimension possibilities for undefined coordinates
|
|
75
|
+
"""
|
|
76
|
+
|
|
77
|
+
# Filter out dimension possibilities with undefined coordinates
|
|
78
|
+
valid_dims = [ds for ds in dims if set(ds).issubset(all_coord_names)]
|
|
79
|
+
# print(f"{klass_name}.{field_name}", valid_dims, dims, all_coord_names)
|
|
80
|
+
|
|
81
|
+
# Raise an exception if this makes the dimension set impossible
|
|
82
|
+
if dims and not valid_dims:
|
|
83
|
+
required_dims = sorted(map(lambda ds: set(ds) - all_coord_names, dims), key=len)
|
|
84
|
+
raise ValueError(
|
|
85
|
+
f"In '{klass_name}', field '{field_name}' has"
|
|
86
|
+
f" undefined coordinates, consider defining {required_dims}!"
|
|
87
|
+
)
|
|
88
|
+
return valid_dims
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
def extract_xarray_dataclass(klass, allow_undefined_coords: bool = False):
|
|
71
92
|
"""
|
|
72
93
|
Go through dataclass fields and interpret them according to xarray-dataclass
|
|
73
94
|
|
|
74
95
|
Returns a tuple of coordinates, data variables and attributes
|
|
96
|
+
|
|
97
|
+
:param allow_undefined_coords: Allow data variables with dimensions
|
|
98
|
+
that do not have associated coordinates (e.g. for data arrays).
|
|
75
99
|
"""
|
|
76
100
|
|
|
77
101
|
field_docstrings = extract_field_docstrings(klass)
|
|
78
102
|
|
|
103
|
+
# Collect type hints, identify coordinates
|
|
104
|
+
type_hints = get_type_hints(klass, include_extras=True)
|
|
105
|
+
if allow_undefined_coords:
|
|
106
|
+
|
|
107
|
+
def check_invalid_dims(dims, field_name):
|
|
108
|
+
return dims
|
|
109
|
+
|
|
110
|
+
else:
|
|
111
|
+
all_coord_names = {
|
|
112
|
+
field.name
|
|
113
|
+
for field in dataclasses.fields(klass)
|
|
114
|
+
if get_role(type_hints[field.name]) == Role.COORD
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
def check_invalid_dims(dims, field_name):
|
|
118
|
+
return _check_invalid_dims(
|
|
119
|
+
dims, all_coord_names, klass.__name__, field_name
|
|
120
|
+
)
|
|
121
|
+
|
|
79
122
|
# Go through attributes, collecting coordinates, data variables and
|
|
80
123
|
# attributes
|
|
81
|
-
type_hints = get_type_hints(klass, include_extras=True)
|
|
82
124
|
coordinates = []
|
|
83
125
|
data_vars = []
|
|
84
126
|
attributes = []
|
|
@@ -122,6 +164,11 @@ def extract_xarray_dataclass(klass):
|
|
|
122
164
|
for f in dataclasses.fields(ArraySchema)
|
|
123
165
|
}
|
|
124
166
|
|
|
167
|
+
# Check for undefined coordinates
|
|
168
|
+
arr_schema_fields["dimensions"] = check_invalid_dims(
|
|
169
|
+
arr_schema_fields["dimensions"], field.name
|
|
170
|
+
)
|
|
171
|
+
|
|
125
172
|
# Repackage as reference
|
|
126
173
|
schema_ref = ArraySchemaRef(
|
|
127
174
|
name=field.name,
|
|
@@ -152,7 +199,10 @@ def extract_xarray_dataclass(klass):
|
|
|
152
199
|
f.name: getattr(arr_schema, f.name)
|
|
153
200
|
for f in dataclasses.fields(ArraySchema)
|
|
154
201
|
}
|
|
155
|
-
|
|
202
|
+
|
|
203
|
+
arr_schema_fields["dimensions"] = check_invalid_dims(
|
|
204
|
+
combined_dimensions, field.name
|
|
205
|
+
)
|
|
156
206
|
schema_ref = ArraySchemaRef(
|
|
157
207
|
name=field.name,
|
|
158
208
|
optional=is_optional(typ),
|
|
@@ -167,8 +217,8 @@ def extract_xarray_dataclass(klass):
|
|
|
167
217
|
optional=is_optional(typ),
|
|
168
218
|
default=field.default,
|
|
169
219
|
docstring=field_docstrings.get(field.name),
|
|
170
|
-
schema_name=
|
|
171
|
-
dimensions=dims,
|
|
220
|
+
schema_name=None,
|
|
221
|
+
dimensions=check_invalid_dims(dims, field.name),
|
|
172
222
|
dtypes=[numpy.dtype(typ) for typ in types],
|
|
173
223
|
coordinates=[],
|
|
174
224
|
attributes=[],
|
|
@@ -206,7 +256,7 @@ def xarray_dataclass_to_array_schema(klass):
|
|
|
206
256
|
return klass.__xradio_array_schema
|
|
207
257
|
|
|
208
258
|
# Extract from data class
|
|
209
|
-
coordinates, data_vars, attributes = extract_xarray_dataclass(klass)
|
|
259
|
+
coordinates, data_vars, attributes = extract_xarray_dataclass(klass, True)
|
|
210
260
|
|
|
211
261
|
# For a dataclass there must be exactly one data variable
|
|
212
262
|
if not data_vars:
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
from . import schema_table
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
def setup(app):
|
|
5
|
+
app.add_directive(
|
|
6
|
+
"xradio_array_schema_table", schema_table.ArraySchemaTableDirective
|
|
7
|
+
)
|
|
8
|
+
app.add_directive(
|
|
9
|
+
"xradio_dataset_schema_table", schema_table.DatasetSchemaTableDirective
|
|
10
|
+
)
|
|
11
|
+
app.add_directive("xradio_dict_schema_table", schema_table.DictSchemaTableDirective)
|
|
12
|
+
return {"version": "0.1.0"}
|
|
@@ -0,0 +1,351 @@
|
|
|
1
|
+
import importlib
|
|
2
|
+
import dataclasses
|
|
3
|
+
import typing
|
|
4
|
+
|
|
5
|
+
from docutils import nodes, utils
|
|
6
|
+
from docutils.parsers.rst import Directive, DirectiveError
|
|
7
|
+
from docutils.parsers.rst import directives
|
|
8
|
+
from docutils.utils import SystemMessagePropagation
|
|
9
|
+
from docutils.statemachine import StringList
|
|
10
|
+
|
|
11
|
+
from sphinx.directives import ObjectDescription
|
|
12
|
+
from sphinx.util.docutils import switch_source_input
|
|
13
|
+
|
|
14
|
+
from xradio.schema import (
|
|
15
|
+
xarray_dataclass_to_array_schema,
|
|
16
|
+
xarray_dataclass_to_dataset_schema,
|
|
17
|
+
xarray_dataclass_to_dict_schema,
|
|
18
|
+
)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class SchemaTableDirective(ObjectDescription):
|
|
22
|
+
required_arguments = 1 # Xarray dataclass of schema
|
|
23
|
+
has_content = False
|
|
24
|
+
|
|
25
|
+
option_spec = {
|
|
26
|
+
# "headers": directives.unchanged,
|
|
27
|
+
# "widths": directives.unchanged,
|
|
28
|
+
"title": directives.unchanged,
|
|
29
|
+
# "columns": directives.unchanged,
|
|
30
|
+
"class": directives.unchanged,
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
def run(self):
|
|
34
|
+
# Import the referenced class
|
|
35
|
+
klass_path = self.arguments[0].rsplit(".", 1)
|
|
36
|
+
if len(klass_path) != 2:
|
|
37
|
+
raise ValueError(
|
|
38
|
+
f"Should be absolute Python name of xarray dataclass definition: {self.arguments[0]}"
|
|
39
|
+
)
|
|
40
|
+
klass_module = importlib.import_module(klass_path[0])
|
|
41
|
+
klass = getattr(klass_module, klass_path[1])
|
|
42
|
+
|
|
43
|
+
# Make table node
|
|
44
|
+
classes = ["tbl", "colwidths-given"]
|
|
45
|
+
if "class" in self.options:
|
|
46
|
+
classes.append(self.options["class"])
|
|
47
|
+
self._table = nodes.table(
|
|
48
|
+
classes=classes, ids=[f"schema-table-{self.arguments[0]}"]
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
# Add title, if requested
|
|
52
|
+
if "title" in self.options:
|
|
53
|
+
self._table += nodes.title(text=caption)
|
|
54
|
+
|
|
55
|
+
# Declare columns
|
|
56
|
+
column_widths = [10, 10, 5, 5, 40]
|
|
57
|
+
self._tgroup = nodes.tgroup(cols=len(column_widths))
|
|
58
|
+
self._table += self._tgroup
|
|
59
|
+
for colwidth in column_widths:
|
|
60
|
+
self._tgroup += nodes.colspec(colwidth=colwidth)
|
|
61
|
+
|
|
62
|
+
# Create head row
|
|
63
|
+
header_row = nodes.row()
|
|
64
|
+
header_row += nodes.entry("", nodes.paragraph(text=""))
|
|
65
|
+
header_row += nodes.entry("", nodes.strong(text="Dimensions"))
|
|
66
|
+
header_row += nodes.entry("", nodes.strong(text="Dtype"))
|
|
67
|
+
header_row += nodes.entry("", nodes.strong(text="Model"))
|
|
68
|
+
header_row += nodes.entry("", nodes.strong(text="Description"))
|
|
69
|
+
self._thead = nodes.thead("", header_row)
|
|
70
|
+
self._tgroup += self._thead
|
|
71
|
+
|
|
72
|
+
# Add body
|
|
73
|
+
self._tbody = nodes.tbody()
|
|
74
|
+
self._tgroup += self._tbody
|
|
75
|
+
|
|
76
|
+
# Add table contents (overridden in subclasses)
|
|
77
|
+
self._add_table_contents(klass)
|
|
78
|
+
|
|
79
|
+
# Register table
|
|
80
|
+
# tbl = self.env.get_domain("tbl")
|
|
81
|
+
# tbl.add_table(caption, table_id)
|
|
82
|
+
|
|
83
|
+
return [self._table]
|
|
84
|
+
|
|
85
|
+
def _add_section(self, name):
|
|
86
|
+
# Create row
|
|
87
|
+
row = nodes.row("", nodes.entry("", nodes.strong("", name), morecols=4))
|
|
88
|
+
self._tbody += row
|
|
89
|
+
|
|
90
|
+
def _add_row(
|
|
91
|
+
self,
|
|
92
|
+
name="",
|
|
93
|
+
dimss=[],
|
|
94
|
+
types=[],
|
|
95
|
+
meta=None,
|
|
96
|
+
descr="",
|
|
97
|
+
optional=False,
|
|
98
|
+
default=dataclasses.MISSING,
|
|
99
|
+
):
|
|
100
|
+
# Create row
|
|
101
|
+
row = nodes.row()
|
|
102
|
+
self._tbody += row
|
|
103
|
+
|
|
104
|
+
# Add name
|
|
105
|
+
name_nds = [nodes.literal(text=name)]
|
|
106
|
+
if optional:
|
|
107
|
+
name_nds = [nodes.Text("(")] + name_nds + [nodes.Text(")")]
|
|
108
|
+
row += nodes.entry("", *name_nds)
|
|
109
|
+
|
|
110
|
+
# Add dimensions
|
|
111
|
+
def mk_multi_entry(lines):
|
|
112
|
+
if not lines:
|
|
113
|
+
return nodes.entry()
|
|
114
|
+
if len(lines) == 0:
|
|
115
|
+
return nodes.entry(lines[0])
|
|
116
|
+
else:
|
|
117
|
+
entry = nodes.entry()
|
|
118
|
+
for i, line in enumerate(lines):
|
|
119
|
+
entry += nodes.line("", "" if i == 0 else "or\xa0", line)
|
|
120
|
+
return entry
|
|
121
|
+
|
|
122
|
+
row += mk_multi_entry(
|
|
123
|
+
[nodes.literal(text=f"[{','.join(dims)}]") for dims in dimss]
|
|
124
|
+
)
|
|
125
|
+
|
|
126
|
+
# Add types
|
|
127
|
+
row += mk_multi_entry([nodes.literal(text=typ) for typ in types])
|
|
128
|
+
|
|
129
|
+
# Add model link
|
|
130
|
+
entry = nodes.entry()
|
|
131
|
+
row += entry
|
|
132
|
+
if meta is not None:
|
|
133
|
+
# Preformatted? Just pass through
|
|
134
|
+
if isinstance(meta, nodes.line):
|
|
135
|
+
entry += meta
|
|
136
|
+
else:
|
|
137
|
+
vl = StringList()
|
|
138
|
+
vl.append(f":py:class:`~{meta}`", "")
|
|
139
|
+
with switch_source_input(self.state, vl):
|
|
140
|
+
self.state.nested_parse(vl, 0, entry)
|
|
141
|
+
|
|
142
|
+
# Add description
|
|
143
|
+
entry = nodes.entry()
|
|
144
|
+
row += entry
|
|
145
|
+
if descr:
|
|
146
|
+
vl = StringList()
|
|
147
|
+
vl.append(descr, "")
|
|
148
|
+
with switch_source_input(self.state, vl):
|
|
149
|
+
self.state.nested_parse(vl, 0, entry)
|
|
150
|
+
if default is not dataclasses.MISSING:
|
|
151
|
+
vl = StringList()
|
|
152
|
+
vl.append(f"**Default:** ``{repr(default)}``", "")
|
|
153
|
+
with switch_source_input(self.state, vl):
|
|
154
|
+
self.state.nested_parse(vl, 0, entry)
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
def format_literals(typ):
|
|
158
|
+
|
|
159
|
+
# a | b | c: Recurse and merge
|
|
160
|
+
if typing.get_origin(typ) == typing.Union:
|
|
161
|
+
type_args = typing.get_args(typ)
|
|
162
|
+
options = []
|
|
163
|
+
for arg in type_args:
|
|
164
|
+
options += format_literals(arg)
|
|
165
|
+
return options
|
|
166
|
+
|
|
167
|
+
# Literal['a', 'b', ...]: Wrap into individual "literal" nodes
|
|
168
|
+
if typing.get_origin(typ) == typing.Literal:
|
|
169
|
+
return list(map(lambda t: nodes.literal(text=repr(t)), typing.get_args(typ)))
|
|
170
|
+
|
|
171
|
+
# list[Literal['a'], Literal['b'], ...]: Format as one literal (compound) value
|
|
172
|
+
if typing.get_origin(typ) == list:
|
|
173
|
+
type_args = typing.get_args(typ)
|
|
174
|
+
if any([typing.get_origin(arg) != typing.Literal for arg in type_args]):
|
|
175
|
+
raise ValueError(f"List must contain only literals: {typ}")
|
|
176
|
+
values = [repr(typing.get_args(val)[0]) for val in typing.get_args(typ)]
|
|
177
|
+
return [nodes.literal(text=f"[{', '.join(values)}]")]
|
|
178
|
+
|
|
179
|
+
raise ValueError(f"Must be either a type or a literal: {typ}")
|
|
180
|
+
|
|
181
|
+
|
|
182
|
+
def format_attr_model_text(state, attr) -> StringList:
|
|
183
|
+
"""
|
|
184
|
+
Formats the text for the 'model' column in schema tables (arrays and datasets).
|
|
185
|
+
Doesn't aim at supporting any literal types or combinations of types in general,
|
|
186
|
+
but the following three ones specifically:
|
|
187
|
+
|
|
188
|
+
- Literals (with multiple options (implicit Union of literals))
|
|
189
|
+
- List of literals (e.g. ["rad","rad"]
|
|
190
|
+
- Union of list of literals (e.g. ["m","m","m"]/["rad","rad","m"]
|
|
191
|
+
|
|
192
|
+
This is meant to produce readable text listing literals as quoted text and
|
|
193
|
+
their combinations, in schema attributes (particularly quantities and measures).
|
|
194
|
+
|
|
195
|
+
Everything else than these expected literal based types would be printed as the
|
|
196
|
+
type name.
|
|
197
|
+
"""
|
|
198
|
+
|
|
199
|
+
type_args = typing.get_args(attr.typ)
|
|
200
|
+
is_list_of_literals = typing.get_origin(attr.typ) is list and all(
|
|
201
|
+
[typing.get_origin(arg) is typing.Literal for arg in type_args]
|
|
202
|
+
)
|
|
203
|
+
|
|
204
|
+
line = nodes.line()
|
|
205
|
+
|
|
206
|
+
if not is_list_of_literals:
|
|
207
|
+
# A type?
|
|
208
|
+
if isinstance(attr.typ, type):
|
|
209
|
+
vl = StringList()
|
|
210
|
+
vl.append(f":py:class:`~{attr.typ.__module__}.{attr.typ.__name__}`", "")
|
|
211
|
+
with switch_source_input(state, vl):
|
|
212
|
+
state.nested_parse(vl, 0, line)
|
|
213
|
+
return line
|
|
214
|
+
|
|
215
|
+
# Derived type, e.g. list of types?
|
|
216
|
+
if typing.get_origin(attr.typ) == list and all(
|
|
217
|
+
[isinstance(arg, type) for arg in type_args]
|
|
218
|
+
):
|
|
219
|
+
vl = StringList()
|
|
220
|
+
vl.append("[", "")
|
|
221
|
+
for i, arg in enumerate(typing.get_args(attr.typ)):
|
|
222
|
+
if i > 0:
|
|
223
|
+
vl.append(", ", "")
|
|
224
|
+
vl.append(f":py:class:`~{arg.__module__}.{arg.__name__}`", "")
|
|
225
|
+
vl.append("]", "")
|
|
226
|
+
with switch_source_input(state, vl):
|
|
227
|
+
state.nested_parse(vl, 0, line)
|
|
228
|
+
return line
|
|
229
|
+
|
|
230
|
+
# Assume it's a literal of some kind - collect options
|
|
231
|
+
literals = format_literals(attr.typ)
|
|
232
|
+
for i, lit in enumerate(literals):
|
|
233
|
+
if i > 0:
|
|
234
|
+
if i + 1 >= len(literals):
|
|
235
|
+
line += nodes.Text(" or\xa0")
|
|
236
|
+
else:
|
|
237
|
+
line += nodes.Text(", ")
|
|
238
|
+
line += lit
|
|
239
|
+
|
|
240
|
+
return line
|
|
241
|
+
|
|
242
|
+
|
|
243
|
+
class ArraySchemaTableDirective(SchemaTableDirective):
|
|
244
|
+
def _add_table_contents(self, klass):
|
|
245
|
+
# Extract schema
|
|
246
|
+
schema = xarray_dataclass_to_array_schema(klass)
|
|
247
|
+
|
|
248
|
+
# Add dataarray reference as first element
|
|
249
|
+
self._add_row(
|
|
250
|
+
"data",
|
|
251
|
+
schema.dimensions,
|
|
252
|
+
schema.dtypes,
|
|
253
|
+
schema.schema_name,
|
|
254
|
+
schema.data_docstring,
|
|
255
|
+
)
|
|
256
|
+
|
|
257
|
+
# Add coordinates
|
|
258
|
+
if schema.coordinates:
|
|
259
|
+
self._add_section("Coordinates:")
|
|
260
|
+
for coord in schema.coordinates:
|
|
261
|
+
self._add_row(
|
|
262
|
+
coord.name,
|
|
263
|
+
coord.dimensions,
|
|
264
|
+
coord.dtypes,
|
|
265
|
+
coord.schema_name,
|
|
266
|
+
coord.docstring or coord.data_docstring,
|
|
267
|
+
optional=coord.optional,
|
|
268
|
+
default=coord.default,
|
|
269
|
+
)
|
|
270
|
+
|
|
271
|
+
# Add attributes
|
|
272
|
+
if schema.attributes:
|
|
273
|
+
self._add_section("Attributes:")
|
|
274
|
+
for attr in schema.attributes:
|
|
275
|
+
model_text = format_attr_model_text(self.state, attr)
|
|
276
|
+
self._add_row(
|
|
277
|
+
attr.name,
|
|
278
|
+
[],
|
|
279
|
+
[],
|
|
280
|
+
model_text,
|
|
281
|
+
attr.docstring,
|
|
282
|
+
optional=attr.optional,
|
|
283
|
+
default=attr.default,
|
|
284
|
+
)
|
|
285
|
+
|
|
286
|
+
|
|
287
|
+
class DatasetSchemaTableDirective(SchemaTableDirective):
|
|
288
|
+
def _add_table_contents(self, klass):
|
|
289
|
+
# Extract schema
|
|
290
|
+
schema = xarray_dataclass_to_dataset_schema(klass)
|
|
291
|
+
|
|
292
|
+
# Add coordinates
|
|
293
|
+
if schema.coordinates:
|
|
294
|
+
self._add_section("Coordinates:")
|
|
295
|
+
for coord in schema.coordinates:
|
|
296
|
+
self._add_row(
|
|
297
|
+
coord.name,
|
|
298
|
+
coord.dimensions,
|
|
299
|
+
coord.dtypes,
|
|
300
|
+
coord.schema_name,
|
|
301
|
+
coord.docstring or coord.data_docstring,
|
|
302
|
+
optional=coord.optional,
|
|
303
|
+
default=coord.default,
|
|
304
|
+
)
|
|
305
|
+
|
|
306
|
+
# Add data variables
|
|
307
|
+
if schema.data_vars:
|
|
308
|
+
self._add_section("Data Variables:")
|
|
309
|
+
for data_var in schema.data_vars:
|
|
310
|
+
self._add_row(
|
|
311
|
+
data_var.name,
|
|
312
|
+
data_var.dimensions,
|
|
313
|
+
data_var.dtypes,
|
|
314
|
+
data_var.schema_name,
|
|
315
|
+
data_var.docstring or data_var.data_docstring,
|
|
316
|
+
optional=data_var.optional,
|
|
317
|
+
default=data_var.default,
|
|
318
|
+
)
|
|
319
|
+
|
|
320
|
+
# Add attributes
|
|
321
|
+
if schema.attributes:
|
|
322
|
+
self._add_section("Attributes:")
|
|
323
|
+
for attr in schema.attributes:
|
|
324
|
+
model_text = format_attr_model_text(self.state, attr)
|
|
325
|
+
self._add_row(
|
|
326
|
+
attr.name,
|
|
327
|
+
[],
|
|
328
|
+
[],
|
|
329
|
+
model_text,
|
|
330
|
+
attr.docstring,
|
|
331
|
+
optional=attr.optional,
|
|
332
|
+
default=attr.default,
|
|
333
|
+
)
|
|
334
|
+
|
|
335
|
+
|
|
336
|
+
class DictSchemaTableDirective(SchemaTableDirective):
|
|
337
|
+
def _add_table_contents(self, klass):
|
|
338
|
+
# Extract schema
|
|
339
|
+
schema = xarray_dataclass_to_dict_schema(klass)
|
|
340
|
+
|
|
341
|
+
# Add attributes
|
|
342
|
+
if schema.attributes:
|
|
343
|
+
self._add_section("Fields:")
|
|
344
|
+
for attr in schema.attributes:
|
|
345
|
+
self._add_row(
|
|
346
|
+
attr.name,
|
|
347
|
+
types=[f"{attr.typ.__name__}"],
|
|
348
|
+
optional=attr.optional,
|
|
349
|
+
descr=attr.docstring,
|
|
350
|
+
default=attr.default,
|
|
351
|
+
)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: xradio
|
|
3
|
-
Version: 0.0.
|
|
3
|
+
Version: 0.0.42
|
|
4
4
|
Summary: Xarray Radio Astronomy Data IO
|
|
5
5
|
Author-email: Jan-Willem Steeb <jsteeb@nrao.edu>
|
|
6
6
|
License: BSD 3-Clause License
|
|
@@ -43,18 +43,14 @@ License-File: LICENSE.txt
|
|
|
43
43
|
Requires-Dist: astropy
|
|
44
44
|
Requires-Dist: dask
|
|
45
45
|
Requires-Dist: distributed
|
|
46
|
-
Requires-Dist: graphviper
|
|
47
46
|
Requires-Dist: toolviper
|
|
48
|
-
Requires-Dist: matplotlib
|
|
49
47
|
Requires-Dist: numba >=0.57.0
|
|
50
48
|
Requires-Dist: numpy
|
|
51
|
-
Requires-Dist: prettytable
|
|
52
49
|
Requires-Dist: pytest
|
|
53
50
|
Requires-Dist: pytest-cov
|
|
54
51
|
Requires-Dist: pytest-html
|
|
55
52
|
Requires-Dist: s3fs
|
|
56
53
|
Requires-Dist: scipy
|
|
57
|
-
Requires-Dist: tqdm
|
|
58
54
|
Requires-Dist: xarray
|
|
59
55
|
Requires-Dist: zarr
|
|
60
56
|
Requires-Dist: pyarrow
|
|
@@ -62,11 +58,6 @@ Requires-Dist: typeguard
|
|
|
62
58
|
Requires-Dist: typing-extensions ; python_version < "3.10"
|
|
63
59
|
Requires-Dist: python-casacore >=3.6.1 ; sys_platform != "darwin"
|
|
64
60
|
Provides-Extra: docs
|
|
65
|
-
Requires-Dist: jupyterlab ; extra == 'docs'
|
|
66
|
-
Requires-Dist: ipykernel ; extra == 'docs'
|
|
67
|
-
Requires-Dist: ipympl ; extra == 'docs'
|
|
68
|
-
Requires-Dist: ipython ; extra == 'docs'
|
|
69
|
-
Requires-Dist: jupyter-client ; extra == 'docs'
|
|
70
61
|
Requires-Dist: nbsphinx ; extra == 'docs'
|
|
71
62
|
Requires-Dist: recommonmark ; extra == 'docs'
|
|
72
63
|
Requires-Dist: scanpydoc ; extra == 'docs'
|
|
@@ -75,22 +66,33 @@ Requires-Dist: sphinx-autosummary-accessors ; extra == 'docs'
|
|
|
75
66
|
Requires-Dist: sphinx-rtd-theme ; extra == 'docs'
|
|
76
67
|
Requires-Dist: twine ; extra == 'docs'
|
|
77
68
|
Requires-Dist: pandoc ; extra == 'docs'
|
|
69
|
+
Provides-Extra: interactive
|
|
70
|
+
Requires-Dist: matplotlib ; extra == 'interactive'
|
|
71
|
+
Requires-Dist: prettytable ; extra == 'interactive'
|
|
72
|
+
Requires-Dist: jupyterlab ; extra == 'interactive'
|
|
73
|
+
Requires-Dist: ipykernel ; extra == 'interactive'
|
|
74
|
+
Requires-Dist: ipympl ; extra == 'interactive'
|
|
75
|
+
Requires-Dist: ipython ; extra == 'interactive'
|
|
76
|
+
Requires-Dist: jupyter-client ; extra == 'interactive'
|
|
78
77
|
|
|
79
78
|
# xradio
|
|
80
79
|
Xarray Radio Astronomy Data IO is still in development.
|
|
81
80
|
|
|
82
|
-
[](https://www.python.org/downloads/release/python-380/)
|
|
81
|
+
[](https://www.python.org/downloads/release/python-380/)
|
|
83
82
|
|
|
84
83
|
# Installing
|
|
85
|
-
It is recommended to use the [
|
|
84
|
+
It is recommended to use the conda environment manager from [miniforge](https://github.com/conda-forge/miniforge) to create a clean, self-contained runtime where XRADIO and all its dependencies can be installed:
|
|
86
85
|
```sh
|
|
87
|
-
conda create --name xradio python=3.
|
|
86
|
+
conda create --name xradio python=3.12 --no-default-packages
|
|
88
87
|
conda activate xradio
|
|
89
|
-
|
|
90
88
|
```
|
|
91
89
|
> 📝 On macOS it is required to pre-install `python-casacore` using `conda install -c conda-forge python-casacore`.
|
|
92
90
|
|
|
93
|
-
|
|
91
|
+
XRADIO can now be installed using:
|
|
94
92
|
```sh
|
|
95
93
|
pip install xradio
|
|
96
94
|
```
|
|
95
|
+
This will also install the minimal dependencies for XRADIO. To install the minimal dependencies and the interactive components (JupyterLab) use:
|
|
96
|
+
```sh
|
|
97
|
+
pip install "xradio[interactive]"
|
|
98
|
+
```
|