vortex-nwp 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vortex/__init__.py +159 -0
- vortex/algo/__init__.py +13 -0
- vortex/algo/components.py +2462 -0
- vortex/algo/mpitools.py +1953 -0
- vortex/algo/mpitools_templates/__init__.py +1 -0
- vortex/algo/mpitools_templates/envelope_wrapper_default.tpl +27 -0
- vortex/algo/mpitools_templates/envelope_wrapper_mpiauto.tpl +29 -0
- vortex/algo/mpitools_templates/wrapstd_wrapper_default.tpl +18 -0
- vortex/algo/serversynctools.py +171 -0
- vortex/config.py +112 -0
- vortex/data/__init__.py +19 -0
- vortex/data/abstractstores.py +1510 -0
- vortex/data/containers.py +835 -0
- vortex/data/contents.py +622 -0
- vortex/data/executables.py +275 -0
- vortex/data/flow.py +119 -0
- vortex/data/geometries.ini +2689 -0
- vortex/data/geometries.py +799 -0
- vortex/data/handlers.py +1230 -0
- vortex/data/outflow.py +67 -0
- vortex/data/providers.py +487 -0
- vortex/data/resources.py +207 -0
- vortex/data/stores.py +1390 -0
- vortex/data/sync_templates/__init__.py +0 -0
- vortex/gloves.py +309 -0
- vortex/layout/__init__.py +20 -0
- vortex/layout/contexts.py +577 -0
- vortex/layout/dataflow.py +1220 -0
- vortex/layout/monitor.py +969 -0
- vortex/nwp/__init__.py +14 -0
- vortex/nwp/algo/__init__.py +21 -0
- vortex/nwp/algo/assim.py +537 -0
- vortex/nwp/algo/clim.py +1086 -0
- vortex/nwp/algo/coupling.py +831 -0
- vortex/nwp/algo/eda.py +840 -0
- vortex/nwp/algo/eps.py +785 -0
- vortex/nwp/algo/forecasts.py +886 -0
- vortex/nwp/algo/fpserver.py +1303 -0
- vortex/nwp/algo/ifsnaming.py +463 -0
- vortex/nwp/algo/ifsroot.py +404 -0
- vortex/nwp/algo/monitoring.py +263 -0
- vortex/nwp/algo/mpitools.py +694 -0
- vortex/nwp/algo/odbtools.py +1258 -0
- vortex/nwp/algo/oopsroot.py +916 -0
- vortex/nwp/algo/oopstests.py +220 -0
- vortex/nwp/algo/request.py +660 -0
- vortex/nwp/algo/stdpost.py +1641 -0
- vortex/nwp/data/__init__.py +30 -0
- vortex/nwp/data/assim.py +380 -0
- vortex/nwp/data/boundaries.py +314 -0
- vortex/nwp/data/climfiles.py +521 -0
- vortex/nwp/data/configfiles.py +153 -0
- vortex/nwp/data/consts.py +954 -0
- vortex/nwp/data/ctpini.py +149 -0
- vortex/nwp/data/diagnostics.py +209 -0
- vortex/nwp/data/eda.py +147 -0
- vortex/nwp/data/eps.py +432 -0
- vortex/nwp/data/executables.py +1045 -0
- vortex/nwp/data/fields.py +111 -0
- vortex/nwp/data/gridfiles.py +380 -0
- vortex/nwp/data/logs.py +584 -0
- vortex/nwp/data/modelstates.py +363 -0
- vortex/nwp/data/monitoring.py +193 -0
- vortex/nwp/data/namelists.py +696 -0
- vortex/nwp/data/obs.py +840 -0
- vortex/nwp/data/oopsexec.py +74 -0
- vortex/nwp/data/providers.py +207 -0
- vortex/nwp/data/query.py +206 -0
- vortex/nwp/data/stores.py +160 -0
- vortex/nwp/data/surfex.py +337 -0
- vortex/nwp/syntax/__init__.py +9 -0
- vortex/nwp/syntax/stdattrs.py +437 -0
- vortex/nwp/tools/__init__.py +10 -0
- vortex/nwp/tools/addons.py +40 -0
- vortex/nwp/tools/agt.py +67 -0
- vortex/nwp/tools/bdap.py +59 -0
- vortex/nwp/tools/bdcp.py +41 -0
- vortex/nwp/tools/bdm.py +24 -0
- vortex/nwp/tools/bdmp.py +54 -0
- vortex/nwp/tools/conftools.py +1661 -0
- vortex/nwp/tools/drhook.py +66 -0
- vortex/nwp/tools/grib.py +294 -0
- vortex/nwp/tools/gribdiff.py +104 -0
- vortex/nwp/tools/ifstools.py +203 -0
- vortex/nwp/tools/igastuff.py +273 -0
- vortex/nwp/tools/mars.py +68 -0
- vortex/nwp/tools/odb.py +657 -0
- vortex/nwp/tools/partitioning.py +258 -0
- vortex/nwp/tools/satrad.py +71 -0
- vortex/nwp/util/__init__.py +6 -0
- vortex/nwp/util/async.py +212 -0
- vortex/nwp/util/beacon.py +40 -0
- vortex/nwp/util/diffpygram.py +447 -0
- vortex/nwp/util/ens.py +279 -0
- vortex/nwp/util/hooks.py +139 -0
- vortex/nwp/util/taskdeco.py +85 -0
- vortex/nwp/util/usepygram.py +697 -0
- vortex/nwp/util/usetnt.py +101 -0
- vortex/proxy.py +6 -0
- vortex/sessions.py +374 -0
- vortex/syntax/__init__.py +9 -0
- vortex/syntax/stdattrs.py +867 -0
- vortex/syntax/stddeco.py +185 -0
- vortex/toolbox.py +1117 -0
- vortex/tools/__init__.py +20 -0
- vortex/tools/actions.py +523 -0
- vortex/tools/addons.py +316 -0
- vortex/tools/arm.py +96 -0
- vortex/tools/compression.py +325 -0
- vortex/tools/date.py +27 -0
- vortex/tools/ddhpack.py +10 -0
- vortex/tools/delayedactions.py +782 -0
- vortex/tools/env.py +541 -0
- vortex/tools/folder.py +834 -0
- vortex/tools/grib.py +738 -0
- vortex/tools/lfi.py +953 -0
- vortex/tools/listings.py +423 -0
- vortex/tools/names.py +637 -0
- vortex/tools/net.py +2124 -0
- vortex/tools/odb.py +10 -0
- vortex/tools/parallelism.py +368 -0
- vortex/tools/prestaging.py +210 -0
- vortex/tools/rawfiles.py +10 -0
- vortex/tools/schedulers.py +480 -0
- vortex/tools/services.py +940 -0
- vortex/tools/storage.py +996 -0
- vortex/tools/surfex.py +61 -0
- vortex/tools/systems.py +3976 -0
- vortex/tools/targets.py +440 -0
- vortex/util/__init__.py +9 -0
- vortex/util/config.py +1122 -0
- vortex/util/empty.py +24 -0
- vortex/util/helpers.py +216 -0
- vortex/util/introspection.py +69 -0
- vortex/util/iosponge.py +80 -0
- vortex/util/roles.py +49 -0
- vortex/util/storefunctions.py +129 -0
- vortex/util/structs.py +26 -0
- vortex/util/worker.py +162 -0
- vortex_nwp-2.0.0.dist-info/METADATA +67 -0
- vortex_nwp-2.0.0.dist-info/RECORD +144 -0
- vortex_nwp-2.0.0.dist-info/WHEEL +5 -0
- vortex_nwp-2.0.0.dist-info/licenses/LICENSE +517 -0
- vortex_nwp-2.0.0.dist-info/top_level.txt +1 -0
vortex/util/empty.py
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
"""
|
|
2
|
+
An empty module to be filled with some kind of blackholes objects.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from bronx.fancies import loggers
|
|
6
|
+
|
|
7
|
+
#: No automatic export
|
|
8
|
+
__all__ = []
|
|
9
|
+
|
|
10
|
+
logger = loggers.getLogger(__name__)
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class DataConst:
|
|
14
|
+
"""Constants stored as raw attributes."""
|
|
15
|
+
|
|
16
|
+
def __init__(self, **kw):
|
|
17
|
+
self.__dict__.update(kw)
|
|
18
|
+
logger.debug("DataConst init %s", self)
|
|
19
|
+
|
|
20
|
+
def __str__(self):
|
|
21
|
+
return super().__str__() + " : " + str(sorted(self.__dict__.keys()))
|
|
22
|
+
|
|
23
|
+
def __contains__(self, item):
|
|
24
|
+
return item in self.__dict__
|
vortex/util/helpers.py
ADDED
|
@@ -0,0 +1,216 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Some convenient functions that may simplify scripts
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from collections import defaultdict
|
|
6
|
+
|
|
7
|
+
from bronx.compat import random
|
|
8
|
+
from bronx.fancies import loggers
|
|
9
|
+
from bronx.stdtypes.date import Date
|
|
10
|
+
import footprints as fp
|
|
11
|
+
|
|
12
|
+
from vortex.data.handlers import Handler
|
|
13
|
+
from vortex.layout.dataflow import Section
|
|
14
|
+
from vortex import sessions
|
|
15
|
+
|
|
16
|
+
logger = loggers.getLogger(__name__)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class InputCheckerError(Exception):
|
|
20
|
+
"""Exception raised when the Input checking process fails."""
|
|
21
|
+
|
|
22
|
+
pass
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def generic_input_checker(grouping_keys, min_items, *rhandlers, **kwargs):
|
|
26
|
+
"""
|
|
27
|
+
Check which input resources are present.
|
|
28
|
+
|
|
29
|
+
First, the resource handlers (*rhandlers* attribute) are split
|
|
30
|
+
into groups based on the values of their properties (only the properties
|
|
31
|
+
specified in the *grouping_keys* attribute are considered).
|
|
32
|
+
|
|
33
|
+
Then, for each group, the **check** method is called upon the resource
|
|
34
|
+
handlers. The group description is returned only if the **check** call
|
|
35
|
+
succeed for all the members of the group.
|
|
36
|
+
|
|
37
|
+
If the number of groups successfully checked is lower than *min_items*,
|
|
38
|
+
an :class:`InputCheckerError` exception is raised.
|
|
39
|
+
"""
|
|
40
|
+
|
|
41
|
+
if len(rhandlers) == 0:
|
|
42
|
+
raise ValueError("At least one resource handler have to be provided")
|
|
43
|
+
# Just in case min_items is not an int...
|
|
44
|
+
min_items = int(min_items)
|
|
45
|
+
|
|
46
|
+
# Create a flat ResourceHandlers list (rhandlers may consists of lists)
|
|
47
|
+
flat_rhlist = []
|
|
48
|
+
flat_rhmandatory = []
|
|
49
|
+
for inlist, outlist in (
|
|
50
|
+
(rhandlers, flat_rhlist),
|
|
51
|
+
(kwargs.pop("mandatory", []), flat_rhmandatory),
|
|
52
|
+
):
|
|
53
|
+
for rh in inlist:
|
|
54
|
+
if isinstance(rh, list) or isinstance(rh, tuple):
|
|
55
|
+
outlist.extend(rh)
|
|
56
|
+
else:
|
|
57
|
+
outlist.append(rh)
|
|
58
|
+
|
|
59
|
+
# Extract the group informations for each of the resource handlers
|
|
60
|
+
rhgroups = defaultdict(list)
|
|
61
|
+
for rh in flat_rhlist:
|
|
62
|
+
keylist = list()
|
|
63
|
+
for key in grouping_keys:
|
|
64
|
+
value = rh.wide_key_lookup(key, exports=True, fatal=False)
|
|
65
|
+
keylist.append(value)
|
|
66
|
+
rhgroups[tuple(keylist)].append(rh)
|
|
67
|
+
|
|
68
|
+
candidateslist = [
|
|
69
|
+
fp.stdtypes.FPDict(
|
|
70
|
+
{k: v for k, v in zip(grouping_keys, group) if v is not None}
|
|
71
|
+
)
|
|
72
|
+
for group in rhgroups.keys()
|
|
73
|
+
]
|
|
74
|
+
|
|
75
|
+
# Activate FTP connections pooling (for enhanced performances)
|
|
76
|
+
t = sessions.current()
|
|
77
|
+
with t.sh.ftppool():
|
|
78
|
+
# Check mandatory stuff
|
|
79
|
+
mychecks = [(rh, rh.check()) for rh in flat_rhmandatory]
|
|
80
|
+
if not all([acheck[1] for acheck in mychecks]):
|
|
81
|
+
for rh in [acheck[0] for acheck in mychecks if not acheck[1]]:
|
|
82
|
+
logger.error(" Missing location: %s", str(rh.locate()))
|
|
83
|
+
raise InputCheckerError(
|
|
84
|
+
"Some of the mandatory resources are missing."
|
|
85
|
+
)
|
|
86
|
+
|
|
87
|
+
# Check call for non-mandatory stuff
|
|
88
|
+
outputlist = list()
|
|
89
|
+
# Is the check real or a delusion ?
|
|
90
|
+
fakecheck = kwargs.pop("fakecheck", False)
|
|
91
|
+
# The keys are sorted so that results remains reproducible
|
|
92
|
+
for grouping_values in sorted(rhgroups.keys()):
|
|
93
|
+
mychecks = [
|
|
94
|
+
(rh, fakecheck or rh.check())
|
|
95
|
+
for rh in rhgroups[grouping_values]
|
|
96
|
+
]
|
|
97
|
+
groupid = fp.stdtypes.FPDict(
|
|
98
|
+
{
|
|
99
|
+
k: v
|
|
100
|
+
for k, v in zip(grouping_keys, grouping_values)
|
|
101
|
+
if v is not None
|
|
102
|
+
}
|
|
103
|
+
)
|
|
104
|
+
if all([acheck[1] for acheck in mychecks]):
|
|
105
|
+
outputlist.append(groupid)
|
|
106
|
+
logger.info(
|
|
107
|
+
"Group (%s): All the input files are accounted for.",
|
|
108
|
+
str(groupid),
|
|
109
|
+
)
|
|
110
|
+
else:
|
|
111
|
+
logger.warning(
|
|
112
|
+
"Group (%s): Discarded because some of the input files are missing (see below).",
|
|
113
|
+
str(groupid),
|
|
114
|
+
)
|
|
115
|
+
for rh in [acheck[0] for acheck in mychecks if not acheck[1]]:
|
|
116
|
+
logger.warning(" Missing location: %s", str(rh.locate()))
|
|
117
|
+
|
|
118
|
+
# Enforce min_items
|
|
119
|
+
if len(outputlist) < min_items:
|
|
120
|
+
raise InputCheckerError(
|
|
121
|
+
"The number of input groups is too small "
|
|
122
|
+
+ "({:d} < {:d})".format(len(outputlist), min_items)
|
|
123
|
+
)
|
|
124
|
+
|
|
125
|
+
return fp.stdtypes.FPList(outputlist), fp.stdtypes.FPList(candidateslist)
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
def members_input_checker(min_items, *rhandlers, **kwargs):
|
|
129
|
+
"""
|
|
130
|
+
This is a shortcut for the generic_input_checher: only the member number is
|
|
131
|
+
considered and the return values corresponds to a list of members.
|
|
132
|
+
"""
|
|
133
|
+
mlist = [
|
|
134
|
+
desc["member"]
|
|
135
|
+
for desc in generic_input_checker(
|
|
136
|
+
("member",), min_items, *rhandlers, **kwargs
|
|
137
|
+
)[0]
|
|
138
|
+
]
|
|
139
|
+
return fp.stdtypes.FPList(sorted(mlist))
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
def colorfull_input_checker(min_items, *rhandlers, **kwargs):
|
|
143
|
+
"""
|
|
144
|
+
This is a shortcut for the generic_input_checher: it returns a list of
|
|
145
|
+
dictionaries that described the available data.
|
|
146
|
+
"""
|
|
147
|
+
return generic_input_checker(
|
|
148
|
+
("vapp", "vconf", "experiment", "cutoff", "date", "member"),
|
|
149
|
+
min_items,
|
|
150
|
+
*rhandlers,
|
|
151
|
+
**kwargs,
|
|
152
|
+
)
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
def merge_contents(*kargs):
|
|
156
|
+
"""Automatically merge several DataContents.
|
|
157
|
+
|
|
158
|
+
Example:
|
|
159
|
+
.. code-block:: python
|
|
160
|
+
|
|
161
|
+
mergedcontent = merge_contents(content1, content2, content3)
|
|
162
|
+
# With a list
|
|
163
|
+
mergedcontent = merge_contents([content1, content2, content3])
|
|
164
|
+
# With a list of ResourceHandlers (e.g. as returned by toolbox.input)
|
|
165
|
+
mergedcontent = merge_contents([rh1, rh2, rh3])
|
|
166
|
+
# With a list of Sections (e.g. as returned by effective_inputs)
|
|
167
|
+
mergedcontent = merge_contents([section1, section2, section3])
|
|
168
|
+
|
|
169
|
+
"""
|
|
170
|
+
# Expand list or tuple elements
|
|
171
|
+
ctlist = list()
|
|
172
|
+
for elt in kargs:
|
|
173
|
+
if isinstance(elt, (list, tuple)):
|
|
174
|
+
ctlist.extend(elt)
|
|
175
|
+
else:
|
|
176
|
+
ctlist.append(elt)
|
|
177
|
+
# kargs may be a list of resource handlers (as returned by the toolbox)
|
|
178
|
+
if all([isinstance(obj, Handler) for obj in ctlist]):
|
|
179
|
+
ctlist = [obj.contents for obj in ctlist]
|
|
180
|
+
# kargs may be a list of sections
|
|
181
|
+
elif all([isinstance(obj, Section) for obj in ctlist]):
|
|
182
|
+
ctlist = [obj.rh.contents for obj in ctlist]
|
|
183
|
+
# Take the first content as a model for the new object
|
|
184
|
+
newcontent = ctlist[0].__class__()
|
|
185
|
+
newcontent.merge(*ctlist)
|
|
186
|
+
return newcontent
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
def mix_list(list_elements, date=None, member=None):
|
|
190
|
+
"""Mix a list using a determined seed, if member and/or date are present."""
|
|
191
|
+
dateinfo = date if date is None else Date(date)
|
|
192
|
+
memberinfo = member if member is None else int(member)
|
|
193
|
+
rgen = random.Random()
|
|
194
|
+
if (dateinfo is not None) or (memberinfo is not None):
|
|
195
|
+
if dateinfo is not None:
|
|
196
|
+
seed = dateinfo.epoch * 100
|
|
197
|
+
else:
|
|
198
|
+
seed = 9999999
|
|
199
|
+
if memberinfo:
|
|
200
|
+
seed = seed // memberinfo
|
|
201
|
+
logger.debug("The random seed is %s.", seed)
|
|
202
|
+
rgen.seed(seed)
|
|
203
|
+
else:
|
|
204
|
+
logger.info("The random seed not initialised")
|
|
205
|
+
logger.debug(
|
|
206
|
+
"The list of elements is %s.",
|
|
207
|
+
" ".join([str(x) for x in list_elements]),
|
|
208
|
+
)
|
|
209
|
+
result_list_elements = list_elements
|
|
210
|
+
result_list_elements.sort()
|
|
211
|
+
rgen.shuffle(result_list_elements)
|
|
212
|
+
logger.debug(
|
|
213
|
+
"The mixed list of elements is %s.",
|
|
214
|
+
" ".join([str(x) for x in result_list_elements]),
|
|
215
|
+
)
|
|
216
|
+
return result_list_elements
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Some convenient functions to explore the source code or its documentation.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import re
|
|
6
|
+
import inspect
|
|
7
|
+
|
|
8
|
+
from bronx.fancies import loggers
|
|
9
|
+
|
|
10
|
+
from vortex import sessions
|
|
11
|
+
|
|
12
|
+
#: No automatic export
|
|
13
|
+
__all__ = []
|
|
14
|
+
|
|
15
|
+
logger = loggers.getLogger(__name__)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class Sherlock:
|
|
19
|
+
"""Centralized interface to introspection functions."""
|
|
20
|
+
|
|
21
|
+
def __init__(self, **kw):
|
|
22
|
+
self.verbose = False
|
|
23
|
+
self.ticket = kw.pop("ticket", sessions.current())
|
|
24
|
+
self.glove = self.ticket.glove
|
|
25
|
+
self.__dict__.update(kw)
|
|
26
|
+
logger.debug("Sherlock init %s", self)
|
|
27
|
+
|
|
28
|
+
def rstfile(self, modpath):
|
|
29
|
+
"""Return the sphinx documentation associated to module reference or module path given."""
|
|
30
|
+
if not isinstance(modpath, str):
|
|
31
|
+
modpath = modpath.__file__
|
|
32
|
+
subpath = modpath
|
|
33
|
+
for installpath in self.glove.sitesrc:
|
|
34
|
+
subpath = re.sub(installpath, "", subpath)
|
|
35
|
+
subpath = re.sub(r"\.pyc?", "", subpath)
|
|
36
|
+
subpath = subpath.split("/")
|
|
37
|
+
if subpath[-1] == "__init__":
|
|
38
|
+
subpath[-1] = subpath[-2]
|
|
39
|
+
subpath[-1] += ".rst"
|
|
40
|
+
|
|
41
|
+
subpath[1:1] = [
|
|
42
|
+
"library",
|
|
43
|
+
]
|
|
44
|
+
return self.glove.sitedoc + "/".join(subpath)
|
|
45
|
+
|
|
46
|
+
def rstshort(self, filename):
|
|
47
|
+
"""Return relative path name of ``filename`` according to :meth:`siteroot`."""
|
|
48
|
+
return re.sub(self.glove.siteroot, "", filename)[1:]
|
|
49
|
+
|
|
50
|
+
def getlocalmembers(self, obj, topmodule=None):
|
|
51
|
+
"""Return members of the module ``obj`` which are defined in the source file of the module."""
|
|
52
|
+
objs = dict()
|
|
53
|
+
if topmodule is None:
|
|
54
|
+
topmodule = obj
|
|
55
|
+
modfile = topmodule.__file__.rstrip("c")
|
|
56
|
+
for x, y in inspect.getmembers(obj):
|
|
57
|
+
if (
|
|
58
|
+
inspect.isclass(y)
|
|
59
|
+
or inspect.isfunction(y)
|
|
60
|
+
or inspect.ismethod(y)
|
|
61
|
+
):
|
|
62
|
+
try:
|
|
63
|
+
if modfile == inspect.getsourcefile(y):
|
|
64
|
+
if self.verbose:
|
|
65
|
+
print(x, y)
|
|
66
|
+
objs[x] = y
|
|
67
|
+
except TypeError:
|
|
68
|
+
pass
|
|
69
|
+
return objs
|
vortex/util/iosponge.py
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Provide a File-Like object that reads in the first N bytes in order to count
|
|
3
|
+
them precisely.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import io
|
|
7
|
+
|
|
8
|
+
#: No automatic export
|
|
9
|
+
__all__ = []
|
|
10
|
+
|
|
11
|
+
IOSPONGE_DEFAULT_SIZECHECK = 33 * 1024 * 1024 # 33Mb
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class IoSponge(io.BufferedIOBase):
|
|
15
|
+
"""Buffer the first bytes in order to compute an accurate size for the
|
|
16
|
+
underlying stream.
|
|
17
|
+
|
|
18
|
+
This class just acts as a buffer. It looks like a file object and should
|
|
19
|
+
be used as such.
|
|
20
|
+
|
|
21
|
+
If the size of the underlying stream is <= *size_check* bytes : the **size**
|
|
22
|
+
property will return an exact estimate of the file-like object size. Passed
|
|
23
|
+
that limit the maximum of *size_check* and *guessed_size* is returned.
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
def __init__(
|
|
27
|
+
self, rawio, size_check=IOSPONGE_DEFAULT_SIZECHECK, guessed_size=0
|
|
28
|
+
):
|
|
29
|
+
"""
|
|
30
|
+
:param file rawio: Any kind of file-like object
|
|
31
|
+
:param int size_check: The first size_check bytes will be buffered in
|
|
32
|
+
order to be properly accounted for.
|
|
33
|
+
:param int gressed_size: An estimate of the file-like object size (in
|
|
34
|
+
bytes)
|
|
35
|
+
"""
|
|
36
|
+
self._rawio = rawio
|
|
37
|
+
self._size_check = size_check
|
|
38
|
+
self._guessed_size = int(guessed_size)
|
|
39
|
+
self._first_bytes = self._rawio.read(size_check)
|
|
40
|
+
self._seek = 0
|
|
41
|
+
|
|
42
|
+
@property
|
|
43
|
+
def size(self):
|
|
44
|
+
"""The (exact or estimated) size of the underlying file-like object."""
|
|
45
|
+
if len(self._first_bytes) < self._size_check:
|
|
46
|
+
return len(self._first_bytes)
|
|
47
|
+
else:
|
|
48
|
+
return max(len(self._first_bytes), self._guessed_size)
|
|
49
|
+
|
|
50
|
+
def tell(self):
|
|
51
|
+
"""The amount of bytes read in this strem."""
|
|
52
|
+
return self._seek
|
|
53
|
+
|
|
54
|
+
def _generic_read(self, size, raw_read_cb):
|
|
55
|
+
ret = b""
|
|
56
|
+
if self._seek < len(self._first_bytes):
|
|
57
|
+
if size is None:
|
|
58
|
+
ret = self._first_bytes[self._seek :]
|
|
59
|
+
else:
|
|
60
|
+
ret = self._first_bytes[
|
|
61
|
+
self._seek : min(self._size_check, self._seek + size)
|
|
62
|
+
]
|
|
63
|
+
if size is None:
|
|
64
|
+
ret += raw_read_cb(None)
|
|
65
|
+
elif len(ret) < size:
|
|
66
|
+
ret += raw_read_cb(size - len(ret))
|
|
67
|
+
self._seek += len(ret)
|
|
68
|
+
return ret
|
|
69
|
+
|
|
70
|
+
def read(self, size=None):
|
|
71
|
+
"""Read *size* bytes from the file."""
|
|
72
|
+
return self._generic_read(size, self._rawio.read)
|
|
73
|
+
|
|
74
|
+
def read1(self, size=None):
|
|
75
|
+
"""Read *size* bytes from the file (at once)."""
|
|
76
|
+
return self._generic_read(size, self._rawio.read1)
|
|
77
|
+
|
|
78
|
+
def readable(self):
|
|
79
|
+
"""Is this file-like object readable ?"""
|
|
80
|
+
return True
|
vortex/util/roles.py
ADDED
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Factory for named roles.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
#: No automatic export
|
|
6
|
+
__all__ = []
|
|
7
|
+
|
|
8
|
+
_activetag = "default"
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def stdfactoryrole(role):
|
|
12
|
+
"""Standard processing for role names."""
|
|
13
|
+
return "".join([s[0].upper() + s[1:] for s in role.split()])
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def switchfactory(tag="default"):
|
|
17
|
+
"""Switch the current active factory to the existing one identified through its ``tag``."""
|
|
18
|
+
if tag in _rolesgateway:
|
|
19
|
+
global _activetag
|
|
20
|
+
_activetag = tag
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def setfactoryrole(factory=None, tag=None):
|
|
24
|
+
"""
|
|
25
|
+
Defines the specified ``factory`` function as the current processing role translator
|
|
26
|
+
associated with ``tag``.
|
|
27
|
+
"""
|
|
28
|
+
global _activetag
|
|
29
|
+
if not tag:
|
|
30
|
+
tag = _activetag
|
|
31
|
+
if factory and tag:
|
|
32
|
+
_rolesgateway[tag] = factory
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def setrole(role, tag=None):
|
|
36
|
+
"""
|
|
37
|
+
Entry point for handling strings ``role``.
|
|
38
|
+
Returns the processed string according to the current active factory name
|
|
39
|
+
or using the one associated with ``tag``.
|
|
40
|
+
"""
|
|
41
|
+
if not role:
|
|
42
|
+
return None
|
|
43
|
+
global _activetag
|
|
44
|
+
if not tag:
|
|
45
|
+
tag = _activetag
|
|
46
|
+
return _rolesgateway[tag](role)
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
_rolesgateway = dict(default=stdfactoryrole)
|
|
@@ -0,0 +1,129 @@
|
|
|
1
|
+
"""
|
|
2
|
+
General purpose functions that can be used in conjunction with the
|
|
3
|
+
:class:`~vortex.data.stores.FunctionStore`.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import io
|
|
7
|
+
|
|
8
|
+
from footprints import proxy as fpx
|
|
9
|
+
|
|
10
|
+
from vortex.data.stores import FunctionStoreCallbackError
|
|
11
|
+
from vortex.tools.env import vartrue
|
|
12
|
+
from vortex import sessions
|
|
13
|
+
from . import helpers
|
|
14
|
+
|
|
15
|
+
#: No automatic export
|
|
16
|
+
__all__ = []
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def mergecontents(options):
|
|
20
|
+
"""
|
|
21
|
+
Merge the DataContent's of the Section objects designated by the
|
|
22
|
+
*role* option.
|
|
23
|
+
|
|
24
|
+
An additional *sort* option may be provided if the resulting merged file
|
|
25
|
+
like object needs to be sorted.
|
|
26
|
+
|
|
27
|
+
:param options: The only argument is a dictionary that contains all the options
|
|
28
|
+
passed to the store plus anything from the query part of the URI.
|
|
29
|
+
|
|
30
|
+
:return: Content of the desired local file/container
|
|
31
|
+
|
|
32
|
+
:rtype: A file like object
|
|
33
|
+
"""
|
|
34
|
+
todo = options.get("role", None)
|
|
35
|
+
sort = vartrue.match(
|
|
36
|
+
options.get(
|
|
37
|
+
"sort",
|
|
38
|
+
[
|
|
39
|
+
"false",
|
|
40
|
+
],
|
|
41
|
+
).pop()
|
|
42
|
+
)
|
|
43
|
+
if todo is not None:
|
|
44
|
+
ctx = sessions.current().context
|
|
45
|
+
sections = list()
|
|
46
|
+
for a_role in todo:
|
|
47
|
+
sections.extend(ctx.sequence.effective_inputs(role=a_role))
|
|
48
|
+
if len(sections) == 0:
|
|
49
|
+
raise FunctionStoreCallbackError(
|
|
50
|
+
"Nothing to store: the effective inputs sequence is void."
|
|
51
|
+
)
|
|
52
|
+
newcontent = helpers.merge_contents(sections)
|
|
53
|
+
if sort:
|
|
54
|
+
newcontent.sort()
|
|
55
|
+
else:
|
|
56
|
+
raise FunctionStoreCallbackError(
|
|
57
|
+
"At least one *role* option must be provided"
|
|
58
|
+
)
|
|
59
|
+
# Create a Virtual container and dump the new content inside it
|
|
60
|
+
virtualcont = fpx.container(incore=True)
|
|
61
|
+
newcontent.rewrite(virtualcont)
|
|
62
|
+
virtualcont.rewind()
|
|
63
|
+
# Force the new container to be in bytes mode
|
|
64
|
+
if virtualcont.actualmode and "b" not in virtualcont.actualmode:
|
|
65
|
+
virtualcont_b = fpx.container(incore=True, mode="w+b")
|
|
66
|
+
virtualcont_b.write(virtualcont.read().encode(encoding="utf-8"))
|
|
67
|
+
virtualcont = virtualcont_b
|
|
68
|
+
return virtualcont
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def dumpinputs(options):
|
|
72
|
+
"""
|
|
73
|
+
Dump the content of the sequence's effective inputs into a JSON file
|
|
74
|
+
|
|
75
|
+
:note: the effective=False option can be provided. If so, all input sections
|
|
76
|
+
are dumped.
|
|
77
|
+
|
|
78
|
+
:return: a file like object
|
|
79
|
+
"""
|
|
80
|
+
t = sessions.current()
|
|
81
|
+
ctx = t.context
|
|
82
|
+
if vartrue.match(
|
|
83
|
+
options.get(
|
|
84
|
+
"effective",
|
|
85
|
+
[
|
|
86
|
+
"true",
|
|
87
|
+
],
|
|
88
|
+
).pop()
|
|
89
|
+
):
|
|
90
|
+
sequence = ctx.sequence.effective_inputs()
|
|
91
|
+
else:
|
|
92
|
+
sequence = list(ctx.sequence.inputs())
|
|
93
|
+
if len(sequence) == 0:
|
|
94
|
+
raise FunctionStoreCallbackError(
|
|
95
|
+
"Nothing to store: the effective inputs sequence is void."
|
|
96
|
+
)
|
|
97
|
+
fileout = io.StringIO()
|
|
98
|
+
t.sh.json_dump([s.as_dict() for s in sequence], fileout, indent=4)
|
|
99
|
+
return fileout
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
def defaultinput(options):
|
|
103
|
+
"""
|
|
104
|
+
Dump the content of a fake section into a JSON file
|
|
105
|
+
"""
|
|
106
|
+
prefix = "d_input_"
|
|
107
|
+
content = dict()
|
|
108
|
+
|
|
109
|
+
def export_value(v):
|
|
110
|
+
if hasattr(v, "footprint_export"):
|
|
111
|
+
return v.footprint_export()
|
|
112
|
+
elif hasattr(v, "export_dict"):
|
|
113
|
+
return v.export_dict()
|
|
114
|
+
else:
|
|
115
|
+
return v
|
|
116
|
+
|
|
117
|
+
for k, v in options.items():
|
|
118
|
+
if isinstance(k, str) and k.startswith(prefix):
|
|
119
|
+
content[k[len(prefix) :]] = export_value(v)
|
|
120
|
+
t = sessions.current()
|
|
121
|
+
fileout = io.StringIO()
|
|
122
|
+
t.sh.json_dump(
|
|
123
|
+
[
|
|
124
|
+
content,
|
|
125
|
+
],
|
|
126
|
+
fileout,
|
|
127
|
+
indent=4,
|
|
128
|
+
)
|
|
129
|
+
return fileout
|
vortex/util/structs.py
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
"""
|
|
2
|
+
This module defines common base classes for miscellaneous purposes.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
|
|
7
|
+
from bronx.fancies import loggers
|
|
8
|
+
|
|
9
|
+
#: No automatic export
|
|
10
|
+
__all__ = []
|
|
11
|
+
|
|
12
|
+
logger = loggers.getLogger(__name__)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class ShellEncoder(json.JSONEncoder):
|
|
16
|
+
"""Encoder for :mod:`json` dumps method."""
|
|
17
|
+
|
|
18
|
+
def default(self, obj):
|
|
19
|
+
"""Overwrite the default encoding if the current object has a ``export_dict`` method."""
|
|
20
|
+
if hasattr(obj, "export_dict"):
|
|
21
|
+
return obj.export_dict()
|
|
22
|
+
elif hasattr(obj, "footprint_export"):
|
|
23
|
+
return obj.footprint_export()
|
|
24
|
+
elif hasattr(obj, "__dict__"):
|
|
25
|
+
return vars(obj)
|
|
26
|
+
return super().default(obj)
|