vortex-nwp 2.0.0b1__py3-none-any.whl → 2.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vortex/__init__.py +75 -47
- vortex/algo/__init__.py +3 -2
- vortex/algo/components.py +944 -618
- vortex/algo/mpitools.py +802 -497
- vortex/algo/mpitools_templates/__init__.py +1 -0
- vortex/algo/serversynctools.py +34 -33
- vortex/config.py +19 -22
- vortex/data/__init__.py +9 -3
- vortex/data/abstractstores.py +593 -655
- vortex/data/containers.py +217 -162
- vortex/data/contents.py +65 -39
- vortex/data/executables.py +93 -102
- vortex/data/flow.py +40 -34
- vortex/data/geometries.py +228 -132
- vortex/data/handlers.py +436 -227
- vortex/data/outflow.py +15 -15
- vortex/data/providers.py +185 -163
- vortex/data/resources.py +48 -42
- vortex/data/stores.py +540 -417
- vortex/data/sync_templates/__init__.py +0 -0
- vortex/gloves.py +114 -87
- vortex/layout/__init__.py +1 -8
- vortex/layout/contexts.py +150 -84
- vortex/layout/dataflow.py +353 -202
- vortex/layout/monitor.py +264 -128
- vortex/nwp/__init__.py +5 -2
- vortex/nwp/algo/__init__.py +14 -5
- vortex/nwp/algo/assim.py +205 -151
- vortex/nwp/algo/clim.py +683 -517
- vortex/nwp/algo/coupling.py +447 -225
- vortex/nwp/algo/eda.py +437 -229
- vortex/nwp/algo/eps.py +403 -231
- vortex/nwp/algo/forecasts.py +416 -275
- vortex/nwp/algo/fpserver.py +683 -307
- vortex/nwp/algo/ifsnaming.py +205 -145
- vortex/nwp/algo/ifsroot.py +215 -122
- vortex/nwp/algo/monitoring.py +137 -76
- vortex/nwp/algo/mpitools.py +330 -190
- vortex/nwp/algo/odbtools.py +637 -353
- vortex/nwp/algo/oopsroot.py +454 -273
- vortex/nwp/algo/oopstests.py +90 -56
- vortex/nwp/algo/request.py +287 -206
- vortex/nwp/algo/stdpost.py +878 -522
- vortex/nwp/data/__init__.py +22 -4
- vortex/nwp/data/assim.py +125 -137
- vortex/nwp/data/boundaries.py +121 -68
- vortex/nwp/data/climfiles.py +193 -211
- vortex/nwp/data/configfiles.py +73 -69
- vortex/nwp/data/consts.py +426 -401
- vortex/nwp/data/ctpini.py +59 -43
- vortex/nwp/data/diagnostics.py +94 -66
- vortex/nwp/data/eda.py +50 -51
- vortex/nwp/data/eps.py +195 -146
- vortex/nwp/data/executables.py +440 -434
- vortex/nwp/data/fields.py +63 -48
- vortex/nwp/data/gridfiles.py +183 -111
- vortex/nwp/data/logs.py +250 -217
- vortex/nwp/data/modelstates.py +180 -151
- vortex/nwp/data/monitoring.py +72 -99
- vortex/nwp/data/namelists.py +254 -202
- vortex/nwp/data/obs.py +400 -308
- vortex/nwp/data/oopsexec.py +22 -20
- vortex/nwp/data/providers.py +90 -65
- vortex/nwp/data/query.py +71 -82
- vortex/nwp/data/stores.py +49 -36
- vortex/nwp/data/surfex.py +136 -137
- vortex/nwp/syntax/__init__.py +1 -1
- vortex/nwp/syntax/stdattrs.py +173 -111
- vortex/nwp/tools/__init__.py +2 -2
- vortex/nwp/tools/addons.py +22 -17
- vortex/nwp/tools/agt.py +24 -12
- vortex/nwp/tools/bdap.py +16 -5
- vortex/nwp/tools/bdcp.py +4 -1
- vortex/nwp/tools/bdm.py +3 -0
- vortex/nwp/tools/bdmp.py +14 -9
- vortex/nwp/tools/conftools.py +728 -378
- vortex/nwp/tools/drhook.py +12 -8
- vortex/nwp/tools/grib.py +65 -39
- vortex/nwp/tools/gribdiff.py +22 -17
- vortex/nwp/tools/ifstools.py +82 -42
- vortex/nwp/tools/igastuff.py +167 -143
- vortex/nwp/tools/mars.py +14 -2
- vortex/nwp/tools/odb.py +234 -125
- vortex/nwp/tools/partitioning.py +61 -37
- vortex/nwp/tools/satrad.py +27 -12
- vortex/nwp/util/async.py +83 -55
- vortex/nwp/util/beacon.py +10 -10
- vortex/nwp/util/diffpygram.py +174 -86
- vortex/nwp/util/ens.py +144 -63
- vortex/nwp/util/hooks.py +30 -19
- vortex/nwp/util/taskdeco.py +28 -24
- vortex/nwp/util/usepygram.py +278 -172
- vortex/nwp/util/usetnt.py +31 -17
- vortex/sessions.py +72 -39
- vortex/syntax/__init__.py +1 -1
- vortex/syntax/stdattrs.py +410 -171
- vortex/syntax/stddeco.py +31 -22
- vortex/toolbox.py +327 -192
- vortex/tools/__init__.py +11 -2
- vortex/tools/actions.py +110 -121
- vortex/tools/addons.py +111 -92
- vortex/tools/arm.py +42 -22
- vortex/tools/compression.py +72 -69
- vortex/tools/date.py +11 -4
- vortex/tools/delayedactions.py +242 -132
- vortex/tools/env.py +75 -47
- vortex/tools/folder.py +342 -171
- vortex/tools/grib.py +341 -162
- vortex/tools/lfi.py +423 -216
- vortex/tools/listings.py +109 -40
- vortex/tools/names.py +218 -156
- vortex/tools/net.py +655 -299
- vortex/tools/parallelism.py +93 -61
- vortex/tools/prestaging.py +55 -31
- vortex/tools/schedulers.py +172 -105
- vortex/tools/services.py +403 -334
- vortex/tools/storage.py +293 -358
- vortex/tools/surfex.py +24 -24
- vortex/tools/systems.py +1234 -643
- vortex/tools/targets.py +156 -100
- vortex/util/__init__.py +1 -1
- vortex/util/config.py +378 -327
- vortex/util/empty.py +2 -2
- vortex/util/helpers.py +56 -24
- vortex/util/introspection.py +18 -12
- vortex/util/iosponge.py +8 -4
- vortex/util/roles.py +4 -6
- vortex/util/storefunctions.py +39 -13
- vortex/util/structs.py +3 -3
- vortex/util/worker.py +29 -17
- vortex_nwp-2.1.0.dist-info/METADATA +67 -0
- vortex_nwp-2.1.0.dist-info/RECORD +144 -0
- {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.1.0.dist-info}/WHEEL +1 -1
- vortex/layout/appconf.py +0 -109
- vortex/layout/jobs.py +0 -1276
- vortex/layout/nodes.py +0 -1424
- vortex/layout/subjobs.py +0 -464
- vortex_nwp-2.0.0b1.dist-info/METADATA +0 -50
- vortex_nwp-2.0.0b1.dist-info/RECORD +0 -146
- {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.1.0.dist-info/licenses}/LICENSE +0 -0
- {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.1.0.dist-info}/top_level.txt +0 -0
vortex/nwp/algo/odbtools.py
CHANGED
|
@@ -32,14 +32,14 @@ logger = loggers.getLogger(__name__)
|
|
|
32
32
|
|
|
33
33
|
|
|
34
34
|
class Raw2OdbExecutionError(ExecutionError):
|
|
35
|
-
|
|
36
35
|
def __init__(self, odb_database):
|
|
37
36
|
self.odb_database = odb_database
|
|
38
|
-
super().__init__(
|
|
37
|
+
super().__init__("Raw2odb execution failed.")
|
|
39
38
|
|
|
40
39
|
def __str__(self):
|
|
41
|
-
return
|
|
42
|
-
|
|
40
|
+
return "Error while running bator for ODB database < {:s} >".format(
|
|
41
|
+
self.odb_database
|
|
42
|
+
)
|
|
43
43
|
|
|
44
44
|
|
|
45
45
|
class Bateur(VortexWorkerBlindRun):
|
|
@@ -54,18 +54,18 @@ class Bateur(VortexWorkerBlindRun):
|
|
|
54
54
|
info="Bateur: launches a single bator execution in a parallel context",
|
|
55
55
|
attr=dict(
|
|
56
56
|
base=dict(
|
|
57
|
-
info
|
|
57
|
+
info="name of the odb database to process",
|
|
58
58
|
),
|
|
59
59
|
workdir=dict(
|
|
60
|
-
info
|
|
60
|
+
info="working directory of the run",
|
|
61
61
|
),
|
|
62
|
-
inputsize
|
|
63
|
-
info
|
|
64
|
-
type
|
|
65
|
-
default
|
|
66
|
-
)
|
|
67
|
-
)
|
|
68
|
-
)
|
|
62
|
+
inputsize=dict(
|
|
63
|
+
info="input files total size in bytes",
|
|
64
|
+
type=int,
|
|
65
|
+
default=0,
|
|
66
|
+
),
|
|
67
|
+
),
|
|
68
|
+
),
|
|
69
69
|
]
|
|
70
70
|
|
|
71
71
|
@property
|
|
@@ -73,135 +73,168 @@ class Bateur(VortexWorkerBlindRun):
|
|
|
73
73
|
return self.memory * 1024 * 1024
|
|
74
74
|
|
|
75
75
|
def vortex_task(self, **kwargs):
|
|
76
|
-
odb_drv = odb.OdbDriver(self.cycle,
|
|
77
|
-
|
|
78
|
-
self.system.cd('wkdir_' + self.base)
|
|
76
|
+
odb_drv = odb.OdbDriver(self.cycle, self.system, self.system.env)
|
|
77
|
+
self.system.cd("wkdir_" + self.base)
|
|
79
78
|
|
|
80
|
-
dbpath = self.system.path.join(self.workdir,
|
|
79
|
+
dbpath = self.system.path.join(self.workdir, "ECMA." + self.base)
|
|
81
80
|
listpath = self.system.path.join(self.workdir, "listing." + self.base)
|
|
82
81
|
|
|
83
|
-
odb_drv.fix_db_path(
|
|
82
|
+
odb_drv.fix_db_path("ecma", dbpath)
|
|
84
83
|
|
|
85
|
-
real_time = -
|
|
84
|
+
real_time = -time.time()
|
|
86
85
|
start_time = utcnow().isoformat()
|
|
87
86
|
rdict = dict(rc=True)
|
|
88
87
|
try:
|
|
89
88
|
self.local_spawn(listpath)
|
|
90
89
|
except ExecutionError:
|
|
91
|
-
rdict[
|
|
90
|
+
rdict["rc"] = Raw2OdbExecutionError(self.base)
|
|
92
91
|
real_time += time.time()
|
|
93
92
|
|
|
94
93
|
if self.system.memory_info is not None:
|
|
95
|
-
realMem = self.system.memory_info.children_maxRSS(
|
|
96
|
-
memRatio = (
|
|
94
|
+
realMem = self.system.memory_info.children_maxRSS("B")
|
|
95
|
+
memRatio = (
|
|
96
|
+
(realMem / float(self.memory_in_bytes))
|
|
97
|
+
if self.memory_in_bytes > 0
|
|
98
|
+
else None
|
|
99
|
+
)
|
|
97
100
|
else:
|
|
98
101
|
realMem = None
|
|
99
102
|
memRatio = None
|
|
100
103
|
|
|
101
|
-
rdict[
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
104
|
+
rdict["synthesis"] = dict(
|
|
105
|
+
base=self.base,
|
|
106
|
+
inputsize=self.inputsize,
|
|
107
|
+
mem_expected=self.memory_in_bytes,
|
|
108
|
+
mem_real=realMem,
|
|
109
|
+
mem_ratio=memRatio,
|
|
110
|
+
time_expected=self.expected_time,
|
|
111
|
+
time_start=start_time,
|
|
112
|
+
time_real=real_time,
|
|
113
|
+
time_ratio=(
|
|
114
|
+
real_time / float(self.expected_time)
|
|
115
|
+
if self.expected_time > 0
|
|
116
|
+
else None
|
|
117
|
+
),
|
|
118
|
+
sched_id=self.scheduler_ticket,
|
|
119
|
+
)
|
|
113
120
|
|
|
114
121
|
# Save a copy of io assign map in the new database
|
|
115
122
|
if self.system.path.isdir(dbpath):
|
|
116
|
-
self.system.cp(
|
|
117
|
-
|
|
123
|
+
self.system.cp(
|
|
124
|
+
self.system.path.join(
|
|
125
|
+
self.workdir, "odb_db_template", "IOASSIGN"
|
|
126
|
+
),
|
|
127
|
+
self.system.path.join(dbpath, "IOASSIGN"),
|
|
128
|
+
)
|
|
118
129
|
else:
|
|
119
|
-
logger.warning(
|
|
130
|
+
logger.warning("ODB database not created: " + self.base)
|
|
120
131
|
|
|
121
132
|
return rdict
|
|
122
133
|
|
|
123
134
|
|
|
124
|
-
class Raw2ODBparallel(
|
|
135
|
+
class Raw2ODBparallel(
|
|
136
|
+
ParaBlindRun, odb.OdbComponentDecoMixin, drhook.DrHookDecoMixin
|
|
137
|
+
):
|
|
125
138
|
"""Convert raw observations files to ODB using taylorism."""
|
|
126
139
|
|
|
127
140
|
_footprint = dict(
|
|
128
|
-
attr
|
|
129
|
-
kind
|
|
130
|
-
values
|
|
131
|
-
remap
|
|
132
|
-
bufr2odb
|
|
133
|
-
obsoul2odb
|
|
134
|
-
)
|
|
141
|
+
attr=dict(
|
|
142
|
+
kind=dict(
|
|
143
|
+
values=["raw2odb", "bufr2odb", "obsoul2odb"],
|
|
144
|
+
remap=dict(
|
|
145
|
+
bufr2odb="raw2odb",
|
|
146
|
+
obsoul2odb="raw2odb",
|
|
147
|
+
),
|
|
135
148
|
),
|
|
136
|
-
engine
|
|
137
|
-
values
|
|
149
|
+
engine=dict(
|
|
150
|
+
values=[
|
|
151
|
+
"blind",
|
|
152
|
+
"parallel",
|
|
153
|
+
] # parallel -> for backward compatibility
|
|
138
154
|
),
|
|
139
|
-
ioassign
|
|
140
|
-
optional
|
|
155
|
+
ioassign=dict(
|
|
156
|
+
optional=False,
|
|
141
157
|
),
|
|
142
|
-
lamflag
|
|
143
|
-
info
|
|
144
|
-
type
|
|
145
|
-
optional
|
|
146
|
-
default
|
|
158
|
+
lamflag=dict(
|
|
159
|
+
info="Activate LAMFLAG (i.e work for Limited Area Model)",
|
|
160
|
+
type=bool,
|
|
161
|
+
optional=True,
|
|
162
|
+
default=False,
|
|
147
163
|
),
|
|
148
|
-
ontime
|
|
149
|
-
info
|
|
150
|
-
type
|
|
151
|
-
optional
|
|
152
|
-
default
|
|
164
|
+
ontime=dict(
|
|
165
|
+
info="Check observation's resources date vs own data attribute.",
|
|
166
|
+
type=bool,
|
|
167
|
+
optional=True,
|
|
168
|
+
default=True,
|
|
153
169
|
),
|
|
154
|
-
mapall
|
|
155
|
-
info
|
|
156
|
-
type
|
|
157
|
-
optional
|
|
158
|
-
default
|
|
170
|
+
mapall=dict(
|
|
171
|
+
info="All observation files must be accounted for in an obsmap file. ",
|
|
172
|
+
type=bool,
|
|
173
|
+
optional=True,
|
|
174
|
+
default=False,
|
|
159
175
|
),
|
|
160
|
-
maponly
|
|
161
|
-
info
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
176
|
+
maponly=dict(
|
|
177
|
+
info=(
|
|
178
|
+
"Work only with observation files listed in the obsmap files. "
|
|
179
|
+
+ "(if False, obsmap entries may be automatically generated)."
|
|
180
|
+
),
|
|
181
|
+
type=bool,
|
|
182
|
+
optional=True,
|
|
183
|
+
default=False,
|
|
184
|
+
),
|
|
185
|
+
member=dict(
|
|
186
|
+
info=(
|
|
187
|
+
"The current member's number "
|
|
188
|
+
+ "(may be omitted in deterministic configurations)."
|
|
189
|
+
),
|
|
190
|
+
optional=True,
|
|
191
|
+
type=int,
|
|
166
192
|
),
|
|
167
|
-
|
|
168
|
-
info
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
193
|
+
dataid=dict(
|
|
194
|
+
info=(
|
|
195
|
+
"The ODB databases created by Bator contain an identifier "
|
|
196
|
+
+ "that is specified as a command-line argument. This "
|
|
197
|
+
+ "switch tweaks the way the command-line argument is "
|
|
198
|
+
+ "generated."
|
|
199
|
+
),
|
|
200
|
+
values=["empty", "hh"],
|
|
201
|
+
default="hh",
|
|
202
|
+
optional=True,
|
|
172
203
|
),
|
|
173
|
-
|
|
174
|
-
info
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
optional = True
|
|
204
|
+
ntasks=dict(
|
|
205
|
+
info=(
|
|
206
|
+
"The maximum number of allowed concurrent task for "
|
|
207
|
+
"parallel execution."
|
|
208
|
+
),
|
|
209
|
+
default=1,
|
|
210
|
+
optional=True,
|
|
181
211
|
),
|
|
182
|
-
|
|
183
|
-
info
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
optional = True,
|
|
212
|
+
maxmemory=dict(
|
|
213
|
+
info="The maximum amount of usable memory (in GiB)",
|
|
214
|
+
type=int,
|
|
215
|
+
optional=True,
|
|
187
216
|
),
|
|
188
|
-
|
|
189
|
-
info
|
|
190
|
-
|
|
191
|
-
|
|
217
|
+
parallel_const=dict(
|
|
218
|
+
info=(
|
|
219
|
+
"Constant that are used to predict execution time and "
|
|
220
|
+
+ "memory consumption for a given ODB database."
|
|
221
|
+
),
|
|
222
|
+
type=footprints.FPDict,
|
|
223
|
+
optional=True,
|
|
192
224
|
),
|
|
193
|
-
parallel_const = dict(
|
|
194
|
-
info = ("Constant that are used to predict execution time and " +
|
|
195
|
-
"memory consumption for a given ODB database."),
|
|
196
|
-
type = footprints.FPDict,
|
|
197
|
-
optional = True,
|
|
198
|
-
)
|
|
199
225
|
)
|
|
200
226
|
)
|
|
201
227
|
|
|
202
|
-
_donot_link_roles = [
|
|
203
|
-
|
|
204
|
-
|
|
228
|
+
_donot_link_roles = [
|
|
229
|
+
"Observations",
|
|
230
|
+
"Obsmap",
|
|
231
|
+
"IOPoll",
|
|
232
|
+
"LFIScripts",
|
|
233
|
+
"LFITOOLS",
|
|
234
|
+
"Binary",
|
|
235
|
+
"Bator",
|
|
236
|
+
"Batodb",
|
|
237
|
+
]
|
|
205
238
|
|
|
206
239
|
def __init__(self, *kargs, **kwargs):
|
|
207
240
|
super().__init__(*kargs, **kwargs)
|
|
@@ -215,16 +248,25 @@ class Raw2ODBparallel(ParaBlindRun, odb.OdbComponentDecoMixin, drhook.DrHookDeco
|
|
|
215
248
|
"""Return the maximum amount of usable memory (in MiB)."""
|
|
216
249
|
if self._effective_maxmem is None:
|
|
217
250
|
if self.maxmemory:
|
|
218
|
-
self._effective_maxmem =
|
|
251
|
+
self._effective_maxmem = (
|
|
252
|
+
self.maxmemory * 1024
|
|
253
|
+
) # maxmemory in GB
|
|
219
254
|
else:
|
|
220
|
-
sys_maxmem = self.system.memory_info.system_RAM(
|
|
255
|
+
sys_maxmem = self.system.memory_info.system_RAM("MiB")
|
|
221
256
|
# System memory minus 20% or minus 4GB
|
|
222
|
-
self._effective_maxmem = max(
|
|
257
|
+
self._effective_maxmem = max(
|
|
258
|
+
sys_maxmem * 0.8, sys_maxmem - 4 * 1024
|
|
259
|
+
)
|
|
223
260
|
return self._effective_maxmem
|
|
224
261
|
|
|
225
262
|
def input_obs(self):
|
|
226
263
|
"""Find out which are the usable observations."""
|
|
227
|
-
obsall = [
|
|
264
|
+
obsall = [
|
|
265
|
+
x
|
|
266
|
+
for x in self.context.sequence.effective_inputs(
|
|
267
|
+
kind="observations"
|
|
268
|
+
)
|
|
269
|
+
]
|
|
228
270
|
obsall.sort(key=lambda s: s.rh.resource.part)
|
|
229
271
|
|
|
230
272
|
# Looking for valid raw observations
|
|
@@ -232,24 +274,34 @@ class Raw2ODBparallel(ParaBlindRun, odb.OdbComponentDecoMixin, drhook.DrHookDeco
|
|
|
232
274
|
obsok = list()
|
|
233
275
|
for secobs in obsall:
|
|
234
276
|
rhobs = secobs.rh
|
|
235
|
-
if rhobs.resource.nativefmt ==
|
|
236
|
-
logger.warning(
|
|
237
|
-
|
|
277
|
+
if rhobs.resource.nativefmt == "odb":
|
|
278
|
+
logger.warning(
|
|
279
|
+
"Observations set [%s] is ODB ready", rhobs.resource.part
|
|
280
|
+
)
|
|
238
281
|
continue
|
|
239
282
|
if rhobs.container.totalsize < sizemin:
|
|
240
|
-
logger.warning(
|
|
241
|
-
|
|
283
|
+
logger.warning(
|
|
284
|
+
"Observations set [%s] is far too small: %d",
|
|
285
|
+
rhobs.resource.part,
|
|
286
|
+
rhobs.container.totalsize,
|
|
287
|
+
)
|
|
242
288
|
else:
|
|
243
|
-
logger.info(
|
|
244
|
-
|
|
289
|
+
logger.info(
|
|
290
|
+
"Observations set [%s] has size: %d",
|
|
291
|
+
rhobs.resource.part,
|
|
292
|
+
int(rhobs.container.totalsize),
|
|
293
|
+
)
|
|
245
294
|
obsok.append(Foo(rh=rhobs, refdata=list(), mapped=False))
|
|
246
295
|
|
|
247
296
|
# Check the observations dates
|
|
248
297
|
for obs in [obs for obs in obsok if obs.rh.resource.date != self.date]:
|
|
249
|
-
logger.warning(
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
298
|
+
logger.warning(
|
|
299
|
+
"Observation [%s] %s [time mismatch: %s / %s]",
|
|
300
|
+
"discarded" if self.ontime else "is questionable",
|
|
301
|
+
obs.rh.resource.part,
|
|
302
|
+
obs.rh.resource.date.isoformat(),
|
|
303
|
+
self.date.isoformat(),
|
|
304
|
+
)
|
|
253
305
|
if self.ontime:
|
|
254
306
|
obsok = [obs for obs in obsok if obs.rh.resource.date == self.date]
|
|
255
307
|
|
|
@@ -258,24 +310,34 @@ class Raw2ODBparallel(ParaBlindRun, odb.OdbComponentDecoMixin, drhook.DrHookDeco
|
|
|
258
310
|
def _retrieve_refdatainfo(self, obslist):
|
|
259
311
|
"""Look for refdata resources and link their content with the obslist."""
|
|
260
312
|
refmap = dict()
|
|
261
|
-
refall = list(self.context.sequence.effective_inputs(kind=
|
|
313
|
+
refall = list(self.context.sequence.effective_inputs(kind="refdata"))
|
|
262
314
|
for rdata in refall:
|
|
263
|
-
logger.info(
|
|
315
|
+
logger.info("Inspect refdata " + rdata.rh.container.localpath())
|
|
264
316
|
self.system.subtitle(rdata.role)
|
|
265
317
|
rdata.rh.container.cat()
|
|
266
318
|
for item in rdata.rh.contents:
|
|
267
|
-
refmap[(item.fmt.lower(), item.data, item.instr)] = (
|
|
319
|
+
refmap[(item.fmt.lower(), item.data, item.instr)] = (
|
|
320
|
+
rdata.rh,
|
|
321
|
+
item,
|
|
322
|
+
)
|
|
268
323
|
|
|
269
324
|
# Build actual refdata
|
|
270
325
|
for obs in obslist:
|
|
271
326
|
thispart = obs.rh.resource.part
|
|
272
327
|
thisfmt = obs.rh.container.actualfmt.lower()
|
|
273
|
-
logger.info(
|
|
328
|
+
logger.info(
|
|
329
|
+
" ".join(
|
|
330
|
+
("Building information for [", thisfmt, "/", thispart, "]")
|
|
331
|
+
)
|
|
332
|
+
)
|
|
274
333
|
|
|
275
334
|
# Gather equivalent refdata lines
|
|
276
|
-
if not self.system.path.exists(
|
|
277
|
-
|
|
278
|
-
|
|
335
|
+
if not self.system.path.exists("norefdata." + thispart) and (
|
|
336
|
+
not self.env.VORTEX_OBSDB_NOREF
|
|
337
|
+
or not re.search(
|
|
338
|
+
thispart, self.env.VORTEX_OBSDB_NOREF, re.IGNORECASE
|
|
339
|
+
)
|
|
340
|
+
):
|
|
279
341
|
for k, v in refmap.items():
|
|
280
342
|
x_fmt, x_data = k[:2]
|
|
281
343
|
if x_fmt == thisfmt and x_data == thispart:
|
|
@@ -290,18 +352,26 @@ class Raw2ODBparallel(ParaBlindRun, odb.OdbComponentDecoMixin, drhook.DrHookDeco
|
|
|
290
352
|
rdata, item = refmap[thiskey]
|
|
291
353
|
thismap.refdata.append(rdata.contents.formatted_data(item))
|
|
292
354
|
else:
|
|
293
|
-
logger.warning(
|
|
294
|
-
|
|
355
|
+
logger.warning(
|
|
356
|
+
"Creating automatic refdata entry for " + str(thiskey)
|
|
357
|
+
)
|
|
358
|
+
item = ObsRefItem(
|
|
359
|
+
imap.data, imap.fmt, imap.instr, self.date.ymd, self.date.hh
|
|
360
|
+
)
|
|
295
361
|
if refall:
|
|
296
|
-
thismap.refdata.append(
|
|
362
|
+
thismap.refdata.append(
|
|
363
|
+
refall[0].rh.contents.formatted_data(item)
|
|
364
|
+
)
|
|
297
365
|
else:
|
|
298
|
-
logger.error(
|
|
366
|
+
logger.error("No default for formatting data %s", item)
|
|
299
367
|
thismap.refdata.append(ObsRefContent.formatted_data(item))
|
|
300
368
|
|
|
301
369
|
@staticmethod
|
|
302
370
|
def _new_obspack_item():
|
|
303
371
|
"""Create a now entry in obspack."""
|
|
304
|
-
return Foo(
|
|
372
|
+
return Foo(
|
|
373
|
+
mapping=list(), standalone=False, refdata=list(), obsfile=dict()
|
|
374
|
+
)
|
|
305
375
|
|
|
306
376
|
def prepare(self, rh, opts):
|
|
307
377
|
"""Get a look at raw observations input files."""
|
|
@@ -310,22 +380,30 @@ class Raw2ODBparallel(ParaBlindRun, odb.OdbComponentDecoMixin, drhook.DrHookDeco
|
|
|
310
380
|
cycle = rh.resource.cycle
|
|
311
381
|
|
|
312
382
|
# First create the proper IO assign table for any of the resulting ECMA databases
|
|
313
|
-
self.odb_create_db(
|
|
314
|
-
self.env.IOASSIGN = sh.path.abspath(
|
|
383
|
+
self.odb_create_db("ECMA", "odb_db_template")
|
|
384
|
+
self.env.IOASSIGN = sh.path.abspath(
|
|
385
|
+
sh.path.join("odb_db_template", "IOASSIGN")
|
|
386
|
+
)
|
|
315
387
|
|
|
316
388
|
# Looking for input observations
|
|
317
389
|
obsok = self.input_obs()
|
|
318
390
|
|
|
319
391
|
# Building refdata map for direct access to (fmt, data, instr) entries
|
|
320
|
-
if cycle <
|
|
392
|
+
if cycle < "cy42_op1":
|
|
321
393
|
# Refdata information is not needed anymore with cy42_op1
|
|
322
394
|
refmap, refall = self._retrieve_refdatainfo(obsok)
|
|
323
395
|
|
|
324
396
|
# Looking for obs maps
|
|
325
397
|
mapitems = list()
|
|
326
|
-
for omsec in self.context.sequence.effective_inputs(kind=
|
|
327
|
-
logger.info(
|
|
328
|
-
|
|
398
|
+
for omsec in self.context.sequence.effective_inputs(kind="obsmap"):
|
|
399
|
+
logger.info(
|
|
400
|
+
" ".join(
|
|
401
|
+
(
|
|
402
|
+
"Gathering information from map",
|
|
403
|
+
omsec.rh.container.localpath(),
|
|
404
|
+
)
|
|
405
|
+
)
|
|
406
|
+
)
|
|
329
407
|
sh.subtitle(omsec.role)
|
|
330
408
|
omsec.rh.container.cat()
|
|
331
409
|
mapitems.extend(omsec.rh.contents)
|
|
@@ -333,12 +411,21 @@ class Raw2ODBparallel(ParaBlindRun, odb.OdbComponentDecoMixin, drhook.DrHookDeco
|
|
|
333
411
|
self.obspack = defaultdict(self._new_obspack_item) # Reset the obspack
|
|
334
412
|
for imap in mapitems:
|
|
335
413
|
# Match observation files and obsmap entries + Various checks
|
|
336
|
-
logger.info(
|
|
337
|
-
candidates = [
|
|
338
|
-
|
|
339
|
-
|
|
414
|
+
logger.info("Inspect " + str(imap))
|
|
415
|
+
candidates = [
|
|
416
|
+
obs
|
|
417
|
+
for obs in obsok
|
|
418
|
+
if (
|
|
419
|
+
obs.rh.resource.part == imap.data
|
|
420
|
+
and obs.rh.container.actualfmt.lower() == imap.fmt.lower()
|
|
421
|
+
)
|
|
422
|
+
]
|
|
340
423
|
if not candidates:
|
|
341
|
-
errmsg =
|
|
424
|
+
errmsg = (
|
|
425
|
+
"No input obsfile could match [data:{:s}/fmt:{:s}]".format(
|
|
426
|
+
imap.data, imap.fmt
|
|
427
|
+
)
|
|
428
|
+
)
|
|
342
429
|
if self.mapall:
|
|
343
430
|
raise ValueError(errmsg)
|
|
344
431
|
else:
|
|
@@ -348,38 +435,44 @@ class Raw2ODBparallel(ParaBlindRun, odb.OdbComponentDecoMixin, drhook.DrHookDeco
|
|
|
348
435
|
# Build the obspack entry
|
|
349
436
|
thismap = self.obspack[imap.odb]
|
|
350
437
|
thismap.mapping.append(imap)
|
|
351
|
-
thismap.obsfile[imap.fmt.upper() +
|
|
438
|
+
thismap.obsfile[imap.fmt.upper() + "." + imap.data] = candidates[
|
|
439
|
+
-1
|
|
440
|
+
]
|
|
352
441
|
# Map refdata and obsmap entries
|
|
353
|
-
if cycle <
|
|
442
|
+
if cycle < "cy42_op1":
|
|
354
443
|
# Refdata information is not needed anymore with cy42_op1
|
|
355
444
|
self._map_refdatainfo(refmap, refall, imap, thismap)
|
|
356
445
|
|
|
357
446
|
# Deal with observations that are not described in the obsmap
|
|
358
447
|
for notmap in [obs for obs in obsok if not obs.mapped]:
|
|
359
448
|
thispart = notmap.rh.resource.part
|
|
360
|
-
logger.info(
|
|
449
|
+
logger.info("Inspect not mapped obs " + thispart)
|
|
361
450
|
if thispart not in self.obspack:
|
|
362
451
|
thisfmt = notmap.rh.container.actualfmt.upper()
|
|
363
|
-
thismsg =
|
|
452
|
+
thismsg = "standalone obs entry [data:{:s} / fmt:{:s}]".format(
|
|
453
|
+
thispart, thisfmt
|
|
454
|
+
)
|
|
364
455
|
if self.maponly:
|
|
365
|
-
logger.warning(
|
|
456
|
+
logger.warning("Ignore " + thismsg)
|
|
366
457
|
else:
|
|
367
|
-
logger.warning(
|
|
458
|
+
logger.warning("Active " + thismsg)
|
|
368
459
|
thismap = self.obspack[thispart]
|
|
369
460
|
thismap.standalone = thisfmt
|
|
370
|
-
thismap.mapping.append(
|
|
461
|
+
thismap.mapping.append(
|
|
462
|
+
ObsMapItem(thispart, thispart, thisfmt, thispart)
|
|
463
|
+
)
|
|
371
464
|
thismap.refdata = notmap.refdata
|
|
372
|
-
thismap.obsfile[thisfmt.upper() +
|
|
465
|
+
thismap.obsfile[thisfmt.upper() + "." + thispart] = notmap
|
|
373
466
|
|
|
374
467
|
# Informations about timeslots
|
|
375
468
|
logger.info("The timeslot definition is: %s", str(self.slots))
|
|
376
|
-
if cycle <
|
|
469
|
+
if cycle < "cy42_op1":
|
|
377
470
|
# ficdate is not needed anymore with cy42_op1...
|
|
378
|
-
self.slots.as_file(self.date,
|
|
471
|
+
self.slots.as_file(self.date, "ficdate")
|
|
379
472
|
else:
|
|
380
473
|
# From cy42_op1 onward, we only need environment variables
|
|
381
474
|
for var, value in self.slots.as_environment().items():
|
|
382
|
-
logger.info(
|
|
475
|
+
logger.info("Setting env %s = %s", var, str(value))
|
|
383
476
|
self.env[var] = value
|
|
384
477
|
|
|
385
478
|
# Let ancestors handling most of the env setting
|
|
@@ -388,37 +481,48 @@ class Raw2ODBparallel(ParaBlindRun, odb.OdbComponentDecoMixin, drhook.DrHookDeco
|
|
|
388
481
|
BATOR_NBPOOL=self.npool,
|
|
389
482
|
BATODB_NBPOOL=self.npool,
|
|
390
483
|
BATOR_NBSLOT=self.slots.nslot,
|
|
391
|
-
BATODB_NBSLOT=self.slots.nslot,
|
|
484
|
+
BATODB_NBSLOT=self.slots.nslot,
|
|
485
|
+
)
|
|
392
486
|
self.env.default(
|
|
393
487
|
TIME_INIT_YYYYMMDD=self.date.ymd,
|
|
394
|
-
TIME_INIT_HHMMSS=self.date.hm +
|
|
488
|
+
TIME_INIT_HHMMSS=self.date.hm + "00",
|
|
489
|
+
)
|
|
395
490
|
if self.lamflag:
|
|
396
|
-
for lamvar in (
|
|
397
|
-
logger.info(
|
|
491
|
+
for lamvar in ("BATOR_LAMFLAG", "BATODB_LAMFLAG"):
|
|
492
|
+
logger.info("Setting env %s = %d", lamvar, 1)
|
|
398
493
|
self.env[lamvar] = 1
|
|
399
494
|
|
|
400
495
|
if self.member is not None:
|
|
401
|
-
for nam in self.context.sequence.effective_inputs(
|
|
402
|
-
|
|
403
|
-
|
|
496
|
+
for nam in self.context.sequence.effective_inputs(
|
|
497
|
+
kind=("namelist", "namelistfp")
|
|
498
|
+
):
|
|
499
|
+
nam.rh.contents.setmacro("MEMBER", self.member)
|
|
500
|
+
logger.info(
|
|
501
|
+
"Setup macro MEMBER=%s in %s",
|
|
502
|
+
self.member,
|
|
503
|
+
nam.rh.container.actualpath(),
|
|
504
|
+
)
|
|
404
505
|
if nam.rh.contents.dumps_needs_update:
|
|
405
506
|
nam.rh.save()
|
|
406
507
|
|
|
407
508
|
def spawn_command_options(self):
|
|
408
509
|
"""Any data useful to build the command line."""
|
|
409
510
|
opts_dict = super().spawn_command_options()
|
|
410
|
-
opts_dict[
|
|
411
|
-
opts_dict[
|
|
511
|
+
opts_dict["dataid"] = self.dataid
|
|
512
|
+
opts_dict["date"] = self.date
|
|
412
513
|
return opts_dict
|
|
413
514
|
|
|
414
515
|
def _default_pre_execute(self, rh, opts):
|
|
415
516
|
"""Change default initialisation to use LongerFirstScheduler"""
|
|
416
517
|
# Start the task scheduler
|
|
417
|
-
self._boss = Boss(
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
518
|
+
self._boss = Boss(
|
|
519
|
+
verbose=self.verbose,
|
|
520
|
+
scheduler=footprints.proxy.scheduler(
|
|
521
|
+
limit="threads+memory",
|
|
522
|
+
max_threads=self.ntasks,
|
|
523
|
+
max_memory=self.effective_maxmem,
|
|
524
|
+
),
|
|
525
|
+
)
|
|
422
526
|
self._boss.make_them_work()
|
|
423
527
|
|
|
424
528
|
def execute(self, rh, opts):
|
|
@@ -430,10 +534,15 @@ class Raw2ODBparallel(ParaBlindRun, odb.OdbComponentDecoMixin, drhook.DrHookDeco
|
|
|
430
534
|
sh = self.system
|
|
431
535
|
cycle = rh.resource.cycle
|
|
432
536
|
|
|
433
|
-
batnam = [
|
|
537
|
+
batnam = [
|
|
538
|
+
x.rh
|
|
539
|
+
for x in self.context.sequence.effective_inputs(
|
|
540
|
+
role="NamelistBatodb"
|
|
541
|
+
)
|
|
542
|
+
]
|
|
434
543
|
# Give a glance to the actual namelist
|
|
435
544
|
if batnam:
|
|
436
|
-
sh.subtitle(
|
|
545
|
+
sh.subtitle("Namelist Raw2ODB")
|
|
437
546
|
batnam[0].container.cat()
|
|
438
547
|
|
|
439
548
|
self.obsmapout = list() # Reset the obsmapout
|
|
@@ -442,167 +551,277 @@ class Raw2ODBparallel(ParaBlindRun, odb.OdbComponentDecoMixin, drhook.DrHookDeco
|
|
|
442
551
|
workdir = sh.pwd()
|
|
443
552
|
|
|
444
553
|
for odbset, thispack in self.obspack.items():
|
|
445
|
-
odbname = self.virtualdb.upper() +
|
|
446
|
-
sh.title(
|
|
447
|
-
with sh.cdcontext(
|
|
448
|
-
|
|
449
|
-
|
|
554
|
+
odbname = self.virtualdb.upper() + "." + odbset
|
|
555
|
+
sh.title("Cocooning ODB set: " + odbname)
|
|
556
|
+
with sh.cdcontext("wkdir_" + odbset, create=True):
|
|
557
|
+
for inpt in [
|
|
558
|
+
s
|
|
559
|
+
for s in self.context.sequence.inputs()
|
|
560
|
+
if s.stage == "get"
|
|
561
|
+
]:
|
|
450
562
|
if inpt.role not in self._donot_link_roles:
|
|
451
|
-
logger.info(
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
563
|
+
logger.info(
|
|
564
|
+
"creating softlink: %s -> %s",
|
|
565
|
+
inpt.rh.container.localpath(),
|
|
566
|
+
sh.path.join(
|
|
567
|
+
workdir, inpt.rh.container.localpath()
|
|
568
|
+
),
|
|
569
|
+
)
|
|
570
|
+
sh.softlink(
|
|
571
|
+
sh.path.join(
|
|
572
|
+
workdir, inpt.rh.container.localpath()
|
|
573
|
+
),
|
|
574
|
+
inpt.rh.container.localpath(),
|
|
575
|
+
)
|
|
576
|
+
|
|
577
|
+
if cycle < "cy42_op1":
|
|
457
578
|
# Special stuff for cy < 42
|
|
458
|
-
logger.info(
|
|
459
|
-
sh.softlink(sh.path.join(workdir,
|
|
579
|
+
logger.info("creating softlink for ficdate.")
|
|
580
|
+
sh.softlink(sh.path.join(workdir, "ficdate"), "ficdate")
|
|
460
581
|
|
|
461
582
|
odb_input_size = 0
|
|
462
583
|
for obsname, obsinfo in thispack.obsfile.items():
|
|
463
|
-
logger.info(
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
584
|
+
logger.info(
|
|
585
|
+
"creating softlink: %s -> %s",
|
|
586
|
+
obsname,
|
|
587
|
+
sh.path.join(
|
|
588
|
+
workdir, obsinfo.rh.container.localpath()
|
|
589
|
+
),
|
|
590
|
+
)
|
|
591
|
+
sh.softlink(
|
|
592
|
+
sh.path.join(
|
|
593
|
+
workdir, obsinfo.rh.container.localpath()
|
|
594
|
+
),
|
|
595
|
+
obsname,
|
|
596
|
+
)
|
|
597
|
+
if thispack.standalone and cycle < "cy42_op1":
|
|
598
|
+
logger.info(
|
|
599
|
+
"creating softlink: %s -> %s",
|
|
600
|
+
thispack.standalone,
|
|
601
|
+
sh.path.join(
|
|
602
|
+
workdir, obsinfo.rh.container.localpath()
|
|
603
|
+
),
|
|
604
|
+
)
|
|
605
|
+
sh.softlink(
|
|
606
|
+
sh.path.join(
|
|
607
|
+
workdir, obsinfo.rh.container.localpath()
|
|
608
|
+
),
|
|
609
|
+
thispack.standalone,
|
|
610
|
+
)
|
|
472
611
|
|
|
473
612
|
odb_input_size += obsinfo.rh.container.totalsize
|
|
474
613
|
|
|
475
614
|
# Fill the actual refdata according to information gathered in prepare stage
|
|
476
|
-
if cycle <
|
|
615
|
+
if cycle < "cy42_op1":
|
|
477
616
|
if thispack.refdata:
|
|
478
|
-
with open(
|
|
617
|
+
with open("refdata", "w") as fd:
|
|
479
618
|
for rdentry in thispack.refdata:
|
|
480
619
|
fd.write(str(rdentry + "\n"))
|
|
481
|
-
sh.subtitle(
|
|
482
|
-
sh.cat(
|
|
620
|
+
sh.subtitle("Local refdata for: {:s}".format(odbname))
|
|
621
|
+
sh.cat("refdata", output=False)
|
|
483
622
|
# Drive bator with a batormap file (from cy42_op1 onward)
|
|
484
623
|
else:
|
|
485
|
-
with open(
|
|
624
|
+
with open("batormap", "w") as fd:
|
|
486
625
|
for mapentry in sorted(thispack.mapping):
|
|
487
|
-
fd.write(
|
|
488
|
-
|
|
489
|
-
|
|
626
|
+
fd.write(
|
|
627
|
+
str(
|
|
628
|
+
ObsMapContent.formatted_data(mapentry)
|
|
629
|
+
+ "\n"
|
|
630
|
+
)
|
|
631
|
+
)
|
|
632
|
+
sh.subtitle("Local batormap for: {:s}".format(odbname))
|
|
633
|
+
sh.cat("batormap", output=False)
|
|
490
634
|
|
|
491
635
|
self.obsmapout.extend(thispack.mapping)
|
|
492
636
|
|
|
493
637
|
# Compute the expected memory and time
|
|
494
638
|
if isinstance(self.parallel_const, dict):
|
|
495
|
-
pconst = self.parallel_const.get(
|
|
496
|
-
|
|
497
|
-
|
|
639
|
+
pconst = self.parallel_const.get(
|
|
640
|
+
odbset,
|
|
641
|
+
self.parallel_const.get("default", (999999.0, 1.0)),
|
|
642
|
+
)
|
|
643
|
+
offsets = self.parallel_const.get(
|
|
644
|
+
"offset", (0.0, 0.0)
|
|
645
|
+
) # In MiB for the memory
|
|
498
646
|
else:
|
|
499
|
-
pconst = (999999
|
|
500
|
-
offsets = (0
|
|
647
|
+
pconst = (999999.0, 1.0)
|
|
648
|
+
offsets = (0.0, 0.0)
|
|
501
649
|
bTime = (odb_input_size * pconst[1] / 1048576) + offsets[1]
|
|
502
|
-
bMemory = odb_input_size * pconst[0] + (
|
|
503
|
-
|
|
650
|
+
bMemory = odb_input_size * pconst[0] + (
|
|
651
|
+
offsets[0] * 1024 * 1024
|
|
652
|
+
)
|
|
653
|
+
bMemory = bMemory / 1024.0 / 1024.0
|
|
504
654
|
if bMemory > self.effective_maxmem:
|
|
505
|
-
logger.info(
|
|
506
|
-
|
|
507
|
-
|
|
655
|
+
logger.info(
|
|
656
|
+
"For %s, the computed memory needs exceed the node limit.",
|
|
657
|
+
odbset,
|
|
658
|
+
)
|
|
659
|
+
logger.info(
|
|
660
|
+
"Memory requirement reseted to %d (originally %d.)",
|
|
661
|
+
int(self.effective_maxmem),
|
|
662
|
+
int(bMemory),
|
|
663
|
+
)
|
|
508
664
|
bMemory = self.effective_maxmem
|
|
509
|
-
scheduler_instructions[
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
scheduler_instructions[
|
|
513
|
-
scheduler_instructions[
|
|
665
|
+
scheduler_instructions["name"].append(
|
|
666
|
+
"ODB_database_{:s}".format(odbset)
|
|
667
|
+
)
|
|
668
|
+
scheduler_instructions["base"].append(odbset)
|
|
669
|
+
scheduler_instructions["memory"].append(bMemory)
|
|
670
|
+
scheduler_instructions["expected_time"].append(bTime)
|
|
671
|
+
scheduler_instructions["inputsize"].append(odb_input_size)
|
|
514
672
|
|
|
515
|
-
sh.title(
|
|
673
|
+
sh.title("Launching Bator using taylorism...")
|
|
516
674
|
self._default_pre_execute(rh, opts)
|
|
517
675
|
common_i = self._default_common_instructions(rh, opts)
|
|
518
676
|
# Update the common instructions
|
|
519
|
-
common_i.update(
|
|
677
|
+
common_i.update(
|
|
678
|
+
dict(
|
|
679
|
+
workdir=workdir,
|
|
680
|
+
cycle=cycle,
|
|
681
|
+
)
|
|
682
|
+
)
|
|
520
683
|
|
|
521
684
|
self._add_instructions(common_i, scheduler_instructions)
|
|
522
685
|
|
|
523
686
|
post_opts = copy.copy(opts)
|
|
524
|
-
post_opts[
|
|
687
|
+
post_opts["synthesis"] = self.para_synthesis
|
|
525
688
|
self._default_post_execute(rh, post_opts)
|
|
526
689
|
|
|
527
690
|
def _default_rc_action(self, rh, opts, report, rc):
|
|
528
691
|
super()._default_rc_action(rh, opts, report, rc)
|
|
529
|
-
my_report = report[
|
|
692
|
+
my_report = report["report"].get("synthesis", None)
|
|
530
693
|
if my_report:
|
|
531
|
-
opts[
|
|
694
|
+
opts["synthesis"][my_report.pop("base")] = my_report
|
|
532
695
|
|
|
533
696
|
def postfix(self, rh, opts):
|
|
534
697
|
"""Post conversion cleaning."""
|
|
535
698
|
sh = self.system
|
|
536
699
|
|
|
537
700
|
# Remove empty ECMA databases from the output obsmap
|
|
538
|
-
self.obsmapout = [
|
|
539
|
-
|
|
540
|
-
|
|
701
|
+
self.obsmapout = [
|
|
702
|
+
x
|
|
703
|
+
for x in self.obsmapout
|
|
704
|
+
if (
|
|
705
|
+
sh.path.isdir("ECMA." + x.odb)
|
|
706
|
+
and sh.path.isdir("ECMA." + x.odb + "/1")
|
|
707
|
+
)
|
|
708
|
+
]
|
|
541
709
|
|
|
542
710
|
# At least one non-empty database is needed...
|
|
543
|
-
self.algoassert(
|
|
711
|
+
self.algoassert(
|
|
712
|
+
self.obsmapout, "At least one non-empty ODB database is expected"
|
|
713
|
+
)
|
|
544
714
|
|
|
545
715
|
# Generate the output bator_map
|
|
546
|
-
with open(
|
|
716
|
+
with open("batodb_map.out", "w") as fd:
|
|
547
717
|
for x in sorted(self.obsmapout):
|
|
548
|
-
fd.write(str(ObsMapContent.formatted_data(x) +
|
|
718
|
+
fd.write(str(ObsMapContent.formatted_data(x) + "\n"))
|
|
549
719
|
|
|
550
720
|
# Generate a global refdata (if cycle allows it and if possible)
|
|
551
|
-
if rh.resource.cycle <
|
|
552
|
-
rdrh_dict = {
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
721
|
+
if rh.resource.cycle < "cy42_op1":
|
|
722
|
+
rdrh_dict = {
|
|
723
|
+
y.rh.resource.part: y.rh
|
|
724
|
+
for y in self.context.sequence.effective_inputs(kind="refdata")
|
|
725
|
+
if y.rh.resource.part != "all"
|
|
726
|
+
}
|
|
727
|
+
with open("refdata_global", "w") as rdg:
|
|
556
728
|
for x in sorted(self.obsmapout):
|
|
557
|
-
if (
|
|
558
|
-
|
|
559
|
-
|
|
729
|
+
if (
|
|
730
|
+
x.data in rdrh_dict
|
|
731
|
+
and sh.path.getsize(
|
|
732
|
+
rdrh_dict[x.data].container.localpath()
|
|
733
|
+
)
|
|
734
|
+
> 0
|
|
735
|
+
):
|
|
736
|
+
with open(
|
|
737
|
+
rdrh_dict[x.data].container.localpath()
|
|
738
|
+
) as rdl:
|
|
560
739
|
rdg.write(rdl.readline())
|
|
561
|
-
elif (
|
|
562
|
-
|
|
563
|
-
|
|
740
|
+
elif (
|
|
741
|
+
sh.path.exists("refdata." + x.data)
|
|
742
|
+
and sh.path.getsize("refdata." + x.data) > 0
|
|
743
|
+
):
|
|
744
|
+
with open("refdata." + x.data) as rdl:
|
|
564
745
|
rdg.write(rdl.readline())
|
|
565
746
|
else:
|
|
566
|
-
logger.info(
|
|
747
|
+
logger.info(
|
|
748
|
+
"Unable to create a global refdata entry for data="
|
|
749
|
+
+ x.data
|
|
750
|
+
)
|
|
567
751
|
|
|
568
|
-
sh.json_dump(self.para_synthesis,
|
|
752
|
+
sh.json_dump(self.para_synthesis, "parallel_exec_synthesis.json")
|
|
569
753
|
|
|
570
754
|
# Print the parallel execution summary
|
|
571
|
-
sh.subtitle(
|
|
572
|
-
header =
|
|
573
|
-
rfmt =
|
|
755
|
+
sh.subtitle("Here is the parallel execution synthesis: memory aspects")
|
|
756
|
+
header = "Database InputSize(MiB) PredMem(GiB) RealMem(GiB) Real/Pred Ratio"
|
|
757
|
+
rfmt = "{:8s} {:>15.0f} {:>12.1f} {:>12.1f} {:>15.2f}"
|
|
574
758
|
print(header)
|
|
575
759
|
for row in sorted(self.para_synthesis.keys()):
|
|
576
760
|
srep = self.para_synthesis[row]
|
|
577
|
-
print(
|
|
578
|
-
|
|
579
|
-
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
|
|
761
|
+
print(
|
|
762
|
+
rfmt.format(
|
|
763
|
+
row,
|
|
764
|
+
convert_bytes_in_unit(srep["inputsize"], "MiB"),
|
|
765
|
+
convert_bytes_in_unit(srep["mem_expected"], "GiB"),
|
|
766
|
+
(
|
|
767
|
+
99.99
|
|
768
|
+
if srep["mem_real"] is None
|
|
769
|
+
else convert_bytes_in_unit(srep["mem_real"], "GiB")
|
|
770
|
+
),
|
|
771
|
+
(
|
|
772
|
+
99.99
|
|
773
|
+
if srep["mem_ratio"] is None
|
|
774
|
+
else srep["mem_ratio"]
|
|
775
|
+
),
|
|
776
|
+
)
|
|
777
|
+
)
|
|
778
|
+
|
|
779
|
+
sh.subtitle(
|
|
780
|
+
"Here is the parallel execution synthesis: elapsed time aspects"
|
|
781
|
+
)
|
|
782
|
+
header = (
|
|
783
|
+
"Database InputSize(MiB) PredTime(s) RealTime(s) Real/Pred Ratio"
|
|
784
|
+
)
|
|
785
|
+
rfmt = "{:8s} {:>15.0f} {:>11.1f} {:>11.1f} {:>15.2f}"
|
|
588
786
|
print(header)
|
|
589
787
|
for row in sorted(self.para_synthesis.keys()):
|
|
590
788
|
srep = self.para_synthesis[row]
|
|
591
|
-
print(
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
789
|
+
print(
|
|
790
|
+
rfmt.format(
|
|
791
|
+
row,
|
|
792
|
+
convert_bytes_in_unit(srep["inputsize"], "MiB"),
|
|
793
|
+
srep["time_expected"],
|
|
794
|
+
srep["time_real"],
|
|
795
|
+
(
|
|
796
|
+
99.99
|
|
797
|
+
if srep["time_ratio"] is None
|
|
798
|
+
else srep["time_ratio"]
|
|
799
|
+
),
|
|
800
|
+
)
|
|
801
|
+
)
|
|
802
|
+
|
|
803
|
+
sh.subtitle("Here is the parallel execution synthesis: timeline")
|
|
804
|
+
header = "Database StartTime(UTC) PredMem(GiB) RealTime(s) ExecSlot"
|
|
805
|
+
rfmt = "{:8s} {:>40s} {:>11.1f} {:>12.1f} {:>8s}"
|
|
599
806
|
print(header)
|
|
600
|
-
for
|
|
601
|
-
|
|
602
|
-
|
|
603
|
-
|
|
807
|
+
for row, srep in sorted(
|
|
808
|
+
self.para_synthesis.items(), key=lambda x: x[1]["time_start"]
|
|
809
|
+
):
|
|
810
|
+
print(
|
|
811
|
+
rfmt.format(
|
|
812
|
+
row,
|
|
813
|
+
srep["time_start"],
|
|
814
|
+
convert_bytes_in_unit(srep["mem_expected"], "GiB"),
|
|
815
|
+
srep["time_real"],
|
|
816
|
+
str(srep["sched_id"]),
|
|
817
|
+
)
|
|
818
|
+
)
|
|
604
819
|
|
|
605
|
-
print(
|
|
820
|
+
print(
|
|
821
|
+
"\nThe memory limit was set to: {:.1f} GiB".format(
|
|
822
|
+
self.effective_maxmem / 1024.0
|
|
823
|
+
)
|
|
824
|
+
)
|
|
606
825
|
|
|
607
826
|
super().postfix(rh, opts)
|
|
608
827
|
|
|
@@ -611,36 +830,45 @@ class OdbAverage(Parallel, odb.OdbComponentDecoMixin, drhook.DrHookDecoMixin):
|
|
|
611
830
|
"""TODO the father of this component is very much welcome."""
|
|
612
831
|
|
|
613
832
|
_footprint = dict(
|
|
614
|
-
attr
|
|
615
|
-
kind
|
|
616
|
-
values
|
|
833
|
+
attr=dict(
|
|
834
|
+
kind=dict(
|
|
835
|
+
values=["average"],
|
|
617
836
|
),
|
|
618
|
-
binarysingle
|
|
619
|
-
default
|
|
837
|
+
binarysingle=dict(
|
|
838
|
+
default="basicobsort",
|
|
620
839
|
),
|
|
621
|
-
ioassign
|
|
622
|
-
outdb
|
|
623
|
-
optional
|
|
624
|
-
default
|
|
625
|
-
value
|
|
840
|
+
ioassign=dict(),
|
|
841
|
+
outdb=dict(
|
|
842
|
+
optional=True,
|
|
843
|
+
default="ccma",
|
|
844
|
+
value=["ecma", "ccma"],
|
|
626
845
|
),
|
|
627
|
-
maskname
|
|
628
|
-
optional
|
|
629
|
-
default
|
|
846
|
+
maskname=dict(
|
|
847
|
+
optional=True,
|
|
848
|
+
default="mask4x4.txt",
|
|
630
849
|
),
|
|
631
850
|
)
|
|
632
851
|
)
|
|
633
852
|
|
|
853
|
+
def _mpitool_attributes(self, opts):
|
|
854
|
+
conf_dict = super()._mpitool_attributes(opts)
|
|
855
|
+
conf_dict.update({"mplbased": True})
|
|
856
|
+
return conf_dict
|
|
857
|
+
|
|
634
858
|
def prepare(self, rh, opts):
|
|
635
859
|
"""Find any ODB candidate in input files."""
|
|
636
860
|
|
|
637
861
|
sh = self.system
|
|
638
862
|
|
|
639
863
|
# Looking for input observations
|
|
640
|
-
obsall = [
|
|
864
|
+
obsall = [
|
|
865
|
+
x
|
|
866
|
+
for x in self.lookupodb()
|
|
867
|
+
if x.rh.resource.layout.lower() == "ecma"
|
|
868
|
+
]
|
|
641
869
|
# One database at a time
|
|
642
870
|
if len(obsall) != 1:
|
|
643
|
-
raise ValueError(
|
|
871
|
+
raise ValueError("One and only one ECMA input should be here")
|
|
644
872
|
self.bingo = ecma = obsall[0]
|
|
645
873
|
|
|
646
874
|
# First create a fake CCMA
|
|
@@ -654,10 +882,10 @@ class OdbAverage(Parallel, odb.OdbComponentDecoMixin, drhook.DrHookDecoMixin):
|
|
|
654
882
|
|
|
655
883
|
self.odb.ioassign_gather(ecma_path, ccma_path)
|
|
656
884
|
|
|
657
|
-
ecma_pool = sh.path.join(ecma_path,
|
|
885
|
+
ecma_pool = sh.path.join(ecma_path, "1")
|
|
658
886
|
if not sh.path.isdir(ecma_pool):
|
|
659
|
-
logger.error(
|
|
660
|
-
self.abort(
|
|
887
|
+
logger.error("The input ECMA base is empty")
|
|
888
|
+
self.abort("No ECMA input")
|
|
661
889
|
return
|
|
662
890
|
|
|
663
891
|
self.odb.create_poolmask(self.layout_new, ccma_path)
|
|
@@ -688,15 +916,18 @@ class OdbAverage(Parallel, odb.OdbComponentDecoMixin, drhook.DrHookDecoMixin):
|
|
|
688
916
|
|
|
689
917
|
sh = self.system
|
|
690
918
|
|
|
691
|
-
mask = [
|
|
919
|
+
mask = [
|
|
920
|
+
x.rh
|
|
921
|
+
for x in self.context.sequence.effective_inputs(kind="atmsmask")
|
|
922
|
+
]
|
|
692
923
|
if not mask:
|
|
693
|
-
raise ValueError(
|
|
924
|
+
raise ValueError("Could not find any MASK input")
|
|
694
925
|
|
|
695
926
|
# Have a look to mask file
|
|
696
927
|
if mask[0].container.localpath() != self.maskname:
|
|
697
928
|
sh.softlink(mask[0].container.localpath(), self.maskname)
|
|
698
929
|
|
|
699
|
-
sh.subtitle(
|
|
930
|
+
sh.subtitle("Mask")
|
|
700
931
|
mask[0].container.cat()
|
|
701
932
|
|
|
702
933
|
# Standard execution
|
|
@@ -707,13 +938,19 @@ class OdbAverage(Parallel, odb.OdbComponentDecoMixin, drhook.DrHookDecoMixin):
|
|
|
707
938
|
sh = self.system
|
|
708
939
|
|
|
709
940
|
with sh.cdcontext(self.layout_new):
|
|
710
|
-
for ccma in sh.glob(
|
|
711
|
-
slurp = sh.cat(ccma, outsplit=False).replace(
|
|
712
|
-
|
|
941
|
+
for ccma in sh.glob("{:s}.*".format(self.layout_new)):
|
|
942
|
+
slurp = sh.cat(ccma, outsplit=False).replace(
|
|
943
|
+
self.layout_new, self.layout_in
|
|
944
|
+
)
|
|
945
|
+
with open(
|
|
946
|
+
ccma.replace(self.layout_new, self.layout_in), "w"
|
|
947
|
+
) as fd:
|
|
713
948
|
fd.write(str(slurp))
|
|
714
949
|
sh.rm(ccma)
|
|
715
950
|
|
|
716
|
-
sh.mv(
|
|
951
|
+
sh.mv(
|
|
952
|
+
self.layout_new, self.layout_in + "." + self.bingo.rh.resource.part
|
|
953
|
+
)
|
|
717
954
|
|
|
718
955
|
super().postfix(rh, opts)
|
|
719
956
|
|
|
@@ -722,30 +959,41 @@ class OdbCompress(Parallel, odb.OdbComponentDecoMixin, drhook.DrHookDecoMixin):
|
|
|
722
959
|
"""Take a screening ODB ECMA database and create the compressed CCMA database."""
|
|
723
960
|
|
|
724
961
|
_footprint = dict(
|
|
725
|
-
attr
|
|
726
|
-
kind
|
|
727
|
-
values
|
|
962
|
+
attr=dict(
|
|
963
|
+
kind=dict(
|
|
964
|
+
values=["odbcompress"],
|
|
728
965
|
),
|
|
729
|
-
ioassign
|
|
966
|
+
ioassign=dict(),
|
|
730
967
|
)
|
|
731
968
|
)
|
|
732
969
|
|
|
970
|
+
def _mpitool_attributes(self, opts):
|
|
971
|
+
conf_dict = super()._mpitool_attributes(opts)
|
|
972
|
+
conf_dict.update({"mplbased": True})
|
|
973
|
+
return conf_dict
|
|
974
|
+
|
|
733
975
|
def prepare(self, rh, opts):
|
|
734
976
|
"""Find any ODB candidate in input files and fox ODB env accordingly."""
|
|
735
977
|
|
|
736
|
-
obsall = [
|
|
978
|
+
obsall = [
|
|
979
|
+
x
|
|
980
|
+
for x in self.lookupodb()
|
|
981
|
+
if x.rh.resource.layout.lower() == "ecma"
|
|
982
|
+
]
|
|
737
983
|
if len(obsall) > 1:
|
|
738
|
-
obsvirtual = [o for o in obsall if o.rh.resource.part ==
|
|
984
|
+
obsvirtual = [o for o in obsall if o.rh.resource.part == "virtual"]
|
|
739
985
|
if len(obsvirtual) != 1:
|
|
740
|
-
raise ValueError(
|
|
986
|
+
raise ValueError(
|
|
987
|
+
"One and only one virtual database must be provided"
|
|
988
|
+
)
|
|
741
989
|
ecma = obsvirtual[0]
|
|
742
990
|
elif len(obsall) == 1:
|
|
743
991
|
ecma = obsall[0]
|
|
744
992
|
else:
|
|
745
|
-
raise ValueError(
|
|
993
|
+
raise ValueError("No ECMA database provided")
|
|
746
994
|
|
|
747
995
|
# First create a fake CCMA
|
|
748
|
-
self.layout_new =
|
|
996
|
+
self.layout_new = "ccma"
|
|
749
997
|
ccma_path = self.odb_create_db(self.layout_new)
|
|
750
998
|
self.odb.fix_db_path(self.layout_new, ccma_path)
|
|
751
999
|
|
|
@@ -757,7 +1005,7 @@ class OdbCompress(Parallel, odb.OdbComponentDecoMixin, drhook.DrHookDecoMixin):
|
|
|
757
1005
|
|
|
758
1006
|
self.odb.create_poolmask(self.layout_new, ccma_path)
|
|
759
1007
|
|
|
760
|
-
self.odb_rw_or_overwrite_method(*
|
|
1008
|
+
self.odb_rw_or_overwrite_method(*obsall)
|
|
761
1009
|
|
|
762
1010
|
# Let ancesters handling most of the env setting
|
|
763
1011
|
super().prepare(rh, opts)
|
|
@@ -777,18 +1025,23 @@ class OdbMatchup(Parallel, odb.OdbComponentDecoMixin, drhook.DrHookDecoMixin):
|
|
|
777
1025
|
"""Report some information from post-minim CCMA to post-screening ECMA base."""
|
|
778
1026
|
|
|
779
1027
|
_footprint = dict(
|
|
780
|
-
attr
|
|
781
|
-
kind
|
|
782
|
-
values
|
|
1028
|
+
attr=dict(
|
|
1029
|
+
kind=dict(
|
|
1030
|
+
values=["matchup"],
|
|
783
1031
|
),
|
|
784
|
-
fcmalayout
|
|
785
|
-
optional
|
|
786
|
-
value
|
|
787
|
-
remap
|
|
1032
|
+
fcmalayout=dict(
|
|
1033
|
+
optional=True,
|
|
1034
|
+
value=["ecma", "ccma", "CCMA", "ECMA"],
|
|
1035
|
+
remap=dict(CCMA="ccma", ECMA="ecma"),
|
|
788
1036
|
),
|
|
789
1037
|
)
|
|
790
1038
|
)
|
|
791
1039
|
|
|
1040
|
+
def _mpitool_attributes(self, opts):
|
|
1041
|
+
conf_dict = super()._mpitool_attributes(opts)
|
|
1042
|
+
conf_dict.update({"mplbased": True})
|
|
1043
|
+
return conf_dict
|
|
1044
|
+
|
|
792
1045
|
def prepare(self, rh, opts):
|
|
793
1046
|
"""Find ODB candidates in input files."""
|
|
794
1047
|
|
|
@@ -796,31 +1049,40 @@ class OdbMatchup(Parallel, odb.OdbComponentDecoMixin, drhook.DrHookDecoMixin):
|
|
|
796
1049
|
|
|
797
1050
|
# Looking for input observations
|
|
798
1051
|
obsscr_virtual = [
|
|
799
|
-
x
|
|
800
|
-
|
|
1052
|
+
x
|
|
1053
|
+
for x in self.lookupodb()
|
|
1054
|
+
if x.rh.resource.stage.startswith("screen")
|
|
1055
|
+
and x.rh.resource.part == "virtual"
|
|
801
1056
|
]
|
|
802
1057
|
obsscr_parts = [
|
|
803
|
-
x
|
|
804
|
-
|
|
1058
|
+
x
|
|
1059
|
+
for x in self.lookupodb()
|
|
1060
|
+
if x.rh.resource.stage.startswith("screen")
|
|
1061
|
+
and x.rh.resource.part != "virtual"
|
|
805
1062
|
]
|
|
806
1063
|
obscompressed = [
|
|
807
|
-
x
|
|
808
|
-
|
|
1064
|
+
x
|
|
1065
|
+
for x in self.lookupodb()
|
|
1066
|
+
if x.rh.resource.stage.startswith("min")
|
|
1067
|
+
or x.rh.resource.stage.startswith("traj")
|
|
809
1068
|
]
|
|
810
1069
|
|
|
811
1070
|
# One database at a time
|
|
812
1071
|
if not obsscr_virtual:
|
|
813
|
-
raise ValueError(
|
|
1072
|
+
raise ValueError("Could not find any ODB screening input")
|
|
814
1073
|
if not obscompressed:
|
|
815
|
-
raise ValueError(
|
|
1074
|
+
raise ValueError("Could not find any ODB minim input")
|
|
816
1075
|
|
|
817
1076
|
# Set actual layout and path
|
|
818
1077
|
ecma = obsscr_virtual.pop(0)
|
|
819
1078
|
ccma = obscompressed.pop(0)
|
|
820
1079
|
self.layout_screening = ecma.rh.resource.layout
|
|
821
1080
|
self.layout_compressed = ccma.rh.resource.layout
|
|
822
|
-
self.layout_fcma = (
|
|
823
|
-
|
|
1081
|
+
self.layout_fcma = (
|
|
1082
|
+
self.layout_compressed
|
|
1083
|
+
if self.fcmalayout is None
|
|
1084
|
+
else self.fcmalayout
|
|
1085
|
+
)
|
|
824
1086
|
ecma_path = sh.path.abspath(ecma.rh.container.localpath())
|
|
825
1087
|
ccma_path = sh.path.abspath(ccma.rh.container.localpath())
|
|
826
1088
|
|
|
@@ -829,14 +1091,17 @@ class OdbMatchup(Parallel, odb.OdbComponentDecoMixin, drhook.DrHookDecoMixin):
|
|
|
829
1091
|
self.odb.ioassign_gather(ccma_path, ecma_path)
|
|
830
1092
|
|
|
831
1093
|
# Ok, but why ???
|
|
832
|
-
sh.cp(
|
|
1094
|
+
sh.cp(
|
|
1095
|
+
sh.path.join(ecma_path, "ECMA.dd"),
|
|
1096
|
+
sh.path.join(ccma_path, "ECMA.dd"),
|
|
1097
|
+
)
|
|
833
1098
|
|
|
834
1099
|
# Let ancesters handling most of the env setting
|
|
835
1100
|
super().prepare(rh, opts)
|
|
836
1101
|
|
|
837
1102
|
# Fix the input database intent
|
|
838
1103
|
self.odb_rw_or_overwrite_method(ecma)
|
|
839
|
-
self.odb_rw_or_overwrite_method(*
|
|
1104
|
+
self.odb_rw_or_overwrite_method(*obsscr_parts)
|
|
840
1105
|
|
|
841
1106
|
def spawn_command_options(self):
|
|
842
1107
|
"""Prepare command line options to binary."""
|
|
@@ -850,45 +1115,50 @@ class OdbMatchup(Parallel, odb.OdbComponentDecoMixin, drhook.DrHookDecoMixin):
|
|
|
850
1115
|
)
|
|
851
1116
|
|
|
852
1117
|
|
|
853
|
-
class OdbReshuffle(
|
|
1118
|
+
class OdbReshuffle(
|
|
1119
|
+
Parallel, odb.OdbComponentDecoMixin, drhook.DrHookDecoMixin
|
|
1120
|
+
):
|
|
854
1121
|
"""Take a bunch of ECMA databases and create new ones with an updated number of pools."""
|
|
855
1122
|
|
|
856
1123
|
_footprint = dict(
|
|
857
|
-
attr
|
|
858
|
-
kind
|
|
859
|
-
values
|
|
1124
|
+
attr=dict(
|
|
1125
|
+
kind=dict(
|
|
1126
|
+
values=["reshuffle"],
|
|
860
1127
|
),
|
|
861
1128
|
)
|
|
862
1129
|
)
|
|
863
1130
|
|
|
864
|
-
_OUT_DIRECTORY =
|
|
865
|
-
_BARE_OUT_LAYOUT =
|
|
1131
|
+
_OUT_DIRECTORY = "reshuffled"
|
|
1132
|
+
_BARE_OUT_LAYOUT = "ccma"
|
|
1133
|
+
|
|
1134
|
+
def _mpitool_attributes(self, opts):
|
|
1135
|
+
conf_dict = super()._mpitool_attributes(opts)
|
|
1136
|
+
conf_dict.update({"mplbased": True})
|
|
1137
|
+
return conf_dict
|
|
866
1138
|
|
|
867
1139
|
def prepare(self, rh, opts):
|
|
868
1140
|
"""Find ODB candidates in input files."""
|
|
869
1141
|
|
|
870
1142
|
# Looking for input observations
|
|
871
1143
|
obs_in_virtual = [
|
|
872
|
-
x for x in self.lookupodb()
|
|
873
|
-
if x.rh.resource.part == 'virtual'
|
|
1144
|
+
x for x in self.lookupodb() if x.rh.resource.part == "virtual"
|
|
874
1145
|
]
|
|
875
1146
|
if obs_in_virtual:
|
|
876
|
-
raise ValueError(
|
|
1147
|
+
raise ValueError("Do not input a Virtual database")
|
|
877
1148
|
self.obs_in_parts = [
|
|
878
|
-
x for x in self.lookupodb()
|
|
879
|
-
if x.rh.resource.part != 'virtual'
|
|
1149
|
+
x for x in self.lookupodb() if x.rh.resource.part != "virtual"
|
|
880
1150
|
]
|
|
881
1151
|
|
|
882
1152
|
# Find the input layout
|
|
883
1153
|
in_layout = {x.rh.resource.layout for x in self.obs_in_parts}
|
|
884
1154
|
if len(in_layout) != 1:
|
|
885
|
-
raise ValueError(
|
|
1155
|
+
raise ValueError(
|
|
1156
|
+
"Incoherent layout in input databases or no input databases"
|
|
1157
|
+
)
|
|
886
1158
|
self.layout_in = in_layout.pop()
|
|
887
1159
|
|
|
888
1160
|
# Some extra settings
|
|
889
|
-
self.env.update(
|
|
890
|
-
TO_ODB_FULL=1
|
|
891
|
-
)
|
|
1161
|
+
self.env.update(TO_ODB_FULL=1)
|
|
892
1162
|
|
|
893
1163
|
# prepare the ouputs' directory
|
|
894
1164
|
self.system.mkdir(self._OUT_DIRECTORY)
|
|
@@ -899,12 +1169,17 @@ class OdbReshuffle(Parallel, odb.OdbComponentDecoMixin, drhook.DrHookDecoMixin):
|
|
|
899
1169
|
"""Loop on available databases."""
|
|
900
1170
|
sh = self.system
|
|
901
1171
|
for a_db in self.obs_in_parts:
|
|
902
|
-
sh.subtitle(
|
|
1172
|
+
sh.subtitle(
|
|
1173
|
+
"Dealing with {:s}".format(a_db.rh.container.localpath())
|
|
1174
|
+
)
|
|
903
1175
|
|
|
904
1176
|
ecma_path = sh.path.abspath(a_db.rh.container.localpath())
|
|
905
|
-
ccma_path = sh.path.abspath(
|
|
906
|
-
|
|
907
|
-
|
|
1177
|
+
ccma_path = sh.path.abspath(
|
|
1178
|
+
sh.path.join(
|
|
1179
|
+
self._OUT_DIRECTORY,
|
|
1180
|
+
".".join([self.layout_in.upper(), a_db.rh.resource.part]),
|
|
1181
|
+
)
|
|
1182
|
+
)
|
|
908
1183
|
self.odb_create_db(self._BARE_OUT_LAYOUT, dbpath=ccma_path)
|
|
909
1184
|
self.odb.fix_db_path(self.layout_in, ecma_path)
|
|
910
1185
|
self.odb.fix_db_path(self._BARE_OUT_LAYOUT, ccma_path)
|
|
@@ -917,15 +1192,20 @@ class OdbReshuffle(Parallel, odb.OdbComponentDecoMixin, drhook.DrHookDecoMixin):
|
|
|
917
1192
|
super().execute(rh, opts)
|
|
918
1193
|
|
|
919
1194
|
# CCMA -> ECMA
|
|
920
|
-
self.odb.change_layout(
|
|
1195
|
+
self.odb.change_layout(
|
|
1196
|
+
self._BARE_OUT_LAYOUT, self.layout_in, ccma_path
|
|
1197
|
+
)
|
|
921
1198
|
|
|
922
1199
|
def postfix(self, rh, opts):
|
|
923
1200
|
"""Create a virtual database for output data."""
|
|
924
|
-
self.system.subtitle(
|
|
925
|
-
virtual_db = self.odb_merge_if_needed(
|
|
926
|
-
|
|
927
|
-
|
|
928
|
-
|
|
1201
|
+
self.system.subtitle("Creating the virtual database")
|
|
1202
|
+
virtual_db = self.odb_merge_if_needed(
|
|
1203
|
+
self.obs_in_parts, subdir=self._OUT_DIRECTORY
|
|
1204
|
+
)
|
|
1205
|
+
logger.info(
|
|
1206
|
+
"The output virtual DB was created: %s",
|
|
1207
|
+
self.system.path.join(self._OUT_DIRECTORY, virtual_db),
|
|
1208
|
+
)
|
|
929
1209
|
|
|
930
1210
|
def spawn_command_options(self):
|
|
931
1211
|
"""Prepare command line options to binary."""
|
|
@@ -936,14 +1216,16 @@ class OdbReshuffle(Parallel, odb.OdbComponentDecoMixin, drhook.DrHookDecoMixin):
|
|
|
936
1216
|
)
|
|
937
1217
|
|
|
938
1218
|
|
|
939
|
-
class FlagsCompute(
|
|
1219
|
+
class FlagsCompute(
|
|
1220
|
+
Parallel, odb.OdbComponentDecoMixin, drhook.DrHookDecoMixin
|
|
1221
|
+
):
|
|
940
1222
|
"""Compute observations flags."""
|
|
941
1223
|
|
|
942
1224
|
_footprint = dict(
|
|
943
|
-
info
|
|
944
|
-
attr
|
|
945
|
-
kind
|
|
946
|
-
values
|
|
1225
|
+
info="Computation of observations flags.",
|
|
1226
|
+
attr=dict(
|
|
1227
|
+
kind=dict(
|
|
1228
|
+
values=["flagscomp"],
|
|
947
1229
|
),
|
|
948
1230
|
),
|
|
949
1231
|
)
|
|
@@ -952,23 +1234,25 @@ class FlagsCompute(Parallel, odb.OdbComponentDecoMixin, drhook.DrHookDecoMixin):
|
|
|
952
1234
|
"""Spawn the binary for each of the input databases."""
|
|
953
1235
|
# Look for the input databases
|
|
954
1236
|
input_databases = self.context.sequence.effective_inputs(
|
|
955
|
-
role=
|
|
956
|
-
kind=
|
|
1237
|
+
role="ECMA",
|
|
1238
|
+
kind="observations",
|
|
957
1239
|
)
|
|
958
1240
|
# Check that there is at least one database
|
|
959
1241
|
if len(input_databases) < 1:
|
|
960
|
-
raise AttributeError(
|
|
1242
|
+
raise AttributeError("No database in input. Stop.")
|
|
961
1243
|
|
|
962
1244
|
for input_database in input_databases:
|
|
963
1245
|
ecma = input_database.rh
|
|
964
1246
|
ecma_filename = ecma.container.filename
|
|
965
1247
|
# Environment variable to set DB path
|
|
966
1248
|
self.odb.fix_db_path(ecma.resource.layout, ecma.container.abspath)
|
|
967
|
-
self.env.setvar(
|
|
968
|
-
logger.info(
|
|
1249
|
+
self.env.setvar("ODB_ECMA", ecma_filename)
|
|
1250
|
+
logger.info("Variable %s set to %s.", "ODB_ECMA", ecma_filename)
|
|
969
1251
|
# Path to the IOASSIGN file
|
|
970
|
-
self.env.IOASSIGN = self.system.path.join(
|
|
1252
|
+
self.env.IOASSIGN = self.system.path.join(
|
|
1253
|
+
ecma.container.abspath, "IOASSIGN"
|
|
1254
|
+
)
|
|
971
1255
|
# Let ancesters handling most of the env setting
|
|
972
1256
|
super().execute(rh, opts)
|
|
973
1257
|
# Rename the output file according to the name of the part of the observations treated
|
|
974
|
-
self.system.mv(
|
|
1258
|
+
self.system.mv("BDM_CQ", "_".join(["BDM_CQ", ecma.resource.part]))
|