vortex-nwp 2.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (144) hide show
  1. vortex/__init__.py +159 -0
  2. vortex/algo/__init__.py +13 -0
  3. vortex/algo/components.py +2462 -0
  4. vortex/algo/mpitools.py +1953 -0
  5. vortex/algo/mpitools_templates/__init__.py +1 -0
  6. vortex/algo/mpitools_templates/envelope_wrapper_default.tpl +27 -0
  7. vortex/algo/mpitools_templates/envelope_wrapper_mpiauto.tpl +29 -0
  8. vortex/algo/mpitools_templates/wrapstd_wrapper_default.tpl +18 -0
  9. vortex/algo/serversynctools.py +171 -0
  10. vortex/config.py +112 -0
  11. vortex/data/__init__.py +19 -0
  12. vortex/data/abstractstores.py +1510 -0
  13. vortex/data/containers.py +835 -0
  14. vortex/data/contents.py +622 -0
  15. vortex/data/executables.py +275 -0
  16. vortex/data/flow.py +119 -0
  17. vortex/data/geometries.ini +2689 -0
  18. vortex/data/geometries.py +799 -0
  19. vortex/data/handlers.py +1230 -0
  20. vortex/data/outflow.py +67 -0
  21. vortex/data/providers.py +487 -0
  22. vortex/data/resources.py +207 -0
  23. vortex/data/stores.py +1390 -0
  24. vortex/data/sync_templates/__init__.py +0 -0
  25. vortex/gloves.py +309 -0
  26. vortex/layout/__init__.py +20 -0
  27. vortex/layout/contexts.py +577 -0
  28. vortex/layout/dataflow.py +1220 -0
  29. vortex/layout/monitor.py +969 -0
  30. vortex/nwp/__init__.py +14 -0
  31. vortex/nwp/algo/__init__.py +21 -0
  32. vortex/nwp/algo/assim.py +537 -0
  33. vortex/nwp/algo/clim.py +1086 -0
  34. vortex/nwp/algo/coupling.py +831 -0
  35. vortex/nwp/algo/eda.py +840 -0
  36. vortex/nwp/algo/eps.py +785 -0
  37. vortex/nwp/algo/forecasts.py +886 -0
  38. vortex/nwp/algo/fpserver.py +1303 -0
  39. vortex/nwp/algo/ifsnaming.py +463 -0
  40. vortex/nwp/algo/ifsroot.py +404 -0
  41. vortex/nwp/algo/monitoring.py +263 -0
  42. vortex/nwp/algo/mpitools.py +694 -0
  43. vortex/nwp/algo/odbtools.py +1258 -0
  44. vortex/nwp/algo/oopsroot.py +916 -0
  45. vortex/nwp/algo/oopstests.py +220 -0
  46. vortex/nwp/algo/request.py +660 -0
  47. vortex/nwp/algo/stdpost.py +1641 -0
  48. vortex/nwp/data/__init__.py +30 -0
  49. vortex/nwp/data/assim.py +380 -0
  50. vortex/nwp/data/boundaries.py +314 -0
  51. vortex/nwp/data/climfiles.py +521 -0
  52. vortex/nwp/data/configfiles.py +153 -0
  53. vortex/nwp/data/consts.py +954 -0
  54. vortex/nwp/data/ctpini.py +149 -0
  55. vortex/nwp/data/diagnostics.py +209 -0
  56. vortex/nwp/data/eda.py +147 -0
  57. vortex/nwp/data/eps.py +432 -0
  58. vortex/nwp/data/executables.py +1045 -0
  59. vortex/nwp/data/fields.py +111 -0
  60. vortex/nwp/data/gridfiles.py +380 -0
  61. vortex/nwp/data/logs.py +584 -0
  62. vortex/nwp/data/modelstates.py +363 -0
  63. vortex/nwp/data/monitoring.py +193 -0
  64. vortex/nwp/data/namelists.py +696 -0
  65. vortex/nwp/data/obs.py +840 -0
  66. vortex/nwp/data/oopsexec.py +74 -0
  67. vortex/nwp/data/providers.py +207 -0
  68. vortex/nwp/data/query.py +206 -0
  69. vortex/nwp/data/stores.py +160 -0
  70. vortex/nwp/data/surfex.py +337 -0
  71. vortex/nwp/syntax/__init__.py +9 -0
  72. vortex/nwp/syntax/stdattrs.py +437 -0
  73. vortex/nwp/tools/__init__.py +10 -0
  74. vortex/nwp/tools/addons.py +40 -0
  75. vortex/nwp/tools/agt.py +67 -0
  76. vortex/nwp/tools/bdap.py +59 -0
  77. vortex/nwp/tools/bdcp.py +41 -0
  78. vortex/nwp/tools/bdm.py +24 -0
  79. vortex/nwp/tools/bdmp.py +54 -0
  80. vortex/nwp/tools/conftools.py +1661 -0
  81. vortex/nwp/tools/drhook.py +66 -0
  82. vortex/nwp/tools/grib.py +294 -0
  83. vortex/nwp/tools/gribdiff.py +104 -0
  84. vortex/nwp/tools/ifstools.py +203 -0
  85. vortex/nwp/tools/igastuff.py +273 -0
  86. vortex/nwp/tools/mars.py +68 -0
  87. vortex/nwp/tools/odb.py +657 -0
  88. vortex/nwp/tools/partitioning.py +258 -0
  89. vortex/nwp/tools/satrad.py +71 -0
  90. vortex/nwp/util/__init__.py +6 -0
  91. vortex/nwp/util/async.py +212 -0
  92. vortex/nwp/util/beacon.py +40 -0
  93. vortex/nwp/util/diffpygram.py +447 -0
  94. vortex/nwp/util/ens.py +279 -0
  95. vortex/nwp/util/hooks.py +139 -0
  96. vortex/nwp/util/taskdeco.py +85 -0
  97. vortex/nwp/util/usepygram.py +697 -0
  98. vortex/nwp/util/usetnt.py +101 -0
  99. vortex/proxy.py +6 -0
  100. vortex/sessions.py +374 -0
  101. vortex/syntax/__init__.py +9 -0
  102. vortex/syntax/stdattrs.py +867 -0
  103. vortex/syntax/stddeco.py +185 -0
  104. vortex/toolbox.py +1117 -0
  105. vortex/tools/__init__.py +20 -0
  106. vortex/tools/actions.py +523 -0
  107. vortex/tools/addons.py +316 -0
  108. vortex/tools/arm.py +96 -0
  109. vortex/tools/compression.py +325 -0
  110. vortex/tools/date.py +27 -0
  111. vortex/tools/ddhpack.py +10 -0
  112. vortex/tools/delayedactions.py +782 -0
  113. vortex/tools/env.py +541 -0
  114. vortex/tools/folder.py +834 -0
  115. vortex/tools/grib.py +738 -0
  116. vortex/tools/lfi.py +953 -0
  117. vortex/tools/listings.py +423 -0
  118. vortex/tools/names.py +637 -0
  119. vortex/tools/net.py +2124 -0
  120. vortex/tools/odb.py +10 -0
  121. vortex/tools/parallelism.py +368 -0
  122. vortex/tools/prestaging.py +210 -0
  123. vortex/tools/rawfiles.py +10 -0
  124. vortex/tools/schedulers.py +480 -0
  125. vortex/tools/services.py +940 -0
  126. vortex/tools/storage.py +996 -0
  127. vortex/tools/surfex.py +61 -0
  128. vortex/tools/systems.py +3976 -0
  129. vortex/tools/targets.py +440 -0
  130. vortex/util/__init__.py +9 -0
  131. vortex/util/config.py +1122 -0
  132. vortex/util/empty.py +24 -0
  133. vortex/util/helpers.py +216 -0
  134. vortex/util/introspection.py +69 -0
  135. vortex/util/iosponge.py +80 -0
  136. vortex/util/roles.py +49 -0
  137. vortex/util/storefunctions.py +129 -0
  138. vortex/util/structs.py +26 -0
  139. vortex/util/worker.py +162 -0
  140. vortex_nwp-2.0.0.dist-info/METADATA +67 -0
  141. vortex_nwp-2.0.0.dist-info/RECORD +144 -0
  142. vortex_nwp-2.0.0.dist-info/WHEEL +5 -0
  143. vortex_nwp-2.0.0.dist-info/licenses/LICENSE +517 -0
  144. vortex_nwp-2.0.0.dist-info/top_level.txt +1 -0
vortex/nwp/__init__.py ADDED
@@ -0,0 +1,14 @@
1
+ """
2
+ The NWP VORTEX extension package.
3
+ """
4
+
5
+ # Recursive inclusion of packages with potential FootprintBase classes
6
+ from . import algo as algo
7
+ from . import data as data
8
+ from . import tools as tools
9
+ from . import syntax as syntax
10
+
11
+ #: No automatic export
12
+ __all__ = []
13
+
14
+ __tocinfoline__ = "The NWP VORTEX extension"
@@ -0,0 +1,21 @@
1
+ """
2
+ AlgoComponents for NWP
3
+ """
4
+
5
+ # Recursive inclusion of packages with potential FootprintBase classes
6
+ from . import forecasts as forecasts
7
+ from . import fpserver as fpserver
8
+ from . import coupling as coupling
9
+ from . import mpitools as mpitools
10
+ from . import odbtools as odbtools
11
+ from . import stdpost as stdpost
12
+ from . import assim as assim
13
+ from . import eps as eps
14
+ from . import eda as eda
15
+ from . import request as request
16
+ from . import monitoring as monitoring
17
+ from . import clim as clim
18
+ from . import oopsroot as oopsroot
19
+ from . import oopstests as oopstests
20
+
21
+ __all__ = []
@@ -0,0 +1,537 @@
1
+ """
2
+ AlgoComponents dedicated to computations related to Data Assimilation systems.
3
+ """
4
+
5
+ from bronx.fancies import loggers
6
+ from bronx.stdtypes.date import Date
7
+
8
+ from vortex.algo.components import BlindRun, Parallel
9
+ from vortex.syntax.stdattrs import a_date
10
+ from .ifsroot import IFSParallel
11
+ from ..tools import odb, drhook
12
+
13
+ #: No automatic export
14
+ __all__ = []
15
+
16
+ logger = loggers.getLogger(__name__)
17
+
18
+
19
+ class MergeVarBC(Parallel):
20
+ """Merge two VarBC files.
21
+
22
+ The VarBC file resulting from the MergeVarBC contains all the items of the
23
+ first VarBC file plus any new item that would be present in the second file.
24
+ """
25
+
26
+ _footprint = dict(
27
+ attr=dict(
28
+ kind=dict(
29
+ values=["mergevarbc"],
30
+ ),
31
+ varbcout=dict(
32
+ optional=True,
33
+ default="VARBC.cycle_out",
34
+ ),
35
+ )
36
+ )
37
+
38
+ def prepare(self, rh, opts):
39
+ """Find any ODB candidate in input files."""
40
+
41
+ sh = self.system
42
+
43
+ sh.touch(self.varbcout)
44
+
45
+ # Let ancesters doing real stuff
46
+ super().prepare(rh, opts)
47
+
48
+
49
+ class Anamix(IFSParallel):
50
+ """Merge the surface and atmospheric analyses into a single file"""
51
+
52
+ _footprint = dict(
53
+ info="Merge surface and atmospheric analyses",
54
+ attr=dict(
55
+ kind=dict(
56
+ values=["anamix"],
57
+ ),
58
+ conf=dict(
59
+ default=701,
60
+ ),
61
+ xpname=dict(
62
+ default="CANS",
63
+ ),
64
+ timestep=dict(
65
+ default=1,
66
+ ),
67
+ ),
68
+ )
69
+
70
+
71
+ class SstAnalysis(IFSParallel):
72
+ """SST (Sea Surface Temperature) Analysis"""
73
+
74
+ _footprint = dict(
75
+ attr=dict(
76
+ kind=dict(
77
+ values=["sstana", "sst_ana", "sst_analysis", "c931"],
78
+ remap=dict(autoremap="first"),
79
+ ),
80
+ conf=dict(
81
+ default=931,
82
+ ),
83
+ xpname=dict(
84
+ default="ANAL",
85
+ ),
86
+ timestep=dict(
87
+ default="1.",
88
+ ),
89
+ )
90
+ )
91
+
92
+
93
+ class SeaIceAnalysis(IFSParallel):
94
+ """Sea Ice Analysis"""
95
+
96
+ _footprint = dict(
97
+ attr=dict(
98
+ kind=dict(
99
+ values=["seaiceana", "seaice_ana", "seaice_analysis", "c932"],
100
+ remap=dict(autoremap="first"),
101
+ ),
102
+ conf=dict(
103
+ default=932,
104
+ ),
105
+ xpname=dict(
106
+ default="ANAL",
107
+ ),
108
+ timestep=dict(
109
+ default="1.",
110
+ ),
111
+ date=dict(
112
+ type=Date,
113
+ ),
114
+ )
115
+ )
116
+
117
+ def find_namelists(self, opts=None):
118
+ namrh_list = super().find_namelists(opts)
119
+ if not namrh_list:
120
+ logger.critical("No namelist was found.")
121
+ raise ValueError("No namelist was found for seaice analysis")
122
+ return namrh_list
123
+
124
+ def prepare_namelist_delta(self, rh, namcontents, namlocal):
125
+ super().prepare_namelist_delta(rh, namcontents, namlocal)
126
+ self._set_nam_macro(namcontents, namlocal, "IDAT", int(self.date.ymd))
127
+ return True
128
+
129
+
130
+ class Canari(IFSParallel, odb.OdbComponentDecoMixin):
131
+ """Surface analysis."""
132
+
133
+ _footprint = dict(
134
+ info="Surface assimilation based on optimal interpolation",
135
+ attr=dict(
136
+ kind=dict(
137
+ values=["canari"],
138
+ ),
139
+ binarysingle=dict(
140
+ default="basicnwpobsort",
141
+ ),
142
+ conf=dict(
143
+ default=701,
144
+ ),
145
+ xpname=dict(
146
+ default="CANS",
147
+ ),
148
+ ),
149
+ )
150
+
151
+ def prepare(self, rh, opts):
152
+ """Get a look at raw observations input files."""
153
+ super().prepare(rh, opts)
154
+
155
+ # Looking for input observations
156
+ obsodb = [
157
+ x
158
+ for x in self.lookupodb()
159
+ if x.rh.resource.part.startswith("surf")
160
+ ]
161
+ if not obsodb:
162
+ raise ValueError("No surface obsdata for canari")
163
+ self.odb_date_and_layout_from_sections(obsodb)
164
+
165
+ # Find the unique input ODb database
166
+ ssurf = obsodb.pop()
167
+ if obsodb:
168
+ logger.error("More than one surface obsdata provided")
169
+ logger.error(
170
+ "Using : %s / %s",
171
+ ssurf.rh.resource.layout,
172
+ ssurf.rh.resource.part,
173
+ )
174
+ for sobs in obsodb:
175
+ logger.error(
176
+ "Skip : %s / %s",
177
+ sobs.rh.resource.layout,
178
+ sobs.rh.resource.part,
179
+ )
180
+
181
+ # Fix paths + generate a global IOASSING file
182
+ cma_path = self.system.path.abspath(ssurf.rh.container.localpath())
183
+ self.odb.fix_db_path(self.virtualdb, cma_path)
184
+ self.odb.ioassign_gather(cma_path)
185
+
186
+ # Some extra settings
187
+ self.odb.create_poolmask(self.virtualdb, cma_path)
188
+ self.odb.shuffle_setup(self.slots, mergedirect=True, ccmadirect=False)
189
+ self.env.update(
190
+ ODB_POOLMASKING=1,
191
+ ODB_PACKING=-1,
192
+ BASETIME=self.date.ymdh,
193
+ )
194
+
195
+ # Fix the input DB intent
196
+ self.odb_rw_or_overwrite_method(ssurf)
197
+
198
+
199
+ class Screening(IFSParallel, odb.OdbComponentDecoMixin):
200
+ """Observation screening."""
201
+
202
+ _footprint = dict(
203
+ info="Observations screening.",
204
+ attr=dict(
205
+ kind=dict(
206
+ values=["screening", "screen", "thinning"],
207
+ remap=dict(autoremap="first"),
208
+ ),
209
+ binarysingle=dict(
210
+ default="basicnwpobsort",
211
+ ),
212
+ ioassign=dict(
213
+ optional=False,
214
+ ),
215
+ conf=dict(
216
+ default=2,
217
+ ),
218
+ xpname=dict(
219
+ default="SCRE",
220
+ ),
221
+ ),
222
+ )
223
+
224
+ def prepare(self, rh, opts):
225
+ """Get a look at raw observations input files."""
226
+ super().prepare(rh, opts)
227
+
228
+ # Looking for input observations
229
+ allodb = self.lookupodb()
230
+ self.odb_date_and_layout_from_sections(allodb)
231
+
232
+ # Perform the pre-merging stuff (this will create the ECMA virtual DB)
233
+ virtualdb_path = self.odb_merge_if_needed(allodb)
234
+ # Prepare the CCMA DB
235
+ ccma_path = self.odb_create_db(layout="CCMA")
236
+
237
+ # Fix paths + generate a global IOASSING file
238
+ self.odb.fix_db_path(self.virtualdb, virtualdb_path)
239
+ self.odb.fix_db_path("CCMA", ccma_path)
240
+ self.odb.ioassign_gather(virtualdb_path, ccma_path)
241
+
242
+ # Some extra settings
243
+ self.odb.create_poolmask(self.virtualdb, virtualdb_path)
244
+ self.odb.shuffle_setup(self.slots, mergedirect=True, ccmadirect=True)
245
+
246
+ # Look for extras ODB raw
247
+ self.odb_handle_raw_dbs()
248
+
249
+ # Fix the input databases intent
250
+ self.odb_rw_or_overwrite_method(*allodb)
251
+
252
+ # Look for channels namelists and set appropriate links
253
+ self.setchannels()
254
+
255
+
256
+ class IFSODBCCMA(IFSParallel, odb.OdbComponentDecoMixin):
257
+ """Specialised IFSODB for CCMA processing"""
258
+
259
+ _abstract = True
260
+ _footprint = dict(
261
+ attr=dict(
262
+ virtualdb=dict(
263
+ default="ccma",
264
+ ),
265
+ binarysingle=dict(
266
+ default="basicnwpobsort",
267
+ ),
268
+ )
269
+ )
270
+
271
+ def prepare(self, rh, opts):
272
+ """Get a look at raw observations input files."""
273
+ super().prepare(rh, opts)
274
+
275
+ sh = self.system
276
+
277
+ # Looking for input observations
278
+ allodb = self.lookupodb()
279
+ allccma = [x for x in allodb if x.rh.resource.layout.lower() == "ccma"]
280
+ if allccma:
281
+ if len(allccma) > 1:
282
+ logger.error(
283
+ "Multiple CCMA databases detected: only the first one is taken into account"
284
+ )
285
+ else:
286
+ raise ValueError("Missing CCMA input data for " + self.kind)
287
+
288
+ # Set env and IOASSIGN
289
+ ccma = allccma.pop()
290
+ ccma_path = sh.path.abspath(ccma.rh.container.localpath())
291
+ self.odb_date_and_layout_from_sections(
292
+ [
293
+ ccma,
294
+ ]
295
+ )
296
+ self.odb.fix_db_path(ccma.rh.resource.layout, ccma_path)
297
+ self.odb.ioassign_gather(ccma_path)
298
+
299
+ # Fix the input database intent
300
+ self.odb_rw_or_overwrite_method(ccma)
301
+
302
+ # Look for channels namelists and set appropriate links
303
+ self.setchannels()
304
+
305
+
306
+ class Minim(IFSODBCCMA):
307
+ """Observation minimisation."""
308
+
309
+ _footprint = dict(
310
+ info="Minimisation in the assimilation process.",
311
+ attr=dict(
312
+ kind=dict(
313
+ values=["minim", "min", "minimisation"],
314
+ remap=dict(autoremap="first"),
315
+ ),
316
+ conf=dict(
317
+ default=131,
318
+ ),
319
+ xpname=dict(
320
+ default="MINI",
321
+ ),
322
+ ),
323
+ )
324
+
325
+ def prepare(self, rh, opts):
326
+ """Find out if preconditioning eigenvectors are here."""
327
+ super().prepare(rh, opts)
328
+
329
+ # Check if a preconditioning EV map is here
330
+ evmaprh = self.context.sequence.effective_inputs(
331
+ role=("PreconEVMap", "PreconditionningEVMap"), kind="precevmap"
332
+ )
333
+ if evmaprh:
334
+ if len(evmaprh) > 1:
335
+ logger.warning(
336
+ "Several preconditioning EV maps provided. Using the first one."
337
+ )
338
+ nprec_ev = evmaprh[0].rh.contents.data["evlen"]
339
+ # If there are preconditioning EV: update the namelist
340
+ if nprec_ev > 0:
341
+ for namrh in [
342
+ x.rh
343
+ for x in self.context.sequence.effective_inputs(
344
+ role="Namelist",
345
+ kind="namelist",
346
+ )
347
+ ]:
348
+ namc = namrh.contents
349
+ try:
350
+ namc["NAMVAR"].NPCVECS = nprec_ev
351
+ namc.rewrite(namrh.container)
352
+ except Exception:
353
+ logger.critical(
354
+ "Could not fix NAMVAR in %s",
355
+ namrh.container.actualpath(),
356
+ )
357
+ raise
358
+ logger.info(
359
+ "%d preconditioning EV will by used (NPCVECS=%d).",
360
+ nprec_ev,
361
+ nprec_ev,
362
+ )
363
+ else:
364
+ logger.warning(
365
+ "A preconditioning EV map was found, "
366
+ + "but no preconditioning EV are available."
367
+ )
368
+ else:
369
+ logger.info("No preconditioning EV were found.")
370
+
371
+ def postfix(self, rh, opts):
372
+ """Find out if any special resources have been produced."""
373
+ sh = self.system
374
+
375
+ # Look up for PREConditionning Eigen Vectors
376
+ prec = sh.ls("MEMINI*")
377
+ if prec:
378
+ prec_info = dict(evlen=len(prec))
379
+ prec_info["evnum"] = [int(x[6:]) for x in prec]
380
+ sh.json_dump(prec_info, "precev_map.out", indent=4)
381
+
382
+ super().postfix(rh, opts)
383
+
384
+
385
+ class Trajectory(IFSODBCCMA):
386
+ """Observation trajectory."""
387
+
388
+ _footprint = dict(
389
+ info="Trajectory in the assimilation process.",
390
+ attr=dict(
391
+ kind=dict(
392
+ values=["traj", "trajectory"],
393
+ remap=dict(autoremap="first"),
394
+ ),
395
+ conf=dict(
396
+ default=2,
397
+ ),
398
+ xpname=dict(
399
+ default="TRAJ",
400
+ ),
401
+ ),
402
+ )
403
+
404
+
405
+ class PseudoTrajectory(BlindRun, drhook.DrHookDecoMixin):
406
+ """Copy a few fields from the Guess file into the Analysis file"""
407
+
408
+ _footprint = dict(
409
+ attr=dict(
410
+ kind=dict(
411
+ values=["pseudotraj", "traj", "trajectory"],
412
+ remap=dict(autoremap="first"),
413
+ ),
414
+ )
415
+ )
416
+
417
+
418
+ class SstGrb2Ascii(BlindRun):
419
+ """Transform sst grib files from the BDAP into ascii files"""
420
+
421
+ _footprint = dict(
422
+ info="Binary to change the format of sst BDAP files.",
423
+ attr=dict(
424
+ kind=dict(
425
+ values=["lect_bdap"],
426
+ ),
427
+ date=a_date,
428
+ nlat=dict(
429
+ default=0,
430
+ ),
431
+ nlon=dict(
432
+ default=0,
433
+ ),
434
+ ),
435
+ )
436
+
437
+ def prepare(self, rh, opts):
438
+ """Add namelist delta, prepare the environment and build the arguments needed."""
439
+ super().prepare(rh, opts)
440
+ for namrh in [
441
+ x.rh
442
+ for x in self.context.sequence.effective_inputs(
443
+ role="Namelist",
444
+ kind="namelist",
445
+ )
446
+ ]:
447
+ namc = namrh.contents
448
+ try:
449
+ namc.newblock("NAMFILE")
450
+ namc["NAMFILE"].NBFICH = 1
451
+ namc["NAMFILE"]["CCNFICH(1)"] = "GRIB_SST"
452
+ namc.rewrite(namrh.container)
453
+ except Exception:
454
+ logger.critical(
455
+ "Could not fix NAMFILE in %s", namrh.container.actualpath()
456
+ )
457
+ raise
458
+
459
+ def spawn_command_options(self):
460
+ """Build the dictionnary to provide arguments to the binary."""
461
+ return dict(
462
+ year=self.date.year,
463
+ month=self.date.month,
464
+ day=self.date.day,
465
+ hour=self.date.hour,
466
+ lon=self.nlon,
467
+ lat=self.nlat,
468
+ )
469
+
470
+
471
+ class IceNetCDF2Ascii(BlindRun):
472
+ """Transform ice NetCDF files from the BDPE into ascii files"""
473
+
474
+ _footprint = dict(
475
+ info="Binary to change the format of ice BDPE files.",
476
+ attr=dict(
477
+ kind=dict(
478
+ values=["ice_nc2ascii"],
479
+ ),
480
+ output_file=dict(optional=True, default="ice_concent"),
481
+ param=dict(
482
+ optional=True,
483
+ default="ice_conc",
484
+ ),
485
+ ),
486
+ )
487
+
488
+ def prepare(self, rh, opts):
489
+ super().prepare(rh, opts)
490
+ # Look for the input files
491
+ list_netcdf = self.context.sequence.effective_inputs(
492
+ role="NetCDFfiles", kind="observations"
493
+ )
494
+ hn_file = ""
495
+ hs_file = ""
496
+ for sect in list_netcdf:
497
+ part = sect.rh.resource.part
498
+ filename = sect.rh.container.filename
499
+ if part == "ice_hn":
500
+ if hn_file == "":
501
+ hn_file = filename
502
+ logger.info(
503
+ "The input file for the North hemisphere is: %s.",
504
+ hn_file,
505
+ )
506
+ else:
507
+ logger.warning(
508
+ "There was already one file for the North hemisphere. "
509
+ "The following one, %s, is not used.",
510
+ filename,
511
+ )
512
+ elif part == "ice_hs":
513
+ if hs_file == "":
514
+ hs_file = filename
515
+ logger.info(
516
+ "The input file for the South hemisphere is: %s.",
517
+ hs_file,
518
+ )
519
+ else:
520
+ logger.warning(
521
+ "There was already one file for the South hemisphere. "
522
+ "The following one, %s, is not used.",
523
+ filename,
524
+ )
525
+ else:
526
+ logger.warning("The following file is not used: %s.", filename)
527
+ self.input_file_hn = hn_file
528
+ self.input_file_hs = hs_file
529
+
530
+ def spawn_command_options(self):
531
+ """Build the dictionnary to provide arguments to the binary."""
532
+ return dict(
533
+ file_in_hn=self.input_file_hn,
534
+ file_in_hs=self.input_file_hs,
535
+ param=self.param,
536
+ file_out=self.output_file,
537
+ )