vortex-nwp 2.0.0b1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (146) hide show
  1. vortex/__init__.py +135 -0
  2. vortex/algo/__init__.py +12 -0
  3. vortex/algo/components.py +2136 -0
  4. vortex/algo/mpitools.py +1648 -0
  5. vortex/algo/mpitools_templates/envelope_wrapper_default.tpl +27 -0
  6. vortex/algo/mpitools_templates/envelope_wrapper_mpiauto.tpl +29 -0
  7. vortex/algo/mpitools_templates/wrapstd_wrapper_default.tpl +18 -0
  8. vortex/algo/serversynctools.py +170 -0
  9. vortex/config.py +115 -0
  10. vortex/data/__init__.py +13 -0
  11. vortex/data/abstractstores.py +1572 -0
  12. vortex/data/containers.py +780 -0
  13. vortex/data/contents.py +596 -0
  14. vortex/data/executables.py +284 -0
  15. vortex/data/flow.py +113 -0
  16. vortex/data/geometries.ini +2689 -0
  17. vortex/data/geometries.py +703 -0
  18. vortex/data/handlers.py +1021 -0
  19. vortex/data/outflow.py +67 -0
  20. vortex/data/providers.py +465 -0
  21. vortex/data/resources.py +201 -0
  22. vortex/data/stores.py +1271 -0
  23. vortex/gloves.py +282 -0
  24. vortex/layout/__init__.py +27 -0
  25. vortex/layout/appconf.py +109 -0
  26. vortex/layout/contexts.py +511 -0
  27. vortex/layout/dataflow.py +1069 -0
  28. vortex/layout/jobs.py +1276 -0
  29. vortex/layout/monitor.py +833 -0
  30. vortex/layout/nodes.py +1424 -0
  31. vortex/layout/subjobs.py +464 -0
  32. vortex/nwp/__init__.py +11 -0
  33. vortex/nwp/algo/__init__.py +12 -0
  34. vortex/nwp/algo/assim.py +483 -0
  35. vortex/nwp/algo/clim.py +920 -0
  36. vortex/nwp/algo/coupling.py +609 -0
  37. vortex/nwp/algo/eda.py +632 -0
  38. vortex/nwp/algo/eps.py +613 -0
  39. vortex/nwp/algo/forecasts.py +745 -0
  40. vortex/nwp/algo/fpserver.py +927 -0
  41. vortex/nwp/algo/ifsnaming.py +403 -0
  42. vortex/nwp/algo/ifsroot.py +311 -0
  43. vortex/nwp/algo/monitoring.py +202 -0
  44. vortex/nwp/algo/mpitools.py +554 -0
  45. vortex/nwp/algo/odbtools.py +974 -0
  46. vortex/nwp/algo/oopsroot.py +735 -0
  47. vortex/nwp/algo/oopstests.py +186 -0
  48. vortex/nwp/algo/request.py +579 -0
  49. vortex/nwp/algo/stdpost.py +1285 -0
  50. vortex/nwp/data/__init__.py +12 -0
  51. vortex/nwp/data/assim.py +392 -0
  52. vortex/nwp/data/boundaries.py +261 -0
  53. vortex/nwp/data/climfiles.py +539 -0
  54. vortex/nwp/data/configfiles.py +149 -0
  55. vortex/nwp/data/consts.py +929 -0
  56. vortex/nwp/data/ctpini.py +133 -0
  57. vortex/nwp/data/diagnostics.py +181 -0
  58. vortex/nwp/data/eda.py +148 -0
  59. vortex/nwp/data/eps.py +383 -0
  60. vortex/nwp/data/executables.py +1039 -0
  61. vortex/nwp/data/fields.py +96 -0
  62. vortex/nwp/data/gridfiles.py +308 -0
  63. vortex/nwp/data/logs.py +551 -0
  64. vortex/nwp/data/modelstates.py +334 -0
  65. vortex/nwp/data/monitoring.py +220 -0
  66. vortex/nwp/data/namelists.py +644 -0
  67. vortex/nwp/data/obs.py +748 -0
  68. vortex/nwp/data/oopsexec.py +72 -0
  69. vortex/nwp/data/providers.py +182 -0
  70. vortex/nwp/data/query.py +217 -0
  71. vortex/nwp/data/stores.py +147 -0
  72. vortex/nwp/data/surfex.py +338 -0
  73. vortex/nwp/syntax/__init__.py +9 -0
  74. vortex/nwp/syntax/stdattrs.py +375 -0
  75. vortex/nwp/tools/__init__.py +10 -0
  76. vortex/nwp/tools/addons.py +35 -0
  77. vortex/nwp/tools/agt.py +55 -0
  78. vortex/nwp/tools/bdap.py +48 -0
  79. vortex/nwp/tools/bdcp.py +38 -0
  80. vortex/nwp/tools/bdm.py +21 -0
  81. vortex/nwp/tools/bdmp.py +49 -0
  82. vortex/nwp/tools/conftools.py +1311 -0
  83. vortex/nwp/tools/drhook.py +62 -0
  84. vortex/nwp/tools/grib.py +268 -0
  85. vortex/nwp/tools/gribdiff.py +99 -0
  86. vortex/nwp/tools/ifstools.py +163 -0
  87. vortex/nwp/tools/igastuff.py +249 -0
  88. vortex/nwp/tools/mars.py +56 -0
  89. vortex/nwp/tools/odb.py +548 -0
  90. vortex/nwp/tools/partitioning.py +234 -0
  91. vortex/nwp/tools/satrad.py +56 -0
  92. vortex/nwp/util/__init__.py +6 -0
  93. vortex/nwp/util/async.py +184 -0
  94. vortex/nwp/util/beacon.py +40 -0
  95. vortex/nwp/util/diffpygram.py +359 -0
  96. vortex/nwp/util/ens.py +198 -0
  97. vortex/nwp/util/hooks.py +128 -0
  98. vortex/nwp/util/taskdeco.py +81 -0
  99. vortex/nwp/util/usepygram.py +591 -0
  100. vortex/nwp/util/usetnt.py +87 -0
  101. vortex/proxy.py +6 -0
  102. vortex/sessions.py +341 -0
  103. vortex/syntax/__init__.py +9 -0
  104. vortex/syntax/stdattrs.py +628 -0
  105. vortex/syntax/stddeco.py +176 -0
  106. vortex/toolbox.py +982 -0
  107. vortex/tools/__init__.py +11 -0
  108. vortex/tools/actions.py +457 -0
  109. vortex/tools/addons.py +297 -0
  110. vortex/tools/arm.py +76 -0
  111. vortex/tools/compression.py +322 -0
  112. vortex/tools/date.py +20 -0
  113. vortex/tools/ddhpack.py +10 -0
  114. vortex/tools/delayedactions.py +672 -0
  115. vortex/tools/env.py +513 -0
  116. vortex/tools/folder.py +663 -0
  117. vortex/tools/grib.py +559 -0
  118. vortex/tools/lfi.py +746 -0
  119. vortex/tools/listings.py +354 -0
  120. vortex/tools/names.py +575 -0
  121. vortex/tools/net.py +1790 -0
  122. vortex/tools/odb.py +10 -0
  123. vortex/tools/parallelism.py +336 -0
  124. vortex/tools/prestaging.py +186 -0
  125. vortex/tools/rawfiles.py +10 -0
  126. vortex/tools/schedulers.py +413 -0
  127. vortex/tools/services.py +871 -0
  128. vortex/tools/storage.py +1061 -0
  129. vortex/tools/surfex.py +61 -0
  130. vortex/tools/systems.py +3396 -0
  131. vortex/tools/targets.py +384 -0
  132. vortex/util/__init__.py +9 -0
  133. vortex/util/config.py +1071 -0
  134. vortex/util/empty.py +24 -0
  135. vortex/util/helpers.py +184 -0
  136. vortex/util/introspection.py +63 -0
  137. vortex/util/iosponge.py +76 -0
  138. vortex/util/roles.py +51 -0
  139. vortex/util/storefunctions.py +103 -0
  140. vortex/util/structs.py +26 -0
  141. vortex/util/worker.py +150 -0
  142. vortex_nwp-2.0.0b1.dist-info/LICENSE +517 -0
  143. vortex_nwp-2.0.0b1.dist-info/METADATA +50 -0
  144. vortex_nwp-2.0.0b1.dist-info/RECORD +146 -0
  145. vortex_nwp-2.0.0b1.dist-info/WHEEL +5 -0
  146. vortex_nwp-2.0.0b1.dist-info/top_level.txt +1 -0
@@ -0,0 +1,483 @@
1
+ """
2
+ AlgoComponents dedicated to computations related to Data Assimilation systems.
3
+ """
4
+
5
+ from bronx.fancies import loggers
6
+ from bronx.stdtypes.date import Date
7
+
8
+ from vortex.algo.components import BlindRun, Parallel
9
+ from vortex.syntax.stdattrs import a_date
10
+ from .ifsroot import IFSParallel
11
+ from ..tools import odb, drhook
12
+
13
+ #: No automatic export
14
+ __all__ = []
15
+
16
+ logger = loggers.getLogger(__name__)
17
+
18
+
19
+ class MergeVarBC(Parallel):
20
+ """Merge two VarBC files.
21
+
22
+ The VarBC file resulting from the MergeVarBC contains all the items of the
23
+ first VarBC file plus any new item that would be present in the second file.
24
+ """
25
+
26
+ _footprint = dict(
27
+ attr = dict(
28
+ kind = dict(
29
+ values = ['mergevarbc'],
30
+ ),
31
+ varbcout = dict(
32
+ optional = True,
33
+ default = 'VARBC.cycle_out',
34
+ ),
35
+ )
36
+ )
37
+
38
+ def prepare(self, rh, opts):
39
+ """Find any ODB candidate in input files."""
40
+
41
+ sh = self.system
42
+
43
+ sh.touch(self.varbcout)
44
+
45
+ # Let ancesters doing real stuff
46
+ super().prepare(rh, opts)
47
+
48
+
49
+ class Anamix(IFSParallel):
50
+ """Merge the surface and atmospheric analyses into a single file"""
51
+
52
+ _footprint = dict(
53
+ info='Merge surface and atmospheric analyses',
54
+ attr=dict(
55
+ kind=dict(
56
+ values=['anamix'],
57
+ ),
58
+ conf=dict(
59
+ default=701,
60
+ ),
61
+ xpname=dict(
62
+ default='CANS',
63
+ ),
64
+ timestep=dict(
65
+ default=1,
66
+ )
67
+ )
68
+ )
69
+
70
+
71
+ class SstAnalysis(IFSParallel):
72
+ """SST (Sea Surface Temperature) Analysis"""
73
+
74
+ _footprint = dict(
75
+ attr = dict(
76
+ kind = dict(
77
+ values = ['sstana', 'sst_ana', 'sst_analysis', 'c931'],
78
+ remap = dict(autoremap = 'first'),
79
+ ),
80
+ conf = dict(
81
+ default = 931,
82
+ ),
83
+ xpname = dict(
84
+ default = 'ANAL',
85
+ ),
86
+ timestep = dict(
87
+ default = '1.',
88
+ ),
89
+ )
90
+ )
91
+
92
+
93
+ class SeaIceAnalysis(IFSParallel):
94
+ """Sea Ice Analysis"""
95
+
96
+ _footprint = dict(
97
+ attr = dict(
98
+ kind = dict(
99
+ values = ['seaiceana', 'seaice_ana', 'seaice_analysis', 'c932'],
100
+ remap = dict(autoremap = 'first'),
101
+ ),
102
+ conf = dict(
103
+ default = 932,
104
+ ),
105
+ xpname = dict(
106
+ default = 'ANAL',
107
+ ),
108
+ timestep = dict(
109
+ default = '1.',
110
+ ),
111
+ date = dict(
112
+ type = Date,
113
+ )
114
+ )
115
+ )
116
+
117
+ def find_namelists(self, opts=None):
118
+ namrh_list = super().find_namelists(opts)
119
+ if not namrh_list:
120
+ logger.critical('No namelist was found.')
121
+ raise ValueError('No namelist was found for seaice analysis')
122
+ return namrh_list
123
+
124
+ def prepare_namelist_delta(self, rh, namcontents, namlocal):
125
+ super().prepare_namelist_delta(rh, namcontents, namlocal)
126
+ self._set_nam_macro(namcontents, namlocal, 'IDAT', int(self.date.ymd))
127
+ return True
128
+
129
+
130
+ class Canari(IFSParallel, odb.OdbComponentDecoMixin):
131
+ """Surface analysis."""
132
+
133
+ _footprint = dict(
134
+ info = 'Surface assimilation based on optimal interpolation',
135
+ attr = dict(
136
+ kind = dict(
137
+ values = ['canari'],
138
+ ),
139
+ binarysingle = dict(
140
+ default = 'basicnwpobsort',
141
+ ),
142
+ conf = dict(
143
+ default = 701,
144
+ ),
145
+ xpname = dict(
146
+ default = 'CANS',
147
+ ),
148
+ )
149
+ )
150
+
151
+ def prepare(self, rh, opts):
152
+ """Get a look at raw observations input files."""
153
+ super().prepare(rh, opts)
154
+
155
+ # Looking for input observations
156
+ obsodb = [x for x in self.lookupodb() if x.rh.resource.part.startswith('surf')]
157
+ if not obsodb:
158
+ raise ValueError('No surface obsdata for canari')
159
+ self.odb_date_and_layout_from_sections(obsodb)
160
+
161
+ # Find the unique input ODb database
162
+ ssurf = obsodb.pop()
163
+ if obsodb:
164
+ logger.error('More than one surface obsdata provided')
165
+ logger.error('Using : %s / %s', ssurf.rh.resource.layout, ssurf.rh.resource.part)
166
+ for sobs in obsodb:
167
+ logger.error('Skip : %s / %s', sobs.rh.resource.layout, sobs.rh.resource.part)
168
+
169
+ # Fix paths + generate a global IOASSING file
170
+ cma_path = self.system.path.abspath(ssurf.rh.container.localpath())
171
+ self.odb.fix_db_path(self.virtualdb, cma_path)
172
+ self.odb.ioassign_gather(cma_path)
173
+
174
+ # Some extra settings
175
+ self.odb.create_poolmask(self.virtualdb, cma_path)
176
+ self.odb.shuffle_setup(self.slots, mergedirect=True, ccmadirect=False)
177
+ self.env.update(
178
+ ODB_POOLMASKING=1,
179
+ ODB_PACKING=-1,
180
+ BASETIME=self.date.ymdh,
181
+ )
182
+
183
+ # Fix the input DB intent
184
+ self.odb_rw_or_overwrite_method(ssurf)
185
+
186
+
187
+ class Screening(IFSParallel, odb.OdbComponentDecoMixin):
188
+ """Observation screening."""
189
+
190
+ _footprint = dict(
191
+ info = 'Observations screening.',
192
+ attr = dict(
193
+ kind = dict(
194
+ values = ['screening', 'screen', 'thinning'],
195
+ remap = dict(autoremap = 'first'),
196
+ ),
197
+ binarysingle = dict(
198
+ default = 'basicnwpobsort',
199
+ ),
200
+ ioassign = dict(
201
+ optional = False,
202
+ ),
203
+ conf = dict(
204
+ default = 2,
205
+ ),
206
+ xpname = dict(
207
+ default = 'SCRE',
208
+ ),
209
+ )
210
+ )
211
+
212
+ def prepare(self, rh, opts):
213
+ """Get a look at raw observations input files."""
214
+ super().prepare(rh, opts)
215
+
216
+ # Looking for input observations
217
+ allodb = self.lookupodb()
218
+ self.odb_date_and_layout_from_sections(allodb)
219
+
220
+ # Perform the pre-merging stuff (this will create the ECMA virtual DB)
221
+ virtualdb_path = self.odb_merge_if_needed(allodb)
222
+ # Prepare the CCMA DB
223
+ ccma_path = self.odb_create_db(layout='CCMA')
224
+
225
+ # Fix paths + generate a global IOASSING file
226
+ self.odb.fix_db_path(self.virtualdb, virtualdb_path)
227
+ self.odb.fix_db_path('CCMA', ccma_path)
228
+ self.odb.ioassign_gather(virtualdb_path, ccma_path)
229
+
230
+ # Some extra settings
231
+ self.odb.create_poolmask(self.virtualdb, virtualdb_path)
232
+ self.odb.shuffle_setup(self.slots, mergedirect=True, ccmadirect=True)
233
+
234
+ # Look for extras ODB raw
235
+ self.odb_handle_raw_dbs()
236
+
237
+ # Fix the input databases intent
238
+ self.odb_rw_or_overwrite_method(* allodb)
239
+
240
+ # Look for channels namelists and set appropriate links
241
+ self.setchannels()
242
+
243
+
244
+ class IFSODBCCMA(IFSParallel, odb.OdbComponentDecoMixin):
245
+ """Specialised IFSODB for CCMA processing"""
246
+
247
+ _abstract = True
248
+ _footprint = dict(
249
+ attr = dict(
250
+ virtualdb = dict(
251
+ default = 'ccma',
252
+ ),
253
+ binarysingle = dict(
254
+ default = 'basicnwpobsort',
255
+ ),
256
+ )
257
+ )
258
+
259
+ def prepare(self, rh, opts):
260
+ """Get a look at raw observations input files."""
261
+ super().prepare(rh, opts)
262
+
263
+ sh = self.system
264
+
265
+ # Looking for input observations
266
+ allodb = self.lookupodb()
267
+ allccma = [x for x in allodb if x.rh.resource.layout.lower() == 'ccma']
268
+ if allccma:
269
+ if len(allccma) > 1:
270
+ logger.error('Multiple CCMA databases detected: only the first one is taken into account')
271
+ else:
272
+ raise ValueError('Missing CCMA input data for ' + self.kind)
273
+
274
+ # Set env and IOASSIGN
275
+ ccma = allccma.pop()
276
+ ccma_path = sh.path.abspath(ccma.rh.container.localpath())
277
+ self.odb_date_and_layout_from_sections([ccma, ])
278
+ self.odb.fix_db_path(ccma.rh.resource.layout, ccma_path)
279
+ self.odb.ioassign_gather(ccma_path)
280
+
281
+ # Fix the input database intent
282
+ self.odb_rw_or_overwrite_method(ccma)
283
+
284
+ # Look for channels namelists and set appropriate links
285
+ self.setchannels()
286
+
287
+
288
+ class Minim(IFSODBCCMA):
289
+ """Observation minimisation."""
290
+
291
+ _footprint = dict(
292
+ info='Minimisation in the assimilation process.',
293
+ attr=dict(
294
+ kind=dict(
295
+ values=['minim', 'min', 'minimisation'],
296
+ remap=dict(autoremap='first'),
297
+ ),
298
+ conf=dict(
299
+ default=131,
300
+ ),
301
+ xpname=dict(
302
+ default='MINI',
303
+ ),
304
+ )
305
+ )
306
+
307
+ def prepare(self, rh, opts):
308
+ """Find out if preconditioning eigenvectors are here."""
309
+ super().prepare(rh, opts)
310
+
311
+ # Check if a preconditioning EV map is here
312
+ evmaprh = self.context.sequence.effective_inputs(role=('PreconEVMap',
313
+ 'PreconditionningEVMap'),
314
+ kind='precevmap')
315
+ if evmaprh:
316
+ if len(evmaprh) > 1:
317
+ logger.warning("Several preconditioning EV maps provided. Using the first one.")
318
+ nprec_ev = evmaprh[0].rh.contents.data['evlen']
319
+ # If there are preconditioning EV: update the namelist
320
+ if nprec_ev > 0:
321
+ for namrh in [x.rh for x in self.context.sequence.effective_inputs(role='Namelist',
322
+ kind='namelist',)]:
323
+ namc = namrh.contents
324
+ try:
325
+ namc['NAMVAR'].NPCVECS = nprec_ev
326
+ namc.rewrite(namrh.container)
327
+ except Exception:
328
+ logger.critical('Could not fix NAMVAR in %s', namrh.container.actualpath())
329
+ raise
330
+ logger.info("%d preconditioning EV will by used (NPCVECS=%d).", nprec_ev, nprec_ev)
331
+ else:
332
+ logger.warning("A preconditioning EV map was found, " +
333
+ "but no preconditioning EV are available.")
334
+ else:
335
+ logger.info("No preconditioning EV were found.")
336
+
337
+ def postfix(self, rh, opts):
338
+ """Find out if any special resources have been produced."""
339
+ sh = self.system
340
+
341
+ # Look up for PREConditionning Eigen Vectors
342
+ prec = sh.ls('MEMINI*')
343
+ if prec:
344
+ prec_info = dict(evlen=len(prec))
345
+ prec_info['evnum'] = [int(x[6:]) for x in prec]
346
+ sh.json_dump(prec_info, 'precev_map.out', indent=4)
347
+
348
+ super().postfix(rh, opts)
349
+
350
+
351
+ class Trajectory(IFSODBCCMA):
352
+ """Observation trajectory."""
353
+
354
+ _footprint = dict(
355
+ info='Trajectory in the assimilation process.',
356
+ attr=dict(
357
+ kind=dict(
358
+ values=['traj', 'trajectory'],
359
+ remap=dict(autoremap='first'),
360
+ ),
361
+ conf=dict(
362
+ default=2,
363
+ ),
364
+ xpname=dict(
365
+ default='TRAJ',
366
+ ),
367
+ )
368
+ )
369
+
370
+
371
+ class PseudoTrajectory(BlindRun, drhook.DrHookDecoMixin):
372
+ """Copy a few fields from the Guess file into the Analysis file"""
373
+ _footprint = dict(
374
+ attr = dict(
375
+ kind = dict(
376
+ values = ['pseudotraj', 'traj', 'trajectory'],
377
+ remap = dict(autoremap = 'first'),
378
+ ),
379
+ )
380
+ )
381
+
382
+
383
+ class SstGrb2Ascii(BlindRun):
384
+ """Transform sst grib files from the BDAP into ascii files"""
385
+ _footprint = dict(
386
+ info = 'Binary to change the format of sst BDAP files.',
387
+ attr = dict(
388
+ kind = dict(
389
+ values = ['lect_bdap'],
390
+ ),
391
+ date = a_date,
392
+ nlat = dict(
393
+ default = 0,
394
+ ),
395
+ nlon = dict(
396
+ default = 0,
397
+ )
398
+ )
399
+ )
400
+
401
+ def prepare(self, rh, opts):
402
+ """Add namelist delta, prepare the environment and build the arguments needed."""
403
+ super().prepare(rh, opts)
404
+ for namrh in [x.rh for x in self.context.sequence.effective_inputs(role='Namelist',
405
+ kind='namelist', )]:
406
+ namc = namrh.contents
407
+ try:
408
+ namc.newblock('NAMFILE')
409
+ namc['NAMFILE'].NBFICH = 1
410
+ namc['NAMFILE']['CCNFICH(1)'] = 'GRIB_SST'
411
+ namc.rewrite(namrh.container)
412
+ except Exception:
413
+ logger.critical('Could not fix NAMFILE in %s', namrh.container.actualpath())
414
+ raise
415
+
416
+ def spawn_command_options(self):
417
+ """Build the dictionnary to provide arguments to the binary."""
418
+ return dict(
419
+ year=self.date.year,
420
+ month=self.date.month,
421
+ day=self.date.day,
422
+ hour=self.date.hour,
423
+ lon=self.nlon,
424
+ lat=self.nlat,
425
+ )
426
+
427
+
428
+ class IceNetCDF2Ascii(BlindRun):
429
+ """Transform ice NetCDF files from the BDPE into ascii files"""
430
+ _footprint = dict(
431
+ info = 'Binary to change the format of ice BDPE files.',
432
+ attr = dict(
433
+ kind = dict(
434
+ values = ['ice_nc2ascii'],
435
+ ),
436
+ output_file = dict(
437
+ optional = True,
438
+ default = "ice_concent"
439
+ ),
440
+ param = dict(
441
+ optional = True,
442
+ default = "ice_conc",
443
+ ),
444
+ )
445
+ )
446
+
447
+ def prepare(self, rh, opts):
448
+ super().prepare(rh, opts)
449
+ # Look for the input files
450
+ list_netcdf = self.context.sequence.effective_inputs(role='NetCDFfiles',
451
+ kind='observations')
452
+ hn_file = ''
453
+ hs_file = ''
454
+ for sect in list_netcdf:
455
+ part = sect.rh.resource.part
456
+ filename = sect.rh.container.filename
457
+ if part == "ice_hn":
458
+ if hn_file == '':
459
+ hn_file = filename
460
+ logger.info('The input file for the North hemisphere is: %s.', hn_file)
461
+ else:
462
+ logger.warning('There was already one file for the North hemisphere. '
463
+ 'The following one, %s, is not used.', filename)
464
+ elif part == "ice_hs":
465
+ if hs_file == '':
466
+ hs_file = filename
467
+ logger.info('The input file for the South hemisphere is: %s.', hs_file)
468
+ else:
469
+ logger.warning('There was already one file for the South hemisphere. '
470
+ 'The following one, %s, is not used.', filename)
471
+ else:
472
+ logger.warning('The following file is not used: %s.', filename)
473
+ self.input_file_hn = hn_file
474
+ self.input_file_hs = hs_file
475
+
476
+ def spawn_command_options(self):
477
+ """Build the dictionnary to provide arguments to the binary."""
478
+ return dict(
479
+ file_in_hn=self.input_file_hn,
480
+ file_in_hs=self.input_file_hs,
481
+ param=self.param,
482
+ file_out=self.output_file
483
+ )