vortex-nwp 2.1.3__py3-none-any.whl → 2.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vortex/__init__.py +3 -2
- vortex/algo/components.py +20 -2
- vortex/algo/mpitools.py +5 -6
- vortex/data/abstractstores.py +43 -39
- vortex/data/executables.py +28 -0
- vortex/data/geometries.ini +89 -24
- vortex/data/geometries.py +24 -7
- vortex/data/handlers.py +1 -0
- vortex/data/providers.py +49 -10
- vortex/data/stores.py +152 -42
- vortex/nwp/algo/assim.py +1 -31
- vortex/nwp/algo/forecasts.py +30 -0
- vortex/nwp/algo/ifsroot.py +34 -20
- vortex/nwp/algo/monitoring.py +2 -2
- vortex/nwp/algo/odbtools.py +2 -2
- vortex/nwp/data/consts.py +61 -1
- vortex/nwp/data/diagnostics.py +2 -4
- vortex/nwp/tools/conftools.py +7 -6
- vortex/sessions.py +3 -2
- vortex/tools/storage.py +12 -10
- vortex/tools/systems.py +33 -3
- {vortex_nwp-2.1.3.dist-info → vortex_nwp-2.3.0.dist-info}/METADATA +4 -1
- {vortex_nwp-2.1.3.dist-info → vortex_nwp-2.3.0.dist-info}/RECORD +26 -26
- {vortex_nwp-2.1.3.dist-info → vortex_nwp-2.3.0.dist-info}/WHEEL +1 -1
- {vortex_nwp-2.1.3.dist-info → vortex_nwp-2.3.0.dist-info}/licenses/LICENSE +0 -0
- {vortex_nwp-2.1.3.dist-info → vortex_nwp-2.3.0.dist-info}/top_level.txt +0 -0
vortex/data/providers.py
CHANGED
|
@@ -14,6 +14,7 @@ from bronx.fancies import loggers
|
|
|
14
14
|
import footprints
|
|
15
15
|
from footprints import proxy as fpx
|
|
16
16
|
|
|
17
|
+
import vortex
|
|
17
18
|
from vortex import config
|
|
18
19
|
from vortex.syntax.stdattrs import (
|
|
19
20
|
xpid,
|
|
@@ -56,16 +57,13 @@ class Provider(footprints.FootprintBase):
|
|
|
56
57
|
info="The username that will be used whenever necessary.",
|
|
57
58
|
optional=True,
|
|
58
59
|
default=None,
|
|
60
|
+
access="rwx",
|
|
59
61
|
alias=("user", "logname"),
|
|
60
62
|
),
|
|
61
63
|
),
|
|
62
64
|
fastkeys={"namespace"},
|
|
63
65
|
)
|
|
64
66
|
|
|
65
|
-
def __init__(self, *args, **kw):
|
|
66
|
-
logger.debug("Abstract provider init %s", self.__class__)
|
|
67
|
-
super().__init__(*args, **kw)
|
|
68
|
-
|
|
69
67
|
def _str_more(self):
|
|
70
68
|
"""Additional information to print representation."""
|
|
71
69
|
try:
|
|
@@ -87,7 +85,9 @@ class Provider(footprints.FootprintBase):
|
|
|
87
85
|
|
|
88
86
|
def netuser_name(self, resource): # @UnusedVariable
|
|
89
87
|
"""Abstract method."""
|
|
90
|
-
|
|
88
|
+
if self.username is not None:
|
|
89
|
+
return self.username
|
|
90
|
+
return vortex.ticket().glove.user
|
|
91
91
|
|
|
92
92
|
def pathname(self, resource):
|
|
93
93
|
"""Abstract method."""
|
|
@@ -119,10 +119,11 @@ class Provider(footprints.FootprintBase):
|
|
|
119
119
|
The different operations of the algorithm can be redefined by subclasses.
|
|
120
120
|
"""
|
|
121
121
|
username = self.netuser_name(resource)
|
|
122
|
+
netloc = self.netloc(resource)
|
|
122
123
|
fullnetloc = (
|
|
123
124
|
"{:s}@{:s}".format(username, self.netloc(resource))
|
|
124
|
-
if username
|
|
125
|
-
else
|
|
125
|
+
if (username and netloc)
|
|
126
|
+
else netloc
|
|
126
127
|
)
|
|
127
128
|
logger.debug(
|
|
128
129
|
"scheme %s netloc %s normpath %s urlquery %s",
|
|
@@ -274,12 +275,50 @@ def set_namespace_from_cache_settings(usecache, usearchive):
|
|
|
274
275
|
return ".".join(("vortex", domain, "fr"))
|
|
275
276
|
|
|
276
277
|
|
|
278
|
+
class Git(Provider):
|
|
279
|
+
_footprint = dict(
|
|
280
|
+
info="Git provider",
|
|
281
|
+
attr=dict(
|
|
282
|
+
ref=dict(
|
|
283
|
+
type=str,
|
|
284
|
+
optional=True,
|
|
285
|
+
default=None,
|
|
286
|
+
info="The reference's SHA-1 hash id",
|
|
287
|
+
),
|
|
288
|
+
repo=dict(
|
|
289
|
+
type=str,
|
|
290
|
+
info="Path to the Git repository",
|
|
291
|
+
),
|
|
292
|
+
path=dict(
|
|
293
|
+
type=str,
|
|
294
|
+
info="File path within the repository",
|
|
295
|
+
),
|
|
296
|
+
),
|
|
297
|
+
)
|
|
298
|
+
|
|
299
|
+
def scheme(self, resource):
|
|
300
|
+
return "git"
|
|
301
|
+
|
|
302
|
+
def urlquery(self, resource):
|
|
303
|
+
return (
|
|
304
|
+
f"repo={self.repo}&ref={self.ref}"
|
|
305
|
+
if self.ref
|
|
306
|
+
else f"repo={self.repo}"
|
|
307
|
+
)
|
|
308
|
+
|
|
309
|
+
def basename(self, resource):
|
|
310
|
+
return self.path
|
|
311
|
+
|
|
312
|
+
def pathname(sef, resource):
|
|
313
|
+
return "."
|
|
314
|
+
|
|
315
|
+
|
|
277
316
|
class Vortex(Provider):
|
|
278
317
|
"""Main provider of the toolbox, using a fix-size path and a dedicated name factory."""
|
|
279
318
|
|
|
280
319
|
_DEFAULT_NAME_BUILDER = names.VortexNameBuilder()
|
|
281
320
|
_CUSTOM_NAME_BUILDERS = dict()
|
|
282
|
-
_SPECIAL_EXPS = ("
|
|
321
|
+
_SPECIAL_EXPS = ("OPER", "DBLE", "TEST", "MIRR")
|
|
283
322
|
|
|
284
323
|
_footprint = [
|
|
285
324
|
block,
|
|
@@ -366,8 +405,8 @@ class Vortex(Provider):
|
|
|
366
405
|
self._namebuilder = self._CUSTOM_NAME_BUILDERS[self.namebuild]
|
|
367
406
|
else:
|
|
368
407
|
self._namebuilder = self._DEFAULT_NAME_BUILDER
|
|
369
|
-
if self.experiment in (n.
|
|
370
|
-
self.experiment = self.experiment.
|
|
408
|
+
if self.experiment in (n.lower() for n in self._SPECIAL_EXPS):
|
|
409
|
+
self.experiment = self.experiment.upper()
|
|
371
410
|
|
|
372
411
|
# Ensure compatibility with deprecated namespace attribute
|
|
373
412
|
# Under the hood the namespace attribute is still used to
|
vortex/data/stores.py
CHANGED
|
@@ -8,8 +8,11 @@ Store objects use the :mod:`footprints` mechanism.
|
|
|
8
8
|
import copy
|
|
9
9
|
import ftplib
|
|
10
10
|
import io
|
|
11
|
+
from pathlib import Path
|
|
11
12
|
import os
|
|
12
13
|
import re
|
|
14
|
+
import shutil
|
|
15
|
+
import subprocess
|
|
13
16
|
|
|
14
17
|
from bronx.fancies import loggers
|
|
15
18
|
import footprints
|
|
@@ -33,6 +36,13 @@ __all__ = []
|
|
|
33
36
|
|
|
34
37
|
logger = loggers.getLogger(__name__)
|
|
35
38
|
|
|
39
|
+
try:
|
|
40
|
+
import pygit2
|
|
41
|
+
|
|
42
|
+
NO_PYGIT2 = False
|
|
43
|
+
except ImportError:
|
|
44
|
+
NO_PYGIT2 = True
|
|
45
|
+
|
|
36
46
|
|
|
37
47
|
def get_cache_location():
|
|
38
48
|
try:
|
|
@@ -335,7 +345,6 @@ class Finder(Store):
|
|
|
335
345
|
def _ftpinfos(self, remote, **kwargs):
|
|
336
346
|
args = kwargs.copy()
|
|
337
347
|
args["hostname"] = self.hostname()
|
|
338
|
-
args["logname"] = remote["username"]
|
|
339
348
|
port = self.hostname().netport
|
|
340
349
|
if port is not None:
|
|
341
350
|
args["port"] = port
|
|
@@ -422,6 +431,98 @@ class Finder(Store):
|
|
|
422
431
|
return rc
|
|
423
432
|
|
|
424
433
|
|
|
434
|
+
class GitStore(Store):
|
|
435
|
+
_footprint = dict(
|
|
436
|
+
info="A store to access Git repositories",
|
|
437
|
+
attr=dict(
|
|
438
|
+
scheme=dict(
|
|
439
|
+
values=["git"],
|
|
440
|
+
),
|
|
441
|
+
),
|
|
442
|
+
)
|
|
443
|
+
|
|
444
|
+
def gitget(self, remote, local, options):
|
|
445
|
+
if NO_PYGIT2:
|
|
446
|
+
raise ModuleNotFoundError(
|
|
447
|
+
"pygit2 is not installed in the current environment"
|
|
448
|
+
)
|
|
449
|
+
annex_cache_path = Path(remote["query"]["repo"][0])
|
|
450
|
+
|
|
451
|
+
# If no git reference is provided, only make a copy of the
|
|
452
|
+
# worktree
|
|
453
|
+
if "ref" not in remote["query"]:
|
|
454
|
+
shutil.copy(
|
|
455
|
+
src=Path(annex_cache_path) / remote["path"].lstrip("/"),
|
|
456
|
+
dst=local,
|
|
457
|
+
)
|
|
458
|
+
return local
|
|
459
|
+
|
|
460
|
+
assert "ref" in remote["query"]
|
|
461
|
+
path = remote["path"].lstrip("/")
|
|
462
|
+
repo = pygit2.Repository(annex_cache_path)
|
|
463
|
+
oid = repo.revparse_single(remote["query"]["ref"][0])
|
|
464
|
+
obj = oid.peel(pygit2.Tree) / path
|
|
465
|
+
if obj.filemode == pygit2.enums.FileMode.LINK:
|
|
466
|
+
# Here we need to discriminate between links that point
|
|
467
|
+
# to git-annex managed files and all other symlinks.
|
|
468
|
+
|
|
469
|
+
# If the file is a git-annex link, work out the location
|
|
470
|
+
# of the data within the .git/annex dir and copy this
|
|
471
|
+
# file into the cwd with the right name
|
|
472
|
+
if ".git/annex/objects" in obj.data.decode("ASCII"):
|
|
473
|
+
gitannex_key = Path(obj.data.decode("ASCII")).name
|
|
474
|
+
subprocess.run(
|
|
475
|
+
args=["git-annex", "get", "--key", gitannex_key],
|
|
476
|
+
cwd=str(annex_cache_path),
|
|
477
|
+
)
|
|
478
|
+
gitannex_content_location = subprocess.run(
|
|
479
|
+
args=["git-annex", "contentlocation", gitannex_key],
|
|
480
|
+
cwd=str(annex_cache_path),
|
|
481
|
+
capture_output=True,
|
|
482
|
+
encoding="ASCII",
|
|
483
|
+
).stdout
|
|
484
|
+
shutil.copy(
|
|
485
|
+
src=annex_cache_path / Path(gitannex_content_location),
|
|
486
|
+
dst=local,
|
|
487
|
+
follow_symlinks=True,
|
|
488
|
+
)
|
|
489
|
+
return local
|
|
490
|
+
os.symlink(
|
|
491
|
+
src=obj.data.decode("ASCII"),
|
|
492
|
+
dst=local,
|
|
493
|
+
)
|
|
494
|
+
return local
|
|
495
|
+
|
|
496
|
+
if obj.filemode == pygit2.enums.FileMode.BLOB:
|
|
497
|
+
with open(local, "wb") as dst:
|
|
498
|
+
# Could also use pygit2.BlobIO to stream content
|
|
499
|
+
# without having to load the entire blob data in
|
|
500
|
+
# memory:
|
|
501
|
+
#
|
|
502
|
+
# with pygit2.BlobIO(obj) as src:
|
|
503
|
+
# shutil.copyfileobj(fsrc=src, fdst=dst)
|
|
504
|
+
dst.write(obj.data)
|
|
505
|
+
return local
|
|
506
|
+
|
|
507
|
+
if obj.filemode == pygit2.enums.FileMode.TREE:
|
|
508
|
+
if local.endswith("/"):
|
|
509
|
+
localpath = "."
|
|
510
|
+
else:
|
|
511
|
+
localpath = local
|
|
512
|
+
os.mkdir(local)
|
|
513
|
+
for subobj in obj:
|
|
514
|
+
r = {
|
|
515
|
+
"query": {"ref": remote["query"]["ref"]},
|
|
516
|
+
"path": str(Path(path) / subobj.name),
|
|
517
|
+
}
|
|
518
|
+
self.gitget(
|
|
519
|
+
remote=r,
|
|
520
|
+
local=str(Path(localpath) / subobj.name),
|
|
521
|
+
options=None,
|
|
522
|
+
)
|
|
523
|
+
return local
|
|
524
|
+
|
|
525
|
+
|
|
425
526
|
class _VortexStackedStorageMixin:
|
|
426
527
|
"""Mixin class that adds utility functions to work with stacked data."""
|
|
427
528
|
|
|
@@ -741,19 +842,6 @@ class VortexStdBaseArchiveStore(_VortexBaseArchiveStore):
|
|
|
741
842
|
def remap_read(self, remote, options):
|
|
742
843
|
"""Reformulates the remote path to compatible vortex namespace."""
|
|
743
844
|
remote = copy.copy(remote)
|
|
744
|
-
try:
|
|
745
|
-
remote["root"] = config.from_config(
|
|
746
|
-
section="storage",
|
|
747
|
-
key="rootdir",
|
|
748
|
-
)
|
|
749
|
-
except config.ConfigurationError as e:
|
|
750
|
-
msg = (
|
|
751
|
-
"Trying to write to archive but location is not configured. "
|
|
752
|
-
'Make sure key "rootdir" is defined in storage section of '
|
|
753
|
-
"the configuration.\n"
|
|
754
|
-
"See https://vortex-nwp.readthedocs.io/en/latest/user-guide/configuration.html#storage"
|
|
755
|
-
)
|
|
756
|
-
raise config.ConfigurationError(msg) from e
|
|
757
845
|
return remote
|
|
758
846
|
|
|
759
847
|
remap_write = remap_read
|
|
@@ -801,22 +889,13 @@ class VortexOpBaseArchiveStore(_VortexBaseArchiveStore):
|
|
|
801
889
|
),
|
|
802
890
|
)
|
|
803
891
|
|
|
892
|
+
@property
|
|
893
|
+
def archive_entry(self):
|
|
894
|
+
return config.from_config(section="storage", key="op_rootdir")
|
|
895
|
+
|
|
804
896
|
def remap_read(self, remote, options):
|
|
805
897
|
"""Reformulates the remote path to compatible vortex namespace."""
|
|
806
898
|
remote = copy.copy(remote)
|
|
807
|
-
try:
|
|
808
|
-
remote["root"] = config.from_config(
|
|
809
|
-
section="storage",
|
|
810
|
-
key="op_rootdir",
|
|
811
|
-
)
|
|
812
|
-
except config.ConfigurationError as e:
|
|
813
|
-
msg = (
|
|
814
|
-
"Trying to write to operational data archive but location"
|
|
815
|
-
' is not configured. Make sure key "op_rootdir" is defined in '
|
|
816
|
-
"the storage section of the configuration.\n"
|
|
817
|
-
"See https://vortex-nwp.readthedocs.io/en/latest/user-guide/configuration.html#storage"
|
|
818
|
-
)
|
|
819
|
-
raise config.ConfigurationError(msg) from e
|
|
820
899
|
xpath = remote["path"].split("/")
|
|
821
900
|
if len(xpath) >= 5 and re.match(r"^\d{8}T\d{2,4}", xpath[4]):
|
|
822
901
|
# If a date is detected
|
|
@@ -915,7 +994,11 @@ class VortexArchiveStore(MultiStore):
|
|
|
915
994
|
|
|
916
995
|
def alternates_fpextras(self):
|
|
917
996
|
"""Deal with some ArchiveStores' specific attributes."""
|
|
918
|
-
return dict(
|
|
997
|
+
return dict(
|
|
998
|
+
username=self.username,
|
|
999
|
+
storehead=self.storehead,
|
|
1000
|
+
storesync=self.storesync,
|
|
1001
|
+
)
|
|
919
1002
|
|
|
920
1003
|
|
|
921
1004
|
class _VortexCacheBaseStore(CacheStore, _VortexStackedStorageMixin):
|
|
@@ -993,9 +1076,23 @@ class VortexCacheMtStore(_VortexCacheBaseStore):
|
|
|
993
1076
|
),
|
|
994
1077
|
)
|
|
995
1078
|
|
|
996
|
-
|
|
997
|
-
|
|
998
|
-
|
|
1079
|
+
@property
|
|
1080
|
+
def cache_entry(self):
|
|
1081
|
+
try:
|
|
1082
|
+
cacheloc = config.from_config(
|
|
1083
|
+
section="data-tree",
|
|
1084
|
+
key="rootdir",
|
|
1085
|
+
)
|
|
1086
|
+
except config.ConfigurationError:
|
|
1087
|
+
cacheloc = os.path.join(os.environ["HOME"], ".vortex.d")
|
|
1088
|
+
|
|
1089
|
+
current_vortex_user = self.system.glove.user
|
|
1090
|
+
cacheloc = cacheloc.replace("%usr%", current_vortex_user)
|
|
1091
|
+
|
|
1092
|
+
if self.username != current_vortex_user:
|
|
1093
|
+
return os.path.join(cacheloc, self.username)
|
|
1094
|
+
|
|
1095
|
+
return cacheloc
|
|
999
1096
|
|
|
1000
1097
|
|
|
1001
1098
|
class VortexCacheOp2ResearchStore(_VortexCacheBaseStore):
|
|
@@ -1016,19 +1113,17 @@ class VortexCacheOp2ResearchStore(_VortexCacheBaseStore):
|
|
|
1016
1113
|
),
|
|
1017
1114
|
)
|
|
1018
1115
|
|
|
1019
|
-
|
|
1020
|
-
|
|
1116
|
+
@property
|
|
1117
|
+
def cache_entry(self):
|
|
1021
1118
|
if not config.is_defined(section="data-tree", key="op_rootdir"):
|
|
1022
|
-
|
|
1119
|
+
msg = (
|
|
1023
1120
|
"Using special experiment but corresponding cache location "
|
|
1024
1121
|
'is not configured. Bet sure to set "op_rootdir" in configuration. '
|
|
1025
1122
|
"See https://vortex-nwp.readthedocs.io/en/latest/user-guide/oper-dble-data-trees"
|
|
1026
1123
|
)
|
|
1027
|
-
|
|
1028
|
-
|
|
1029
|
-
|
|
1030
|
-
)
|
|
1031
|
-
self.location = os.path.join(cachepath, "vortex")
|
|
1124
|
+
raise config.ConfigurationError(msg)
|
|
1125
|
+
|
|
1126
|
+
return config.from_config(section="data-tree", key="op_rootdir")
|
|
1032
1127
|
|
|
1033
1128
|
|
|
1034
1129
|
class _AbstractVortexCacheMultiStore(MultiStore):
|
|
@@ -1103,6 +1198,9 @@ class VortexCacheStore(_AbstractVortexCacheMultiStore):
|
|
|
1103
1198
|
f"{self.netloc.firstname}.stacked-cache-mt.fr",
|
|
1104
1199
|
]
|
|
1105
1200
|
|
|
1201
|
+
def alternates_fpextras(self):
|
|
1202
|
+
return dict(username=self.username)
|
|
1203
|
+
|
|
1106
1204
|
|
|
1107
1205
|
class VortexVsopCacheStore(_AbstractVortexCacheMultiStore):
|
|
1108
1206
|
"""The go to store for data cached by VORTEX operational experiments.
|
|
@@ -1145,6 +1243,9 @@ class VortexVsopCacheStore(_AbstractVortexCacheMultiStore):
|
|
|
1145
1243
|
]
|
|
1146
1244
|
return todo
|
|
1147
1245
|
|
|
1246
|
+
def alternates_fpextras(self):
|
|
1247
|
+
return dict(username=self.username)
|
|
1248
|
+
|
|
1148
1249
|
|
|
1149
1250
|
class _AbstractVortexStackMultiStore(MultiStore):
|
|
1150
1251
|
"""Any Cache based Vortex multi store."""
|
|
@@ -1208,6 +1309,9 @@ class VortexStackStore(_AbstractVortexStackMultiStore):
|
|
|
1208
1309
|
"""Go through the various stacked stores."""
|
|
1209
1310
|
return [f"{self.netloc.firstname}.stacked-cache-mt.fr"]
|
|
1210
1311
|
|
|
1312
|
+
def alternates_fpextras(self):
|
|
1313
|
+
return dict(username=self.username)
|
|
1314
|
+
|
|
1211
1315
|
|
|
1212
1316
|
class VortexVsopStackStore(_AbstractVortexStackMultiStore):
|
|
1213
1317
|
"""Store intended to read and write data into VORTEX R&D stacks."""
|
|
@@ -1267,6 +1371,9 @@ class VortexStoreLegacy(MultiStore):
|
|
|
1267
1371
|
for d in (".cache.fr", ".archive-legacy.fr")
|
|
1268
1372
|
]
|
|
1269
1373
|
|
|
1374
|
+
def alternates_fpextras(self):
|
|
1375
|
+
return dict(username=self.username)
|
|
1376
|
+
|
|
1270
1377
|
|
|
1271
1378
|
class VortexStore(MultiStore):
|
|
1272
1379
|
"""Combined cache and archive VORTEX stores.
|
|
@@ -1315,6 +1422,9 @@ class VortexStore(MultiStore):
|
|
|
1315
1422
|
)
|
|
1316
1423
|
]
|
|
1317
1424
|
|
|
1425
|
+
def alternates_fpextras(self):
|
|
1426
|
+
return dict(username=self.username)
|
|
1427
|
+
|
|
1318
1428
|
|
|
1319
1429
|
class PromiseCacheStore(VortexCacheMtStore):
|
|
1320
1430
|
"""Some kind of vortex cache for EXPECTED resources."""
|
|
@@ -1332,9 +1442,9 @@ class PromiseCacheStore(VortexCacheMtStore):
|
|
|
1332
1442
|
),
|
|
1333
1443
|
)
|
|
1334
1444
|
|
|
1335
|
-
|
|
1336
|
-
|
|
1337
|
-
|
|
1445
|
+
@property
|
|
1446
|
+
def cache_entry(self):
|
|
1447
|
+
return os.path.join(super().cache_entry, "promise")
|
|
1338
1448
|
|
|
1339
1449
|
@staticmethod
|
|
1340
1450
|
def _add_default_options(options):
|
vortex/nwp/algo/assim.py
CHANGED
|
@@ -5,7 +5,7 @@ AlgoComponents dedicated to computations related to Data Assimilation systems.
|
|
|
5
5
|
from bronx.fancies import loggers
|
|
6
6
|
from bronx.stdtypes.date import Date
|
|
7
7
|
|
|
8
|
-
from vortex.algo.components import BlindRun
|
|
8
|
+
from vortex.algo.components import BlindRun
|
|
9
9
|
from vortex.syntax.stdattrs import a_date
|
|
10
10
|
from .ifsroot import IFSParallel
|
|
11
11
|
from ..tools import odb, drhook
|
|
@@ -16,36 +16,6 @@ __all__ = []
|
|
|
16
16
|
logger = loggers.getLogger(__name__)
|
|
17
17
|
|
|
18
18
|
|
|
19
|
-
class MergeVarBC(Parallel):
|
|
20
|
-
"""Merge two VarBC files.
|
|
21
|
-
|
|
22
|
-
The VarBC file resulting from the MergeVarBC contains all the items of the
|
|
23
|
-
first VarBC file plus any new item that would be present in the second file.
|
|
24
|
-
"""
|
|
25
|
-
|
|
26
|
-
_footprint = dict(
|
|
27
|
-
attr=dict(
|
|
28
|
-
kind=dict(
|
|
29
|
-
values=["mergevarbc"],
|
|
30
|
-
),
|
|
31
|
-
varbcout=dict(
|
|
32
|
-
optional=True,
|
|
33
|
-
default="VARBC.cycle_out",
|
|
34
|
-
),
|
|
35
|
-
)
|
|
36
|
-
)
|
|
37
|
-
|
|
38
|
-
def prepare(self, rh, opts):
|
|
39
|
-
"""Find any ODB candidate in input files."""
|
|
40
|
-
|
|
41
|
-
sh = self.system
|
|
42
|
-
|
|
43
|
-
sh.touch(self.varbcout)
|
|
44
|
-
|
|
45
|
-
# Let ancesters doing real stuff
|
|
46
|
-
super().prepare(rh, opts)
|
|
47
|
-
|
|
48
|
-
|
|
49
19
|
class Anamix(IFSParallel):
|
|
50
20
|
"""Merge the surface and atmospheric analyses into a single file"""
|
|
51
21
|
|
vortex/nwp/algo/forecasts.py
CHANGED
|
@@ -112,6 +112,8 @@ class Forecast(IFSParallel):
|
|
|
112
112
|
# Possibly fix post-processing clim files
|
|
113
113
|
self.all_localclim_fixer(rh, thismonth)
|
|
114
114
|
|
|
115
|
+
self.grab(analysis, comment="analysis")
|
|
116
|
+
|
|
115
117
|
# File linking for IAU increments
|
|
116
118
|
#
|
|
117
119
|
# In the case of a forecast with IAU, the IFS executable
|
|
@@ -884,3 +886,31 @@ class OfflineSurfex(Parallel, DrHookDecoMixin):
|
|
|
884
886
|
if namsec.rh.contents.dumps_needs_update:
|
|
885
887
|
namsec.rh.save()
|
|
886
888
|
logger.info("Namelist dump: \n%s", namsec.rh.container.read())
|
|
889
|
+
|
|
890
|
+
|
|
891
|
+
class MUSCForecast(Forecast):
|
|
892
|
+
"""Forecast for MUSC single-column model."""
|
|
893
|
+
|
|
894
|
+
_footprint = dict(
|
|
895
|
+
info="Run a forecast with a MUSC single-column model.",
|
|
896
|
+
attr=dict(
|
|
897
|
+
kind=dict(
|
|
898
|
+
values=["musc"],
|
|
899
|
+
),
|
|
900
|
+
),
|
|
901
|
+
)
|
|
902
|
+
|
|
903
|
+
def postfix(self, rh, opts):
|
|
904
|
+
"""Post forecast information and cleaning."""
|
|
905
|
+
sh = self.system
|
|
906
|
+
# rename specific output files with hours term on 4 digits for compatibility for fmth formatting
|
|
907
|
+
fmt = re.compile(r"Out\.(?P<termh>\d{3})\.\d{4}\.lfa$")
|
|
908
|
+
for f in [
|
|
909
|
+
fmt.match(f)
|
|
910
|
+
for f in sh.listdir()
|
|
911
|
+
if f.startswith("Out.") and f.endswith(".lfa")
|
|
912
|
+
]:
|
|
913
|
+
sh.rename(
|
|
914
|
+
f.string, "Out.{:>04}.0000.lfa".format(int(f.group("termh")))
|
|
915
|
+
)
|
|
916
|
+
super().postfix(rh, opts)
|
vortex/nwp/algo/ifsroot.py
CHANGED
|
@@ -91,7 +91,7 @@ class IFSParallel(
|
|
|
91
91
|
),
|
|
92
92
|
fcterm=dict(
|
|
93
93
|
info="The forecast term of the Arpege/IFS model.",
|
|
94
|
-
type=
|
|
94
|
+
type=float,
|
|
95
95
|
optional=True,
|
|
96
96
|
default=0,
|
|
97
97
|
),
|
|
@@ -346,24 +346,6 @@ class IFSParallel(
|
|
|
346
346
|
# be done by an extra class ... and it could be generalized to mpi
|
|
347
347
|
# setup by the way !
|
|
348
348
|
nam_updated = False
|
|
349
|
-
# For cy41 onward, replace some namelist macros with the command line
|
|
350
|
-
# arguments
|
|
351
|
-
if rh.resource.cycle >= "cy41":
|
|
352
|
-
if "NAMARG" in namcontents:
|
|
353
|
-
opts_arg = self.spawn_command_options()
|
|
354
|
-
self._set_nam_macro(
|
|
355
|
-
namcontents, namlocal, "CEXP", opts_arg["name"]
|
|
356
|
-
)
|
|
357
|
-
self._set_nam_macro(
|
|
358
|
-
namcontents, namlocal, "TIMESTEP", opts_arg["timestep"]
|
|
359
|
-
)
|
|
360
|
-
fcstop = "{:s}{:d}".format(
|
|
361
|
-
opts_arg["fcunit"], opts_arg["fcterm"]
|
|
362
|
-
)
|
|
363
|
-
self._set_nam_macro(namcontents, namlocal, "FCSTOP", fcstop)
|
|
364
|
-
nam_updated = True
|
|
365
|
-
else:
|
|
366
|
-
logger.info("No NAMARG block in %s", namlocal)
|
|
367
349
|
|
|
368
350
|
if self.member is not None:
|
|
369
351
|
for macro_name in ("MEMBER", "PERTURB"):
|
|
@@ -371,7 +353,39 @@ class IFSParallel(
|
|
|
371
353
|
namcontents, namlocal, macro_name, self.member
|
|
372
354
|
)
|
|
373
355
|
nam_updated = True
|
|
374
|
-
|
|
356
|
+
|
|
357
|
+
if rh.resource.cycle < "cy41":
|
|
358
|
+
return nam_updated
|
|
359
|
+
|
|
360
|
+
if "NAMARG" not in namcontents:
|
|
361
|
+
logger.info("No NAMARG block in %s", namlocal)
|
|
362
|
+
return nam_updated
|
|
363
|
+
|
|
364
|
+
# For cy41 onward, replace some namelist macros with the command line
|
|
365
|
+
# arguments
|
|
366
|
+
opts_arg = self.spawn_command_options()
|
|
367
|
+
self._set_nam_macro(namcontents, namlocal, "CEXP", opts_arg["name"])
|
|
368
|
+
self._set_nam_macro(
|
|
369
|
+
namcontents, namlocal, "TIMESTEP", opts_arg["timestep"]
|
|
370
|
+
)
|
|
371
|
+
|
|
372
|
+
if self.fcunit == "t":
|
|
373
|
+
fcstop = "t{:d}".format(int(self.fcterm))
|
|
374
|
+
elif self.fcterm.is_integer():
|
|
375
|
+
# Round number of hours
|
|
376
|
+
fcstop = "h{:d}".format(int(self.fcterm))
|
|
377
|
+
else:
|
|
378
|
+
# IFS expects the forecast term to be given as an integer,
|
|
379
|
+
# whether this integer represents hours or timesteps. This
|
|
380
|
+
# means terms that are not round hours (e.g. 01:45) can only
|
|
381
|
+
# be expressed as a number of timesteps.
|
|
382
|
+
# See http://gitlab.meteo.fr/cnrm-gmap/vortex/-/issues/9
|
|
383
|
+
nsteps = int(self.fcterm * 3600 // self.timestep)
|
|
384
|
+
fcstop = "t{:d}".format(nsteps)
|
|
385
|
+
logger.info(f"Converting {self.fcterm} hours into {nsteps}")
|
|
386
|
+
|
|
387
|
+
self._set_nam_macro(namcontents, namlocal, "FCSTOP", fcstop)
|
|
388
|
+
return True
|
|
375
389
|
|
|
376
390
|
def prepare_namelists(self, rh, opts=None):
|
|
377
391
|
"""Update each of the namelists."""
|
vortex/nwp/algo/monitoring.py
CHANGED
|
@@ -4,7 +4,7 @@ AlgoComponents dedicated to computations related to observations monitoring.
|
|
|
4
4
|
|
|
5
5
|
from bronx.fancies import loggers
|
|
6
6
|
|
|
7
|
-
from vortex.algo.components import
|
|
7
|
+
from vortex.algo.components import BlindRun
|
|
8
8
|
from vortex.syntax.stdattrs import a_date, a_model, a_cutoff
|
|
9
9
|
from ..tools import odb, drhook
|
|
10
10
|
|
|
@@ -15,7 +15,7 @@ logger = loggers.getLogger(__name__)
|
|
|
15
15
|
|
|
16
16
|
|
|
17
17
|
class OdbMonitoring(
|
|
18
|
-
|
|
18
|
+
BlindRun, odb.OdbComponentDecoMixin, drhook.DrHookDecoMixin
|
|
19
19
|
):
|
|
20
20
|
"""Compute monitoring statistics."""
|
|
21
21
|
|
vortex/nwp/algo/odbtools.py
CHANGED
|
@@ -16,7 +16,7 @@ from taylorism import Boss
|
|
|
16
16
|
|
|
17
17
|
from vortex.tools.systems import ExecutionError
|
|
18
18
|
|
|
19
|
-
from vortex.algo.components import Parallel, ParaBlindRun
|
|
19
|
+
from vortex.algo.components import Parallel, ParaBlindRun, BlindRun
|
|
20
20
|
from vortex.tools.parallelism import VortexWorkerBlindRun
|
|
21
21
|
|
|
22
22
|
from ..syntax.stdattrs import arpifs_cycle
|
|
@@ -1217,7 +1217,7 @@ class OdbReshuffle(
|
|
|
1217
1217
|
|
|
1218
1218
|
|
|
1219
1219
|
class FlagsCompute(
|
|
1220
|
-
|
|
1220
|
+
BlindRun, odb.OdbComponentDecoMixin, drhook.DrHookDecoMixin
|
|
1221
1221
|
):
|
|
1222
1222
|
"""Compute observations flags."""
|
|
1223
1223
|
|
vortex/nwp/data/consts.py
CHANGED
|
@@ -2,12 +2,15 @@
|
|
|
2
2
|
Various Resources for constant files used in NWP.
|
|
3
3
|
"""
|
|
4
4
|
|
|
5
|
+
import random
|
|
6
|
+
|
|
5
7
|
import footprints
|
|
8
|
+
from footprints import FPList
|
|
6
9
|
from ..syntax.stdattrs import gvar
|
|
7
10
|
from vortex.data.contents import DataRaw, JsonDictContent, TextContent
|
|
8
11
|
from vortex.data.geometries import GaussGeometry, LonlatGeometry
|
|
9
12
|
from vortex.data.outflow import ModelGeoResource, ModelResource, StaticResource
|
|
10
|
-
from vortex.syntax.stdattrs import month_deco
|
|
13
|
+
from vortex.syntax.stdattrs import date_deco, member, month_deco
|
|
11
14
|
from vortex.syntax.stddeco import (
|
|
12
15
|
namebuilding_append,
|
|
13
16
|
namebuilding_delete,
|
|
@@ -180,6 +183,63 @@ class RtCoef(GenvModelResource):
|
|
|
180
183
|
return "rtcoef"
|
|
181
184
|
|
|
182
185
|
|
|
186
|
+
class RtCoefMulti(GenvModelResource):
|
|
187
|
+
"""
|
|
188
|
+
RtCoeff Satellite coefficients, randomly chosen depending on the member of an ensemble.
|
|
189
|
+
|
|
190
|
+
Reproducibility is ensured by the stability of the random generation for a given date (ymdh) and member.
|
|
191
|
+
With member=0 or None, or with choices=0, the choice is always 0, even if 0 is excluded.
|
|
192
|
+
"""
|
|
193
|
+
|
|
194
|
+
_footprint = [
|
|
195
|
+
date_deco,
|
|
196
|
+
member,
|
|
197
|
+
dict(
|
|
198
|
+
info="Set of satellite coefficients",
|
|
199
|
+
attr=dict(
|
|
200
|
+
kind=dict(
|
|
201
|
+
values=["rtcoef_multi", "mwave_rtcoef_multi"],
|
|
202
|
+
),
|
|
203
|
+
choices=dict(
|
|
204
|
+
info="Number of choices to choose from (0..choices-1)",
|
|
205
|
+
type=int,
|
|
206
|
+
optional=True,
|
|
207
|
+
),
|
|
208
|
+
excluded=dict(
|
|
209
|
+
info="List of values excluded from choice",
|
|
210
|
+
type=footprints.stdtypes.FPList,
|
|
211
|
+
optional=True,
|
|
212
|
+
default=FPList([]),
|
|
213
|
+
),
|
|
214
|
+
gvar=dict(
|
|
215
|
+
info="Will be modified by the random choice",
|
|
216
|
+
default="[kind]_0",
|
|
217
|
+
access="rwx",
|
|
218
|
+
),
|
|
219
|
+
),
|
|
220
|
+
),
|
|
221
|
+
]
|
|
222
|
+
|
|
223
|
+
def __init__(self, *args, **kw):
|
|
224
|
+
super().__init__(*args, **kw)
|
|
225
|
+
if self.member is None or self.member == 0 or self.choices == 0:
|
|
226
|
+
choice = 0
|
|
227
|
+
else:
|
|
228
|
+
# a random generator entirely determined by the date
|
|
229
|
+
rgen = random.Random(int(self.date.ymdh))
|
|
230
|
+
# drawing must be reproducible for a given member:
|
|
231
|
+
# generate 'member' values, but only keep the last
|
|
232
|
+
choice = rgen.choices(
|
|
233
|
+
[n for n in range(self.choices) if n not in self.excluded],
|
|
234
|
+
k=self.member,
|
|
235
|
+
)[-1]
|
|
236
|
+
self.gvar = self.gvar[:-1] + str(choice)
|
|
237
|
+
|
|
238
|
+
@property
|
|
239
|
+
def realkind(self):
|
|
240
|
+
return "rtcoef"
|
|
241
|
+
|
|
242
|
+
|
|
183
243
|
class RRTM(GenvModelResource):
|
|
184
244
|
"""
|
|
185
245
|
Class of a tar-zip file of coefficients for radiative transfers computations.
|