wrfrun 0.1.8__py3-none-any.whl → 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. wrfrun/cli.py +131 -0
  2. wrfrun/core/base.py +52 -19
  3. wrfrun/core/config.py +257 -170
  4. wrfrun/core/error.py +8 -1
  5. wrfrun/core/replay.py +1 -1
  6. wrfrun/core/server.py +91 -71
  7. wrfrun/data.py +14 -16
  8. wrfrun/extension/goos_sst/__init__.py +5 -5
  9. wrfrun/extension/goos_sst/core.py +4 -1
  10. wrfrun/extension/goos_sst/res/Vtable.ERA_GOOS_SST +1 -1
  11. wrfrun/extension/goos_sst/res/__init__.py +17 -0
  12. wrfrun/extension/goos_sst/utils.py +21 -5
  13. wrfrun/extension/littler/__init__.py +57 -1
  14. wrfrun/extension/littler/{utils.py → core.py} +329 -43
  15. wrfrun/extension/utils.py +24 -22
  16. wrfrun/model/__init__.py +24 -1
  17. wrfrun/model/plot.py +259 -36
  18. wrfrun/model/utils.py +19 -9
  19. wrfrun/model/wrf/__init__.py +41 -0
  20. wrfrun/model/wrf/core.py +229 -101
  21. wrfrun/model/wrf/exec_wrap.py +49 -35
  22. wrfrun/model/wrf/geodata.py +2 -1
  23. wrfrun/model/wrf/namelist.py +78 -4
  24. wrfrun/model/wrf/{_metgrid.py → utils.py} +38 -3
  25. wrfrun/model/wrf/vtable.py +9 -5
  26. wrfrun/res/__init__.py +22 -7
  27. wrfrun/res/config/config.template.toml +57 -0
  28. wrfrun/res/{config.toml.template → config/wrf.template.toml} +7 -46
  29. wrfrun/res/run.template.sh +10 -0
  30. wrfrun/res/scheduler/lsf.template +5 -0
  31. wrfrun/res/{job_scheduler → scheduler}/pbs.template +1 -1
  32. wrfrun/res/{job_scheduler → scheduler}/slurm.template +2 -1
  33. wrfrun/run.py +39 -27
  34. wrfrun/scheduler/__init__.py +35 -0
  35. wrfrun/scheduler/env.py +44 -0
  36. wrfrun/scheduler/lsf.py +49 -0
  37. wrfrun/scheduler/pbs.py +50 -0
  38. wrfrun/scheduler/script.py +72 -0
  39. wrfrun/scheduler/slurm.py +50 -0
  40. wrfrun/scheduler/utils.py +14 -0
  41. wrfrun/utils.py +8 -3
  42. wrfrun/workspace/__init__.py +38 -0
  43. wrfrun/workspace/core.py +94 -0
  44. wrfrun/workspace/wrf.py +165 -0
  45. {wrfrun-0.1.8.dist-info → wrfrun-0.2.0.dist-info}/METADATA +3 -2
  46. wrfrun-0.2.0.dist-info/RECORD +62 -0
  47. wrfrun-0.2.0.dist-info/entry_points.txt +3 -0
  48. wrfrun/model/wrf/_ndown.py +0 -39
  49. wrfrun/pbs.py +0 -86
  50. wrfrun/res/run.sh.template +0 -16
  51. wrfrun/workspace.py +0 -88
  52. wrfrun-0.1.8.dist-info/RECORD +0 -51
  53. {wrfrun-0.1.8.dist-info → wrfrun-0.2.0.dist-info}/WHEEL +0 -0
wrfrun/core/server.py CHANGED
@@ -8,7 +8,7 @@ In order to report the progress to user, ``wrfrun`` provides :class:`WRFRunServe
8
8
  .. autosummary::
9
9
  :toctree: generated/
10
10
 
11
- get_wrf_simulated_seconds
11
+ set_log_parse_func
12
12
  WRFRunServer
13
13
  WRFRunServerHandler
14
14
  stop_server
@@ -16,11 +16,12 @@ In order to report the progress to user, ``wrfrun`` provides :class:`WRFRunServe
16
16
 
17
17
  import socket
18
18
  import socketserver
19
- import subprocess
19
+ import threading
20
+ from collections.abc import Callable
20
21
  from datetime import datetime
21
22
  from json import dumps
22
23
  from time import time
23
- from typing import Tuple, Optional
24
+ from typing import Tuple
24
25
 
25
26
  from .config import WRFRUNConfig
26
27
  from ..utils import logger
@@ -29,38 +30,22 @@ WRFRUN_SERVER_INSTANCE = None
29
30
  WRFRUN_SERVER_THREAD = None
30
31
 
31
32
 
32
- def get_wrf_simulated_seconds(start_datetime: datetime, log_file_path: Optional[str] = None) -> int:
33
- """
34
- Read the latest line of WRF's log file and calculate how many seconds WRF has integrated.
35
-
36
- :param start_datetime: WRF start datetime.
37
- :type start_datetime: datetime
38
- :param log_file_path: Absolute path of the log file to be parsed.
39
- :type log_file_path: str
40
- :return: Integrated seconds. If this method fails to calculate the time, the returned value is ``-1``.
41
- :rtype: int
42
- """
43
- # use linux cmd to get the latest line of wrf log files
44
- if log_file_path is None:
45
- log_file_path = WRFRUNConfig.parse_resource_uri(f"{WRFRUNConfig.WRF_WORK_PATH}/rsl.out.0000")
46
- res = subprocess.run(["tail", "-n", "1", log_file_path], capture_output=True)
47
- log_text = res.stdout.decode()
33
+ SET_LOG_PARSER_LOCK = threading.Lock()
34
+ LOG_PARSER: Callable[[datetime], int] | None = None
48
35
 
49
- if not (log_text.startswith("d01") or log_text.startswith("d02")):
50
- return -1
51
36
 
52
- time_string = log_text.split()[1]
53
-
54
- try:
55
- current_datetime = datetime.strptime(time_string, "%Y-%m-%d_%H:%M:%S")
56
- # remove timezone info so we can calculate.
57
- date_delta = current_datetime - start_datetime.replace(tzinfo=None)
58
- seconds = date_delta.days * 24 * 60 * 60 + date_delta.seconds
37
+ def set_log_parse_func(func: Callable[[datetime], int]):
38
+ """
39
+ Set log parse function used by socket server.
59
40
 
60
- except ValueError:
61
- seconds = -1
41
+ :param func: Function used to get simulated seconds from model's log file.
42
+ If the function can't parse the simulated seconds, it should return ``-1``.
43
+ :type func: Callable[[datetime], int]
44
+ """
45
+ global SET_LOG_PARSER_LOCK, LOG_PARSER
62
46
 
63
- return seconds
47
+ with SET_LOG_PARSER_LOCK:
48
+ LOG_PARSER = func
64
49
 
65
50
 
66
51
  class WRFRunServer(socketserver.ThreadingMixIn, socketserver.TCPServer):
@@ -98,13 +83,13 @@ class WRFRunServer(socketserver.ThreadingMixIn, socketserver.TCPServer):
98
83
  Path of the log file the server will read.
99
84
  """
100
85
 
101
- def __init__(self, start_date: datetime, wrf_simulate_seconds: int, *args, **kwargs) -> None:
86
+ def __init__(self, start_date: datetime, total_simulate_seconds: int, *args, **kwargs) -> None:
102
87
  """
103
88
 
104
89
  :param start_date: The simulation's start date.
105
90
  :type start_date: datetime
106
- :param wrf_simulate_seconds: The total seconds the simulation will integrate.
107
- :type wrf_simulate_seconds: int
91
+ :param total_simulate_seconds: The total seconds the simulation will integrate.
92
+ :type total_simulate_seconds: int
108
93
  :param args: Other positional arguments passed to parent class.
109
94
  :type args:
110
95
  :param kwargs: Other keyword arguments passed to parent class.
@@ -119,12 +104,7 @@ class WRFRunServer(socketserver.ThreadingMixIn, socketserver.TCPServer):
119
104
  self.start_date = start_date
120
105
 
121
106
  # record how many seconds the wrf will integral
122
- self.wrf_simulate_seconds = wrf_simulate_seconds
123
-
124
- # we need to parse the log file to track the simulation progress.
125
- self.wrf_log_path = WRFRUNConfig.parse_resource_uri(f"{WRFRUNConfig.WRF_WORK_PATH}/rsl.out.0000")
126
- logger.debug("WRFRun Server will try to track simulation progress with following log files:")
127
- logger.debug(f"WRF: {self.wrf_log_path}")
107
+ self.total_simulate_seconds = total_simulate_seconds
128
108
 
129
109
  def server_bind(self):
130
110
  """
@@ -144,33 +124,80 @@ class WRFRunServer(socketserver.ThreadingMixIn, socketserver.TCPServer):
144
124
  """
145
125
  return self.start_timestamp
146
126
 
147
- def get_wrf_simulate_settings(self) -> Tuple[datetime, int]:
127
+ def get_model_simulate_settings(self) -> Tuple[datetime, int]:
148
128
  """
149
129
  Get the start date of the case the NWP simulates and the total seconds of the simulation.
150
130
 
151
131
  :return: (start date, simulation seconds)
152
132
  :rtype: tuple
153
133
  """
154
- return self.start_date, self.wrf_simulate_seconds
134
+ return self.start_date, self.total_simulate_seconds
155
135
 
156
136
 
157
137
  class WRFRunServerHandler(socketserver.StreamRequestHandler):
158
138
  """
159
- Socket server handler.
139
+ :class:`WRFRunServer` handler.
140
+
141
+ This handler can report time usage and simulation progress of the running model, simulation settings, and stop the server:
142
+
143
+ 1. If this handler receives ``"stop"``, it stops the server.
144
+ 2. If this handler receives ``"debug"``, it returns the simulation settings in JSON.
145
+ 3. It returns time usage and simulation progress in JSON when receiving any other messages.
146
+
147
+ **On receiving "stop"**
148
+
149
+ This handler will stop the server, and return a plain message ``Server stop``.
150
+
151
+ **On receiving "debug"**
152
+
153
+ This handler will return simulation settings in a JSON string like:
154
+
155
+ .. code-block:: json
156
+
157
+ {
158
+ "start_date": "2021-03-25 00:00",
159
+ "total_simulate_seconds": 360000,
160
+ }
161
+
162
+ **On receiving any other messages**
163
+
164
+ This handler will return time usage and simulation progress in a JSON string like:
165
+
166
+ .. code-block:: json
167
+
168
+ {
169
+ "usage": 3600,
170
+ "status": "geogrid",
171
+ "progress": 35,
172
+ }
173
+
174
+ where ``usage`` represents the seconds ``wrfrun`` has spent running the NWP model,
175
+ ``status`` represents work status,
176
+ ``progress`` represents simulation progress of the status in percentage.
160
177
  """
161
178
  def __init__(self, request, client_address, server: WRFRunServer) -> None:
179
+ """
180
+ :class:`WRFRunServer` handler.
181
+
182
+ :param request:
183
+ :type request:
184
+ :param client_address:
185
+ :type client_address:
186
+ :param server: :class:`WRFRunServer` instance.
187
+ :type server: WRFRunServer
188
+ """
162
189
  super().__init__(request, client_address, server)
163
190
 
164
191
  # get server
165
192
  self.server: WRFRunServer = server
166
193
 
167
- def calculate_time_usage(self) -> str:
194
+ def calculate_time_usage(self) -> int:
168
195
  """
169
196
  Calculate the duration from the server's start time to the present,
170
197
  which represents the time ``wrfrun`` has spent running the NWP model.
171
198
 
172
- :return: A time string in ``%H:%M:%S`` format.
173
- :rtype: str
199
+ :return: Seconds.
200
+ :rtype: int
174
201
  """
175
202
  # get current timestamp
176
203
  current_timestamp = datetime.fromtimestamp(time())
@@ -179,42 +206,35 @@ class WRFRunServerHandler(socketserver.StreamRequestHandler):
179
206
  seconds_diff = current_timestamp - self.server.get_start_time()
180
207
  seconds_diff = seconds_diff.seconds
181
208
 
182
- # calculate hours, minutes and seconds
183
- seconds = seconds_diff % 60
184
- minutes = (seconds_diff % 3600) // 60
185
- hours = seconds_diff // 3600
209
+ return seconds_diff
186
210
 
187
- time_usage = ":".join([
188
- str(hours).rjust(2, '0'),
189
- str(minutes).rjust(2, '0'),
190
- str(seconds).rjust(2, '0')
191
- ])
192
-
193
- return time_usage
194
-
195
- def calculate_progress(self) -> str:
211
+ def calculate_progress(self) -> tuple[str, int]:
196
212
  """
197
213
  Read the log file and calculate the simulation progress.
198
214
 
199
- :return: A JSON string with two keys: ``status`` and ``progress``.
200
- :rtype: str
215
+ :return: ``(status, progress)``. ``status`` represents work status,
216
+ ``progress`` represents simulation progress of the status in percentage.
217
+ :rtype: tuple[str, int]
201
218
  """
202
- start_date, simulate_seconds = self.server.get_wrf_simulate_settings()
219
+ start_date, simulate_seconds = self.server.get_model_simulate_settings()
203
220
 
204
- simulated_seconds = get_wrf_simulated_seconds(start_date, self.server.wrf_log_path)
221
+ with SET_LOG_PARSER_LOCK:
222
+ if LOG_PARSER is None:
223
+ simulated_seconds = -1
224
+ else:
225
+ simulated_seconds = LOG_PARSER(start_date)
205
226
 
206
227
  if simulated_seconds > 0:
207
228
  progress = simulated_seconds * 100 // simulate_seconds
208
-
209
229
  else:
210
- progress = 0
230
+ progress = -1
211
231
 
212
232
  status = WRFRUNConfig.WRFRUN_WORK_STATUS
213
233
 
214
234
  if status == "":
215
235
  status = "*"
216
236
 
217
- return dumps({"status": status, "progress": progress})
237
+ return status, progress
218
238
 
219
239
  def handle(self) -> None:
220
240
  """
@@ -229,17 +249,17 @@ class WRFRunServerHandler(socketserver.StreamRequestHandler):
229
249
  self.wfile.write(f"Server stop\n".encode())
230
250
 
231
251
  elif msg == "debug":
232
- start_date, simulate_seconds = self.server.get_wrf_simulate_settings()
252
+ start_date, simulate_seconds = self.server.get_model_simulate_settings()
233
253
  start_date = start_date.strftime("%Y-%m-%d %H:%M")
234
254
 
235
- self.wfile.write(f"{start_date}\n{simulate_seconds}\n".encode())
255
+ self.wfile.write(dumps({"start_date": start_date, "total_seconds": simulate_seconds}).encode())
236
256
 
237
257
  else:
238
- progress = self.calculate_progress()
258
+ status, progress = self.calculate_progress()
239
259
  time_usage = self.calculate_time_usage()
240
260
 
241
261
  # send the message
242
- self.wfile.write(f"{progress}\n{time_usage}".encode())
262
+ self.wfile.write(dumps({"usage": time_usage, "status": status, "progress": progress}).encode())
243
263
 
244
264
 
245
265
  def stop_server(socket_ip: str, socket_port: int):
@@ -266,4 +286,4 @@ def stop_server(socket_ip: str, socket_port: int):
266
286
  logger.warning("Fail to stop WRFRunServer, maybe it doesn't start at all.")
267
287
 
268
288
 
269
- __all__ = ["WRFRunServer", "WRFRunServerHandler", "get_wrf_simulated_seconds", "stop_server"]
289
+ __all__ = ["WRFRunServer", "WRFRunServerHandler", "stop_server", "set_log_parse_func"]
wrfrun/data.py CHANGED
@@ -1,14 +1,15 @@
1
1
  from datetime import datetime
2
2
  from os import makedirs
3
- from os.path import exists, dirname
4
- from typing import Union, List, Tuple
3
+ from os.path import dirname, exists
4
+ from typing import List, Tuple, Union
5
5
 
6
6
  from pandas import date_range
7
- # from seafog import goos_sst_find_data
8
7
 
9
- from .core.config import WRFRUNConfig
8
+ from .core import get_wrfrun_config
10
9
  from .utils import logger
11
10
 
11
+ # from seafog import goos_sst_find_data
12
+
12
13
  # lazy initialize
13
14
  CDS_CLIENT = None
14
15
 
@@ -264,8 +265,7 @@ def find_era5_data(date: Union[List[str], List[datetime]], area: Tuple[int, int,
264
265
  # check if we need to add pressure_level to params dict
265
266
  if dataset == ERA5CONFIG.DATASET_ERA5_PRESSURE_LEVEL:
266
267
  if pressure_level is None:
267
- logger.error(
268
- f"You need to provide pressure levels to download data")
268
+ logger.error("You need to provide pressure levels to download data")
269
269
  exit(1)
270
270
  # convert value to str
271
271
  if not isinstance(pressure_level, list):
@@ -277,12 +277,11 @@ def find_era5_data(date: Union[List[str], List[datetime]], area: Tuple[int, int,
277
277
  if _check_pressure_level(pressure_level): # type: ignore
278
278
  params_dict["pressure_level"] = pressure_level
279
279
  else:
280
- logger.error(
281
- f"You have passed wrong pressure level to download data, check it")
280
+ logger.error("You have passed wrong pressure level to download data, check it")
282
281
  exit(1)
283
282
 
284
283
  # download data
285
- logger.info(f"Downloading data to {save_path}, it may take several tens of minutes, please wait...")
284
+ logger.info(f"Downloading data to '{save_path}', it may take several tens of minutes, please wait...")
286
285
 
287
286
  if CDS_CLIENT is None:
288
287
  import cdsapi
@@ -299,29 +298,28 @@ def prepare_wps_input_data(area: Tuple[int, int, int, int]):
299
298
  Args:
300
299
  area (Tuple[int, int, int, int]): Range of longitude and latitude, `[lon1, lon2, lat1, lat2]`.
301
300
  """
302
- wrf_config = WRFRUNConfig.get_model_config("wrf")
301
+ wrf_config = get_wrfrun_config().get_model_config("wrf")
303
302
  # get start and end date from config
304
303
  start_date = wrf_config["time"]["start_date"]
305
304
  end_date = wrf_config["time"]["end_date"]
306
305
 
307
306
  # remove second part
308
- start_date = start_date[:-3]
309
- end_date = end_date[:-3]
307
+ start_date = start_date.strftime("%Y-%m-%d %H:%M")
308
+ end_date = end_date.strftime("%Y-%m-%d %H:%M")
310
309
 
311
310
  # get hour step
312
311
  hour_step = wrf_config["time"]["input_data_interval"] // 3600
313
312
 
314
313
  # get data save path
315
- bg_save_path = wrf_config["wps_input_data_folder"]
316
- sst_save_path = wrf_config["near_goos_data_folder"]
314
+ data_save_path = get_wrfrun_config().get_input_data_path()
317
315
 
318
316
  # download data
319
317
  logger.info(f"Download background data of surface level...")
320
- download_data(start_date, end_date, hour_step, area, f"{bg_save_path}/surface.grib",
318
+ download_data(start_date, end_date, hour_step, area, f"{data_save_path}/surface.grib",
321
319
  data_format="grib", data_type="surface", overwrite=True)
322
320
 
323
321
  logger.info(f"Download background data of pressure level...")
324
- download_data(start_date, end_date, hour_step, area, f"{bg_save_path}/pressure.grib",
322
+ download_data(start_date, end_date, hour_step, area, f"{data_save_path}/pressure.grib",
325
323
  data_format="grib", data_type="pressure", overwrite=True)
326
324
 
327
325
  # logger.info(f"Download NearGOOS data...")
@@ -4,11 +4,11 @@ wrfrun.extension.goos_sst
4
4
 
5
5
  This extension can help you create a GRIB file from ERA5 skin temperature (SKT) data and NEAR-GOOS sea surface temperature (SST) data.
6
6
 
7
- ================================================== =============================================
8
- :doc:`goos_sst </api/extension.goos_sst.goos_sst>` Core functionality submodule.
9
- :doc:`res </api/extension.goos_sst.res>` Resource files provided by this extension.
10
- :doc:`utils </api/extension.goos_sst.utils>` Utility submodule used by the core submodule.
11
- ================================================== =============================================
7
+ ============================================= =============================================
8
+ :doc:`core </api/extension.goos_sst.core>` Core functionality submodule.
9
+ :doc:`res </api/extension.goos_sst.res>` Resource files provided by this extension.
10
+ :doc:`utils </api/extension.goos_sst.utils>` Utility submodule used by the core submodule.
11
+ ============================================= =============================================
12
12
 
13
13
  Important Note
14
14
  **************
@@ -16,7 +16,6 @@ from os.path import dirname
16
16
  import cfgrib as cf
17
17
  import numpy as np
18
18
  from pandas import to_datetime
19
- from seafog import goos_sst_find_data, goos_sst_parser
20
19
  from xarray import DataArray
21
20
 
22
21
  from .utils import create_sst_grib
@@ -40,6 +39,10 @@ def merge_era5_goos_sst_grib(surface_grib_path: str, save_path: str, sst_data_sa
40
39
  Please check ``seafog.goos_sst_find_data`` for more information.
41
40
  :type resolution: str
42
41
  """
42
+ # lazy import seafog to fix libcurl error in readthedocs
43
+ # T^T
44
+ from seafog import goos_sst_find_data, goos_sst_parser
45
+
43
46
  dataset_list = cf.open_datasets(surface_grib_path)
44
47
 
45
48
  dataset = None
@@ -4,4 +4,4 @@ Code | Code | 1 | 2 | Name | Units | Description
4
4
  34 | 1 | 0 | | SST | K | Sea-Surface Temperature |
5
5
  -----+------+------+------+----------+----------+------------------------------------------+
6
6
 
7
- # Vtable setting for GRIB file created by wrfrun.extension.sst
7
+ # Vtable setting for GRIB file created by wrfrun.extension.goos_sst
@@ -1,3 +1,20 @@
1
+ """
2
+ wrfrun.extension.goos_sst.res
3
+ #############################
4
+
5
+ Resource files provided by :doc:`/api/extension.goos_sst`.
6
+
7
+ VTABLE_ERA_GOOS_SST
8
+ *******************
9
+
10
+ .. py:data:: VTABLE_ERA_GOOS_SST
11
+ :type: str
12
+ :value: Absolute file path.
13
+
14
+ Vtable file used to input the GRIB data created by :doc:`/api/extension.goos_sst` to WRF.
15
+
16
+ """
17
+
1
18
  from os.path import abspath, dirname
2
19
 
3
20
 
@@ -1,3 +1,15 @@
1
+ """
2
+ wrfrun.extension.goos_sst.utils
3
+ ###############################
4
+
5
+ Functions that are used by :doc:`/api/extension.goos_sst`.
6
+
7
+ .. autosummary::
8
+ :toctree: generated/
9
+
10
+ create_sst_grib
11
+ """
12
+
1
13
  from cfgrib.xarray_to_grib import to_grib
2
14
  from numpy.dtypes import DateTime64DType
3
15
  from pandas import to_datetime
@@ -8,12 +20,16 @@ from wrfrun.utils import logger
8
20
 
9
21
  def create_sst_grib(data: DataArray, save_path: str):
10
22
  """
11
- Create a dataset and save it to a GRIB file.
12
-
13
- Args:
14
- data: DataArray data, it should at least contain three dimensions: ``["time", "latitude", "longitude"]``.
15
- save_path: GRIB file path.
23
+ Write SST data to a GRIB file.
24
+
25
+ This function creates GRIB file using ``cfgrib`` package.
26
+ While GRIB write support is experimental in ``cfgrib``,
27
+ this function may **FAIL TO CREATE GRIB FILE**.
16
28
 
29
+ :param data: ``xarray.DataArray``, which at least has three dimensions: ``["time", "latitude", "longitude"]``.
30
+ :type data: DataArray
31
+ :param save_path: Output GRIB file path.
32
+ :type save_path: str
17
33
  """
18
34
  # check the data's dimensions.
19
35
  for _dim in ["time", "longitude", "latitude"]:
@@ -1 +1,57 @@
1
- from .utils import *
1
+ """
2
+ wrfrun.extension.littler
3
+ ########################
4
+
5
+ This extension can help you manage observation data, and create ``LITTLE_R`` file for data assimilation.
6
+
7
+ ========================================= =============================
8
+ :doc:`core </api/extension.littler.core>` Core functionality submodule.
9
+ ========================================= =============================
10
+
11
+ What Can This Extension Do?
12
+ ***************************
13
+
14
+ According to the `WRFDA Online Tutorial <https://www2.mmm.ucar.edu/wrf/users/wrfda/OnlineTutorial/Help/littler.html>`_,
15
+ ``LITTLE_R`` is an ASCII-based observation file format that is designed to be an intermediate format
16
+ so that WRFDA might be able to assimilate as many observation types as possible in a universal manner.
17
+
18
+ However, ``LITTLE_R`` is really hard to process elegantly from the point of view of Python.
19
+ To help users create ``LITTLE_R`` file easily, this extension introduces :class:`LittleR <core.LittleR>`,
20
+ and **Zipped Little R** file.
21
+
22
+ :class:`LittleR <core.LittleR>` accepts observation datas, and can generate observation reports in proper format.
23
+ Besides, it can save an observation report to a Zipped Little R file,
24
+ so you can read the report later or process the observation data with other program.
25
+ Please check :class:`LittleR <core.LittleR>` for more information.
26
+
27
+ How To Use This Extension?
28
+ **************************
29
+
30
+ The code snap below shows you how to use this extension.
31
+
32
+ .. code-block:: Python
33
+ :caption: main.py
34
+
35
+ from wrfrun.extension.littler import LittleR
36
+
37
+
38
+ if __name__ == '__main__':
39
+ littler = LittleR()
40
+ littler.set_header(
41
+ longitude=120, latitude=60, fm="FM-19", elevation=0,
42
+ is_bogus=True, date="20250902070000"
43
+ )
44
+ # write to LITTLE_R file
45
+ with open("data/test", "w") as f:
46
+ f.write(str(littler))
47
+ # write to zlr file
48
+ littler.to_zlr("data/test.zlr")
49
+
50
+ .. toctree::
51
+ :maxdepth: 1
52
+ :hidden:
53
+
54
+ core <extension.littler.core>
55
+ """
56
+
57
+ from .core import *