tdlpackio 2.0.0b1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tdlpackio-2.0.0b1/LICENSE +21 -0
- tdlpackio-2.0.0b1/MANIFEST.in +15 -0
- tdlpackio-2.0.0b1/PKG-INFO +99 -0
- tdlpackio-2.0.0b1/README.md +65 -0
- tdlpackio-2.0.0b1/TDLPACK.md +40 -0
- tdlpackio-2.0.0b1/TdlpackIO.py +384 -0
- tdlpackio-2.0.0b1/VERSION +1 -0
- tdlpackio-2.0.0b1/create_docs.sh +9 -0
- tdlpackio-2.0.0b1/docs/index.html +7 -0
- tdlpackio-2.0.0b1/docs/search.js +46 -0
- tdlpackio-2.0.0b1/docs/tdlpackio.html +3720 -0
- tdlpackio-2.0.0b1/pyproject.toml +72 -0
- tdlpackio-2.0.0b1/requirements.txt +6 -0
- tdlpackio-2.0.0b1/setup.cfg +4 -0
- tdlpackio-2.0.0b1/setup.py +323 -0
- tdlpackio-2.0.0b1/src/ext/tdlpacklib.pyx +432 -0
- tdlpackio-2.0.0b1/src/tdlpackio/__config__.py +6 -0
- tdlpackio-2.0.0b1/src/tdlpackio/__init__.py +39 -0
- tdlpackio-2.0.0b1/src/tdlpackio/_tdlpackio.py +2079 -0
- tdlpackio-2.0.0b1/src/tdlpackio/grids.py +136 -0
- tdlpackio-2.0.0b1/src/tdlpackio/lstdlp.py +132 -0
- tdlpackio-2.0.0b1/src/tdlpackio/templates.py +539 -0
- tdlpackio-2.0.0b1/src/tdlpackio/utils.py +192 -0
- tdlpackio-2.0.0b1/src/tdlpackio/version.py +2 -0
- tdlpackio-2.0.0b1/src/tdlpackio/xarray_backend.py +1173 -0
- tdlpackio-2.0.0b1/src/tdlpackio.egg-info/PKG-INFO +99 -0
- tdlpackio-2.0.0b1/src/tdlpackio.egg-info/SOURCES.txt +30 -0
- tdlpackio-2.0.0b1/src/tdlpackio.egg-info/dependency_links.txt +1 -0
- tdlpackio-2.0.0b1/src/tdlpackio.egg-info/entry_points.txt +5 -0
- tdlpackio-2.0.0b1/src/tdlpackio.egg-info/not-zip-safe +1 -0
- tdlpackio-2.0.0b1/src/tdlpackio.egg-info/requires.txt +4 -0
- tdlpackio-2.0.0b1/src/tdlpackio.egg-info/top_level.txt +1 -0
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2024 NOAA Meteorological Development Laboratory (MDL)
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
include MANIFEST.in
|
|
2
|
+
include setup.py
|
|
3
|
+
include pyproject.toml
|
|
4
|
+
include README.md
|
|
5
|
+
include TDLPACK.md
|
|
6
|
+
include LICENSE
|
|
7
|
+
include VERSION
|
|
8
|
+
include create_docs.sh
|
|
9
|
+
include requirements.txt
|
|
10
|
+
include TdlpackIO.py
|
|
11
|
+
graft docs
|
|
12
|
+
graft src/ext
|
|
13
|
+
graft src/tdlpackio
|
|
14
|
+
prune tests
|
|
15
|
+
global-exclude *.py[cod] *.c *.so *.DS_Store
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: tdlpackio
|
|
3
|
+
Version: 2.0.0b1
|
|
4
|
+
Summary: Python interface for reading and writing TDLPACK data
|
|
5
|
+
Author-email: Eric Engle <eric.engle@noaa.gov>
|
|
6
|
+
License: MIT
|
|
7
|
+
Project-URL: Documentation, https://noaa-mdl.github.io/tdlpackio
|
|
8
|
+
Project-URL: Respository, https://github.com/NOAA-MDL/tdlpackio
|
|
9
|
+
Keywords: atmospheric-science,data-science,meteorology,mos,numpy,statistics,statpp,weather,weather-data
|
|
10
|
+
Classifier: Development Status :: 5 - Production/Stable
|
|
11
|
+
Classifier: Environment :: Console
|
|
12
|
+
Classifier: Programming Language :: Cython
|
|
13
|
+
Classifier: Programming Language :: Python :: 3
|
|
14
|
+
Classifier: Programming Language :: Python :: 3 :: Only
|
|
15
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
16
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
17
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
18
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
19
|
+
Classifier: Programming Language :: Python :: 3.14
|
|
20
|
+
Classifier: Intended Audience :: Science/Research
|
|
21
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
22
|
+
Classifier: Topic :: Scientific/Engineering :: Atmospheric Science
|
|
23
|
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
24
|
+
Classifier: Operating System :: POSIX
|
|
25
|
+
Classifier: Operating System :: POSIX :: Linux
|
|
26
|
+
Classifier: Operating System :: MacOS
|
|
27
|
+
Requires-Python: <3.15,>=3.8
|
|
28
|
+
Description-Content-Type: text/markdown
|
|
29
|
+
License-File: LICENSE
|
|
30
|
+
Requires-Dist: numpy
|
|
31
|
+
Provides-Extra: xarray
|
|
32
|
+
Requires-Dist: xarray; extra == "xarray"
|
|
33
|
+
Dynamic: license-file
|
|
34
|
+
|
|
35
|
+
# tdlpackio
|
|
36
|
+
|
|
37
|
+
[](https://opensource.org/licenses/MIT)
|
|
38
|
+
|
|
39
|
+
[](https://www.python.org/downloads/release/python-3100/)
|
|
40
|
+
[](https://www.python.org/downloads/release/python-3110/)
|
|
41
|
+
[](https://www.python.org/downloads/release/python-3120/)
|
|
42
|
+
[](https://www.python.org/downloads/release/python-3130/)
|
|
43
|
+
[](https://www.python.org/downloads/release/python-3140/)
|
|
44
|
+
|
|
45
|
+

|
|
46
|
+

|
|
47
|
+
|
|
48
|
+

|
|
49
|
+

|
|
50
|
+
|
|
51
|
+
[](https://anaconda.org/conda-forge/tdlpackio)
|
|
52
|
+
[](https://anaconda.org/conda-forge/tdlpackio)
|
|
53
|
+
[](https://anaconda.org/conda-forge/tdlpackio)
|
|
54
|
+
|
|
55
|
+
## Introduction
|
|
56
|
+
|
|
57
|
+
tdlpackio provides a Python interface for reading and writing TDLPACK files. The NOAA/NWS Meteorological Development Lab ([MDL](https://www.weather.gov/mdl/)) produces [Model Output Statistics (MOS)](https://vlab.noaa.gov/web/mdl/mos) and the [National Blend of Models (NBM)](https://vlab.noaa.gov/web/mdl/nbm). These products are generated from the MDL's in-house MOS-2000 Software System (MOS2K). The MOS2K system defines a GRIB-like data format called TDLPACK. A brief introduction to TDLPACK files and data format can be found [here](TDLPACK.md). tdlpackio provides a Cython extension module, tdlpacklib, which provides an interface to [libtdlpack](https://github.com/NOAA-MDL/libtdlpack), a Fortran-based library containing a subset of MOS2K subroutines.
|
|
58
|
+
|
|
59
|
+
## Requirements
|
|
60
|
+
* Python 3.10, 3.11, 3.12, 3.13, 3.14
|
|
61
|
+
* [libtdlpack](https://github.com/NOAA-MDL/libtdlpack) 1.0.0+
|
|
62
|
+
* setuptools
|
|
63
|
+
* cython 3.0+
|
|
64
|
+
* numpy 1.26+
|
|
65
|
+
* pyproj 1.9.6+
|
|
66
|
+
* C compiler: GNU, Intel, and Apple Clang have been tested.
|
|
67
|
+
|
|
68
|
+
## Required External Libraries
|
|
69
|
+
|
|
70
|
+
### libtdlpack
|
|
71
|
+
The [libtdlpack](https://github.com/NOAA-MDL/libtdlpack) library is required for tdlpackio. You will have to build and install this yourself, but this is not difficult. For macOS users, libtdlpack can be installed via [this Homebrew Tap](https://github.com/NOAA-MDL/homebrew-mdllibs). If you use the *conda ecosystems, then you can install via `conda install -c conda-forge libtdlpack`.
|
|
72
|
+
|
|
73
|
+
## Install
|
|
74
|
+
|
|
75
|
+
Once again, this assumes that libtdlpack has been installed. If libtdlpack has been installed into a "common" installation path, then it will be found, otherwise define environment variable `TDLPACK_DIR` with the installation path.
|
|
76
|
+
|
|
77
|
+
* From [PyPI](https://pypi.python.org/pypi/tdlpackio) via pip:
|
|
78
|
+
|
|
79
|
+
```
|
|
80
|
+
pip install tdlpackio
|
|
81
|
+
```
|
|
82
|
+
* From [conda-forge](https://anaconda.org/conda-forge/tdlpackio) via conda:
|
|
83
|
+
|
|
84
|
+
```
|
|
85
|
+
conda install -c conda-forge tdlpackio
|
|
86
|
+
```
|
|
87
|
+
|
|
88
|
+
* From source:
|
|
89
|
+
```shell
|
|
90
|
+
pip install .
|
|
91
|
+
```
|
|
92
|
+
|
|
93
|
+
## Development
|
|
94
|
+
|
|
95
|
+
The development evolution of tdlpackio will mainly focus on how best to serve that purpose and its primary user's -- mainly meteorologists, physical scientists, and software developers supporting the missions within NOAA's National Weather Service (NWS) and National Centers for Environmental Prediction (NCEP), and other NOAA organizations.
|
|
96
|
+
|
|
97
|
+
## Disclaimer
|
|
98
|
+
|
|
99
|
+
This repository is a scientific product and is not official communication of the National Oceanic and Atmospheric Administration, or the United States Department of Commerce. All NOAA GitHub project code is provided on an 'as is' basis and the user assumes responsibility for its use. Any claims against the Department of Commerce or Department of Commerce bureaus stemming from the use of this GitHub project will be governed by all applicable Federal law. Any reference to specific commercial products, processes, or services by service mark, trademark, manufacturer, or otherwise, does not constitute or imply their endorsement, recommendation or favoring by the Department of Commerce. The Department of Commerce seal and logo, or the seal and logo of a DOC bureau, shall not be used in any manner to imply endorsement of any commercial product or activity by DOC or the United States Government.
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
# tdlpackio
|
|
2
|
+
|
|
3
|
+
[](https://opensource.org/licenses/MIT)
|
|
4
|
+
|
|
5
|
+
[](https://www.python.org/downloads/release/python-3100/)
|
|
6
|
+
[](https://www.python.org/downloads/release/python-3110/)
|
|
7
|
+
[](https://www.python.org/downloads/release/python-3120/)
|
|
8
|
+
[](https://www.python.org/downloads/release/python-3130/)
|
|
9
|
+
[](https://www.python.org/downloads/release/python-3140/)
|
|
10
|
+
|
|
11
|
+

|
|
12
|
+

|
|
13
|
+
|
|
14
|
+

|
|
15
|
+

|
|
16
|
+
|
|
17
|
+
[](https://anaconda.org/conda-forge/tdlpackio)
|
|
18
|
+
[](https://anaconda.org/conda-forge/tdlpackio)
|
|
19
|
+
[](https://anaconda.org/conda-forge/tdlpackio)
|
|
20
|
+
|
|
21
|
+
## Introduction
|
|
22
|
+
|
|
23
|
+
tdlpackio provides a Python interface for reading and writing TDLPACK files. The NOAA/NWS Meteorological Development Lab ([MDL](https://www.weather.gov/mdl/)) produces [Model Output Statistics (MOS)](https://vlab.noaa.gov/web/mdl/mos) and the [National Blend of Models (NBM)](https://vlab.noaa.gov/web/mdl/nbm). These products are generated from the MDL's in-house MOS-2000 Software System (MOS2K). The MOS2K system defines a GRIB-like data format called TDLPACK. A brief introduction to TDLPACK files and data format can be found [here](TDLPACK.md). tdlpackio provides a Cython extension module, tdlpacklib, which provides an interface to [libtdlpack](https://github.com/NOAA-MDL/libtdlpack), a Fortran-based library containing a subset of MOS2K subroutines.
|
|
24
|
+
|
|
25
|
+
## Requirements
|
|
26
|
+
* Python 3.10, 3.11, 3.12, 3.13, 3.14
|
|
27
|
+
* [libtdlpack](https://github.com/NOAA-MDL/libtdlpack) 1.0.0+
|
|
28
|
+
* setuptools
|
|
29
|
+
* cython 3.0+
|
|
30
|
+
* numpy 1.26+
|
|
31
|
+
* pyproj 1.9.6+
|
|
32
|
+
* C compiler: GNU, Intel, and Apple Clang have been tested.
|
|
33
|
+
|
|
34
|
+
## Required External Libraries
|
|
35
|
+
|
|
36
|
+
### libtdlpack
|
|
37
|
+
The [libtdlpack](https://github.com/NOAA-MDL/libtdlpack) library is required for tdlpackio. You will have to build and install this yourself, but this is not difficult. For macOS users, libtdlpack can be installed via [this Homebrew Tap](https://github.com/NOAA-MDL/homebrew-mdllibs). If you use the *conda ecosystems, then you can install via `conda install -c conda-forge libtdlpack`.
|
|
38
|
+
|
|
39
|
+
## Install
|
|
40
|
+
|
|
41
|
+
Once again, this assumes that libtdlpack has been installed. If libtdlpack has been installed into a "common" installation path, then it will be found, otherwise define environment variable `TDLPACK_DIR` with the installation path.
|
|
42
|
+
|
|
43
|
+
* From [PyPI](https://pypi.python.org/pypi/tdlpackio) via pip:
|
|
44
|
+
|
|
45
|
+
```
|
|
46
|
+
pip install tdlpackio
|
|
47
|
+
```
|
|
48
|
+
* From [conda-forge](https://anaconda.org/conda-forge/tdlpackio) via conda:
|
|
49
|
+
|
|
50
|
+
```
|
|
51
|
+
conda install -c conda-forge tdlpackio
|
|
52
|
+
```
|
|
53
|
+
|
|
54
|
+
* From source:
|
|
55
|
+
```shell
|
|
56
|
+
pip install .
|
|
57
|
+
```
|
|
58
|
+
|
|
59
|
+
## Development
|
|
60
|
+
|
|
61
|
+
The development evolution of tdlpackio will mainly focus on how best to serve that purpose and its primary user's -- mainly meteorologists, physical scientists, and software developers supporting the missions within NOAA's National Weather Service (NWS) and National Centers for Environmental Prediction (NCEP), and other NOAA organizations.
|
|
62
|
+
|
|
63
|
+
## Disclaimer
|
|
64
|
+
|
|
65
|
+
This repository is a scientific product and is not official communication of the National Oceanic and Atmospheric Administration, or the United States Department of Commerce. All NOAA GitHub project code is provided on an 'as is' basis and the user assumes responsibility for its use. Any claims against the Department of Commerce or Department of Commerce bureaus stemming from the use of this GitHub project will be governed by all applicable Federal law. Any reference to specific commercial products, processes, or services by service mark, trademark, manufacturer, or otherwise, does not constitute or imply their endorsement, recommendation or favoring by the Department of Commerce. The Department of Commerce seal and logo, or the seal and logo of a DOC bureau, shall not be used in any manner to imply endorsement of any commercial product or activity by DOC or the United States Government.
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
# TDLPACK
|
|
2
|
+
|
|
3
|
+
## TDLPACK File Types
|
|
4
|
+
|
|
5
|
+
Before we describe the TDLPACK data format, one first needs to know the different types of files in which a TDLPACK record can be reside in. There a two file types: Fortran unformatted (variable record length); Fortran direct-access (fixed record length). In MOS2K world, these are known as sequential and random access files, respectively.
|
|
6
|
+
|
|
7
|
+
### Sequential Files
|
|
8
|
+
|
|
9
|
+
This is a Fortran unformatted, variable record length file. The TDLPACK record is contained within a Fortran record. The TDLPACK record preceeded by 8-bytes that contains the record length in bytes of the TDLPACK record. A TDLPACK sequental file can also contain 2 other type of records: station call letter record; trailer record. Station call letters in MOS2K of of type CHARACTER*8.
|
|
10
|
+
|
|
11
|
+
Station call letter record is of the following format in bytes:
|
|
12
|
+
* 1 - 8: Size of the station call letters in bytes (number of stations * 8)
|
|
13
|
+
* 9 - _n_: Station Call Letters
|
|
14
|
+
|
|
15
|
+
Trailer record is of the following format in bytes:
|
|
16
|
+
* 1 - 8: Record length
|
|
17
|
+
* 9 - _n_: Trailer record consisting of date/time information
|
|
18
|
+
|
|
19
|
+
### Random Access Files
|
|
20
|
+
|
|
21
|
+
This is a Fortran direct access, fixed record length file. One can think of these files like a book in that there is a table of contents (i.e. key records) that point to where specific TDLPACK records (i.e. data records) exist within the file. Also, there exists a Master Key Record at the beginning of the file. In a Random Access File, a TDLPACK record is considered a logical record because it can span multiple (Fortran) physical records. A TDLPACK random access file can also contain a station call letter record and is the same format as discussed above, however, accessing this record is different.
|
|
22
|
+
|
|
23
|
+
## TDLPACK Data Format
|
|
24
|
+
|
|
25
|
+
The following will attempt to briefly explain the TDLPACK format. Please read the official documentation [here](https://www.weather.gov/media/mdl/TDL_OfficeNote00-1.pdf). TDLPACK is a big-endian binary data format and is GRIB-like in that it contains an initial 4-character string to identify the data format "TDLP"; indentification sections (Indicator, Product, and Grid); Data section; and an End Section that contains "7777". A more detailed description of TDLPACK sections can be found in the chapter 5 of the official documentation. TDLPACK contains the following sections:
|
|
26
|
+
|
|
27
|
+
* Section 0 - Indictator Section
|
|
28
|
+
* Section 1 - Product Definition Section
|
|
29
|
+
* Section 2 - Grid Definition Section (**NOTE**: Not present when data are vector)
|
|
30
|
+
* Section 3 - Bitmap Section (defined, but never supported in code)
|
|
31
|
+
* Section 4 - Data Section
|
|
32
|
+
* Section 5 - End Section
|
|
33
|
+
|
|
34
|
+
A TDLPACK record can contain 2 types of data: vector data (i.e. most likely data at "stations"); gridded data (i.e. data at regularly spaced projected gridpoints). TDLPACK only supports the following map projections:
|
|
35
|
+
|
|
36
|
+
* Lambert Conformal Conic
|
|
37
|
+
* Polar Stereograhic
|
|
38
|
+
* Mercator
|
|
39
|
+
|
|
40
|
+
There is no support for geographic grids (i.e. latitude-longitude grids).
|
|
@@ -0,0 +1,384 @@
|
|
|
1
|
+
"""
|
|
2
|
+
TdlpackIO is a pure Python implementation for performing IO with TDLPACK sequential files
|
|
3
|
+
(i.e. Fortran unformatted files). Instead of using Fortran for perform IO, we are using
|
|
4
|
+
Python builtins.open() in binary mode. This allows us to perform stream-based IO for TDLPACK
|
|
5
|
+
files. When a file is opened for reading, its contents (TDLPACK records) are automatically
|
|
6
|
+
indexed and stored in a dictionary. The dictionary stores the byte offset the data record;
|
|
7
|
+
the size of the data record; date and lead time; and MOS-2000 ID.
|
|
8
|
+
|
|
9
|
+
This indexing allow the user to access a TDLPACK sequential file in a random-access nature.
|
|
10
|
+
For example if a users wants to read the 500th record in the file, the first 499 records in
|
|
11
|
+
their entirety do not need to be read.
|
|
12
|
+
"""
|
|
13
|
+
import logging
|
|
14
|
+
import numpy as np
|
|
15
|
+
import os
|
|
16
|
+
import pdb
|
|
17
|
+
import pytdlpack
|
|
18
|
+
import struct
|
|
19
|
+
import sys
|
|
20
|
+
import warnings
|
|
21
|
+
|
|
22
|
+
__version__ = pytdlpack.__version__ # Share the version number
|
|
23
|
+
|
|
24
|
+
_IS_PYTHON3 = sys.version_info.major >= 3
|
|
25
|
+
|
|
26
|
+
if _IS_PYTHON3:
|
|
27
|
+
import builtins
|
|
28
|
+
else:
|
|
29
|
+
import __builtin__ as builtins
|
|
30
|
+
|
|
31
|
+
ONE_MB = 1048576
|
|
32
|
+
|
|
33
|
+
class open(object):
|
|
34
|
+
def __init__(self,filename,mode='r'):
|
|
35
|
+
"""
|
|
36
|
+
Class Constructor
|
|
37
|
+
|
|
38
|
+
Parameters
|
|
39
|
+
----------
|
|
40
|
+
|
|
41
|
+
**`filename : str`**
|
|
42
|
+
|
|
43
|
+
File name.
|
|
44
|
+
|
|
45
|
+
**`mode : str, optional, default = 'r'`**
|
|
46
|
+
|
|
47
|
+
File handle mode. The default is open for reading ('r').
|
|
48
|
+
"""
|
|
49
|
+
if mode == 'r' or mode == 'w':
|
|
50
|
+
mode = mode+'b'
|
|
51
|
+
elif mode == 'a':
|
|
52
|
+
mode = 'wb'
|
|
53
|
+
self._filehandle = builtins.open(filename,mode=mode,buffering=ONE_MB)
|
|
54
|
+
self._hasindex = False
|
|
55
|
+
self._index = {}
|
|
56
|
+
self.mode = mode
|
|
57
|
+
self.name = os.path.abspath(filename)
|
|
58
|
+
self.records = 0
|
|
59
|
+
self.recordnumber = 0
|
|
60
|
+
self.size = os.path.getsize(self.name)
|
|
61
|
+
# Perform indexing on read
|
|
62
|
+
if 'r' in self.mode:
|
|
63
|
+
self._get_index()
|
|
64
|
+
|
|
65
|
+
def __enter__(self):
|
|
66
|
+
"""
|
|
67
|
+
"""
|
|
68
|
+
return self
|
|
69
|
+
|
|
70
|
+
def __exit__(self,atype,value,traceback):
|
|
71
|
+
"""
|
|
72
|
+
"""
|
|
73
|
+
self.close()
|
|
74
|
+
|
|
75
|
+
def __iter__(self):
|
|
76
|
+
"""
|
|
77
|
+
"""
|
|
78
|
+
return self
|
|
79
|
+
|
|
80
|
+
def __next__(self):
|
|
81
|
+
"""
|
|
82
|
+
"""
|
|
83
|
+
if self.recordnumber < self.records:
|
|
84
|
+
return self.read(1)[0]
|
|
85
|
+
else:
|
|
86
|
+
raise StopIteration
|
|
87
|
+
|
|
88
|
+
def __repr__(self):
|
|
89
|
+
"""
|
|
90
|
+
"""
|
|
91
|
+
strings = []
|
|
92
|
+
keys = self.__dict__.keys()
|
|
93
|
+
for k in keys:
|
|
94
|
+
if not k.startswith('_'):
|
|
95
|
+
strings.append('%s = %s\n'%(k,self.__dict__[k]))
|
|
96
|
+
return ''.join(strings)
|
|
97
|
+
|
|
98
|
+
def __getitem__(self,key):
|
|
99
|
+
"""
|
|
100
|
+
"""
|
|
101
|
+
if isinstance(key,slice):
|
|
102
|
+
beg, end, inc = key.indices(self.records)
|
|
103
|
+
self.seek(beg)
|
|
104
|
+
return [self.record(i+1) for i in range(beg,end,inc)]
|
|
105
|
+
elif isinstance(key,int):
|
|
106
|
+
if key == 0: return None
|
|
107
|
+
self.seek(key)
|
|
108
|
+
return self.record(key)
|
|
109
|
+
else:
|
|
110
|
+
raise KeyError('Key must be an integer record number or a slice')
|
|
111
|
+
|
|
112
|
+
def _get_index(self):
|
|
113
|
+
"""
|
|
114
|
+
Perform indexing of data records.
|
|
115
|
+
"""
|
|
116
|
+
#pdb.set_trace()
|
|
117
|
+
# Initialize index dictionary
|
|
118
|
+
self._index['offset'] = []
|
|
119
|
+
self._index['size'] = []
|
|
120
|
+
self._index['type'] = []
|
|
121
|
+
self._index['date'] = []
|
|
122
|
+
self._index['lead'] = []
|
|
123
|
+
self._index['id1'] = []
|
|
124
|
+
self._index['id2'] = []
|
|
125
|
+
self._index['id3'] = []
|
|
126
|
+
self._index['id4'] = []
|
|
127
|
+
self._index['dims'] = []
|
|
128
|
+
self._index['linked_station_id_record'] = []
|
|
129
|
+
_last_station_id_record = 0
|
|
130
|
+
|
|
131
|
+
# Iterate
|
|
132
|
+
while True:
|
|
133
|
+
try:
|
|
134
|
+
# First read 4-byte Fortran record header, then read the next
|
|
135
|
+
# 44 bytes which provides enough information to catalog the
|
|
136
|
+
# data record.
|
|
137
|
+
pos = self._filehandle.tell()
|
|
138
|
+
fortran_header = struct.unpack('>i',self._filehandle.read(4))[0]
|
|
139
|
+
if fortran_header >= 132:
|
|
140
|
+
bytes_to_read = 132
|
|
141
|
+
else:
|
|
142
|
+
bytes_to_read = fortran_header
|
|
143
|
+
temp = np.frombuffer(self._filehandle.read(bytes_to_read),dtype='>i4')
|
|
144
|
+
_header = struct.unpack('>4s',temp[2])[0].decode()
|
|
145
|
+
|
|
146
|
+
# Check to first 4 bytes of the data record to determine the data
|
|
147
|
+
# record type.
|
|
148
|
+
if _header == 'PLDT':
|
|
149
|
+
# TDLPACK data record
|
|
150
|
+
# Here we create a dimension dictionary per TDLPACK record and store in
|
|
151
|
+
# the index.
|
|
152
|
+
_dimdict = {}
|
|
153
|
+
_pos = 16+temp.tobytes()[16]
|
|
154
|
+
if bool(int(bin(temp.tobytes()[17])[-1])):
|
|
155
|
+
# Grid
|
|
156
|
+
_dimdict['nx'] = struct.unpack('>h',temp.tobytes()[_pos+2:_pos+4])[0]
|
|
157
|
+
_dimdict['ny'] = struct.unpack('>h',temp.tobytes()[_pos+4:_pos+6])[0]
|
|
158
|
+
else:
|
|
159
|
+
# Vector
|
|
160
|
+
_dimdict['nsta'] = struct.unpack('>i',temp.tobytes()[_pos+4:_pos+8])[0]
|
|
161
|
+
self._index['size'].append(temp[1])
|
|
162
|
+
self._index['type'].append('data')
|
|
163
|
+
self._index['date'].append(temp[6])
|
|
164
|
+
self._index['lead'].append(int(str(temp[9])[-3:]))
|
|
165
|
+
self._index['id1'].append(temp[7])
|
|
166
|
+
self._index['id2'].append(temp[8])
|
|
167
|
+
self._index['id3'].append(temp[9])
|
|
168
|
+
self._index['id4'].append(temp[10])
|
|
169
|
+
self._index['dims'].append(_dimdict)
|
|
170
|
+
self._index['linked_station_id_record'].append(_last_station_id_record)
|
|
171
|
+
else:
|
|
172
|
+
if temp[1] == 24 and temp[6] == 9999:
|
|
173
|
+
# Trailer record
|
|
174
|
+
self._index['size'].append(temp[1])
|
|
175
|
+
self._index['type'].append('trailer')
|
|
176
|
+
self._index['date'].append(None)
|
|
177
|
+
self._index['lead'].append(None)
|
|
178
|
+
self._index['id1'].append(None)
|
|
179
|
+
self._index['id2'].append(None)
|
|
180
|
+
self._index['id3'].append(None)
|
|
181
|
+
self._index['id4'].append(None)
|
|
182
|
+
self._index['dims'].append(None)
|
|
183
|
+
self._index['linked_station_id_record'].append(_last_station_id_record)
|
|
184
|
+
else:
|
|
185
|
+
# Station ID record
|
|
186
|
+
self._index['size'].append(temp[1])
|
|
187
|
+
self._index['type'].append('station')
|
|
188
|
+
self._index['date'].append(None)
|
|
189
|
+
self._index['lead'].append(None)
|
|
190
|
+
self._index['id1'].append(400001000)
|
|
191
|
+
self._index['id2'].append(0)
|
|
192
|
+
self._index['id3'].append(0)
|
|
193
|
+
self._index['id4'].append(0)
|
|
194
|
+
self._index['dims'].append(None)
|
|
195
|
+
self._index['linked_station_id_record'].append(_last_station_id_record)
|
|
196
|
+
|
|
197
|
+
# At this point we have successfully identified a TDLPACK record from
|
|
198
|
+
# the file. Increment self.records and position the file pointer to
|
|
199
|
+
# now read the Fortran trailer.
|
|
200
|
+
self.records += 1 # Includes trailer records
|
|
201
|
+
self._filehandle.seek(fortran_header-bytes_to_read,1)
|
|
202
|
+
fortran_trailer = struct.unpack('>i',self._filehandle.read(4))[0]
|
|
203
|
+
|
|
204
|
+
# Check Fortran header and trailer for the record.
|
|
205
|
+
if fortran_header != fortran_trailer:
|
|
206
|
+
raise IOError('Bad Fortran record.')
|
|
207
|
+
|
|
208
|
+
# NOTE: The 'offset' key contains the byte position in the file of where
|
|
209
|
+
# data record begins. A value of 12 is added to consider a 4-byte Fortran
|
|
210
|
+
# header, 4-byte "trash", and 4-byte ioctet value (already) stored on index.
|
|
211
|
+
self._index['offset'].append(pos+12) # 4-byte header + 4-byte trash + 4-byte ioctet
|
|
212
|
+
|
|
213
|
+
# Hold the record number of the last station ID record
|
|
214
|
+
if self._index['type'][-1] == 'station':
|
|
215
|
+
_last_station_id_record = self.records # This should be OK.
|
|
216
|
+
|
|
217
|
+
except(struct.error):
|
|
218
|
+
self._filehandle.seek(0)
|
|
219
|
+
break
|
|
220
|
+
|
|
221
|
+
self._hasindex = True
|
|
222
|
+
self.dates = tuple(sorted(set(list(filter(None,self._index['date'])))))
|
|
223
|
+
self.leadtimes = tuple(sorted(set(list(filter(None,self._index['lead'])))))
|
|
224
|
+
|
|
225
|
+
def close(self):
|
|
226
|
+
"""
|
|
227
|
+
Close the file handle
|
|
228
|
+
"""
|
|
229
|
+
self._filehandle.close()
|
|
230
|
+
|
|
231
|
+
def read(self,num=None,unpack=True):
|
|
232
|
+
"""
|
|
233
|
+
Read num records from the current position.
|
|
234
|
+
"""
|
|
235
|
+
#pdb.set_trace()
|
|
236
|
+
recs = []
|
|
237
|
+
if num == 0:
|
|
238
|
+
return recs
|
|
239
|
+
elif num == 1:
|
|
240
|
+
reclist = [self.recordnumber+1]
|
|
241
|
+
elif num > 1:
|
|
242
|
+
reclist = list(range(self.recordnumber+1,self.recordnumber+1+num))
|
|
243
|
+
for n in reclist:
|
|
244
|
+
nn = n-1 # Use this for the self._index referencing
|
|
245
|
+
kwargs = {}
|
|
246
|
+
self.seek(n)
|
|
247
|
+
kwargs['ioctet'] = self._index['size'][nn]
|
|
248
|
+
kwargs['ipack'] = np.frombuffer(self._filehandle.read(self._index['size'][nn]),dtype='>i4')
|
|
249
|
+
if self._index['type'][nn] == 'data':
|
|
250
|
+
kwargs['reference_date'] = self._index['date'][nn]
|
|
251
|
+
rec = pytdlpack.TdlpackRecord(**kwargs)
|
|
252
|
+
if unpack: rec.unpack()
|
|
253
|
+
recs.append(rec)
|
|
254
|
+
elif self._index['type'][nn] == 'station':
|
|
255
|
+
kwargs['ipack'] = kwargs['ipack'].byteswap()
|
|
256
|
+
kwargs['number_of_stations'] = np.int32(kwargs['ioctet']//pytdlpack.NCHAR)
|
|
257
|
+
rec = pytdlpack.TdlpackStationRecord(**kwargs)
|
|
258
|
+
if unpack: rec.unpack()
|
|
259
|
+
recs.append(rec)
|
|
260
|
+
elif self._index['type'][nn] == 'trailer':
|
|
261
|
+
recs.append(pytdlpack.TdlpackTrailerRecord(**kwargs))
|
|
262
|
+
self.recordnumber = n
|
|
263
|
+
return recs
|
|
264
|
+
|
|
265
|
+
def record(self,rec,unpack=True):
|
|
266
|
+
"""
|
|
267
|
+
Read the N-th record.
|
|
268
|
+
"""
|
|
269
|
+
#pdb.set_trace()
|
|
270
|
+
if rec is None:
|
|
271
|
+
return None
|
|
272
|
+
if rec <= 0:
|
|
273
|
+
warnings.warn("Record numbers begin at 1.")
|
|
274
|
+
return None
|
|
275
|
+
elif rec > self.records:
|
|
276
|
+
warnings.warn("Not that many records in the file.")
|
|
277
|
+
return None
|
|
278
|
+
else:
|
|
279
|
+
self.seek(rec) # Use the actual record number here.
|
|
280
|
+
return self.read(1,unpack=unpack)[0]
|
|
281
|
+
|
|
282
|
+
def seek(self,offset):
|
|
283
|
+
"""
|
|
284
|
+
Set the position within the file in units of data records.
|
|
285
|
+
"""
|
|
286
|
+
#pdb.set_trace()
|
|
287
|
+
if self._hasindex:
|
|
288
|
+
if offset == 0:
|
|
289
|
+
self._filehandle.seek(self._index['offset'][offset])
|
|
290
|
+
self.recordnumber = offset
|
|
291
|
+
elif offset > 0:
|
|
292
|
+
self._filehandle.seek(self._index['offset'][offset-1])
|
|
293
|
+
self.recordnumber = offset-1
|
|
294
|
+
|
|
295
|
+
def fetch(self,date=None,id=None,lead=None,unpack=True):
|
|
296
|
+
"""
|
|
297
|
+
Fetch TDLPACK data record by means of date, lead time, id or any combination
|
|
298
|
+
thereof.
|
|
299
|
+
"""
|
|
300
|
+
#pdb.set_trace()
|
|
301
|
+
recs = []
|
|
302
|
+
idx = None
|
|
303
|
+
match_count = 0
|
|
304
|
+
|
|
305
|
+
# Match by date.
|
|
306
|
+
if type(date) is not list:
|
|
307
|
+
if date is None:
|
|
308
|
+
date = []
|
|
309
|
+
else:
|
|
310
|
+
date = [date]
|
|
311
|
+
if len(date) > 0: match_count += 1
|
|
312
|
+
for d in date:
|
|
313
|
+
if d is not None:
|
|
314
|
+
if idx is None:
|
|
315
|
+
idx = np.where(np.array(self._index['date'])==d)[0]
|
|
316
|
+
else:
|
|
317
|
+
idx = np.concatenate((idx,np.where(np.array(self._index['date'])==d)[0]))
|
|
318
|
+
|
|
319
|
+
# Match by ID.
|
|
320
|
+
if id is not None:
|
|
321
|
+
# Test for type
|
|
322
|
+
if type(id) is str:
|
|
323
|
+
# Need all 4 words for now....
|
|
324
|
+
id = [int(i) for i in list(filter(None,id.split(' ')))]
|
|
325
|
+
print(id)
|
|
326
|
+
# Match by MOS ID (all 4 words)
|
|
327
|
+
match_count += 4
|
|
328
|
+
allrecs = np.arange(self.records)
|
|
329
|
+
# ID1
|
|
330
|
+
if id[0] == -1:
|
|
331
|
+
idx1 = allrecs
|
|
332
|
+
elif id[0] >= 0:
|
|
333
|
+
idx1 = np.where(np.array(self._index['id1'])==id[0])[0]
|
|
334
|
+
# ID2
|
|
335
|
+
if id[1] == -1:
|
|
336
|
+
idx2 = allrecs
|
|
337
|
+
elif id[1] >= 0:
|
|
338
|
+
idx2 = np.where(np.array(self._index['id2'])==id[1])[0]
|
|
339
|
+
# ID3
|
|
340
|
+
if id[2] == -1:
|
|
341
|
+
idx3 = allrecs
|
|
342
|
+
elif id[2] >= 0:
|
|
343
|
+
idx3 = np.where(np.array(self._index['id3'])==id[2])[0]
|
|
344
|
+
# ID4
|
|
345
|
+
if id[3] == -1:
|
|
346
|
+
idx4 = allrecs
|
|
347
|
+
elif id[3] >= 0:
|
|
348
|
+
idx4 = np.where(np.array(self._index['id4'])==id[3])[0]
|
|
349
|
+
|
|
350
|
+
if idx is not None:
|
|
351
|
+
idx = np.concatenate((idx,idx1,idx2,idx3,idx4))
|
|
352
|
+
else:
|
|
353
|
+
idx = np.concatenate((idx1,idx2,idx3,idx4))
|
|
354
|
+
|
|
355
|
+
# Match by lead times(s).
|
|
356
|
+
if type(lead) is not list:
|
|
357
|
+
if lead is None:
|
|
358
|
+
lead = []
|
|
359
|
+
else:
|
|
360
|
+
lead = [lead]
|
|
361
|
+
if len(lead) > 0: match_count += 1
|
|
362
|
+
for l in lead:
|
|
363
|
+
if l is not None:
|
|
364
|
+
if idx is None:
|
|
365
|
+
idx = np.where(np.array(self._index['lead'])==l)[0]
|
|
366
|
+
else:
|
|
367
|
+
idx = np.concatenate((idx,np.where(np.array(self._index['lead'])==l)[0]))
|
|
368
|
+
|
|
369
|
+
# Now determine the count of unique index values. The count needs to match the
|
|
370
|
+
# value of match_count. Where this occurs, the index values are extracted.
|
|
371
|
+
vals,cnts = np.unique(idx,return_counts=True)
|
|
372
|
+
idx = vals[np.where(cnts==match_count)[0]]
|
|
373
|
+
|
|
374
|
+
# Now we iterate over the matching index values and build the list of
|
|
375
|
+
# records.
|
|
376
|
+
for i in idx:
|
|
377
|
+
recs.append(self.record(i+1,unpack=unpack))
|
|
378
|
+
return recs
|
|
379
|
+
|
|
380
|
+
def tell(self):
|
|
381
|
+
"""
|
|
382
|
+
Return the position in units of records.
|
|
383
|
+
"""
|
|
384
|
+
return self.recordnumber
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
2.0.0b1
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
#!/bin/sh
|
|
2
|
+
# ----------------------------------------------------------------------------------------
|
|
3
|
+
# Uses real pdoc (https://github.com/mitmproxy/pdoc), not pdoc3
|
|
4
|
+
#
|
|
5
|
+
# pip install pdoc
|
|
6
|
+
# ----------------------------------------------------------------------------------------
|
|
7
|
+
export PYTHONPATH=src
|
|
8
|
+
export VERSION=$(cat ./VERSION)
|
|
9
|
+
python -m pdoc -d numpy --footer-text "tdlpackio v${VERSION}" -o docs tdlpackio
|