pypromice 1.4.0__py3-none-any.whl → 1.4.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pypromice might be problematic. Click here for more details.
- pypromice/postprocess/bufr_to_csv.py +9 -2
- pypromice/process/L2toL3.py +1 -0
- pypromice/process/aws.py +10 -1
- pypromice/process/resample.py +21 -2
- pypromice/utilities/git.py +8 -7
- {pypromice-1.4.0.dist-info → pypromice-1.4.1.dist-info}/METADATA +2 -2
- {pypromice-1.4.0.dist-info → pypromice-1.4.1.dist-info}/RECORD +11 -11
- {pypromice-1.4.0.dist-info → pypromice-1.4.1.dist-info}/WHEEL +1 -1
- {pypromice-1.4.0.dist-info → pypromice-1.4.1.dist-info}/LICENSE.txt +0 -0
- {pypromice-1.4.0.dist-info → pypromice-1.4.1.dist-info}/entry_points.txt +0 -0
- {pypromice-1.4.0.dist-info → pypromice-1.4.1.dist-info}/top_level.txt +0 -0
|
@@ -1,15 +1,22 @@
|
|
|
1
1
|
import argparse
|
|
2
2
|
from pathlib import Path
|
|
3
3
|
|
|
4
|
+
import pandas as pd
|
|
5
|
+
|
|
4
6
|
from pypromice.postprocess.bufr_utilities import read_bufr_file
|
|
5
7
|
|
|
6
8
|
|
|
7
9
|
def main():
|
|
8
10
|
parser = argparse.ArgumentParser("BUFR to CSV converter")
|
|
9
|
-
parser.add_argument("path", type=Path)
|
|
11
|
+
parser.add_argument("path", type=Path, nargs='+')
|
|
10
12
|
args = parser.parse_args()
|
|
11
13
|
|
|
12
|
-
|
|
14
|
+
paths = []
|
|
15
|
+
for path in args.path:
|
|
16
|
+
paths += list(path.parent.glob(path.name))
|
|
17
|
+
|
|
18
|
+
df = pd.concat([read_bufr_file(path) for path in paths])
|
|
19
|
+
print(df.to_csv())
|
|
13
20
|
|
|
14
21
|
|
|
15
22
|
if __name__ == "__main__":
|
pypromice/process/L2toL3.py
CHANGED
|
@@ -254,6 +254,7 @@ def process_surface_height(ds, data_adjustments_dir, station_config={}):
|
|
|
254
254
|
|
|
255
255
|
ds['z_surf_combined'] = np.maximum(ds['z_surf_combined'], ds['z_ice_surf'])
|
|
256
256
|
ds['snow_height'] = np.maximum(0, ds['z_surf_combined'] - ds['z_ice_surf'])
|
|
257
|
+
ds['z_ice_surf'] = ds['z_ice_surf'].where(ds.snow_height.notnull())
|
|
257
258
|
elif ds.attrs['site_type'] in ['accumulation', 'bedrock']:
|
|
258
259
|
# Handle accumulation and bedrock site types
|
|
259
260
|
ds['z_ice_surf'] = ('time', ds['z_surf_1'].data * np.nan)
|
pypromice/process/aws.py
CHANGED
|
@@ -55,7 +55,15 @@ class AWS(object):
|
|
|
55
55
|
"""
|
|
56
56
|
assert os.path.isfile(config_file), "cannot find " + config_file
|
|
57
57
|
assert os.path.isdir(inpath), "cannot find " + inpath
|
|
58
|
-
logger.info(
|
|
58
|
+
logger.info(
|
|
59
|
+
"AWS("
|
|
60
|
+
f"config_file={config_file},"
|
|
61
|
+
f" inpath={inpath},"
|
|
62
|
+
f" data_issues_repository={data_issues_repository},"
|
|
63
|
+
f" var_file={var_file},"
|
|
64
|
+
f" meta_file={meta_file}"
|
|
65
|
+
")"
|
|
66
|
+
)
|
|
59
67
|
|
|
60
68
|
# Load config, variables CSF standards, and L0 files
|
|
61
69
|
self.config = self.loadConfig(config_file, inpath)
|
|
@@ -73,6 +81,7 @@ class AWS(object):
|
|
|
73
81
|
l0_data_root=inpath_hash,
|
|
74
82
|
data_issues=data_issues_hash,
|
|
75
83
|
)
|
|
84
|
+
logger.debug('Source information: %s', source_dict)
|
|
76
85
|
self.meta["source"] = json.dumps(source_dict)
|
|
77
86
|
|
|
78
87
|
# Load config file
|
pypromice/process/resample.py
CHANGED
|
@@ -34,6 +34,15 @@ def resample_dataset(ds_h, t):
|
|
|
34
34
|
'''
|
|
35
35
|
df_d = ds_h.to_dataframe().resample(t).mean()
|
|
36
36
|
|
|
37
|
+
# taking the 10 min data and using it as instantaneous values:
|
|
38
|
+
if (t == '60min') and (ds_h.time.diff(dim='time').isel(time=0).dt.total_seconds() == 600):
|
|
39
|
+
cols_to_update = ['p_i', 't_i', 'rh_i', 'rh_i_cor', 'wspd_i', 'wdir_i','wspd_x_i','wspd_y_i']
|
|
40
|
+
for col in cols_to_update:
|
|
41
|
+
df_d[col] = ds_h.reindex(time=df_d.index)[col.replace('_i','_u')].values
|
|
42
|
+
if col == 'p_i':
|
|
43
|
+
df_d[col] = df_d[col].values-1000
|
|
44
|
+
|
|
45
|
+
|
|
37
46
|
# recalculating wind direction from averaged directional wind speeds
|
|
38
47
|
for var in ['wdir_u','wdir_l']:
|
|
39
48
|
boom = var.split('_')[1]
|
|
@@ -60,9 +69,19 @@ def resample_dataset(ds_h, t):
|
|
|
60
69
|
if var+'_cor' in df_d.keys():
|
|
61
70
|
df_d[var+'_cor'] = (p_vap.to_series().resample(t).mean() \
|
|
62
71
|
/ es_cor.to_series().resample(t).mean())*100
|
|
72
|
+
|
|
73
|
+
# passing each variable attribute to the ressample dataset
|
|
74
|
+
vals = []
|
|
75
|
+
for c in df_d.columns:
|
|
76
|
+
if c in ds_h.data_vars:
|
|
77
|
+
vals.append(xr.DataArray(
|
|
78
|
+
data=df_d[c], dims=['time'],
|
|
79
|
+
coords={'time':df_d.index}, attrs=ds_h[c].attrs))
|
|
80
|
+
else:
|
|
81
|
+
vals.append(xr.DataArray(
|
|
82
|
+
data=df_d[c], dims=['time'],
|
|
83
|
+
coords={'time':df_d.index}, attrs=None))
|
|
63
84
|
|
|
64
|
-
vals = [xr.DataArray(data=df_d[c], dims=['time'],
|
|
65
|
-
coords={'time':df_d.index}, attrs=ds_h[c].attrs) for c in df_d.columns]
|
|
66
85
|
ds_d = xr.Dataset(dict(zip(df_d.columns,vals)), attrs=ds_h.attrs)
|
|
67
86
|
return ds_d
|
|
68
87
|
|
pypromice/utilities/git.py
CHANGED
|
@@ -7,12 +7,16 @@ import logging
|
|
|
7
7
|
logger = logging.getLogger(__name__)
|
|
8
8
|
|
|
9
9
|
|
|
10
|
-
def get_commit_hash_and_check_dirty(file_path) -> str:
|
|
11
|
-
|
|
10
|
+
def get_commit_hash_and_check_dirty(file_path: str | Path) -> str:
|
|
11
|
+
if isinstance(file_path, str):
|
|
12
|
+
file_path = Path(file_path)
|
|
13
|
+
if file_path.is_dir():
|
|
14
|
+
repo_path = file_path
|
|
15
|
+
else:
|
|
16
|
+
repo_path = file_path.parent
|
|
12
17
|
|
|
13
18
|
try:
|
|
14
19
|
# Ensure the file path is relative to the repository
|
|
15
|
-
relative_file_path = os.path.relpath(file_path, repo_path)
|
|
16
20
|
|
|
17
21
|
# Get the latest commit hash for the file
|
|
18
22
|
commit_hash = (
|
|
@@ -25,8 +29,6 @@ def get_commit_hash_and_check_dirty(file_path) -> str:
|
|
|
25
29
|
"-n",
|
|
26
30
|
"1",
|
|
27
31
|
"--pretty=format:%H",
|
|
28
|
-
#"--",
|
|
29
|
-
#relative_file_path,
|
|
30
32
|
],
|
|
31
33
|
stderr=subprocess.STDOUT,
|
|
32
34
|
)
|
|
@@ -49,12 +51,11 @@ def get_commit_hash_and_check_dirty(file_path) -> str:
|
|
|
49
51
|
|
|
50
52
|
if is_dirty:
|
|
51
53
|
logger.warning(f"Warning: The file {file_path} is dirty compared to the last commit. {commit_hash}")
|
|
52
|
-
return '
|
|
54
|
+
return f'{commit_hash} (dirty)'
|
|
53
55
|
if commit_hash == "":
|
|
54
56
|
logger.warning(f"Warning: The file {file_path} is not under version control.")
|
|
55
57
|
return 'unknown'
|
|
56
58
|
|
|
57
|
-
print(f"Commit hash: {commit_hash}")
|
|
58
59
|
return commit_hash
|
|
59
60
|
except subprocess.CalledProcessError as e:
|
|
60
61
|
logger.warning(f"Error: {e.output.decode('utf-8')}")
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: pypromice
|
|
3
|
-
Version: 1.4.
|
|
3
|
+
Version: 1.4.1
|
|
4
4
|
Summary: PROMICE/GC-Net data processing toolbox
|
|
5
5
|
Home-page: https://github.com/GEUS-Glaciology-and-Climate/pypromice
|
|
6
6
|
Author: GEUS Glaciology and Climate
|
|
@@ -15,7 +15,7 @@ Classifier: Intended Audience :: Science/Research
|
|
|
15
15
|
Classifier: Natural Language :: English
|
|
16
16
|
Classifier: Topic :: Scientific/Engineering
|
|
17
17
|
Classifier: Operating System :: OS Independent
|
|
18
|
-
Requires-Python: >=3.
|
|
18
|
+
Requires-Python: >=3.10
|
|
19
19
|
Description-Content-Type: text/markdown
|
|
20
20
|
License-File: LICENSE.txt
|
|
21
21
|
Requires-Dist: numpy ~=1.23
|
|
@@ -4,7 +4,7 @@ pypromice/get/__init__.py,sha256=n2L6P9EeUsdjsHaeU7BEanBjlkCBX9csGseT8z-laew,32
|
|
|
4
4
|
pypromice/get/get.py,sha256=8tdIbvdeXCpRWU7KmcKGIP9ZPdqIry3MjtJp9krumvo,7705
|
|
5
5
|
pypromice/get/get_promice_data.py,sha256=bluNCaP50iRlWBzdEOXLrSPepOQdGB7SeQLkTWiqK4c,1806
|
|
6
6
|
pypromice/postprocess/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
7
|
-
pypromice/postprocess/bufr_to_csv.py,sha256=
|
|
7
|
+
pypromice/postprocess/bufr_to_csv.py,sha256=ntTJZhfBRYPbLn2u9GzO5PFCkg9DIypUZONYEs2XrqA,507
|
|
8
8
|
pypromice/postprocess/bufr_utilities.py,sha256=bc9U7ZC_7jtPbzaLYNnsz-nq3xn_PHGkgk1bVWa3ltc,21176
|
|
9
9
|
pypromice/postprocess/create_bufr_files.py,sha256=GizmNAW_v54MkvelPVzqIklBNHWH6nTRQC2qOo5amys,5657
|
|
10
10
|
pypromice/postprocess/get_bufr.py,sha256=98BZ7tTbmvvrVAzhxJ8LgHib3w7Q3S_x757pCubHToU,16693
|
|
@@ -13,15 +13,15 @@ pypromice/postprocess/positions_seed.csv,sha256=0kVCQ8UfEALdeXNYCddmwxpseRqLRudb
|
|
|
13
13
|
pypromice/postprocess/real_time_utilities.py,sha256=2GZUua5R8ocp-TnIjEA4ig8lC4TlI3SkiimzLxoHXFE,9476
|
|
14
14
|
pypromice/process/L0toL1.py,sha256=aQmVEXhtRisHNrJuPGIg7NA4hjjmV9jUBu1DClRIGzs,23175
|
|
15
15
|
pypromice/process/L1toL2.py,sha256=Hcelx8kYDq5ERtzdAO1YUuBu8_aRwgWJ9FEDHuAVZu8,30478
|
|
16
|
-
pypromice/process/L2toL3.py,sha256=
|
|
16
|
+
pypromice/process/L2toL3.py,sha256=xJXEHqfgWtKj5DxmuFq2F_Ynj0gMEpBxw3-RKpcnfUs,62311
|
|
17
17
|
pypromice/process/__init__.py,sha256=xvd0I-9nIyVw4M4qjgkQ5vXYpNuKcVSkIVIROQsZDo0,147
|
|
18
|
-
pypromice/process/aws.py,sha256=
|
|
18
|
+
pypromice/process/aws.py,sha256=SLVfJ4VMY6F3DGzlAQXvgHqdQnqppYZofSU2VEUeRkQ,8960
|
|
19
19
|
pypromice/process/get_l2.py,sha256=ALXJCMJ8qgg0_dEKx-dV5TQ9IAJnLLLGPUxlr5QVfpk,3076
|
|
20
20
|
pypromice/process/get_l2tol3.py,sha256=4Qu2d5rT25H2dObyCc70ivtJg3vw6WA-hzI-kRD6ybQ,4544
|
|
21
21
|
pypromice/process/join_l2.py,sha256=ifjuhFR9scVvZt3xuy-ELp-iRchxV1dEK9qJ4UNh5bE,4567
|
|
22
22
|
pypromice/process/join_l3.py,sha256=nLLQbX0vuuvHET8r33ZAt5g1dtIk3foUJ9RZkwkEwE4,20158
|
|
23
23
|
pypromice/process/load.py,sha256=iaFvJeaDanAA60caVj4BWupZpqgQNj3CiNU4csz9FdU,4585
|
|
24
|
-
pypromice/process/resample.py,sha256=
|
|
24
|
+
pypromice/process/resample.py,sha256=caOqcO3AQQ6ejEbVd2AcCKycQps7U0y6GKjLYzWfZnI,5714
|
|
25
25
|
pypromice/process/utilities.py,sha256=1pqSaF3bIbvRNtOjb25mbegHfuW9MY4KpCBDVXWyML8,1773
|
|
26
26
|
pypromice/process/value_clipping.py,sha256=FkBiDT_HK_BDFiVjB7NdWH-_nab7vONG9LOd2PpEBI8,1573
|
|
27
27
|
pypromice/process/write.py,sha256=fRCCK4g_W07M4EEsJErdTSN2Pldr9SLgqM2w_rsp2ZQ,16257
|
|
@@ -44,10 +44,10 @@ pypromice/tx/payload_formats.csv,sha256=tzTTNuvmVwlwd7z3aF8A2dhjKNQ4lVumpnNBs3e3
|
|
|
44
44
|
pypromice/tx/payload_types.csv,sha256=C1-xCmHytAqqAzgzPwBLWqabzWu6s6tKAd8AjVd935s,457
|
|
45
45
|
pypromice/tx/tx.py,sha256=TE5lKYMJF4hAhHrKjMyx1LZWpOHlwGJI9EdlPJrQITs,34251
|
|
46
46
|
pypromice/utilities/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
47
|
-
pypromice/utilities/git.py,sha256=
|
|
48
|
-
pypromice-1.4.
|
|
49
|
-
pypromice-1.4.
|
|
50
|
-
pypromice-1.4.
|
|
51
|
-
pypromice-1.4.
|
|
52
|
-
pypromice-1.4.
|
|
53
|
-
pypromice-1.4.
|
|
47
|
+
pypromice/utilities/git.py,sha256=7EUGjDs_VZucrckakXKyZEclDAZ_mKIxhTWzhopCIxM,1785
|
|
48
|
+
pypromice-1.4.1.dist-info/LICENSE.txt,sha256=gXf5dRMhNSbfLPYYTY_5hsZ1r7UU1OaKQEAQUhuIBkM,18092
|
|
49
|
+
pypromice-1.4.1.dist-info/METADATA,sha256=znYJm-tDsyrWFwkvQ9xE2BEiN9ZofRxO5EtmBYeIhrQ,4762
|
|
50
|
+
pypromice-1.4.1.dist-info/WHEEL,sha256=cVxcB9AmuTcXqmwrtPhNK88dr7IR_b6qagTj0UvIEbY,91
|
|
51
|
+
pypromice-1.4.1.dist-info/entry_points.txt,sha256=ufX1npmY3nqMPtSVRKVxn3MhG9IyFHD5FjPZQcELVXo,618
|
|
52
|
+
pypromice-1.4.1.dist-info/top_level.txt,sha256=cBdfwgSbWDQq3a07nKRjrfmLC7jdaYXs98GG58HpTks,10
|
|
53
|
+
pypromice-1.4.1.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|