pypromice 1.3.6__py3-none-any.whl → 1.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pypromice might be problematic. Click here for more details.

Files changed (53) hide show
  1. pypromice/postprocess/bufr_to_csv.py +6 -1
  2. pypromice/postprocess/bufr_utilities.py +91 -18
  3. pypromice/postprocess/create_bufr_files.py +178 -0
  4. pypromice/postprocess/get_bufr.py +248 -397
  5. pypromice/postprocess/make_metadata_csv.py +214 -0
  6. pypromice/postprocess/real_time_utilities.py +41 -11
  7. pypromice/process/L0toL1.py +12 -5
  8. pypromice/process/L1toL2.py +69 -14
  9. pypromice/process/L2toL3.py +1033 -186
  10. pypromice/process/aws.py +130 -808
  11. pypromice/process/get_l2.py +90 -0
  12. pypromice/process/get_l2tol3.py +111 -0
  13. pypromice/process/join_l2.py +112 -0
  14. pypromice/process/join_l3.py +551 -120
  15. pypromice/process/load.py +161 -0
  16. pypromice/process/resample.py +128 -0
  17. pypromice/process/utilities.py +68 -0
  18. pypromice/process/write.py +503 -0
  19. pypromice/qc/github_data_issues.py +10 -16
  20. pypromice/qc/persistence.py +52 -30
  21. pypromice/resources/__init__.py +28 -0
  22. pypromice/{process/metadata.csv → resources/file_attributes.csv} +0 -2
  23. pypromice/resources/variable_aliases_GC-Net.csv +78 -0
  24. pypromice/resources/variables.csv +106 -0
  25. pypromice/station_configuration.py +118 -0
  26. pypromice/tx/get_l0tx.py +7 -4
  27. pypromice/tx/payload_formats.csv +1 -0
  28. pypromice/tx/tx.py +27 -6
  29. pypromice/utilities/__init__.py +0 -0
  30. pypromice/utilities/git.py +61 -0
  31. {pypromice-1.3.6.dist-info → pypromice-1.4.0.dist-info}/METADATA +3 -3
  32. pypromice-1.4.0.dist-info/RECORD +53 -0
  33. {pypromice-1.3.6.dist-info → pypromice-1.4.0.dist-info}/WHEEL +1 -1
  34. pypromice-1.4.0.dist-info/entry_points.txt +13 -0
  35. pypromice/postprocess/station_configurations.toml +0 -762
  36. pypromice/process/get_l3.py +0 -46
  37. pypromice/process/variables.csv +0 -92
  38. pypromice/qc/persistence_test.py +0 -150
  39. pypromice/test/test_config1.toml +0 -69
  40. pypromice/test/test_config2.toml +0 -54
  41. pypromice/test/test_email +0 -75
  42. pypromice/test/test_payload_formats.csv +0 -4
  43. pypromice/test/test_payload_types.csv +0 -7
  44. pypromice/test/test_percentile.py +0 -229
  45. pypromice/test/test_raw1.txt +0 -4468
  46. pypromice/test/test_raw_DataTable2.txt +0 -11167
  47. pypromice/test/test_raw_SlimTableMem1.txt +0 -1155
  48. pypromice/test/test_raw_transmitted1.txt +0 -15411
  49. pypromice/test/test_raw_transmitted2.txt +0 -28
  50. pypromice-1.3.6.dist-info/RECORD +0 -53
  51. pypromice-1.3.6.dist-info/entry_points.txt +0 -8
  52. {pypromice-1.3.6.dist-info → pypromice-1.4.0.dist-info}/LICENSE.txt +0 -0
  53. {pypromice-1.3.6.dist-info → pypromice-1.4.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,90 @@
1
+ #!/usr/bin/env python
2
+ import logging
3
+ import os
4
+ import sys
5
+ from argparse import ArgumentParser
6
+ from pathlib import Path
7
+
8
+ from pypromice.process.aws import AWS
9
+ from pypromice.process.write import prepare_and_write
10
+
11
+
12
+ def parse_arguments_l2():
13
+ parser = ArgumentParser(description="AWS L2 processor")
14
+
15
+ parser.add_argument('-c', '--config_file', type=str, required=True,
16
+ help='Path to config (TOML) file')
17
+ parser.add_argument('-i', '--inpath', type=str, required=True,
18
+ help='Path to input data')
19
+ parser.add_argument('-o', '--outpath', default=None, type=str, required=False,
20
+ help='Path where to write output')
21
+ parser.add_argument('-v', '--variables', default=None, type=str,
22
+ required=False, help='File path to variables look-up table')
23
+ parser.add_argument('-m', '--metadata', default=None, type=str,
24
+ required=False, help='File path to metadata')
25
+ parser.add_argument('--data_issues_path', '--issues', default=None, help="Path to data issues repository")
26
+ args = parser.parse_args()
27
+ return args
28
+
29
+
30
+ def get_l2(config_file, inpath, outpath, variables, metadata, data_issues_path: Path) -> AWS:
31
+ # Define input path
32
+ station_name = config_file.split('/')[-1].split('.')[0]
33
+ station_path = os.path.join(inpath, station_name)
34
+
35
+ # checking that data_issues_path is valid
36
+ if data_issues_path is None:
37
+ data_issues_path = Path("../PROMICE-AWS-data-issues")
38
+ if data_issues_path.exists():
39
+ logging.warning(f"data_issues_path is missing. Using default data issues path: {data_issues_path}")
40
+ else:
41
+ raise ValueError("data_issues_path is missing. Please provide a valid path to the data issues repository")
42
+
43
+ if os.path.exists(station_path):
44
+ aws = AWS(config_file,
45
+ station_path,
46
+ data_issues_repository=data_issues_path,
47
+ var_file=variables,
48
+ meta_file=metadata)
49
+ else:
50
+ aws = AWS(config_file,
51
+ inpath,
52
+ data_issues_repository=data_issues_path,
53
+ var_file=variables,
54
+ meta_file=metadata)
55
+
56
+ # Perform level 1 and 2 processing
57
+ aws.getL1()
58
+ aws.getL2()
59
+ # Write out level 2
60
+ if outpath is not None:
61
+ if not os.path.isdir(outpath):
62
+ os.mkdir(outpath)
63
+ if aws.L2.attrs['format'] == 'raw':
64
+ prepare_and_write(aws.L2, outpath, aws.vars, aws.meta, '10min')
65
+ prepare_and_write(aws.L2, outpath, aws.vars, aws.meta, '60min')
66
+ return aws
67
+
68
+
69
+ def main():
70
+ args = parse_arguments_l2()
71
+
72
+ logging.basicConfig(
73
+ format="%(asctime)s; %(levelname)s; %(name)s; %(message)s",
74
+ level=logging.INFO,
75
+ stream=sys.stdout,
76
+ )
77
+
78
+ _ = get_l2(
79
+ args.config_file,
80
+ args.inpath,
81
+ args.outpath,
82
+ args.variables,
83
+ args.metadata,
84
+ args.data_issues_path,
85
+ )
86
+
87
+
88
+ if __name__ == "__main__":
89
+ main()
90
+
@@ -0,0 +1,111 @@
1
+ #!/usr/bin/env python
2
+ import logging, sys, toml
3
+ from pathlib import Path
4
+
5
+ import xarray as xr
6
+ from argparse import ArgumentParser
7
+ import pypromice
8
+ from pypromice.process.L2toL3 import toL3
9
+ import pypromice.resources
10
+ from pypromice.process.write import prepare_and_write
11
+ logger = logging.getLogger(__name__)
12
+
13
+ def parse_arguments_l2tol3(debug_args=None):
14
+ parser = ArgumentParser(description="AWS L3 script for the processing L3 "+
15
+ "data from L2. An hourly, daily and monthly L3 "+
16
+ "data product is outputted to the defined output path")
17
+ parser.add_argument('-c', '--config_folder', type=str, required=True,
18
+ default='../aws-l0/metadata/station_configurations/',
19
+ help='Path to folder with sites configuration (TOML) files')
20
+ parser.add_argument('-i', '--inpath', type=str, required=True,
21
+ help='Path to Level 2 .nc data file')
22
+ parser.add_argument('-o', '--outpath', default=None, type=str, required=False,
23
+ help='Path where to write output')
24
+ parser.add_argument('-v', '--variables', default=None, type=str,
25
+ required=False, help='File path to variables look-up table')
26
+ parser.add_argument('-m', '--metadata', default=None, type=str,
27
+ required=False, help='File path to metadata')
28
+ parser.add_argument('--data_issues_path', '--issues', default=None, help="Path to data issues repository")
29
+
30
+
31
+ args = parser.parse_args(args=debug_args)
32
+ return args
33
+
34
+ def get_l2tol3(config_folder: Path|str, inpath, outpath, variables, metadata, data_issues_path: Path|str):
35
+ if isinstance(config_folder, str):
36
+ config_folder = Path(config_folder)
37
+
38
+ logging.basicConfig(
39
+ format="%(asctime)s; %(levelname)s; %(name)s; %(message)s",
40
+ level=logging.INFO,
41
+ stream=sys.stdout,
42
+ )
43
+
44
+ # Define Level 2 dataset from file
45
+ with xr.open_dataset(inpath) as l2:
46
+ l2.load()
47
+
48
+ # Remove encoding attributes from NetCDF
49
+ for varname in l2.variables:
50
+ if l2[varname].encoding!={}:
51
+ l2[varname].encoding = {}
52
+
53
+ if 'bedrock' in l2.attrs.keys():
54
+ l2.attrs['bedrock'] = l2.attrs['bedrock'] == 'True'
55
+ if 'number_of_booms' in l2.attrs.keys():
56
+ l2.attrs['number_of_booms'] = int(l2.attrs['number_of_booms'])
57
+
58
+ # importing station_config (dict) from config_folder (str path)
59
+ config_file = config_folder / (l2.attrs['station_id']+'.toml')
60
+
61
+ if config_file.exists():
62
+ # File exists, load the configuration
63
+ station_config = toml.load(config_file)
64
+ else:
65
+ # File does not exist, initialize with standard info
66
+ # this was prefered by RSF over exiting with error
67
+ logger.error("\n***\nNo station_configuration file for %s.\nPlease create one on AWS-L0/metadata/station_configurations.\n***"%l2.attrs['station_id'])
68
+ station_config = {"stid":l2.attrs['station_id'],
69
+ "station_site":l2.attrs['station_id'],
70
+ "project": "PROMICE",
71
+ "location_type": "ice sheet",
72
+ }
73
+
74
+ # checking that the adjustement directory is properly given
75
+ if data_issues_path is None:
76
+ data_issues_path = Path("../PROMICE-AWS-data-issues")
77
+ if data_issues_path.exists():
78
+ logging.warning(f"data_issues_path is missing. Using default data issues path: {data_issues_path}")
79
+ else:
80
+ raise ValueError("data_issues_path is missing. Please provide a valid path to the data issues repository")
81
+ else:
82
+ data_issues_path = Path(data_issues_path)
83
+
84
+ data_adjustments_dir = data_issues_path / "adjustments"
85
+
86
+ # Perform Level 3 processing
87
+ l3 = toL3(l2, data_adjustments_dir, station_config)
88
+
89
+ # Write Level 3 dataset to file if output directory given
90
+ v = pypromice.resources.load_variables(variables)
91
+ m = pypromice.resources.load_metadata(metadata)
92
+ if outpath is not None:
93
+ prepare_and_write(l3, outpath, v, m, '60min')
94
+ prepare_and_write(l3, outpath, v, m, '1D')
95
+ prepare_and_write(l3, outpath, v, m, 'M')
96
+ return l3
97
+
98
+ def main():
99
+ args = parse_arguments_l2tol3()
100
+
101
+
102
+
103
+ _ = get_l2tol3(args.config_folder,
104
+ args.inpath,
105
+ args.outpath,
106
+ args.variables,
107
+ args.metadata,
108
+ args.data_issues_path)
109
+
110
+ if __name__ == "__main__":
111
+ main()
@@ -0,0 +1,112 @@
1
+ #!/usr/bin/env python
2
+ import logging, sys, os, unittest
3
+ import pandas as pd
4
+ import xarray as xr
5
+ from argparse import ArgumentParser
6
+ from pypromice.process.L1toL2 import correctPrecip
7
+ from pypromice.process.write import prepare_and_write
8
+ logger = logging.getLogger(__name__)
9
+
10
+ def parse_arguments_join():
11
+ parser = ArgumentParser(description="AWS L2 joiner for merging together two L2 products, for example an L2 RAW and L2 TX data product. An hourly, daily and monthly L2 data product is outputted to the defined output path")
12
+ parser.add_argument('-s', '--file1', type=str, required=True,
13
+ help='Path to source L2 file, which will be preferenced in merge process')
14
+ parser.add_argument('-t', '--file2', type=str, required=True,
15
+ help='Path to target L2 file, which will be used to fill gaps in merge process')
16
+ parser.add_argument('-o', '--outpath', default=os.getcwd(), type=str, required=True,
17
+ help='Path where to write output')
18
+ parser.add_argument('-v', '--variables', default=None, type=str, required=False,
19
+ help='Path to variables look-up table .csv file for variable name retained'''),
20
+ parser.add_argument('-m', '--metadata', default=None, type=str, required=False,
21
+ help='Path to metadata table .csv file for metadata information'''),
22
+ args = parser.parse_args()
23
+ return args
24
+
25
+ def loadArr(infile):
26
+ if infile.split('.')[-1].lower() == 'csv':
27
+ df = pd.read_csv(infile, index_col=0, parse_dates=True)
28
+ ds = xr.Dataset.from_dataframe(df)
29
+ elif infile.split('.')[-1].lower() == 'nc':
30
+ with xr.open_dataset(infile) as ds:
31
+ ds.load()
32
+ # Remove encoding attributes from NetCDF
33
+ for varname in ds.variables:
34
+ if ds[varname].encoding!={}:
35
+ ds[varname].encoding = {}
36
+
37
+ try:
38
+ name = ds.attrs['station_id']
39
+ except:
40
+ name = infile.split('/')[-1].split('.')[0].split('_hour')[0].split('_10min')[0]
41
+ ds.attrs['station_id'] = name
42
+ if 'bedrock' in ds.attrs.keys():
43
+ ds.attrs['bedrock'] = ds.attrs['bedrock'] == 'True'
44
+ if 'number_of_booms' in ds.attrs.keys():
45
+ ds.attrs['number_of_booms'] = int(ds.attrs['number_of_booms'])
46
+
47
+ logger.info(f'{name} array loaded from {infile}')
48
+ return ds, name
49
+
50
+
51
+ def join_l2(file1,file2,outpath,variables,metadata) -> xr.Dataset:
52
+ logging.basicConfig(
53
+ format="%(asctime)s; %(levelname)s; %(name)s; %(message)s",
54
+ level=logging.INFO,
55
+ stream=sys.stdout,
56
+ )
57
+ # Check files
58
+ if os.path.isfile(file1) and os.path.isfile(file2):
59
+
60
+ # Load data arrays
61
+ ds1, n1 = loadArr(file1)
62
+ ds2, n2 = loadArr(file2)
63
+
64
+ # Check stations match
65
+ if n1.lower() == n2.lower():
66
+
67
+ # Merge arrays
68
+ logger.info(f'Combining {file1} with {file2}...')
69
+ name = n1
70
+ all_ds = ds1.combine_first(ds2)
71
+
72
+ # Re-calculate corrected precipitation
73
+ if hasattr(all_ds, 'precip_u_cor'):
74
+ if ~all_ds['precip_u_cor'].isnull().all():
75
+ all_ds['precip_u_cor'], _ = correctPrecip(all_ds['precip_u'],
76
+ all_ds['wspd_u'])
77
+ if hasattr(all_ds, 'precip_l_cor'):
78
+ if ~all_ds['precip_l_cor'].isnull().all():
79
+ all_ds['precip_l_cor'], _ = correctPrecip(all_ds['precip_l'],
80
+ all_ds['wspd_l'])
81
+ else:
82
+ logger.info(f'Mismatched station names {n1}, {n2}')
83
+ exit()
84
+
85
+ elif os.path.isfile(file1):
86
+ ds1, name = loadArr(file1)
87
+ logger.info(f'Only one file found {file1}...')
88
+ all_ds = ds1
89
+
90
+ elif os.path.isfile(file2):
91
+ ds2, name = loadArr(file2)
92
+ logger.info(f'Only one file found {file2}...')
93
+ all_ds = ds2
94
+
95
+ else:
96
+ logger.info(f'Invalid files {file1}, {file2}')
97
+ exit()
98
+
99
+ all_ds.attrs['format'] = 'merged RAW and TX'
100
+
101
+ # Resample to hourly, daily and monthly datasets and write to file
102
+ prepare_and_write(all_ds, outpath, variables, metadata, resample = False)
103
+
104
+ logger.info(f'Files saved to {os.path.join(outpath, name)}...')
105
+ return all_ds
106
+
107
+ def main():
108
+ args = parse_arguments_join()
109
+ _ = join_l2(args.file1, args.file2, args.outpath, args.variables, args.metadata)
110
+
111
+ if __name__ == "__main__":
112
+ main()