pypromice 1.4.1__py3-none-any.whl → 1.4.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pypromice might be problematic. Click here for more details.

@@ -32,15 +32,44 @@ def resample_dataset(ds_h, t):
32
32
  ds_d : xarray.Dataset
33
33
  L3 AWS dataset resampled to the frequency defined by t
34
34
  '''
35
- df_d = ds_h.to_dataframe().resample(t).mean()
35
+ # Convert dataset to DataFrame
36
+ df_d = ds_h.to_dataframe()
36
37
 
38
+ # Identify non-numeric columns
39
+ non_numeric_cols = df_d.select_dtypes(exclude=['number']).columns
40
+
41
+ # Log a warning and drop non-numeric columns
42
+ if len(non_numeric_cols) > 0:
43
+ for col in non_numeric_cols:
44
+ unique_values = df_d[col].unique()
45
+ logger.warning(f"Dropping column '{col}' because it is of type '{df_d[col].dtype}' and contains unique values: {unique_values}")
46
+
47
+ df_d = df_d.drop(columns=non_numeric_cols)
48
+ # Resample the DataFrame
49
+ df_d = df_d.resample(t).mean()
50
+
37
51
  # taking the 10 min data and using it as instantaneous values:
38
- if (t == '60min') and (ds_h.time.diff(dim='time').isel(time=0).dt.total_seconds() == 600):
52
+ is_10_minutes_timestamp = (ds_h.time.diff(dim='time') / np.timedelta64(1, 's') == 600)
53
+ if (t == '60min') and is_10_minutes_timestamp.any():
39
54
  cols_to_update = ['p_i', 't_i', 'rh_i', 'rh_i_cor', 'wspd_i', 'wdir_i','wspd_x_i','wspd_y_i']
55
+ timestamp_10min = ds_h.time.where(is_10_minutes_timestamp, drop=True).to_index()
56
+ timestamp_round_hour = df_d.index
57
+ timestamp_to_update = timestamp_round_hour.intersection(timestamp_10min)
58
+
40
59
  for col in cols_to_update:
41
- df_d[col] = ds_h.reindex(time=df_d.index)[col.replace('_i','_u')].values
60
+ if col not in df_d.columns:
61
+ df_d[col] = np.nan
62
+ else:
63
+ # if there are already instantaneous values in the dataset
64
+ # we want to keep them as they are
65
+ # removing timestamps where there is already t_i filled from a TX file
66
+ missing_instantaneous = ds_h.reindex(time=timestamp_to_update)[col].isnull()
67
+ timestamp_to_update = timestamp_to_update[missing_instantaneous]
68
+ df_d.loc[timestamp_to_update, col] = ds_h.reindex(
69
+ time= timestamp_to_update
70
+ )[col.replace('_i','_u')].values
42
71
  if col == 'p_i':
43
- df_d[col] = df_d[col].values-1000
72
+ df_d.loc[timestamp_to_update, col] = df_d.loc[timestamp_to_update, col].values-1000
44
73
 
45
74
 
46
75
  # recalculating wind direction from averaged directional wind speeds
pypromice/tx/tx.py CHANGED
@@ -894,10 +894,10 @@ def sortLines(in_file, out_file, replace_unsorted=True): #
894
894
  # out_f.write(headers)
895
895
  out_f.writelines(unique_lines)
896
896
 
897
- # Replace input file with new sorted file
898
- if replace_unsorted:
899
- os.remove(in_file)
900
- os.rename(out_file, in_file)
897
+ # Replace input file with new sorted file
898
+ if replace_unsorted:
899
+ os.remove(in_file)
900
+ os.rename(out_file, in_file)
901
901
 
902
902
  def addTail(in_file, out_dir, aws_name, header_names='', lines_limit=100):
903
903
  '''Generate tails file from L0tx file
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: pypromice
3
- Version: 1.4.1
3
+ Version: 1.4.3
4
4
  Summary: PROMICE/GC-Net data processing toolbox
5
5
  Home-page: https://github.com/GEUS-Glaciology-and-Climate/pypromice
6
6
  Author: GEUS Glaciology and Climate
@@ -21,7 +21,7 @@ pypromice/process/get_l2tol3.py,sha256=4Qu2d5rT25H2dObyCc70ivtJg3vw6WA-hzI-kRD6y
21
21
  pypromice/process/join_l2.py,sha256=ifjuhFR9scVvZt3xuy-ELp-iRchxV1dEK9qJ4UNh5bE,4567
22
22
  pypromice/process/join_l3.py,sha256=nLLQbX0vuuvHET8r33ZAt5g1dtIk3foUJ9RZkwkEwE4,20158
23
23
  pypromice/process/load.py,sha256=iaFvJeaDanAA60caVj4BWupZpqgQNj3CiNU4csz9FdU,4585
24
- pypromice/process/resample.py,sha256=caOqcO3AQQ6ejEbVd2AcCKycQps7U0y6GKjLYzWfZnI,5714
24
+ pypromice/process/resample.py,sha256=x0t9CE4YnwYMrn6wDRArhrQHqHOIJpYpTrgEcARUppo,7138
25
25
  pypromice/process/utilities.py,sha256=1pqSaF3bIbvRNtOjb25mbegHfuW9MY4KpCBDVXWyML8,1773
26
26
  pypromice/process/value_clipping.py,sha256=FkBiDT_HK_BDFiVjB7NdWH-_nab7vONG9LOd2PpEBI8,1573
27
27
  pypromice/process/write.py,sha256=fRCCK4g_W07M4EEsJErdTSN2Pldr9SLgqM2w_rsp2ZQ,16257
@@ -39,15 +39,14 @@ pypromice/resources/variables.csv,sha256=YyYng6ZL2eA0EIDdNAT8ACp6kg49R_38mTD5dWz
39
39
  pypromice/tx/__init__.py,sha256=-62bhHWJGfzFh5JwHcLqRj2jcGzmqzYOLWByhO706YY,30
40
40
  pypromice/tx/get_l0tx.py,sha256=b34-96KGshTyTN2tBFaAIBl7oZZzbRB_JR7sXtDNfXA,6957
41
41
  pypromice/tx/get_msg.py,sha256=OGS60OHjy4Wf8JExTfOdK-9xhjFdjhuChxoTSPe_MjI,3417
42
- pypromice/tx/get_watsontx.py,sha256=vFSuDs_vvkATe_6WCF8OLVsx7Wa-MxLATZRfP9qUZqI,5436
43
42
  pypromice/tx/payload_formats.csv,sha256=tzTTNuvmVwlwd7z3aF8A2dhjKNQ4lVumpnNBs3e3YeQ,7816
44
43
  pypromice/tx/payload_types.csv,sha256=C1-xCmHytAqqAzgzPwBLWqabzWu6s6tKAd8AjVd935s,457
45
- pypromice/tx/tx.py,sha256=TE5lKYMJF4hAhHrKjMyx1LZWpOHlwGJI9EdlPJrQITs,34251
44
+ pypromice/tx/tx.py,sha256=mghUjwGqUKe_4JWuAEgWMyH4ME2QRufeMPPHoL72R08,34267
46
45
  pypromice/utilities/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
47
46
  pypromice/utilities/git.py,sha256=7EUGjDs_VZucrckakXKyZEclDAZ_mKIxhTWzhopCIxM,1785
48
- pypromice-1.4.1.dist-info/LICENSE.txt,sha256=gXf5dRMhNSbfLPYYTY_5hsZ1r7UU1OaKQEAQUhuIBkM,18092
49
- pypromice-1.4.1.dist-info/METADATA,sha256=znYJm-tDsyrWFwkvQ9xE2BEiN9ZofRxO5EtmBYeIhrQ,4762
50
- pypromice-1.4.1.dist-info/WHEEL,sha256=cVxcB9AmuTcXqmwrtPhNK88dr7IR_b6qagTj0UvIEbY,91
51
- pypromice-1.4.1.dist-info/entry_points.txt,sha256=ufX1npmY3nqMPtSVRKVxn3MhG9IyFHD5FjPZQcELVXo,618
52
- pypromice-1.4.1.dist-info/top_level.txt,sha256=cBdfwgSbWDQq3a07nKRjrfmLC7jdaYXs98GG58HpTks,10
53
- pypromice-1.4.1.dist-info/RECORD,,
47
+ pypromice-1.4.3.dist-info/LICENSE.txt,sha256=gXf5dRMhNSbfLPYYTY_5hsZ1r7UU1OaKQEAQUhuIBkM,18092
48
+ pypromice-1.4.3.dist-info/METADATA,sha256=mMnZyZwB5jdkzlnxlj-2vxptCrrM1vUHk85tfObm90U,4762
49
+ pypromice-1.4.3.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
50
+ pypromice-1.4.3.dist-info/entry_points.txt,sha256=ufX1npmY3nqMPtSVRKVxn3MhG9IyFHD5FjPZQcELVXo,618
51
+ pypromice-1.4.3.dist-info/top_level.txt,sha256=cBdfwgSbWDQq3a07nKRjrfmLC7jdaYXs98GG58HpTks,10
52
+ pypromice-1.4.3.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (74.1.2)
2
+ Generator: setuptools (75.1.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -1,147 +0,0 @@
1
- #!/usr/bin/env python3
2
- # -*- coding: utf-8 -*-
3
- """
4
- Created on Fri Jul 22 16:20:09 2022
5
-
6
- Script to get L0tx transmission messages from the Watson River station using
7
- the tx module
8
-
9
- @author: Penelope How, pho@geus.dk
10
- """
11
- from argparse import ArgumentParser
12
-
13
- from configparser import ConfigParser
14
- import os, imaplib, email, re
15
- from glob import glob
16
- from datetime import datetime
17
-
18
- from pypromice.tx import getMail, L0tx, sortLines
19
-
20
-
21
- def parse_arguments_watson():
22
- parser = ArgumentParser(description="AWS L0 transmission fetcher for Watson River measurements")
23
- parser.add_argument('-a', '--account', default=None, type=str, required=True, help='Email account .ini file')
24
- parser.add_argument('-p', '--password', default=None, type=str, required=True, help='Email credentials .ini file')
25
- parser.add_argument('-o', '--outpath', default=None, type=str, required=False, help='Path where to write output (if given)')
26
- parser.add_argument('-f', '--formats', default=None, type=str, required=False, help='Path to Payload format .csv file')
27
- parser.add_argument('-t', '--types', default=None, type=str, required=False, help='Path to Payload type .csv file')
28
- parser.add_argument('-u', '--uid', default=None, type=str, required=True, help='Last AWS uid .ini file')
29
- args = parser.parse_args()
30
- return args
31
-
32
- #------------------------------------------------------------------------------
33
- def get_watsontx():
34
- """Executed from the command line"""
35
- args = parse_arguments_watson()
36
-
37
- # Set payload formatter paths
38
- formatter_file = args.formats
39
- type_file = args.types
40
-
41
- # Set credential paths
42
- accounts_file = args.account
43
- credentials_file = args.password
44
-
45
- # Set last aws uid path
46
- # last_uid = 1000000
47
- uid_file = args.uid
48
-
49
- # Set last aws uid path
50
- with open(uid_file, 'r') as last_uid_f:
51
- last_uid = int(last_uid_f.readline())
52
-
53
- # Set output file directory
54
- out_dir = args.outpath
55
- if not os.path.exists(out_dir):
56
- os.mkdir(out_dir)
57
-
58
- #------------------------------------------------------------------------------
59
-
60
- # Define accounts and credentials ini file paths
61
- accounts_ini = ConfigParser()
62
- accounts_ini.read_file(open(accounts_file))
63
- accounts_ini.read(credentials_file)
64
-
65
- # Get credentials
66
- account = accounts_ini.get('aws', 'account')
67
- server = accounts_ini.get('aws', 'server')
68
- port = accounts_ini.getint('aws', 'port')
69
- password = accounts_ini.get('aws', 'password')
70
- if not password:
71
- password = input('password for AWS email account: ')
72
- print('AWS data from server %s, account %s' %(server, account))
73
-
74
- #------------------------------------------------------------------------------
75
-
76
- # Log in to email server
77
- mail_server = imaplib.IMAP4_SSL(server, port)
78
- typ, accountDetails = mail_server.login(account, password)
79
- if typ != 'OK':
80
- print('Not able to sign in!')
81
- raise
82
-
83
- # Grab new emails
84
- result, data = mail_server.select(mailbox='"[Gmail]/All Mail"',
85
- readonly=True)
86
- print('mailbox contains %s messages' %data[0])
87
-
88
- #------------------------------------------------------------------------------
89
-
90
- # Get L0tx datalines from email transmissions
91
- for uid, mail in getMail(mail_server, last_uid=last_uid):
92
- message = email.message_from_string(mail)
93
- try:
94
- name = str(message.get_all('subject')[0])
95
- d = datetime.strptime(message.get_all('date')[0],
96
- '%a, %d %b %Y %H:%M:%S %z')
97
- except:
98
- name=None
99
- d=None
100
-
101
- if name and ('Watson' in name or 'GIOS' in name):
102
- print(f'Watson/GIOS station message, {d.strftime("%Y-%m-%d %H:%M:%S")}')
103
-
104
- l0 = L0tx(message, formatter_file, type_file,
105
- sender_name=['emailrelay@konectgds.com','sbdservice'])
106
-
107
- if l0.msg:
108
- content, attachment = l0.getEmailBody()
109
- attachment_name = str(attachment.get_filename())
110
- out_fn = re.sub(r'\d*\.dat$', '', attachment_name) + '.txt'
111
- out_path = os.sep.join((out_dir, out_fn))
112
-
113
- print(f'Writing to {out_fn}')
114
- print(l0.msg)
115
-
116
- with open(out_path, mode='a') as out_f:
117
- out_f.write(l0.msg + '\n')
118
-
119
- #------------------------------------------------------------------------------
120
-
121
- # Sort L0tx files and add tails
122
- for f in glob(out_dir+'/*.txt'):
123
-
124
- # Sort lines in L0tx file and remove duplicates
125
- in_dirn, in_fn = os.path.split(f)
126
- out_fn = 'sorted_' + in_fn
127
- out_pn = os.sep.join((in_dirn, out_fn))
128
- sortLines(f, out_pn)
129
-
130
- # Close mail server if open
131
- if 'mail_server' in locals():
132
- print(f'\nClosing {account}')
133
- mail_server.close()
134
- resp = mail_server.logout()
135
- assert resp[0].upper() == 'BYE'
136
-
137
- # Write last aws uid to ini file
138
- try:
139
- with open(uid_file, 'w') as last_uid_f:
140
- last_uid_f.write(uid)
141
- except:
142
- print(f'Could not write last uid {uid} to {uid_file}')
143
-
144
- print('Finished')
145
-
146
- if __name__ == "__main__":
147
- get_watsontx()