windborne 1.0.8__py3-none-any.whl → 1.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- windborne/__init__.py +6 -15
- windborne/api_request.py +227 -0
- windborne/cli.py +52 -60
- windborne/cyclone_formatting.py +210 -0
- windborne/data_api.py +390 -1028
- windborne/forecasts_api.py +186 -305
- windborne/observation_formatting.py +456 -0
- windborne/utils.py +15 -887
- {windborne-1.0.8.dist-info → windborne-1.1.0.dist-info}/METADATA +1 -2
- windborne-1.1.0.dist-info/RECORD +13 -0
- windborne/config.py +0 -42
- windborne-1.0.8.dist-info/RECORD +0 -11
- {windborne-1.0.8.dist-info → windborne-1.1.0.dist-info}/WHEEL +0 -0
- {windborne-1.0.8.dist-info → windborne-1.1.0.dist-info}/entry_points.txt +0 -0
- {windborne-1.0.8.dist-info → windborne-1.1.0.dist-info}/top_level.txt +0 -0
windborne/utils.py
CHANGED
@@ -1,186 +1,11 @@
|
|
1
|
-
from .config import CLIENT_ID, API_KEY
|
2
|
-
|
3
1
|
import os
|
4
|
-
import requests
|
5
|
-
import jwt
|
6
|
-
import time
|
7
2
|
import re
|
8
|
-
import uuid
|
9
3
|
from datetime import datetime, timezone
|
10
4
|
import dateutil.parser
|
11
|
-
import boto3
|
12
|
-
import io
|
13
5
|
import json
|
14
6
|
import csv
|
15
7
|
|
16
|
-
import numpy as np
|
17
|
-
|
18
|
-
# Check if input is uuid v4
|
19
|
-
def is_valid_uuid_v4(client_id):
|
20
|
-
try:
|
21
|
-
return str(uuid.UUID(client_id, version=4)) == client_id
|
22
|
-
except ValueError:
|
23
|
-
return False
|
24
|
-
|
25
|
-
# Check if client id input format
|
26
|
-
def is_valid_client_id_format(client_id):
|
27
|
-
return re.fullmatch(r"[a-z0-9_]+", client_id) is not None
|
28
|
-
|
29
|
-
# Authenticate requests using a JWT | no reveal of underlying key
|
30
|
-
def make_api_request(url, params=None, return_type=None):
|
31
|
-
# Check if credentials are set
|
32
|
-
if not CLIENT_ID and not API_KEY:
|
33
|
-
print("To access the WindBorne API, you need to set your Client ID and API key by setting the environment variables WB_CLIENT_ID and WB_API_KEY.")
|
34
|
-
print("--------------------------------------")
|
35
|
-
print("You may refer to https://windbornesystems.com/docs/api/cli#introduction\n"
|
36
|
-
"for instructions on how to set your credentials as environment variables for CLI and Code usage\n\n"
|
37
|
-
"and to https://windbornesystems.com/docs/api/pip_data#introduction\n"
|
38
|
-
"for instruction on how to set your credentials for code usage.")
|
39
|
-
print("--------------------------------------")
|
40
|
-
print("To get an API key, email data@windbornesystems.com.")
|
41
|
-
exit(80)
|
42
|
-
elif not CLIENT_ID:
|
43
|
-
print("To access the WindBorne API, you need to set your Client ID by setting the environment variable WB_CLIENT_ID.")
|
44
|
-
print("--------------------------------------")
|
45
|
-
print("You may refer to https://windbornesystems.com/docs/api/cli#introduction\n"
|
46
|
-
"for instructions on how to set your credentials as environment variables for CLI and Code usage\n\n"
|
47
|
-
"and to https://windbornesystems.com/docs/api/pip_data#introduction\n"
|
48
|
-
"for instruction on how to set your credentials for code usage.")
|
49
|
-
print("--------------------------------------")
|
50
|
-
print("To get an API key, email data@windbornesystems.com.")
|
51
|
-
exit(90)
|
52
|
-
elif not API_KEY:
|
53
|
-
print("To access the WindBorne API, you need to set your CAPI key by setting the environment variable WB_API_KEY.")
|
54
|
-
print("--------------------------------------")
|
55
|
-
print("You may refer to https://windbornesystems.com/docs/api/cli#introduction\n"
|
56
|
-
"for instructions on how to set your credentials as environment variables for CLI and Code usage\n\n"
|
57
|
-
"and to https://windbornesystems.com/docs/api/pip_data#introduction\n"
|
58
|
-
"for instruction on how to set your credentials for code usage.")
|
59
|
-
print("--------------------------------------")
|
60
|
-
print("To get an API key, email data@windbornesystems.com.")
|
61
|
-
exit(91)
|
62
|
-
# Check if credentials are swapped
|
63
|
-
elif len(CLIENT_ID) in [32, 35]:
|
64
|
-
print("Your Client ID and API Key are swapped.")
|
65
|
-
print("--------------------------------------")
|
66
|
-
print("Swap them or modify them accordingly to get access to WindBorne API.")
|
67
|
-
print("--------------------------------------")
|
68
|
-
print("You may refer to https://windbornesystems.com/docs/api/cli#introduction\n"
|
69
|
-
"for instructions on how to set your credentials as environment variables for CLI and Code usage\n\n"
|
70
|
-
"and to https://windbornesystems.com/docs/api/pip_data#introduction\n"
|
71
|
-
"for instruction on how to set your credentials for code usage.")
|
72
|
-
print("--------------------------------------")
|
73
|
-
print(f"Current Client ID: {CLIENT_ID}")
|
74
|
-
print(f"Current API Key: {API_KEY}")
|
75
|
-
exit(95)
|
76
|
-
|
77
|
-
# Validate WB_CLIENT_ID format
|
78
|
-
if not (is_valid_uuid_v4(CLIENT_ID) or is_valid_client_id_format(CLIENT_ID)):
|
79
|
-
print("Your Client ID is misformatted.")
|
80
|
-
print("--------------------------------------")
|
81
|
-
print("It should either be a valid UUID v4 or consist of only lowercase letters, digits, and underscores ([a-z0-9_]).")
|
82
|
-
print("--------------------------------------")
|
83
|
-
print("You may refer to https://windbornesystems.com/docs/api/cli#introduction\n"
|
84
|
-
"for instructions on how to set your credentials as environment variables for CLI and Code usage\n\n"
|
85
|
-
"and to https://windbornesystems.com/docs/api/pip_data#introduction\n"
|
86
|
-
"for instruction on how to set your credentials for code usage.")
|
87
|
-
print("--------------------------------------")
|
88
|
-
print(f"Current Client ID: {CLIENT_ID}")
|
89
|
-
exit(92)
|
90
|
-
|
91
|
-
# Validate WB_API_KEY for both newer and older formats
|
92
|
-
if API_KEY.startswith("wb_"):
|
93
|
-
if len(API_KEY) != 35:
|
94
|
-
print("Your API key is misformatted.")
|
95
|
-
print("--------------------------------------")
|
96
|
-
print("API keys starting with 'wb_' must be 35 characters long (including the 'wb_' prefix).")
|
97
|
-
print("--------------------------------------")
|
98
|
-
print("You may refer to https://windbornesystems.com/docs/api/cli#introduction\n"
|
99
|
-
"for instructions on how to set your credentials as environment variables for CLI and Code usage\n\n"
|
100
|
-
"and to https://windbornesystems.com/docs/api/pip_data#introduction\n"
|
101
|
-
"for instruction on how to set your credentials for code usage.")
|
102
|
-
print("--------------------------------------")
|
103
|
-
print(f"Current API key: {API_KEY}")
|
104
|
-
exit(93)
|
105
|
-
elif len(API_KEY) != 32: # For early tokens
|
106
|
-
print("Your API key is misformatted.")
|
107
|
-
print("--------------------------------------")
|
108
|
-
print("API keys created in 2023 or earlier must be exactly 32 characters long.")
|
109
|
-
print("--------------------------------------")
|
110
|
-
print("You may refer to https://windbornesystems.com/docs/api/cli#introduction\n"
|
111
|
-
"for instructions on how to set your credentials as environment variables for CLI and Code usage\n\n"
|
112
|
-
"and to https://windbornesystems.com/docs/api/pip_data#introduction\n"
|
113
|
-
"for instruction on how to set your credentials for code usage.")
|
114
|
-
print("--------------------------------------")
|
115
|
-
print(f"Current API key: {API_KEY}")
|
116
|
-
exit(94)
|
117
|
-
|
118
|
-
signed_token = jwt.encode({
|
119
|
-
'client_id': CLIENT_ID,
|
120
|
-
'iat': int(time.time()),
|
121
|
-
}, API_KEY, algorithm='HS256')
|
122
|
-
|
123
|
-
try:
|
124
|
-
if params:
|
125
|
-
response = requests.get(url, auth=(CLIENT_ID, signed_token), params=params)
|
126
|
-
else:
|
127
|
-
response = requests.get(url, auth=(CLIENT_ID, signed_token))
|
128
|
-
|
129
|
-
response.raise_for_status()
|
130
8
|
|
131
|
-
if return_type is None:
|
132
|
-
# For Data API
|
133
|
-
return response.json()
|
134
|
-
elif return_type == 'all':
|
135
|
-
# For Forecasts API (except tcs) --> return whole response not .json to obtain S3 url
|
136
|
-
return response
|
137
|
-
except requests.exceptions.HTTPError as http_err:
|
138
|
-
if http_err.response.status_code == 403:
|
139
|
-
print("--------------------------------------")
|
140
|
-
print("We couldn't authenticate your request.")
|
141
|
-
print("--------------------------------------")
|
142
|
-
print("Please make sure you have properly set your WB_CLIENT_ID and WB_API_KEY.\n")
|
143
|
-
print("You can verify this by running\necho $WB_CLIENT_ID and echo $WB_API_KEY in your terminal.\n")
|
144
|
-
print("To get an API key, email data@windbornesystems.com.")
|
145
|
-
elif http_err.response.status_code in [404, 400]:
|
146
|
-
print("-------------------------------------------------------")
|
147
|
-
print("Our server couldn't find the information you requested.")
|
148
|
-
print("-------------------------------------------------------")
|
149
|
-
print(f"URL: {url}")
|
150
|
-
print(f"Error: {http_err.response.status_code}")
|
151
|
-
print("-------------------------------------------------------")
|
152
|
-
if params:
|
153
|
-
print("\nParameters provided:")
|
154
|
-
for key, value in params.items():
|
155
|
-
print(f" {key}: {value}")
|
156
|
-
else:
|
157
|
-
if 'missions/' in url:
|
158
|
-
mission_id = url.split('/missions/')[1].split('/')[0]
|
159
|
-
print(f"Mission ID provided: {mission_id}")
|
160
|
-
print(f"We couldn't find a mission with id: {mission_id}")
|
161
|
-
elif http_err.response.status_code == 502:
|
162
|
-
retries = 1
|
163
|
-
while response.status_code == 502 and retries < 5:
|
164
|
-
print("502 Bad Gateway, sleeping and retrying")
|
165
|
-
time.sleep(2**retries)
|
166
|
-
response = requests.get(url, auth=(CLIENT_ID, signed_token))
|
167
|
-
retries += 1
|
168
|
-
else:
|
169
|
-
print(f"HTTP error occurred\n\n{http_err}")
|
170
|
-
if params:
|
171
|
-
print("\nParameters provided:")
|
172
|
-
for key, value in params.items():
|
173
|
-
print(f" {key}: {value}")
|
174
|
-
exit(http_err.response.status_code)
|
175
|
-
except requests.exceptions.ConnectionError as conn_err:
|
176
|
-
print(f"Connection error occurred\n\n{conn_err}")
|
177
|
-
except requests.exceptions.Timeout as timeout_err:
|
178
|
-
print(f"Timeout error occurred\n\n{timeout_err}")
|
179
|
-
except requests.exceptions.RequestException as req_err:
|
180
|
-
print(f"An error occurred\n\n{req_err}")
|
181
|
-
|
182
|
-
# Supported date formats
|
183
|
-
# YYYY-MM-DD HH:MM:SS, YYYY-MM-DD_HH:MM and ISO strings
|
184
9
|
def to_unix_timestamp(date_string):
|
185
10
|
"""
|
186
11
|
Converts a date string or integer to a UNIX timestamp.
|
@@ -202,15 +27,11 @@ def to_unix_timestamp(date_string):
|
|
202
27
|
"%Y-%m-%d %H:%M:%S", # e.g., 2024-12-05 14:48:00
|
203
28
|
"%Y-%m-%d_%H:%M", # e.g., 2024-12-05_14:48
|
204
29
|
"%Y-%m-%dT%H:%M:%S.%fZ", # e.g., 2024-12-05T14:48:00.000Z
|
30
|
+
"%Y%m%d%H", # e.g., 2024120514
|
205
31
|
]
|
206
|
-
current_time = datetime.now(timezone.utc)
|
207
32
|
for fmt in formats:
|
208
33
|
try:
|
209
34
|
dt = datetime.strptime(date_string, fmt).replace(tzinfo=timezone.utc)
|
210
|
-
if dt > current_time:
|
211
|
-
print(f"How would it be to live in {dt} ?\n")
|
212
|
-
print("Looks like you are coming from the future!\n")
|
213
|
-
exit(1111)
|
214
35
|
return int(dt.timestamp())
|
215
36
|
except ValueError:
|
216
37
|
continue
|
@@ -219,8 +40,9 @@ def to_unix_timestamp(date_string):
|
|
219
40
|
print("Invalid date format. Please use one of the supported formats:\n"
|
220
41
|
"- YYYY-MM-DD HH:MM:SS\n"
|
221
42
|
"- YYYY-MM-DD_HH:MM\n"
|
222
|
-
"- YYYY-MM-DDTHH:MM:SS.fffZ"
|
223
|
-
|
43
|
+
"- YYYY-MM-DDTHH:MM:SS.fffZ\n"
|
44
|
+
"- YYYYMMDDHH")
|
45
|
+
exit(1)
|
224
46
|
|
225
47
|
# Supported date format
|
226
48
|
# Compact format YYYYMMDDHH
|
@@ -268,22 +90,22 @@ def parse_time(time, init_time_flag=None):
|
|
268
90
|
print("Please check your input format and try again")
|
269
91
|
exit(2)
|
270
92
|
|
271
|
-
|
272
|
-
def
|
93
|
+
|
94
|
+
def save_arbitrary_response(output_file, response, csv_data_key=None):
|
273
95
|
"""
|
274
96
|
Save Data API response data to a file in either JSON or CSV format.
|
275
97
|
|
276
98
|
Args:
|
277
|
-
|
99
|
+
output_file (str): The file path where the response will be saved.
|
278
100
|
response (dict or list): The response data to save.
|
279
101
|
csv_data_key (str, optional): Key to extract data for CSV. Defaults to None.
|
280
102
|
"""
|
281
103
|
# Create directory path if it doesn't exist
|
282
|
-
directory = os.path.dirname(
|
104
|
+
directory = os.path.dirname(output_file)
|
283
105
|
if directory and not os.path.isdir(directory):
|
284
106
|
os.makedirs(directory, exist_ok=True)
|
285
107
|
|
286
|
-
if '.' not in
|
108
|
+
if '.' not in output_file:
|
287
109
|
print("You have to provide a file type for your filename.")
|
288
110
|
print("Supported formats:")
|
289
111
|
print(" - .csv")
|
@@ -292,11 +114,11 @@ def save_csv_json(save_to_file, response, csv_data_key=None):
|
|
292
114
|
elif not response:
|
293
115
|
print("There are no available data to save to file.")
|
294
116
|
exit(1)
|
295
|
-
elif
|
296
|
-
with open(
|
117
|
+
elif output_file.lower().endswith('.json'):
|
118
|
+
with open(output_file, 'w', encoding='utf-8') as f:
|
297
119
|
json.dump(response, f, indent=4)
|
298
|
-
print("Saved to",
|
299
|
-
elif
|
120
|
+
print("Saved to", output_file)
|
121
|
+
elif output_file.lower().endswith('.csv'):
|
300
122
|
# Extract data for CSV if a key is provided
|
301
123
|
data = response if not csv_data_key else response.get(csv_data_key, [])
|
302
124
|
if not data:
|
@@ -325,708 +147,14 @@ def save_csv_json(save_to_file, response, csv_data_key=None):
|
|
325
147
|
exit(5)
|
326
148
|
|
327
149
|
# Write data to CSV
|
328
|
-
with open(
|
150
|
+
with open(output_file, mode='w', newline='', encoding='utf-8') as file:
|
329
151
|
writer = csv.DictWriter(file, fieldnames=headers)
|
330
152
|
writer.writeheader()
|
331
153
|
for row in data:
|
332
154
|
# If no value available write 'None'
|
333
155
|
row_data = {k: 'None' if v is None or v == '' else v for k, v in row.items()}
|
334
156
|
writer.writerow(row_data)
|
335
|
-
print("Saved to",
|
157
|
+
print("Saved to", output_file)
|
336
158
|
else:
|
337
159
|
print("Unsupported file format. Please use either .json or .csv.")
|
338
160
|
exit(4)
|
339
|
-
|
340
|
-
def convert_to_netcdf(data, curtime, output_filename):
|
341
|
-
# This module outputs data in netcdf format for the WMO ISARRA program. The output format is netcdf
|
342
|
-
# and the style (variable names, file names, etc.) are described here:
|
343
|
-
# https://github.com/synoptic/wmo-uasdc/tree/main/raw_uas_to_netCDF
|
344
|
-
|
345
|
-
# Import necessary libraries
|
346
|
-
import xarray as xr
|
347
|
-
import pandas as pd
|
348
|
-
import numpy as np
|
349
|
-
|
350
|
-
# Mapping of WindBorne names to ISARRA names
|
351
|
-
rename_dict = {
|
352
|
-
'latitude': 'lat',
|
353
|
-
'longitude': 'lon',
|
354
|
-
'altitude': 'altitude',
|
355
|
-
'temperature': 'air_temperature',
|
356
|
-
'wind_direction': 'wind_direction',
|
357
|
-
'wind_speed': 'wind_speed',
|
358
|
-
'pressure': 'air_pressure',
|
359
|
-
'humidity_mixing_ratio': 'humidity_mixing_ratio',
|
360
|
-
'index': 'obs',
|
361
|
-
}
|
362
|
-
|
363
|
-
# Convert dictionary to list for DataFrame
|
364
|
-
data_list = []
|
365
|
-
if isinstance(data, dict):
|
366
|
-
# If input is dictionary, convert to list
|
367
|
-
for obs_id, obs_data in data.items():
|
368
|
-
clean_data = {k: None if v == 'None' else v for k, v in obs_data.items()}
|
369
|
-
data_list.append(clean_data)
|
370
|
-
else:
|
371
|
-
# If input is already a list
|
372
|
-
for obs_data in data:
|
373
|
-
clean_data = {k: None if v == 'None' else v for k, v in obs_data.items()}
|
374
|
-
data_list.append(clean_data)
|
375
|
-
|
376
|
-
# Put the data in a panda dataframe in order to easily push to xarray then netcdf output
|
377
|
-
df = pd.DataFrame(data_list)
|
378
|
-
|
379
|
-
# Convert numeric columns to float
|
380
|
-
numeric_columns = ['latitude', 'longitude', 'altitude', 'pressure', 'temperature',
|
381
|
-
'speed_u', 'speed_v', 'specific_humidity', 'timestamp']
|
382
|
-
for col in numeric_columns:
|
383
|
-
if col in df.columns:
|
384
|
-
df[col] = pd.to_numeric(df[col], errors='coerce')
|
385
|
-
|
386
|
-
ds = xr.Dataset.from_dataframe(df)
|
387
|
-
|
388
|
-
# Build the filename and save some variables for use later
|
389
|
-
mt = datetime.fromtimestamp(curtime, tz=timezone.utc)
|
390
|
-
|
391
|
-
# Handle dropsondes
|
392
|
-
mission_name = str(df['mission_name'].iloc[0]) if (not df.empty and not pd.isna(df['mission_name'].iloc[0])) else ' '
|
393
|
-
|
394
|
-
is_multi_mission = False
|
395
|
-
|
396
|
-
if len(df['mission_name'].unique()) > 1:
|
397
|
-
is_multi_mission = True
|
398
|
-
|
399
|
-
output_file = output_filename
|
400
|
-
|
401
|
-
# Derived quantities calculated here:
|
402
|
-
|
403
|
-
# convert from specific humidity to humidity_mixing_ratio
|
404
|
-
mg_to_kg = 1000000.
|
405
|
-
if not all(x is None for x in ds['specific_humidity'].data):
|
406
|
-
ds['humidity_mixing_ratio'] = (ds['specific_humidity'] / mg_to_kg) / (1 - (ds['specific_humidity'] / mg_to_kg))
|
407
|
-
else:
|
408
|
-
ds['humidity_mixing_ratio'] = ds['specific_humidity']
|
409
|
-
|
410
|
-
# Wind speed and direction from components
|
411
|
-
ds['wind_speed'] = np.sqrt(ds['speed_u']*ds['speed_u'] + ds['speed_v']*ds['speed_v'])
|
412
|
-
ds['wind_direction'] = np.mod(180 + (180 / np.pi) * np.arctan2(ds['speed_u'], ds['speed_v']), 360)
|
413
|
-
|
414
|
-
ds['time'] = ds['timestamp'].astype(float)
|
415
|
-
ds = ds.assign_coords(time=("time", ds['time'].data))
|
416
|
-
|
417
|
-
# Now that calculations are done, remove variables not needed in the netcdf output
|
418
|
-
variables_to_drop = ['humidity', 'speed_x', 'speed_y', 'timestamp']
|
419
|
-
if 'id' in ds and pd.isna(ds['id']).all():
|
420
|
-
variables_to_drop.append('id')
|
421
|
-
|
422
|
-
existing_vars = [var for var in variables_to_drop if var in ds]
|
423
|
-
ds = ds.drop_vars(existing_vars)
|
424
|
-
|
425
|
-
# Rename the variables
|
426
|
-
ds = ds.rename(rename_dict)
|
427
|
-
|
428
|
-
# Adding attributes to variables in the xarray dataset
|
429
|
-
ds['time'].attrs = {
|
430
|
-
'units': 'seconds since 1970-01-01T00:00:00',
|
431
|
-
'long_name': 'Time', '_FillValue': float('nan'),
|
432
|
-
'processing_level': ''
|
433
|
-
}
|
434
|
-
|
435
|
-
ds['lat'].attrs = {
|
436
|
-
'units': 'degrees_north',
|
437
|
-
'long_name': 'Latitude',
|
438
|
-
'_FillValue': float('nan'),
|
439
|
-
'processing_level': ''
|
440
|
-
}
|
441
|
-
ds['lon'].attrs = {
|
442
|
-
'units': 'degrees_east',
|
443
|
-
'long_name': 'Longitude',
|
444
|
-
'_FillValue': float('nan'),
|
445
|
-
'processing_level': ''
|
446
|
-
}
|
447
|
-
ds['altitude'].attrs = {
|
448
|
-
'units': 'meters_above_sea_level',
|
449
|
-
'long_name': 'Altitude',
|
450
|
-
'_FillValue': float('nan'),
|
451
|
-
'processing_level': ''
|
452
|
-
}
|
453
|
-
ds['air_temperature'].attrs = {
|
454
|
-
'units': 'Kelvin',
|
455
|
-
'long_name': 'Air Temperature',
|
456
|
-
'_FillValue': float('nan'),
|
457
|
-
'processing_level': ''
|
458
|
-
}
|
459
|
-
ds['wind_speed'].attrs = {
|
460
|
-
'units': 'm/s',
|
461
|
-
'long_name': 'Wind Speed',
|
462
|
-
'_FillValue': float('nan'),
|
463
|
-
'processing_level': ''
|
464
|
-
}
|
465
|
-
ds['wind_direction'].attrs = {
|
466
|
-
'units': 'degrees',
|
467
|
-
'long_name': 'Wind Direction',
|
468
|
-
'_FillValue': float('nan'),
|
469
|
-
'processing_level': ''
|
470
|
-
}
|
471
|
-
ds['humidity_mixing_ratio'].attrs = {
|
472
|
-
'units': 'kg/kg',
|
473
|
-
'long_name': 'Humidity Mixing Ratio',
|
474
|
-
'_FillValue': float('nan'),
|
475
|
-
'processing_level': ''
|
476
|
-
}
|
477
|
-
ds['air_pressure'].attrs = {
|
478
|
-
'units': 'Pa',
|
479
|
-
'long_name': 'Atmospheric Pressure',
|
480
|
-
'_FillValue': float('nan'),
|
481
|
-
'processing_level': ''
|
482
|
-
}
|
483
|
-
ds['speed_u'].attrs = {
|
484
|
-
'units': 'm/s',
|
485
|
-
'long_name': 'Wind speed in direction of increasing longitude',
|
486
|
-
'_FillValue': float('nan'),
|
487
|
-
'processing_level': ''
|
488
|
-
}
|
489
|
-
ds['speed_v'].attrs = {
|
490
|
-
'units': 'm/s',
|
491
|
-
'long_name': 'Wind speed in direction of increasing latitude',
|
492
|
-
'_FillValue': float('nan'),
|
493
|
-
'processing_level': ''
|
494
|
-
}
|
495
|
-
ds['specific_humidity'].attrs = {
|
496
|
-
'units': 'mg/kg',
|
497
|
-
'long_name': 'Specific Humidity',
|
498
|
-
'_FillValue': float('nan'),
|
499
|
-
'processing_level': '',
|
500
|
-
'Conventions': "CF-1.8, WMO-CF-1.0"
|
501
|
-
}
|
502
|
-
ds['mission_name'].attrs = {
|
503
|
-
'long_name': 'Mission name',
|
504
|
-
'description': 'Which balloon collected the data'
|
505
|
-
}
|
506
|
-
|
507
|
-
# Add Global Attributes synonymous across all UASDC providers
|
508
|
-
if not is_multi_mission:
|
509
|
-
ds.attrs['wmo__cf_profile'] = "FM 303-2024"
|
510
|
-
ds.attrs['featureType'] = "trajectory"
|
511
|
-
|
512
|
-
# Add Global Attributes unique to Provider
|
513
|
-
ds.attrs['platform_name'] = "WindBorne Global Sounding Balloon"
|
514
|
-
if not is_multi_mission:
|
515
|
-
ds.attrs['flight_id'] = mission_name
|
516
|
-
|
517
|
-
ds.attrs['site_terrain_elevation_height'] = 'not applicable'
|
518
|
-
ds.attrs['processing_level'] = "b1"
|
519
|
-
ds.to_netcdf(output_file)
|
520
|
-
|
521
|
-
def format_value(value, fortran_format, align=None):
|
522
|
-
if fortran_format[0] == 'F':
|
523
|
-
length, decimal_places = fortran_format[1:].split('.')
|
524
|
-
if value is None or value == '':
|
525
|
-
return ' ' * int(length)
|
526
|
-
|
527
|
-
# turn into a string of length characters, with decimal_places decimal places
|
528
|
-
return f"{value:>{length}.{decimal_places}f}"[:int(length)]
|
529
|
-
|
530
|
-
if fortran_format[0] == 'I':
|
531
|
-
length = int(fortran_format[1:])
|
532
|
-
if value is None or value == '':
|
533
|
-
return ' ' * length
|
534
|
-
|
535
|
-
return f"{value:>{length}d}"[:int(length)]
|
536
|
-
|
537
|
-
if fortran_format[0] == 'A':
|
538
|
-
length = int(fortran_format[1:])
|
539
|
-
if value is None:
|
540
|
-
return ' ' * length
|
541
|
-
|
542
|
-
if align == 'right':
|
543
|
-
return str(value)[:length].rjust(length, ' ')
|
544
|
-
|
545
|
-
return str(value)[:length].ljust(length, ' ')
|
546
|
-
|
547
|
-
if fortran_format[0] == 'L':
|
548
|
-
if value and value in ['T', 't', 'True', 'true', '1', True]:
|
549
|
-
value = 'T'
|
550
|
-
else:
|
551
|
-
value = 'F'
|
552
|
-
|
553
|
-
length = int(fortran_format[1:])
|
554
|
-
|
555
|
-
return value.rjust(length, ' ')
|
556
|
-
|
557
|
-
raise ValueError(f"Unknown format: {fortran_format}")
|
558
|
-
|
559
|
-
def safe_float(value, default=-888888.0):
|
560
|
-
"""
|
561
|
-
Convert a value to float. If the value is None, empty, or invalid, return the default.
|
562
|
-
"""
|
563
|
-
try:
|
564
|
-
return float(value) if value not in (None, '', 'None') else default
|
565
|
-
except (ValueError, TypeError):
|
566
|
-
return default
|
567
|
-
|
568
|
-
def format_little_r(observations):
|
569
|
-
"""
|
570
|
-
Convert observations to Little_R format.
|
571
|
-
|
572
|
-
Args:
|
573
|
-
observations (list): List of observation dictionaries
|
574
|
-
|
575
|
-
Returns:
|
576
|
-
list: Formatted Little_R records
|
577
|
-
"""
|
578
|
-
little_r_records = []
|
579
|
-
|
580
|
-
for point in observations:
|
581
|
-
# Observation time
|
582
|
-
observation_time = datetime.fromtimestamp(point['timestamp'], tz=timezone.utc)
|
583
|
-
|
584
|
-
# Convert and validate fields
|
585
|
-
pressure_hpa = safe_float(point.get('pressure'))
|
586
|
-
pressure_pa = pressure_hpa * 100.0
|
587
|
-
|
588
|
-
temperature_c = safe_float(point.get('temperature'))
|
589
|
-
temperature_k = temperature_c + 273.15
|
590
|
-
|
591
|
-
altitude = safe_float(point.get('altitude'))
|
592
|
-
humidity = safe_float(point.get('humidity'))
|
593
|
-
speed_u = safe_float(point.get('speed_u'))
|
594
|
-
speed_v = safe_float(point.get('speed_v'))
|
595
|
-
|
596
|
-
# Header formatting
|
597
|
-
header = ''.join([
|
598
|
-
# Latitude: F20.5
|
599
|
-
format_value(point.get('latitude'), 'F20.5'),
|
600
|
-
|
601
|
-
# Longitude: F20.5
|
602
|
-
format_value(point.get('longitude'), 'F20.5'),
|
603
|
-
|
604
|
-
# ID: A40
|
605
|
-
format_value(point.get('id'), 'A40'),
|
606
|
-
|
607
|
-
# Name: A40
|
608
|
-
format_value(point.get('mission_name'), 'A40'),
|
609
|
-
|
610
|
-
# Platform (FM‑Code): A40
|
611
|
-
format_value('FM-35 TEMP', 'A40'),
|
612
|
-
|
613
|
-
# Source: A40
|
614
|
-
format_value('WindBorne', 'A40'),
|
615
|
-
|
616
|
-
# Elevation: F20.5
|
617
|
-
format_value('', 'F20.5'),
|
618
|
-
|
619
|
-
# Valid fields: I10
|
620
|
-
format_value(-888888, 'I10'),
|
621
|
-
|
622
|
-
# Num. errors: I10
|
623
|
-
format_value(0, 'I10'),
|
624
|
-
|
625
|
-
# Num. warnings: I10
|
626
|
-
format_value(0, 'I10'),
|
627
|
-
|
628
|
-
# Sequence number: I10
|
629
|
-
format_value(0, 'I10'),
|
630
|
-
|
631
|
-
# Num. duplicates: I10
|
632
|
-
format_value(0, 'I10'),
|
633
|
-
|
634
|
-
# Is sounding?: L
|
635
|
-
format_value('T', 'L10'),
|
636
|
-
|
637
|
-
# Is bogus?: L
|
638
|
-
format_value('F', 'L10'),
|
639
|
-
|
640
|
-
# Discard?: L
|
641
|
-
format_value('F', 'L10'),
|
642
|
-
|
643
|
-
# Unix time: I10
|
644
|
-
# format_value(point['timestamp'], 'I10'),
|
645
|
-
format_value(-888888, 'I10'),
|
646
|
-
|
647
|
-
# Julian day: I10
|
648
|
-
format_value(-888888, 'I10'),
|
649
|
-
|
650
|
-
# Date: A20 YYYYMMDDhhmmss
|
651
|
-
format_value(observation_time.strftime('%Y%m%d%H%M%S'), 'A20', align='right'),
|
652
|
-
|
653
|
-
# SLP, QC: F13.5, I7
|
654
|
-
format_value(-888888.0, 'F13.5') + format_value(0, 'I7'),
|
655
|
-
|
656
|
-
# Ref Pressure, QC: F13.5, I7
|
657
|
-
format_value(-888888.0, 'F13.5') + format_value(0, 'I7'),
|
658
|
-
|
659
|
-
# Ground Temp, QC: F13.5, I7
|
660
|
-
format_value(-888888.0, 'F13.5') + format_value(0, 'I7'),
|
661
|
-
|
662
|
-
# SST, QC: F13.5, I7
|
663
|
-
format_value(-888888.0, 'F13.5') + format_value(0, 'I7'),
|
664
|
-
|
665
|
-
# SFC Pressure, QC: F13.5, I7
|
666
|
-
format_value(-888888.0, 'F13.5') + format_value(0, 'I7'),
|
667
|
-
|
668
|
-
# Precip, QC: F13.5, I7
|
669
|
-
format_value(-888888.0, 'F13.5') + format_value(0, 'I7'),
|
670
|
-
|
671
|
-
# Daily Max T, QC: F13.5, I7
|
672
|
-
format_value(-888888.0, 'F13.5') + format_value(0, 'I7'),
|
673
|
-
|
674
|
-
# Daily Min T, QC: F13.5, I7
|
675
|
-
format_value(-888888.0, 'F13.5') + format_value(0, 'I7'),
|
676
|
-
|
677
|
-
# Night Min T, QC: F13.5, I7
|
678
|
-
format_value(-888888.0, 'F13.5') + format_value(0, 'I7'),
|
679
|
-
|
680
|
-
# 3hr Pres Change, QC: F13.5, I7
|
681
|
-
format_value(-888888.0, 'F13.5') + format_value(0, 'I7'),
|
682
|
-
|
683
|
-
# 24hr Pres Change, QC: F13.5, I7
|
684
|
-
format_value(-888888.0, 'F13.5') + format_value(0, 'I7'),
|
685
|
-
|
686
|
-
# Cloud cover, QC: F13.5, I7
|
687
|
-
format_value(-888888.0, 'F13.5') + format_value(0, 'I7'),
|
688
|
-
|
689
|
-
# Ceiling, QC: F13.5, I7
|
690
|
-
format_value(-888888.0, 'F13.5') + format_value(0, 'I7'),
|
691
|
-
|
692
|
-
# Precipitable water, QC (see note): F13.5, I7
|
693
|
-
format_value(-888888.0, 'F13.5') + format_value(0, 'I7'),
|
694
|
-
])
|
695
|
-
|
696
|
-
# Data record formatting
|
697
|
-
data_record = ''.join([
|
698
|
-
# Pressure (Pa): F13.5
|
699
|
-
format_value(pressure_pa, 'F13.5'),
|
700
|
-
|
701
|
-
# QC: I7
|
702
|
-
format_value(0, 'I7'),
|
703
|
-
|
704
|
-
# Height (m): F13.5
|
705
|
-
format_value(altitude, 'F13.5'),
|
706
|
-
|
707
|
-
# QC: I7
|
708
|
-
format_value(0, 'I7'),
|
709
|
-
|
710
|
-
# Temperature (K): F13.5
|
711
|
-
format_value(temperature_k, 'F13.5'),
|
712
|
-
|
713
|
-
# QC: I7
|
714
|
-
format_value(0, 'I7'),
|
715
|
-
|
716
|
-
# Dew point (K): F13.5
|
717
|
-
format_value(-888888.0, 'F13.5'),
|
718
|
-
|
719
|
-
# QC: I7
|
720
|
-
format_value(0, 'I7'),
|
721
|
-
|
722
|
-
# Wind speed (m/s): F13.5
|
723
|
-
format_value(-888888.0, 'F13.5'),
|
724
|
-
|
725
|
-
# QC: I7
|
726
|
-
format_value(0, 'I7'),
|
727
|
-
|
728
|
-
# Wind direction (deg): F13.5
|
729
|
-
format_value(-888888.0, 'F13.5'),
|
730
|
-
|
731
|
-
# QC: I7
|
732
|
-
format_value(0, 'I7'),
|
733
|
-
|
734
|
-
# Wind U (m/s): F13.5
|
735
|
-
format_value(speed_u, 'F13.5'),
|
736
|
-
|
737
|
-
# QC: I7
|
738
|
-
format_value(0, 'I7'),
|
739
|
-
|
740
|
-
# Wind V (m/s): F13.5
|
741
|
-
format_value(speed_v, 'F13.5'),
|
742
|
-
|
743
|
-
# QC: I7
|
744
|
-
format_value(0, 'I7'),
|
745
|
-
|
746
|
-
# Relative humidity (%): F13.5
|
747
|
-
format_value(humidity, 'F13.5'),
|
748
|
-
|
749
|
-
# QC: I7
|
750
|
-
format_value(0, 'I7'),
|
751
|
-
|
752
|
-
# Thickness (m): F13.5
|
753
|
-
format_value(-888888.0, 'F13.5'),
|
754
|
-
|
755
|
-
# QC: I7
|
756
|
-
format_value(0, 'I7')
|
757
|
-
])
|
758
|
-
|
759
|
-
# End record and tail record
|
760
|
-
end_record = '-777777.00000 0-777777.00000 0-888888.00000 0-888888.00000 0-888888.00000 0-888888.00000 0-888888.00000 0-888888.00000 0-888888.00000 0-888888.00000 0'
|
761
|
-
tail_record = ' 39 0 0'
|
762
|
-
|
763
|
-
# Combine into a complete record
|
764
|
-
complete_record = '\n'.join([header, data_record, end_record, tail_record, ''])
|
765
|
-
little_r_records.append(complete_record)
|
766
|
-
|
767
|
-
return little_r_records
|
768
|
-
|
769
|
-
# Download and save a file in .npy upon provided an S3 link
|
770
|
-
def download_and_save_npy(save_to_file, response):
|
771
|
-
"""
|
772
|
-
Downloads data from a presigned S3 url contained in a response and saves it as a .npy file.
|
773
|
-
|
774
|
-
Args:
|
775
|
-
save_to_file (str): Path where to save the .npy file
|
776
|
-
response (str): Response that contains the S3 url to download the data from
|
777
|
-
|
778
|
-
Returns:
|
779
|
-
bool: True if successful, False otherwise
|
780
|
-
"""
|
781
|
-
try:
|
782
|
-
# Download the file
|
783
|
-
print(f"Downloading data")
|
784
|
-
# Load the data into memory
|
785
|
-
data = np.load(io.BytesIO(response.content))
|
786
|
-
|
787
|
-
# Save the data
|
788
|
-
np.save(save_to_file, data)
|
789
|
-
print(f"Data Successfully saved to {save_to_file}")
|
790
|
-
return True
|
791
|
-
|
792
|
-
except requests.exceptions.RequestException as e:
|
793
|
-
print(f"Error downloading the file: {e}")
|
794
|
-
return False
|
795
|
-
except Exception as e:
|
796
|
-
print(f"Error processing the file: {e}")
|
797
|
-
return False
|
798
|
-
|
799
|
-
# Download and save a file in .nc upon provided an S3 link
|
800
|
-
def download_and_save_nc(save_to_file, response):
|
801
|
-
"""
|
802
|
-
Downloads data from a presigned S3 url contained in a response and saves it as a .nc file.
|
803
|
-
|
804
|
-
Args:
|
805
|
-
save_to_file (str): Path where to save the .nc file
|
806
|
-
response (str): Response that contains the S3 url to download the data from
|
807
|
-
|
808
|
-
Returns:
|
809
|
-
bool: True if successful, False otherwise
|
810
|
-
"""
|
811
|
-
|
812
|
-
# Add .nc extension if not present
|
813
|
-
if not save_to_file.endswith('.nc'):
|
814
|
-
save_to_file = save_to_file + '.nc'
|
815
|
-
|
816
|
-
try:
|
817
|
-
# Save the content directly to file
|
818
|
-
with open(save_to_file, 'wb') as f:
|
819
|
-
f.write(response.content)
|
820
|
-
print(f"Data Successfully saved to {save_to_file}")
|
821
|
-
return True
|
822
|
-
|
823
|
-
except requests.exceptions.RequestException as e:
|
824
|
-
print(f"Error downloading the file: {e}")
|
825
|
-
return False
|
826
|
-
except Exception as e:
|
827
|
-
print(f"Error processing the file: {e}")
|
828
|
-
return False
|
829
|
-
|
830
|
-
def save_as_geojson(filename, cyclone_data):
|
831
|
-
"""Convert and save cyclone data as GeoJSON, handling meridian crossing."""
|
832
|
-
features = []
|
833
|
-
for cyclone_id, tracks in cyclone_data.items():
|
834
|
-
# Initialize lists to store line segments
|
835
|
-
line_segments = []
|
836
|
-
current_segment = []
|
837
|
-
|
838
|
-
for i in range(len(tracks)):
|
839
|
-
lon = float(tracks[i]['longitude'])
|
840
|
-
lat = float(tracks[i]['latitude'])
|
841
|
-
|
842
|
-
if not current_segment:
|
843
|
-
current_segment.append([lon, lat])
|
844
|
-
continue
|
845
|
-
|
846
|
-
prev_lon = current_segment[-1][0]
|
847
|
-
|
848
|
-
# Check if we've crossed the meridian (large longitude jump)
|
849
|
-
if abs(lon - prev_lon) > 180:
|
850
|
-
# If previous longitude was positive and current is negative
|
851
|
-
if prev_lon > 0 and lon < 0:
|
852
|
-
# Add point at 180° with same latitude
|
853
|
-
current_segment.append([180, lat])
|
854
|
-
line_segments.append(current_segment)
|
855
|
-
# Start new segment at -180°
|
856
|
-
current_segment = [[-180, lat], [lon, lat]]
|
857
|
-
# If previous longitude was negative and current is positive
|
858
|
-
elif prev_lon < 0 and lon > 0:
|
859
|
-
# Add point at -180° with same latitude
|
860
|
-
current_segment.append([-180, lat])
|
861
|
-
line_segments.append(current_segment)
|
862
|
-
# Start new segment at 180°
|
863
|
-
current_segment = [[180, lat], [lon, lat]]
|
864
|
-
else:
|
865
|
-
current_segment.append([lon, lat])
|
866
|
-
|
867
|
-
# Add the last segment if it's not empty
|
868
|
-
if current_segment:
|
869
|
-
line_segments.append(current_segment)
|
870
|
-
|
871
|
-
# Create a MultiLineString feature with all segments
|
872
|
-
feature = {
|
873
|
-
"type": "Feature",
|
874
|
-
"properties": {
|
875
|
-
"cyclone_id": cyclone_id,
|
876
|
-
"start_time": tracks[0]['time'],
|
877
|
-
"end_time": tracks[-1]['time']
|
878
|
-
},
|
879
|
-
"geometry": {
|
880
|
-
"type": "MultiLineString",
|
881
|
-
"coordinates": line_segments
|
882
|
-
}
|
883
|
-
}
|
884
|
-
features.append(feature)
|
885
|
-
|
886
|
-
geojson = {
|
887
|
-
"type": "FeatureCollection",
|
888
|
-
"features": features
|
889
|
-
}
|
890
|
-
|
891
|
-
with open(filename, 'w', encoding='utf-8') as f:
|
892
|
-
json.dump(geojson, f, indent=4)
|
893
|
-
print("Saved to", filename)
|
894
|
-
|
895
|
-
def save_as_gpx(filename, cyclone_data):
|
896
|
-
"""Convert and save cyclone data as GPX, handling meridian crossing."""
|
897
|
-
gpx = '<?xml version="1.0" encoding="UTF-8"?>\n'
|
898
|
-
gpx += '<gpx version="1.1" creator="Windborne" xmlns="http://www.topografix.com/GPX/1/1">\n'
|
899
|
-
|
900
|
-
for cyclone_id, tracks in cyclone_data.items():
|
901
|
-
gpx += f' <trk>\n <name>{cyclone_id}</name>\n'
|
902
|
-
|
903
|
-
current_segment = []
|
904
|
-
segment_count = 1
|
905
|
-
|
906
|
-
for i in range(len(tracks)):
|
907
|
-
lon = float(tracks[i]['longitude'])
|
908
|
-
lat = float(tracks[i]['latitude'])
|
909
|
-
|
910
|
-
if not current_segment:
|
911
|
-
current_segment.append(tracks[i])
|
912
|
-
continue
|
913
|
-
|
914
|
-
prev_lon = float(current_segment[-1]['longitude'])
|
915
|
-
|
916
|
-
# Check if we've crossed the meridian
|
917
|
-
if abs(lon - prev_lon) > 180:
|
918
|
-
# Write the current segment
|
919
|
-
gpx += ' <trkseg>\n'
|
920
|
-
for point in current_segment:
|
921
|
-
gpx += f' <trkpt lat="{point["latitude"]}" lon="{point["longitude"]}">\n'
|
922
|
-
gpx += f' <time>{point["time"]}</time>\n'
|
923
|
-
gpx += ' </trkpt>\n'
|
924
|
-
gpx += ' </trkseg>\n'
|
925
|
-
|
926
|
-
# Start new segment
|
927
|
-
current_segment = [tracks[i]]
|
928
|
-
segment_count += 1
|
929
|
-
else:
|
930
|
-
current_segment.append(tracks[i])
|
931
|
-
|
932
|
-
# Write the last segment if it's not empty
|
933
|
-
if current_segment:
|
934
|
-
gpx += ' <trkseg>\n'
|
935
|
-
for point in current_segment:
|
936
|
-
gpx += f' <trkpt lat="{point["latitude"]}" lon="{point["longitude"]}">\n'
|
937
|
-
gpx += f' <time>{point["time"]}</time>\n'
|
938
|
-
gpx += ' </trkpt>\n'
|
939
|
-
gpx += ' </trkseg>\n'
|
940
|
-
|
941
|
-
gpx += ' </trk>\n'
|
942
|
-
|
943
|
-
gpx += '</gpx>'
|
944
|
-
|
945
|
-
with open(filename, 'w', encoding='utf-8') as f:
|
946
|
-
f.write(gpx)
|
947
|
-
print(f"Saved to {filename}")
|
948
|
-
|
949
|
-
def save_as_kml(filename, cyclone_data):
|
950
|
-
"""Convert and save cyclone data as KML, handling meridian crossing."""
|
951
|
-
kml = '<?xml version="1.0" encoding="UTF-8"?>\n'
|
952
|
-
kml += '<kml xmlns="http://www.opengis.net/kml/2.2">\n<Document>\n'
|
953
|
-
|
954
|
-
for cyclone_id, tracks in cyclone_data.items():
|
955
|
-
kml += f' <Placemark>\n <name>{cyclone_id}</name>\n <MultiGeometry>\n'
|
956
|
-
|
957
|
-
current_segment = []
|
958
|
-
|
959
|
-
for i in range(len(tracks)):
|
960
|
-
lon = float(tracks[i]['longitude'])
|
961
|
-
lat = float(tracks[i]['latitude'])
|
962
|
-
|
963
|
-
if not current_segment:
|
964
|
-
current_segment.append(tracks[i])
|
965
|
-
continue
|
966
|
-
|
967
|
-
prev_lon = float(current_segment[-1]['longitude'])
|
968
|
-
|
969
|
-
# Check if we've crossed the meridian
|
970
|
-
if abs(lon - prev_lon) > 180:
|
971
|
-
# Write the current segment
|
972
|
-
kml += ' <LineString>\n <coordinates>\n'
|
973
|
-
coordinates = [f' {track["longitude"]},{track["latitude"]},{0}'
|
974
|
-
for track in current_segment]
|
975
|
-
kml += '\n'.join(coordinates)
|
976
|
-
kml += '\n </coordinates>\n </LineString>\n'
|
977
|
-
|
978
|
-
# Start new segment
|
979
|
-
current_segment = [tracks[i]]
|
980
|
-
else:
|
981
|
-
current_segment.append(tracks[i])
|
982
|
-
|
983
|
-
# Write the last segment if it's not empty
|
984
|
-
if current_segment:
|
985
|
-
kml += ' <LineString>\n <coordinates>\n'
|
986
|
-
coordinates = [f' {track["longitude"]},{track["latitude"]},{0}'
|
987
|
-
for track in current_segment]
|
988
|
-
kml += '\n'.join(coordinates)
|
989
|
-
kml += '\n </coordinates>\n </LineString>\n'
|
990
|
-
|
991
|
-
kml += ' </MultiGeometry>\n </Placemark>\n'
|
992
|
-
|
993
|
-
kml += '</Document>\n</kml>'
|
994
|
-
|
995
|
-
with open(filename, 'w', encoding='utf-8') as f:
|
996
|
-
f.write(kml)
|
997
|
-
print(f"Saved to {filename}")
|
998
|
-
|
999
|
-
def save_as_little_r(filename, cyclone_data):
|
1000
|
-
"""Convert and save cyclone data in little_R format."""
|
1001
|
-
with open(filename, 'w', encoding='utf-8') as f:
|
1002
|
-
for cyclone_id, tracks in cyclone_data.items():
|
1003
|
-
for track in tracks:
|
1004
|
-
# Parse the time
|
1005
|
-
dt = datetime.fromisoformat(track['time'].replace('Z', '+00:00'))
|
1006
|
-
|
1007
|
-
# Header line 1
|
1008
|
-
header1 = f"{float(track['latitude']):20.5f}{float(track['longitude']):20.5f}{'HMS':40}"
|
1009
|
-
header1 += f"{0:10d}{0:10d}{0:10d}" # Station ID numbers
|
1010
|
-
header1 += f"{dt.year:10d}{dt.month:10d}{dt.day:10d}{dt.hour:10d}{0:10d}"
|
1011
|
-
header1 += f"{0:10d}{0:10.3f}{cyclone_id:40}"
|
1012
|
-
f.write(header1 + '\n')
|
1013
|
-
|
1014
|
-
# Header line 2
|
1015
|
-
header2 = f"{0:20.5f}{1:10d}{0:10.3f}"
|
1016
|
-
f.write(header2 + '\n')
|
1017
|
-
|
1018
|
-
# Data line format: p, z, t, d, s, d (pressure, height, temp, dewpoint, speed, direction)
|
1019
|
-
# We'll only include position data
|
1020
|
-
data_line = f"{-888888.0:13.5f}{float(track['latitude']):13.5f}{-888888.0:13.5f}"
|
1021
|
-
data_line += f"{-888888.0:13.5f}{-888888.0:13.5f}{float(track['longitude']):13.5f}"
|
1022
|
-
data_line += f"{0:7d}" # End of data line marker
|
1023
|
-
f.write(data_line + '\n')
|
1024
|
-
|
1025
|
-
# End of record line
|
1026
|
-
f.write(f"{-777777.0:13.5f}\n")
|
1027
|
-
|
1028
|
-
print("Saved to", filename)
|
1029
|
-
|
1030
|
-
def sync_to_s3(data, bucket_name, object_name):
|
1031
|
-
s3 = boto3.client("s3")
|
1032
|
-
s3.put_object(Body=str(data), Bucket=bucket_name, Key=object_name)
|