edmt 1.0.1.dev0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- edmt/__init__.py +78 -0
- edmt/analysis/__init__.py +7 -0
- edmt/analysis/analysis.py +2 -0
- edmt/base/__init__.py +5 -0
- edmt/base/base.py +2 -0
- edmt/contrib/__init__.py +16 -0
- edmt/contrib/utils.py +146 -0
- edmt/conversion/__init__.py +19 -0
- edmt/conversion/computational.py +2 -0
- edmt/conversion/conversion.py +302 -0
- edmt/mapping/__init__.py +7 -0
- edmt/mapping/mapping.py +207 -0
- edmt/mapping/maps.py +77 -0
- edmt/models/__init__.py +15 -0
- edmt/models/drones.py +533 -0
- edmt/plotting/__init__.py +0 -0
- edmt-1.0.1.dev0.dist-info/METADATA +26 -0
- edmt-1.0.1.dev0.dist-info/RECORD +22 -0
- edmt-1.0.1.dev0.dist-info/WHEEL +5 -0
- edmt-1.0.1.dev0.dist-info/entry_points.txt +2 -0
- edmt-1.0.1.dev0.dist-info/licenses/LICENSE +21 -0
- edmt-1.0.1.dev0.dist-info/top_level.txt +1 -0
edmt/models/drones.py
ADDED
|
@@ -0,0 +1,533 @@
|
|
|
1
|
+
from edmt.contrib.utils import (
|
|
2
|
+
format_iso_time,
|
|
3
|
+
append_cols,
|
|
4
|
+
norm_exp
|
|
5
|
+
)
|
|
6
|
+
import logging
|
|
7
|
+
logger = logging.getLogger(__name__)
|
|
8
|
+
|
|
9
|
+
from typing import Union
|
|
10
|
+
import base64
|
|
11
|
+
import http.client
|
|
12
|
+
import json
|
|
13
|
+
import requests
|
|
14
|
+
|
|
15
|
+
import pandas as pd
|
|
16
|
+
import geopandas as gpd
|
|
17
|
+
from shapely.geometry import LineString, Point
|
|
18
|
+
|
|
19
|
+
from io import StringIO
|
|
20
|
+
from tqdm import tqdm
|
|
21
|
+
from typing import Union, Optional
|
|
22
|
+
|
|
23
|
+
from pyproj import Geod
|
|
24
|
+
geod = Geod(ellps="WGS84")
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class Airdata:
|
|
28
|
+
def __init__(self, api_key):
|
|
29
|
+
self.api_key = api_key
|
|
30
|
+
self.base_url = "api.airdata.com"
|
|
31
|
+
self.authenticated = False
|
|
32
|
+
self.auth_header = self._get_auth_header()
|
|
33
|
+
|
|
34
|
+
self.authenticate(validate=True)
|
|
35
|
+
|
|
36
|
+
def _get_auth_header(self):
|
|
37
|
+
key_with_colon = self.api_key + ":"
|
|
38
|
+
encoded_key = base64.b64encode(key_with_colon.encode()).decode("utf-8")
|
|
39
|
+
return {
|
|
40
|
+
"Authorization": f"Basic {encoded_key}"
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
def authenticate(self,validate=True):
|
|
44
|
+
"""
|
|
45
|
+
Authenticates with the API by calling /version or /flights.
|
|
46
|
+
"""
|
|
47
|
+
conn = http.client.HTTPSConnection(self.base_url)
|
|
48
|
+
payload = ''
|
|
49
|
+
|
|
50
|
+
try:
|
|
51
|
+
conn.request("GET", "/version", payload, self.auth_header)
|
|
52
|
+
res = conn.getresponse()
|
|
53
|
+
|
|
54
|
+
if res.status == 200:
|
|
55
|
+
self.authenticated = True
|
|
56
|
+
print("✅ Authentication successful.")
|
|
57
|
+
return
|
|
58
|
+
|
|
59
|
+
if res.status == 404:
|
|
60
|
+
conn = http.client.HTTPSConnection(self.base_url)
|
|
61
|
+
conn.request("GET", "/flights", payload, self.auth_header)
|
|
62
|
+
res = conn.getresponse()
|
|
63
|
+
|
|
64
|
+
if res.status == 200:
|
|
65
|
+
self.authenticated = True
|
|
66
|
+
print("✅ Authentication successful.")
|
|
67
|
+
else:
|
|
68
|
+
print(f"❌ Authentication failed. Status code: {res.status}")
|
|
69
|
+
print(f"Response: {res.read().decode('utf-8')[:200]}")
|
|
70
|
+
if validate:
|
|
71
|
+
raise ValueError("Authentication failed: Invalid API key or permissions.")
|
|
72
|
+
|
|
73
|
+
except Exception as e:
|
|
74
|
+
print(f"⚠️ Network error during authentication: {e}")
|
|
75
|
+
if validate:
|
|
76
|
+
raise
|
|
77
|
+
|
|
78
|
+
def get_flights(
|
|
79
|
+
self,since: str = None,until: str = None,limit: Union[int, None] = None,
|
|
80
|
+
created_after: Optional[str] = None,battery_ids: Optional[Union[str, list]] = None,
|
|
81
|
+
pilot_ids: Optional[Union[str, list]] = None,location: Optional[list] = None,
|
|
82
|
+
) -> pd.DataFrame:
|
|
83
|
+
|
|
84
|
+
"""
|
|
85
|
+
Fetch flight data from the Airdata API based on the provided query parameters.
|
|
86
|
+
|
|
87
|
+
Returns:
|
|
88
|
+
pd.DataFrame: A DataFrame containing the retrieved flight data.
|
|
89
|
+
If the request fails or no data is found, returns an empty DataFrame.
|
|
90
|
+
|
|
91
|
+
Raises:
|
|
92
|
+
ValueError:
|
|
93
|
+
If `location` is not a list of exactly two numeric values (latitude and longitude).
|
|
94
|
+
"""
|
|
95
|
+
|
|
96
|
+
if location is not None:
|
|
97
|
+
if not isinstance(location, list) or len(location) != 2 or not all(isinstance(x, (int, float)) for x in location):
|
|
98
|
+
raise ValueError("Location must be a list of exactly two numbers: [latitude, longitude]")
|
|
99
|
+
|
|
100
|
+
formatted_since = format_iso_time(since).replace("T", "+") if since else None
|
|
101
|
+
formatted_until = format_iso_time(until).replace("T", "+") if until else None
|
|
102
|
+
formatted_created_after = format_iso_time(created_after).replace("T", "+") if created_after else None
|
|
103
|
+
|
|
104
|
+
params = {
|
|
105
|
+
"start": formatted_since,
|
|
106
|
+
"end": formatted_until,
|
|
107
|
+
"detail_level": "comprehensive",
|
|
108
|
+
"created_after": formatted_created_after,
|
|
109
|
+
"battery_ids": ",".join(battery_ids) if battery_ids else None,
|
|
110
|
+
"pilot_ids": ",".join(pilot_ids) if pilot_ids else None,
|
|
111
|
+
"latitude": location[0] if location else None,
|
|
112
|
+
"longitude": location[1] if location else None,
|
|
113
|
+
"limit": limit
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
params = {k: v for k, v in params.items() if v is not None}
|
|
117
|
+
|
|
118
|
+
endpoint = "/flights?" + "&".join([f"{k}={v}" for k, v in params.items()])
|
|
119
|
+
|
|
120
|
+
if not self.authenticated:
|
|
121
|
+
print("Cannot fetch flights: Not authenticated.")
|
|
122
|
+
return None
|
|
123
|
+
|
|
124
|
+
try:
|
|
125
|
+
conn = http.client.HTTPSConnection(self.base_url)
|
|
126
|
+
conn.request("GET", endpoint, headers=self.auth_header)
|
|
127
|
+
res = conn.getresponse()
|
|
128
|
+
|
|
129
|
+
if res.status == 200:
|
|
130
|
+
data = json.loads(res.read().decode("utf-8"))
|
|
131
|
+
if "data" in data:
|
|
132
|
+
normalized_data = list(tqdm(data["data"], desc="📥 Downloading"))
|
|
133
|
+
df = pd.json_normalize(normalized_data)
|
|
134
|
+
df = df.drop(
|
|
135
|
+
columns=[
|
|
136
|
+
"displayLink","kmlLink",
|
|
137
|
+
"gpxLink","originalLink",
|
|
138
|
+
"participants.object"
|
|
139
|
+
],
|
|
140
|
+
errors='ignore'
|
|
141
|
+
)
|
|
142
|
+
else:
|
|
143
|
+
df = pd.DataFrame(data)
|
|
144
|
+
return df
|
|
145
|
+
else:
|
|
146
|
+
print(f"Failed to fetch flights. Status code: {res.status}")
|
|
147
|
+
print(f"Response: {res.read().decode('utf-8')[:500]}")
|
|
148
|
+
return None
|
|
149
|
+
except Exception as e:
|
|
150
|
+
print(f"Error fetching flights: {e}")
|
|
151
|
+
return None
|
|
152
|
+
|
|
153
|
+
def AccessGroups(self, endpoint: str) -> Optional[pd.DataFrame]:
|
|
154
|
+
if not self.authenticated:
|
|
155
|
+
logger.warning(f"Cannot fetch {endpoint}: Not authenticated.")
|
|
156
|
+
return None
|
|
157
|
+
|
|
158
|
+
try:
|
|
159
|
+
conn = http.client.HTTPSConnection(self.base_url)
|
|
160
|
+
conn.request("GET", endpoint, headers=self._get_auth_header())
|
|
161
|
+
res = conn.getresponse()
|
|
162
|
+
|
|
163
|
+
if res.status == 200:
|
|
164
|
+
data = json.loads(res.read().decode("utf-8"))
|
|
165
|
+
if "data" in data:
|
|
166
|
+
normalized_data = list(tqdm(data["data"], desc="📥 Downloading"))
|
|
167
|
+
normalized = pd.json_normalize(normalized_data)
|
|
168
|
+
df = norm_exp(normalized,"flights.data")
|
|
169
|
+
else:
|
|
170
|
+
df = pd.DataFrame(data)
|
|
171
|
+
return df
|
|
172
|
+
else:
|
|
173
|
+
logger.warning(f"Failed to fetch flights. Status code: {res.status}")
|
|
174
|
+
logger.warning(f"Response: {res.read().decode('utf-8')[:500]}")
|
|
175
|
+
return None
|
|
176
|
+
except Exception as e:
|
|
177
|
+
logger.warning(f"Error fetching flights: {e}")
|
|
178
|
+
return None
|
|
179
|
+
finally:
|
|
180
|
+
if 'conn' in locals() and conn:
|
|
181
|
+
conn.close()
|
|
182
|
+
|
|
183
|
+
def get_flightgroups(
|
|
184
|
+
self,
|
|
185
|
+
sort_by: str = None,
|
|
186
|
+
ascending: bool = True
|
|
187
|
+
) -> pd.DataFrame:
|
|
188
|
+
"""
|
|
189
|
+
Fetch Flight Groups data from the Airdata API based on query parameters.
|
|
190
|
+
|
|
191
|
+
Parameters:
|
|
192
|
+
sort_by (str, optional): Field to sort by. Valid values are 'title' and 'created'.
|
|
193
|
+
If None, no sorting is applied.
|
|
194
|
+
ascending (bool): Whether to sort in ascending order. Defaults to True.
|
|
195
|
+
id (str, optional): Specific ID of a flight group to fetch.
|
|
196
|
+
|
|
197
|
+
Returns:
|
|
198
|
+
pd.DataFrame: DataFrame containing retrieved flight data.
|
|
199
|
+
Returns empty DataFrame if request fails or no data found.
|
|
200
|
+
"""
|
|
201
|
+
params = {}
|
|
202
|
+
if sort_by:
|
|
203
|
+
if sort_by not in ["title", "created"]:
|
|
204
|
+
raise ValueError("Invalid sort_by value. Must be 'title' or 'created'.")
|
|
205
|
+
params["sort_by"] = sort_by
|
|
206
|
+
params["sort_dir"] = "asc" if ascending else "desc"
|
|
207
|
+
endpoint = "/flightgroups?" + "&".join([f"{k}={v}" for k, v in params.items()])
|
|
208
|
+
|
|
209
|
+
df = self.AccessGroups(endpoint=endpoint)
|
|
210
|
+
return df if df is not None else pd.DataFrame()
|
|
211
|
+
|
|
212
|
+
def AccessItems(self, endpoint: str) -> Optional[pd.DataFrame]:
|
|
213
|
+
"""
|
|
214
|
+
Sends a GET request to the specified API endpoint and returns normalized data as a DataFrame.
|
|
215
|
+
|
|
216
|
+
Parameters:
|
|
217
|
+
endpoint (str): The full API path including query parameters.
|
|
218
|
+
|
|
219
|
+
Returns:
|
|
220
|
+
Optional[pd.DataFrame]: A DataFrame containing the retrieved data, or None if the request fails.
|
|
221
|
+
"""
|
|
222
|
+
if not self.authenticated:
|
|
223
|
+
logger.warning("Cannot fetch data: Not authenticated.")
|
|
224
|
+
return None
|
|
225
|
+
|
|
226
|
+
try:
|
|
227
|
+
conn = http.client.HTTPSConnection(self.base_url)
|
|
228
|
+
try:
|
|
229
|
+
conn.request("GET", f"/{endpoint}", headers=self.auth_header)
|
|
230
|
+
res = conn.getresponse()
|
|
231
|
+
if res.status == 200:
|
|
232
|
+
raw_data = res.read().decode("utf-8")
|
|
233
|
+
try:
|
|
234
|
+
data = json.loads(raw_data)
|
|
235
|
+
except json.JSONDecodeError as e:
|
|
236
|
+
logger.warning(f"Failed to decode JSON response: {e}")
|
|
237
|
+
return None
|
|
238
|
+
|
|
239
|
+
if isinstance(data, list):
|
|
240
|
+
normalized_data = list(tqdm(data, desc="📥 Downloading"))
|
|
241
|
+
else:
|
|
242
|
+
logger.info("Response data is not a list; returning raw.")
|
|
243
|
+
normalized_data = data
|
|
244
|
+
|
|
245
|
+
if not isinstance(normalized_data, (list, dict)):
|
|
246
|
+
logger.warning("Data is not a valid type for json_normalize.")
|
|
247
|
+
return None
|
|
248
|
+
|
|
249
|
+
df = pd.json_normalize(normalized_data)
|
|
250
|
+
return df
|
|
251
|
+
else:
|
|
252
|
+
logger.warning(f"Failed to fetch '{endpoint}'.")
|
|
253
|
+
return None
|
|
254
|
+
finally:
|
|
255
|
+
conn.close()
|
|
256
|
+
|
|
257
|
+
except Exception as e:
|
|
258
|
+
logger.warning(f"Network error while fetching '{endpoint}': {e}")
|
|
259
|
+
return None
|
|
260
|
+
finally:
|
|
261
|
+
if 'conn' in locals() and conn:
|
|
262
|
+
conn.close()
|
|
263
|
+
|
|
264
|
+
def get_drones(self) -> pd.DataFrame:
|
|
265
|
+
"""
|
|
266
|
+
Fetch drone data from the Airdata API based on the provided query parameters.
|
|
267
|
+
|
|
268
|
+
|
|
269
|
+
Returns:
|
|
270
|
+
pd.DataFrame: A DataFrame containing the retrieved flight data.
|
|
271
|
+
If the request fails or no data is found, returns an empty DataFrame.
|
|
272
|
+
"""
|
|
273
|
+
|
|
274
|
+
df = self.AccessItems(endpoint="drones")
|
|
275
|
+
return df if df is not None else pd.DataFrame()
|
|
276
|
+
|
|
277
|
+
def get_batteries(self) -> pd.DataFrame:
|
|
278
|
+
"""
|
|
279
|
+
Fetch batteries data from the Airdata API based on the provided query parameters.
|
|
280
|
+
|
|
281
|
+
|
|
282
|
+
Returns:
|
|
283
|
+
pd.DataFrame: A DataFrame containing the retrieved flight data.
|
|
284
|
+
If the request fails or no data is found, returns an empty DataFrame.
|
|
285
|
+
"""
|
|
286
|
+
df = self.AccessItems(endpoint="batteries")
|
|
287
|
+
return df if df is not None else pd.DataFrame()
|
|
288
|
+
|
|
289
|
+
def get_pilots(self) -> pd.DataFrame:
|
|
290
|
+
"""
|
|
291
|
+
Fetch pilots data from the Airdata API based on the provided query parameters.
|
|
292
|
+
|
|
293
|
+
|
|
294
|
+
Returns:
|
|
295
|
+
pd.DataFrame: A DataFrame containing the retrieved flight data.
|
|
296
|
+
If the request fails or no data is found, returns an empty DataFrame.
|
|
297
|
+
"""
|
|
298
|
+
|
|
299
|
+
df = self.AccessItems(endpoint="pilots")
|
|
300
|
+
return df if df is not None else pd.DataFrame()
|
|
301
|
+
|
|
302
|
+
|
|
303
|
+
def airPoint(df: pd.DataFrame, filter_ids: Optional[list] = None,log_errors: bool = True) -> gpd.GeoDataFrame:
|
|
304
|
+
"""
|
|
305
|
+
Parameters:
|
|
306
|
+
df (pd.DataFrame):
|
|
307
|
+
A DataFrame containing at least two columns:
|
|
308
|
+
- 'id': Unique identifier for each row.
|
|
309
|
+
- 'csvLink': URL pointing to a CSV file.
|
|
310
|
+
filter_ids (list or None):
|
|
311
|
+
Optional list of IDs to restrict processing to specific rows.
|
|
312
|
+
log_errors (bool):
|
|
313
|
+
If True, prints errors encountered during CSV fetching or parsing. Defaults to True.
|
|
314
|
+
expand_dict (bool):
|
|
315
|
+
If True, expands dictionary fields like participants.data and batteries.data into separate columns.
|
|
316
|
+
|
|
317
|
+
Returns:
|
|
318
|
+
pd.DataFrame: A DataFrame combining metadata with CSV content.
|
|
319
|
+
Returns an empty DataFrame if no valid data was retrieved.
|
|
320
|
+
|
|
321
|
+
Raises:
|
|
322
|
+
ValueError:
|
|
323
|
+
If required columns ('id', 'csvLink') are missing from the input DataFrame.
|
|
324
|
+
"""
|
|
325
|
+
df = df.copy()
|
|
326
|
+
df.loc[:, 'checktime'] = pd.to_datetime(df['time'], errors="coerce")
|
|
327
|
+
|
|
328
|
+
required_cols = {'id', 'csvLink'}
|
|
329
|
+
if not required_cols.issubset(df.columns):
|
|
330
|
+
raise ValueError(f"Input DataFrame must contain columns: {required_cols}")
|
|
331
|
+
|
|
332
|
+
if filter_ids is not None:
|
|
333
|
+
df = df[df['id'].isin(filter_ids)]
|
|
334
|
+
|
|
335
|
+
all_combined_rows = []
|
|
336
|
+
|
|
337
|
+
for _, row in tqdm(df.iterrows(), total=len(df), desc="🔄 Processing"):
|
|
338
|
+
csv_url = row['csvLink']
|
|
339
|
+
|
|
340
|
+
try:
|
|
341
|
+
response = requests.get(csv_url)
|
|
342
|
+
response.raise_for_status()
|
|
343
|
+
csv_data = pd.read_csv(StringIO(response.text))
|
|
344
|
+
metadata_repeated = pd.DataFrame([row] * len(csv_data), index=csv_data.index)
|
|
345
|
+
combined = pd.concat([metadata_repeated, csv_data], axis=1)
|
|
346
|
+
all_combined_rows.append(combined)
|
|
347
|
+
|
|
348
|
+
except requests.RequestException as e:
|
|
349
|
+
if log_errors:
|
|
350
|
+
print(f"Network error for id {row['id']}: {e}")
|
|
351
|
+
except pd.errors.ParserError as e:
|
|
352
|
+
if log_errors:
|
|
353
|
+
print(f"Parsing error for CSV at id {row['id']}: {e}")
|
|
354
|
+
except Exception as e:
|
|
355
|
+
if log_errors:
|
|
356
|
+
print(f"Unexpected error for id {row['id']}: {e}")
|
|
357
|
+
|
|
358
|
+
if not all_combined_rows:
|
|
359
|
+
return pd.DataFrame()
|
|
360
|
+
|
|
361
|
+
df_ = pd.concat(all_combined_rows, ignore_index=True)
|
|
362
|
+
cols = ["participants.data", "batteries.data"]
|
|
363
|
+
dfs_to_join = []
|
|
364
|
+
for col in cols:
|
|
365
|
+
try:
|
|
366
|
+
expanded = pd.json_normalize(df_[col].explode(ignore_index=True))
|
|
367
|
+
expanded.columns = [f"{col}_{subcol}" for subcol in expanded.columns]
|
|
368
|
+
dfs_to_join.append(expanded)
|
|
369
|
+
except Exception as e:
|
|
370
|
+
if log_errors:
|
|
371
|
+
print(f"Error expanding column '{col}': {e}")
|
|
372
|
+
if dfs_to_join:
|
|
373
|
+
expanded_df = pd.concat(dfs_to_join, axis=1)
|
|
374
|
+
gdf = df_.join(expanded_df).drop(columns=cols)
|
|
375
|
+
return append_cols(gdf,cols="checktime")
|
|
376
|
+
|
|
377
|
+
|
|
378
|
+
def df_to_gdf( df: pd.DataFrame,lon_col: str = 'longitude',lat_col: str = 'latitude',crs: int = 4326) -> gpd.GeoDataFrame:
|
|
379
|
+
"""
|
|
380
|
+
Convert a pandas DataFrame with latitude and longitude columns into a GeoDataFrame
|
|
381
|
+
with point geometries.
|
|
382
|
+
|
|
383
|
+
Parameters:
|
|
384
|
+
df (pd.DataFrame):
|
|
385
|
+
Input DataFrame containing geographic coordinates.
|
|
386
|
+
lon_col (str):
|
|
387
|
+
Name of the column in `df` that contains longitude values. Default is `'longitude'`.
|
|
388
|
+
lat_col (str):
|
|
389
|
+
Name of the column in `df` that contains latitude values. Default is `'latitude'`.
|
|
390
|
+
crs (int):
|
|
391
|
+
Coordinate Reference System (CRS) to assign to the resulting GeoDataFrame.
|
|
392
|
+
Defaults to 4326 (WGS84 - standard latitude/longitude).
|
|
393
|
+
|
|
394
|
+
Returns:
|
|
395
|
+
gpd.GeoDataFrame:
|
|
396
|
+
A GeoDataFrame with point geometries created from the latitude and longitude columns.
|
|
397
|
+
The original DataFrame columns are preserved.
|
|
398
|
+
|
|
399
|
+
Raises:
|
|
400
|
+
KeyError:
|
|
401
|
+
If either of the specified latitude or longitude columns is not present in the DataFrame.
|
|
402
|
+
ValueError:
|
|
403
|
+
If the CRS is invalid or not supported by GeoPandas.
|
|
404
|
+
"""
|
|
405
|
+
if lat_col not in df.columns or lon_col not in df.columns:
|
|
406
|
+
missing = [col for col in [lat_col, lon_col] if col not in df.columns]
|
|
407
|
+
raise KeyError(f"Missing required column(s): {missing}")
|
|
408
|
+
|
|
409
|
+
try:
|
|
410
|
+
gdf = gpd.GeoDataFrame(
|
|
411
|
+
df,
|
|
412
|
+
geometry=gpd.points_from_xy(df[lon_col], df[lat_col]),
|
|
413
|
+
crs=crs
|
|
414
|
+
)
|
|
415
|
+
except Exception as e:
|
|
416
|
+
raise ValueError(f"Failed to create GeoDataFrame: {e}")
|
|
417
|
+
|
|
418
|
+
return gdf
|
|
419
|
+
|
|
420
|
+
|
|
421
|
+
def airLine(gdf: gpd.GeoDataFrame) -> gpd.GeoDataFrame:
|
|
422
|
+
"""
|
|
423
|
+
Converts a GeoDataFrame with point geometries into a GeoDataFrame with
|
|
424
|
+
LineString geometries for each unique 'id', ordered by 'time(millisecond)'.
|
|
425
|
+
|
|
426
|
+
Adds a new column 'distance_m' representing the total geodesic length of the line.
|
|
427
|
+
|
|
428
|
+
Args:
|
|
429
|
+
gdf: The input GeoDataFrame with 'id', 'time(millisecond)', and 'geometry'
|
|
430
|
+
(Point) columns.
|
|
431
|
+
|
|
432
|
+
Returns:
|
|
433
|
+
A new GeoDataFrame where each row represents a unique 'id' and its
|
|
434
|
+
corresponding LineString geometry and total distance in meters.
|
|
435
|
+
"""
|
|
436
|
+
gdf = gdf[gdf['geometry'] != Point(0, 0)]
|
|
437
|
+
|
|
438
|
+
grouped = []
|
|
439
|
+
for flight_id in tqdm(gdf['id'].unique(), desc="🔄 Processing flights"):
|
|
440
|
+
flight_data = gdf[gdf['id'] == flight_id].sort_values(by='time(millisecond)')
|
|
441
|
+
grouped.append(flight_data)
|
|
442
|
+
|
|
443
|
+
gdf_sorted = pd.concat(grouped)
|
|
444
|
+
|
|
445
|
+
def compute_distance(group):
|
|
446
|
+
coords = [(p.x, p.y) for p in group.geometry.values]
|
|
447
|
+
if len(coords) < 2:
|
|
448
|
+
return None, None
|
|
449
|
+
linestring = LineString(coords)
|
|
450
|
+
total_distance = 0
|
|
451
|
+
for i in range(len(coords) - 1):
|
|
452
|
+
lon1, lat1 = coords[i]
|
|
453
|
+
lon2, lat2 = coords[i + 1]
|
|
454
|
+
_, _, dist = geod.inv(lon1, lat1, lon2, lat2)
|
|
455
|
+
total_distance += dist
|
|
456
|
+
return linestring, total_distance
|
|
457
|
+
|
|
458
|
+
results = []
|
|
459
|
+
|
|
460
|
+
for flight_id, group in gdf_sorted.groupby('id'):
|
|
461
|
+
linestring, distance = compute_distance(group)
|
|
462
|
+
if linestring is not None:
|
|
463
|
+
metadata = group.iloc[0].drop(['geometry', 'time(millisecond)']).to_dict()
|
|
464
|
+
metadata['airline_time'] = group['time(millisecond)'].max()
|
|
465
|
+
results.append({
|
|
466
|
+
'id': flight_id,
|
|
467
|
+
'geometry': linestring,
|
|
468
|
+
'airline_distance_m': distance,
|
|
469
|
+
**metadata
|
|
470
|
+
})
|
|
471
|
+
|
|
472
|
+
line_gdf = gpd.GeoDataFrame(results, geometry='geometry', crs="EPSG:4326")
|
|
473
|
+
|
|
474
|
+
return append_cols(line_gdf, cols=['checktime','airline_time','airline_distance_m','geometry'])
|
|
475
|
+
|
|
476
|
+
|
|
477
|
+
def airSegment(gdf: gpd.GeoDataFrame) -> gpd.GeoDataFrame:
|
|
478
|
+
"""
|
|
479
|
+
Converts a GeoDataFrame with point geometries into a GeoDataFrame with
|
|
480
|
+
LineString segment geometries for each pair of consecutive points,
|
|
481
|
+
grouped by 'id' and ordered by 'time(millisecond)'.
|
|
482
|
+
|
|
483
|
+
Args:
|
|
484
|
+
gdf: The input GeoDataFrame with 'id', 'time(millisecond)', and 'geometry'
|
|
485
|
+
(Point) columns.
|
|
486
|
+
|
|
487
|
+
Returns:
|
|
488
|
+
A new GeoDataFrame where each row represents a line segment between two
|
|
489
|
+
consecutive points.
|
|
490
|
+
"""
|
|
491
|
+
segments = []
|
|
492
|
+
|
|
493
|
+
gdf = gdf[gdf['geometry'] != Point(0, 0)]
|
|
494
|
+
|
|
495
|
+
for flight_id in tqdm(gdf['id'].unique(), desc="🔄 Processing segments"):
|
|
496
|
+
flight_data = gdf[gdf['id'] == flight_id].sort_values(by='time(millisecond)').reset_index(drop=True)
|
|
497
|
+
|
|
498
|
+
for i in range(len(flight_data) - 1):
|
|
499
|
+
pt1 = flight_data.loc[i, 'geometry']
|
|
500
|
+
pt2 = flight_data.loc[i + 1, 'geometry']
|
|
501
|
+
|
|
502
|
+
lon1, lat1 = pt1.x, pt1.y
|
|
503
|
+
lon2, lat2 = pt2.x, pt2.y
|
|
504
|
+
|
|
505
|
+
_, _, D_meters = geod.inv(lon1, lat1, lon2, lat2)
|
|
506
|
+
t1 = flight_data.loc[i, 'time(millisecond)']
|
|
507
|
+
t2 = flight_data.loc[i + 1, 'time(millisecond)']
|
|
508
|
+
T = t2 - t1
|
|
509
|
+
segment = LineString([pt1, pt2])
|
|
510
|
+
|
|
511
|
+
attrs = flight_data.loc[i].drop(['geometry', 'time(millisecond)'])
|
|
512
|
+
|
|
513
|
+
seg_dict = {
|
|
514
|
+
'id': flight_id,
|
|
515
|
+
'segment_start_time': t1,
|
|
516
|
+
'segment_end_time': t2,
|
|
517
|
+
'segment_duration_ms': T,
|
|
518
|
+
'segment_distance_m': D_meters,
|
|
519
|
+
'geometry': segment,
|
|
520
|
+
**attrs.to_dict()
|
|
521
|
+
}
|
|
522
|
+
|
|
523
|
+
segments.append(seg_dict)
|
|
524
|
+
|
|
525
|
+
if not segments:
|
|
526
|
+
return gpd.GeoDataFrame(gdf,geometry='geometry')
|
|
527
|
+
|
|
528
|
+
|
|
529
|
+
airSeg = gpd.GeoDataFrame(segments, geometry='geometry')
|
|
530
|
+
|
|
531
|
+
return append_cols(airSeg, cols=['checktime','segment_start_time','segment_end_time','segment_duration_ms','segment_distance_m','geometry'])
|
|
532
|
+
|
|
533
|
+
|
|
File without changes
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: edmt
|
|
3
|
+
Version: 1.0.1.dev0
|
|
4
|
+
Summary: Environmental Data Management Toolbox
|
|
5
|
+
Author-email: "Odero, Kuloba & musasia" <franodex10@gmail.com>
|
|
6
|
+
License-Expression: MIT
|
|
7
|
+
Project-URL: Homepage, https://github.com/envqwewdmt/EDMT
|
|
8
|
+
Classifier: Programming Language :: Python :: 3
|
|
9
|
+
Classifier: Operating System :: OS Independent
|
|
10
|
+
Requires-Python: >=3.9
|
|
11
|
+
Description-Content-Type: text/markdown
|
|
12
|
+
License-File: LICENSE
|
|
13
|
+
Requires-Dist: contextily>=1.4.0
|
|
14
|
+
Requires-Dist: contourpy>=1.2.1
|
|
15
|
+
Requires-Dist: fiona==1.9.6
|
|
16
|
+
Requires-Dist: folium>=0.18.0
|
|
17
|
+
Requires-Dist: geopandas>=0.12.2
|
|
18
|
+
Requires-Dist: mapclassify>=2.8.0
|
|
19
|
+
Requires-Dist: plotly>=5.24.1
|
|
20
|
+
Requires-Dist: seaborn>=0.13.2
|
|
21
|
+
Requires-Dist: tqdm>=4
|
|
22
|
+
Dynamic: license-file
|
|
23
|
+
|
|
24
|
+
# edmt
|
|
25
|
+
|
|
26
|
+
### Documentation
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
edmt/__init__.py,sha256=eECJ-QZP3Mr9jUT8ANR-ThdXac4TVSy6xLsnkU0Lo-0,1728
|
|
2
|
+
edmt/analysis/__init__.py,sha256=mhvfDwLSDa2ygCQOME3tmzWP9Rx_NN0ERge57o1iVio,68
|
|
3
|
+
edmt/analysis/analysis.py,sha256=3Extd39r_aBULtZX6yNdnRaKosEegfJ_oGdgAuJXkFY,58
|
|
4
|
+
edmt/base/__init__.py,sha256=-Y8RqTJSLpGTXu2HSeBkoTtvV0tO0Dc34WKqHtqXH8Y,51
|
|
5
|
+
edmt/base/base.py,sha256=QvxO5MF5iu-6ZE5bSAKubUGw16zqG6GEG1SRn4dLSPA,46
|
|
6
|
+
edmt/contrib/__init__.py,sha256=o7ht-5qEsZ-tJG--jBtRZ8tJHIpPpU2VyCQS6gnfpII,244
|
|
7
|
+
edmt/contrib/utils.py,sha256=6WqSHrge7wPsoX_-0tmkrA3fQLFs5ECN6SVnAdBHS30,4744
|
|
8
|
+
edmt/conversion/__init__.py,sha256=l3wsrd0EbNhSkvy0SK6xBt30ot6BGhwA3zn0jm0CAFU,309
|
|
9
|
+
edmt/conversion/computational.py,sha256=eye7FMyDF71CBN04INhnPPGNbhfL2WxvtqwdZAM8oFc,67
|
|
10
|
+
edmt/conversion/conversion.py,sha256=ro57NT11KKR-zzRBAkK_cU39EIbDRgf9UQBHiLPnmLE,9869
|
|
11
|
+
edmt/mapping/__init__.py,sha256=9CjbxIFYQQngvujiGwPFTqUwu8g8qDhMhqkUkAgWTE8,62
|
|
12
|
+
edmt/mapping/mapping.py,sha256=Y1RChk8WCbkWs_0bqGSPwoeTxnaC79CFgd4y3qe9Zxg,5541
|
|
13
|
+
edmt/mapping/maps.py,sha256=onVMqLL_OYgWzJeEl41dYTvLrgjhZ8kOYAbKb89Oh20,2049
|
|
14
|
+
edmt/models/__init__.py,sha256=QAETEifV8ZFiws7RkzwSRBqKa3EUJbeZuG0L8TVXYBM,188
|
|
15
|
+
edmt/models/drones.py,sha256=y8bD0272C-Wj_KnCgYjwi_jPwRDkWg2-UFEAtPwSpYg,20167
|
|
16
|
+
edmt/plotting/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
17
|
+
edmt-1.0.1.dev0.dist-info/licenses/LICENSE,sha256=3tzwDPQi5KNmLWejlW-s4kataXNRxw5Geynh3dIQn0U,1084
|
|
18
|
+
edmt-1.0.1.dev0.dist-info/METADATA,sha256=I0pH3OJTp2JPh3GLoZcopO5w0nLcc6vV_kG09p4gnio,748
|
|
19
|
+
edmt-1.0.1.dev0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
20
|
+
edmt-1.0.1.dev0.dist-info/entry_points.txt,sha256=R7DEZ46NrnDge45HAPCdEIKCxN46b_g08W3olu_-gVo,60
|
|
21
|
+
edmt-1.0.1.dev0.dist-info/top_level.txt,sha256=zaFhzN9t8gAUWf8i7h9_5MfSY4En6x6JZUejUOWh6h8,5
|
|
22
|
+
edmt-1.0.1.dev0.dist-info/RECORD,,
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2024 envdmt
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
edmt
|