geo-activity-playground 1.1.0__py3-none-any.whl → 1.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. geo_activity_playground/alembic/versions/85fe0348e8a2_add_time_series_uuid_field.py +28 -0
  2. geo_activity_playground/alembic/versions/f2f50843be2d_make_all_fields_in_activity_nullable.py +34 -0
  3. geo_activity_playground/core/coordinates.py +12 -1
  4. geo_activity_playground/core/copernicus_dem.py +95 -0
  5. geo_activity_playground/core/datamodel.py +78 -22
  6. geo_activity_playground/core/enrichment.py +226 -164
  7. geo_activity_playground/core/paths.py +8 -0
  8. geo_activity_playground/core/test_pandas_timezone.py +36 -0
  9. geo_activity_playground/core/test_time_zone_from_location.py +7 -0
  10. geo_activity_playground/core/test_time_zone_import.py +93 -0
  11. geo_activity_playground/core/test_timezone_sqlalchemy.py +44 -0
  12. geo_activity_playground/core/tiles.py +4 -1
  13. geo_activity_playground/core/time_conversion.py +42 -14
  14. geo_activity_playground/explorer/tile_visits.py +7 -4
  15. geo_activity_playground/importers/activity_parsers.py +21 -22
  16. geo_activity_playground/importers/directory.py +62 -108
  17. geo_activity_playground/importers/strava_api.py +53 -36
  18. geo_activity_playground/importers/strava_checkout.py +30 -56
  19. geo_activity_playground/webui/app.py +40 -2
  20. geo_activity_playground/webui/blueprints/activity_blueprint.py +13 -11
  21. geo_activity_playground/webui/blueprints/entry_views.py +1 -1
  22. geo_activity_playground/webui/blueprints/explorer_blueprint.py +1 -7
  23. geo_activity_playground/webui/blueprints/heatmap_blueprint.py +2 -2
  24. geo_activity_playground/webui/blueprints/photo_blueprint.py +65 -56
  25. geo_activity_playground/webui/blueprints/settings_blueprint.py +20 -14
  26. geo_activity_playground/webui/blueprints/summary_blueprint.py +6 -6
  27. geo_activity_playground/webui/blueprints/time_zone_fixer_blueprint.py +69 -0
  28. geo_activity_playground/webui/blueprints/upload_blueprint.py +3 -16
  29. geo_activity_playground/webui/columns.py +9 -1
  30. geo_activity_playground/webui/templates/activity/show.html.j2 +3 -1
  31. geo_activity_playground/webui/templates/equipment/index.html.j2 +3 -3
  32. geo_activity_playground/webui/templates/hall_of_fame/index.html.j2 +2 -3
  33. geo_activity_playground/webui/templates/home.html.j2 +4 -10
  34. geo_activity_playground/webui/templates/page.html.j2 +2 -0
  35. geo_activity_playground/webui/templates/photo/new.html.j2 +1 -1
  36. geo_activity_playground/webui/templates/settings/index.html.j2 +9 -0
  37. geo_activity_playground/webui/templates/settings/tile-source.html.j2 +33 -0
  38. geo_activity_playground/webui/templates/time_zone_fixer/index.html.j2 +31 -0
  39. {geo_activity_playground-1.1.0.dist-info → geo_activity_playground-1.3.0.dist-info}/METADATA +7 -3
  40. {geo_activity_playground-1.1.0.dist-info → geo_activity_playground-1.3.0.dist-info}/RECORD +43 -34
  41. geo_activity_playground/core/test_time_conversion.py +0 -37
  42. {geo_activity_playground-1.1.0.dist-info → geo_activity_playground-1.3.0.dist-info}/LICENSE +0 -0
  43. {geo_activity_playground-1.1.0.dist-info → geo_activity_playground-1.3.0.dist-info}/WHEEL +0 -0
  44. {geo_activity_playground-1.1.0.dist-info → geo_activity_playground-1.3.0.dist-info}/entry_points.txt +0 -0
@@ -1,212 +1,274 @@
1
1
  import datetime
2
2
  import logging
3
- import pickle
4
- from typing import Optional
3
+ import zoneinfo
4
+ from typing import Callable
5
5
 
6
6
  import numpy as np
7
7
  import pandas as pd
8
- import sqlalchemy
9
- from tqdm import tqdm
10
8
 
11
9
  from .config import Config
12
10
  from .coordinates import get_distance
11
+ from .copernicus_dem import get_elevation
13
12
  from .datamodel import Activity
14
- from .datamodel import ActivityMeta
15
13
  from .datamodel import DB
16
- from .datamodel import get_or_make_equipment
17
- from .datamodel import get_or_make_kind
18
14
  from .missing_values import some
19
- from .paths import activity_extracted_meta_dir
20
- from .paths import activity_extracted_time_series_dir
21
- from .paths import TIME_SERIES_DIR
22
15
  from .tiles import compute_tile_float
23
- from .time_conversion import convert_to_datetime_ns
16
+ from .time_conversion import get_country_timezone
24
17
 
25
18
  logger = logging.getLogger(__name__)
26
19
 
27
20
 
28
- def populate_database_from_extracted(config: Config) -> None:
29
- available_ids = {
30
- int(path.stem) for path in activity_extracted_meta_dir().glob("*.pickle")
31
- }
32
- present_ids = {
33
- int(elem)
34
- for elem in DB.session.scalars(sqlalchemy.select(Activity.upstream_id)).all()
35
- if elem
36
- }
37
- new_ids = available_ids - present_ids
38
-
39
- for upstream_id in tqdm(new_ids, desc="Importing new activities into database"):
40
- extracted_metadata_path = (
41
- activity_extracted_meta_dir() / f"{upstream_id}.pickle"
42
- )
43
- with open(extracted_metadata_path, "rb") as f:
44
- extracted_metadata: ActivityMeta = pickle.load(f)
21
+ def enrichment_set_timezone(
22
+ activity: Activity, time_series: pd.DataFrame, config: Config
23
+ ) -> bool:
24
+ assert (
25
+ len(time_series) > 0
26
+ ), f"You cannot import an activity without points. {activity=}"
27
+ latitude, longitude = time_series[["latitude", "longitude"]].iloc[0].to_list()
28
+ if activity.iana_timezone is None or activity.start_country is None:
29
+ country, tz_str = get_country_timezone(latitude, longitude)
30
+ activity.iana_timezone = tz_str
31
+ activity.start_country = country
32
+ return True
33
+ else:
34
+ return False
45
35
 
46
- extracted_time_series_path = (
47
- activity_extracted_time_series_dir() / f"{upstream_id}.parquet"
48
- )
49
- time_series = pd.read_parquet(extracted_time_series_path)
50
-
51
- # Skip activities that don't have geo information attached to them. This shouldn't happen, though.
52
- if "latitude" not in time_series.columns:
53
- logger.warning(
54
- f"Activity {upstream_id} doesn't have latitude/longitude information. Ignoring this one."
55
- )
56
- continue
57
-
58
- time_series = _embellish_single_time_series(
59
- time_series,
60
- extracted_metadata.get("start", None),
61
- config.time_diff_threshold_seconds,
62
- )
63
36
 
64
- kind_name = extracted_metadata.get("kind", None)
65
- if kind_name:
66
- # Rename kinds if needed.
67
- if kind_name in config.kind_renames:
68
- kind_name = config.kind_renames[kind_name]
69
- kind = get_or_make_kind(kind_name, config)
70
- else:
71
- kind = None
37
+ def enrichment_normalize_time(
38
+ activity: Activity, time_series: pd.DataFrame, config: Config
39
+ ) -> bool:
40
+ # Routes (as opposed to tracks) don't have time information. We cannot do anything with time here.
41
+ if (
42
+ "time" in time_series.columns
43
+ and pd.isna(time_series["time"]).all()
44
+ and not pd.api.types.is_datetime64_any_dtype(time_series["time"].dtype)
45
+ ):
46
+ time_series["time"] = pd.NaT
47
+ return True
72
48
 
73
- equipment_name = extracted_metadata.get("equipment", None)
74
- if equipment_name:
75
- equipment = get_or_make_equipment(equipment_name, config)
76
- elif kind:
77
- equipment = kind.default_equipment
78
- else:
79
- equipment = None
80
-
81
- activity = Activity(
82
- name=extracted_metadata.get("name", "Name Placeholder"),
83
- distance_km=0,
84
- equipment=equipment,
85
- kind=kind,
86
- calories=some(extracted_metadata.get("calories", None)),
87
- elevation_gain=some(extracted_metadata.get("elevation_gain", None)),
88
- steps=some(extracted_metadata.get("steps", None)),
89
- path=extracted_metadata.get("path", None),
90
- upstream_id=upstream_id,
91
- )
49
+ changed = False
50
+ tz_utc = zoneinfo.ZoneInfo("UTC")
51
+ # If the time is naive, assume that it is UTC.
52
+ if time_series["time"].dt.tz is None:
53
+ time_series["time"] = time_series["time"].dt.tz_localize(tz_utc)
54
+ changed = True
55
+
56
+ if time_series["time"].dt.tz.utcoffset(None) != tz_utc.utcoffset(None):
57
+ time_series["time"] = time_series["time"].dt.tz_convert(tz_utc)
58
+ changed = True
92
59
 
93
- update_via_time_series(activity, time_series)
60
+ if not pd.api.types.is_dtype_equal(
61
+ time_series["time"].dtype, "datetime64[ns, UTC]"
62
+ ):
63
+ time_series["time"] = time_series["time"].dt.tz_convert(tz_utc)
64
+ changed = True
94
65
 
95
- DB.session.add(activity)
96
- try:
97
- DB.session.commit()
98
- except sqlalchemy.exc.StatementError:
99
- logger.error(
100
- f"Could not insert the following activity into the database: {vars(activity)=}"
101
- )
102
- raise
66
+ assert pd.api.types.is_dtype_equal(
67
+ time_series["time"].dtype, "datetime64[ns, UTC]"
68
+ ), (
69
+ time_series["time"].dtype,
70
+ time_series["time"].iloc[0],
71
+ )
103
72
 
104
- enriched_time_series_path = TIME_SERIES_DIR() / f"{activity.id}.parquet"
105
- time_series.to_parquet(enriched_time_series_path)
73
+ new_start = some(time_series["time"].iloc[0])
74
+ if new_start != activity.start:
75
+ activity.start = new_start
76
+ changed = True
106
77
 
78
+ new_elapsed_time = some(time_series["time"].iloc[-1] - time_series["time"].iloc[0])
79
+ if new_elapsed_time != activity.elapsed_time:
80
+ activity.elapsed_time = new_elapsed_time
81
+ changed = True
107
82
 
108
- def update_via_time_series(
109
- activity: Activity, time_series: pd.DataFrame
110
- ) -> ActivityMeta:
111
- activity.start = some(time_series["time"].iloc[0])
112
- activity.elapsed_time = some(
113
- time_series["time"].iloc[-1] - time_series["time"].iloc[0]
114
- )
115
- activity.distance_km = (
116
- time_series["distance_km"].iloc[-1] - time_series["distance_km"].iloc[0]
117
- )
118
- if "calories" in time_series.columns:
119
- activity.calories = (
120
- time_series["calories"].iloc[-1] - time_series["calories"].iloc[0]
83
+ return changed
84
+
85
+
86
+ def enrichment_rename_altitude(
87
+ activity: Activity, time_series: pd.DataFrame, config: Config
88
+ ) -> bool:
89
+ if "altitude" in time_series.columns:
90
+ time_series.rename(columns={"altitude": "elevation"}, inplace=True)
91
+ return True
92
+ else:
93
+ return False
94
+
95
+
96
+ def enrichment_compute_tile_xy(
97
+ activity: Activity, time_series: pd.DataFrame, config: Config
98
+ ) -> bool:
99
+ if "x" not in time_series.columns:
100
+ x, y = compute_tile_float(time_series["latitude"], time_series["longitude"], 0)
101
+ time_series["x"] = x
102
+ time_series["y"] = y
103
+ return True
104
+ else:
105
+ return False
106
+
107
+
108
+ def enrichment_copernicus_elevation(
109
+ activity: Activity, time_series: pd.DataFrame, config: Config
110
+ ) -> bool:
111
+ if "copernicus_elevation" not in time_series.columns:
112
+ time_series["copernicus_elevation"] = [
113
+ get_elevation(lat, lon)
114
+ for lat, lon in zip(time_series["latitude"], time_series["longitude"])
115
+ ]
116
+ return True
117
+ else:
118
+ return False
119
+
120
+
121
+ def enrichment_elevation_gain(
122
+ activity: Activity, time_series: pd.DataFrame, config: Config
123
+ ) -> bool:
124
+ if (
125
+ "elevation" in time_series.columns
126
+ or "copernicus_elevation" in time_series.columns
127
+ ) and "elevation_gain_cum" not in time_series.columns:
128
+ elevation = (
129
+ time_series["elevation"]
130
+ if "elevation" in time_series.columns
131
+ else time_series["copernicus_elevation"]
121
132
  )
122
- activity.moving_time = _compute_moving_time(time_series)
123
-
124
- activity.start_latitude = time_series["latitude"].iloc[0]
125
- activity.end_latitude = time_series["latitude"].iloc[-1]
126
- activity.start_longitude = time_series["longitude"].iloc[0]
127
- activity.end_longitude = time_series["longitude"].iloc[-1]
128
- if "elevation_gain_cum" in time_series.columns:
129
- elevation_gain_cum = time_series["elevation_gain_cum"].fillna(0)
133
+ elevation_diff = elevation.diff()
134
+ elevation_diff = elevation_diff.ewm(span=5, min_periods=5).mean()
135
+ elevation_diff.loc[elevation_diff.abs() > 30] = 0
136
+ elevation_diff.loc[elevation_diff < 0] = 0
137
+ time_series["elevation_gain_cum"] = elevation_diff.cumsum().fillna(0)
138
+
130
139
  activity.elevation_gain = (
131
- elevation_gain_cum.iloc[-1] - elevation_gain_cum.iloc[0]
140
+ time_series["elevation_gain_cum"].iloc[-1]
141
+ - time_series["elevation_gain_cum"].iloc[0]
132
142
  )
143
+ return True
144
+ else:
145
+ return False
133
146
 
134
147
 
135
- def _compute_moving_time(time_series: pd.DataFrame) -> datetime.timedelta:
136
- def moving_time(group) -> datetime.timedelta:
137
- selection = group["speed"] > 1.0
138
- time_diff = group["time"].diff().loc[selection]
139
- return time_diff.sum()
140
-
141
- return (
142
- time_series.groupby("segment_id").apply(moving_time, include_groups=False).sum()
143
- )
148
+ def enrichment_add_calories(
149
+ activity: Activity, time_series: pd.DataFrame, config: Config
150
+ ) -> bool:
151
+ if activity.calories is None and "calories" in time_series.columns:
152
+ activity.calories = (
153
+ time_series["calories"].iloc[-1] - time_series["calories"].iloc[0]
154
+ )
155
+ return True
156
+ else:
157
+ return False
144
158
 
145
159
 
146
- def _embellish_single_time_series(
147
- timeseries: pd.DataFrame,
148
- start: Optional[datetime.datetime],
149
- time_diff_threshold_seconds: int,
150
- ) -> pd.DataFrame:
151
- if start is not None and pd.api.types.is_dtype_equal(
152
- timeseries["time"].dtype, "int64"
153
- ):
154
- time = timeseries["time"]
155
- del timeseries["time"]
156
- timeseries["time"] = [
157
- convert_to_datetime_ns(start + datetime.timedelta(seconds=t)) for t in time
158
- ]
159
- timeseries["time"] = convert_to_datetime_ns(timeseries["time"])
160
- assert pd.api.types.is_dtype_equal(timeseries["time"].dtype, "datetime64[ns]"), (
161
- timeseries["time"].dtype,
162
- timeseries["time"].iloc[0],
163
- )
160
+ def enrichment_distance(
161
+ activity: Activity, time_series: pd.DataFrame, config: Config
162
+ ) -> bool:
163
+ changed = False
164
164
 
165
165
  distances = get_distance(
166
- timeseries["latitude"].shift(1),
167
- timeseries["longitude"].shift(1),
168
- timeseries["latitude"],
169
- timeseries["longitude"],
166
+ time_series["latitude"].shift(1),
167
+ time_series["longitude"].shift(1),
168
+ time_series["latitude"],
169
+ time_series["longitude"],
170
170
  ).fillna(0.0)
171
- if time_diff_threshold_seconds:
171
+
172
+ if config.time_diff_threshold_seconds:
172
173
  time_diff = (
173
- timeseries["time"] - timeseries["time"].shift(1)
174
+ time_series["time"] - time_series["time"].shift(1)
174
175
  ).dt.total_seconds()
175
- jump_indices = time_diff >= time_diff_threshold_seconds
176
+ jump_indices = time_diff >= config.time_diff_threshold_seconds
176
177
  distances.loc[jump_indices] = 0.0
177
178
 
178
- if "distance_km" not in timeseries.columns:
179
- timeseries["distance_km"] = pd.Series(np.cumsum(distances)) / 1000
179
+ if "distance_km" not in time_series.columns:
180
+ time_series["distance_km"] = pd.Series(np.cumsum(distances)) / 1000
181
+ changed = True
180
182
 
181
- if "speed" not in timeseries.columns:
182
- timeseries["speed"] = (
183
- timeseries["distance_km"].diff()
184
- / (timeseries["time"].diff().dt.total_seconds() + 1e-3)
183
+ if "speed" not in time_series.columns:
184
+ time_series["speed"] = (
185
+ time_series["distance_km"].diff()
186
+ / (time_series["time"].diff().dt.total_seconds() + 1e-3)
185
187
  * 3600
186
188
  )
189
+ changed = True
187
190
 
188
- potential_jumps = (timeseries["speed"] > 40) & (timeseries["speed"].diff() > 10)
191
+ potential_jumps = (time_series["speed"] > 40) & (time_series["speed"].diff() > 10)
189
192
  if np.any(potential_jumps):
190
- timeseries = timeseries.loc[~potential_jumps].copy()
193
+ time_series.replace(time_series.loc[~potential_jumps])
194
+ changed = True
191
195
 
192
- if "segment_id" not in timeseries.columns:
193
- if time_diff_threshold_seconds:
194
- timeseries["segment_id"] = np.cumsum(jump_indices)
196
+ if "segment_id" not in time_series.columns:
197
+ if config.time_diff_threshold_seconds:
198
+ time_series["segment_id"] = np.cumsum(jump_indices)
195
199
  else:
196
- timeseries["segment_id"] = 0
200
+ time_series["segment_id"] = 0
201
+ changed = True
202
+
203
+ new_distance_km = (
204
+ time_series["distance_km"].iloc[-1] - time_series["distance_km"].iloc[0]
205
+ )
206
+ if new_distance_km != activity.distance_km:
207
+ activity.distance_km = new_distance_km
208
+ changed = True
197
209
 
198
- if "x" not in timeseries.columns:
199
- x, y = compute_tile_float(timeseries["latitude"], timeseries["longitude"], 0)
200
- timeseries["x"] = x
201
- timeseries["y"] = y
210
+ return changed
211
+
212
+
213
+ def enrichment_moving_time(
214
+ activity: Activity, time_series: pd.DataFrame, config: Config
215
+ ) -> bool:
216
+ def moving_time(group) -> datetime.timedelta:
217
+ selection = group["speed"] > 1.0
218
+ time_diff = group["time"].diff().loc[selection]
219
+ return time_diff.sum()
220
+
221
+ new_moving_time = (
222
+ time_series.groupby("segment_id").apply(moving_time, include_groups=False).sum()
223
+ )
224
+ if new_moving_time != activity.moving_time:
225
+ activity.moving_time = new_moving_time
226
+ return True
227
+ else:
228
+ return False
229
+
230
+
231
+ def enrichment_copy_latlon(
232
+ activity: Activity, time_series: pd.DataFrame, config: Config
233
+ ) -> bool:
234
+ if activity.start_latitude is None:
235
+ activity.start_latitude = time_series["latitude"].iloc[0]
236
+ activity.end_latitude = time_series["latitude"].iloc[-1]
237
+ activity.start_longitude = time_series["longitude"].iloc[0]
238
+ activity.end_longitude = time_series["longitude"].iloc[-1]
239
+ return True
240
+ else:
241
+ return False
242
+
243
+
244
+ enrichments: list[Callable[[Activity, pd.DataFrame, Config], bool]] = [
245
+ enrichment_set_timezone,
246
+ enrichment_normalize_time,
247
+ enrichment_rename_altitude,
248
+ enrichment_compute_tile_xy,
249
+ enrichment_copernicus_elevation,
250
+ enrichment_elevation_gain,
251
+ enrichment_add_calories,
252
+ enrichment_distance,
253
+ enrichment_moving_time,
254
+ enrichment_copy_latlon,
255
+ ]
256
+
257
+
258
+ def apply_enrichments(
259
+ activity: Activity, time_series: pd.DataFrame, config: Config
260
+ ) -> bool:
261
+ was_changed = False
262
+ for enrichment in enrichments:
263
+ was_changed |= enrichment(activity, time_series, config)
264
+ return was_changed
202
265
 
203
- if "altitude" in timeseries.columns:
204
- timeseries.rename(columns={"altitude": "elevation"}, inplace=True)
205
- if "elevation" in timeseries.columns:
206
- elevation_diff = timeseries["elevation"].diff()
207
- elevation_diff = elevation_diff.ewm(span=5, min_periods=5).mean()
208
- elevation_diff.loc[elevation_diff.abs() > 30] = 0
209
- elevation_diff.loc[elevation_diff < 0] = 0
210
- timeseries["elevation_gain_cum"] = elevation_diff.cumsum()
211
266
 
212
- return timeseries
267
+ def update_and_commit(
268
+ activity: Activity, time_series: pd.DataFrame, config: Config
269
+ ) -> None:
270
+ changed = apply_enrichments(activity, time_series, config)
271
+ if changed:
272
+ activity.replace_time_series(time_series)
273
+ DB.session.add(activity)
274
+ DB.session.commit()
@@ -3,10 +3,18 @@ import functools
3
3
  import pathlib
4
4
  import typing
5
5
 
6
+ import appdirs
7
+
6
8
  """
7
9
  Paths within the playground and cache.
8
10
  """
9
11
 
12
+ APPDIRS = appdirs.AppDirs(appname="Geo Activity Playground", appauthor="Martin Ueding")
13
+
14
+ USER_CACHE_DIR = pathlib.Path(APPDIRS.user_cache_dir)
15
+ USER_CONFIG_DIR = pathlib.Path(APPDIRS.user_config_dir)
16
+ USER_DATA_DIR = pathlib.Path(APPDIRS.user_data_dir)
17
+
10
18
 
11
19
  def dir_wrapper(path: pathlib.Path) -> typing.Callable[[], pathlib.Path]:
12
20
  def wrapper() -> pathlib.Path:
@@ -0,0 +1,36 @@
1
+ import datetime
2
+ import tempfile
3
+ import zoneinfo
4
+
5
+ import altair as alt
6
+ import pandas as pd
7
+
8
+
9
+ def test_dataframe_timezone() -> None:
10
+ df = pd.DataFrame(
11
+ {
12
+ "time": [
13
+ datetime.datetime(
14
+ 2025, 1, 1, 1, 1, 1, tzinfo=zoneinfo.ZoneInfo("Europe/Berlin")
15
+ )
16
+ ]
17
+ }
18
+ )
19
+
20
+ with tempfile.TemporaryFile() as f:
21
+ df.to_parquet(f)
22
+
23
+
24
+ def test_altair_timezone() -> None:
25
+ df = pd.DataFrame(
26
+ {
27
+ "time": [
28
+ datetime.datetime(
29
+ 2025, 1, 1, 1, 1, 1, tzinfo=zoneinfo.ZoneInfo("Europe/Berlin")
30
+ )
31
+ ]
32
+ }
33
+ )
34
+
35
+ chart = alt.Chart(df).mark_tick().encode(alt.X("time"))
36
+ chart.to_json(format="vega")
@@ -0,0 +1,7 @@
1
+ from .time_conversion import get_country_timezone
2
+
3
+
4
+ def test_time_zone_from_location() -> None:
5
+ country, iana_timezone = get_country_timezone(50, 7)
6
+ assert country == "Germany"
7
+ assert iana_timezone == "Europe/Berlin"
@@ -0,0 +1,93 @@
1
+ import datetime
2
+ import pathlib
3
+ import zoneinfo
4
+
5
+ from ..importers.activity_parsers import read_activity
6
+ from .time_conversion import sanitize_datetime
7
+
8
+
9
+ def test_time_zone_from_string() -> None:
10
+ """
11
+ Understanding test for zoneinfo.
12
+
13
+ A user from Helsinki has recorded an activity. His device recorded 2025-06-21 14:41:06 in UTC. The local time was 17:41:06. How would we represent that properly? We need to import the time as UTC and then convert into the Helsinki time zone. At the end we drop the time zone information to make it a “naive” time but with the local time zone.
14
+ """
15
+ tz_helsinki = zoneinfo.ZoneInfo("Europe/Helsinki")
16
+ tz_utc = zoneinfo.ZoneInfo("UTC")
17
+ dt_utc = datetime.datetime(2025, 6, 21, 14, 41, 6, tzinfo=tz_utc)
18
+ dt_helsinki = dt_utc.astimezone(tz_helsinki)
19
+ assert dt_helsinki == datetime.datetime(2025, 6, 21, 17, 41, 6, tzinfo=tz_helsinki)
20
+ assert dt_helsinki.replace(tzinfo=None) == datetime.datetime(2025, 6, 21, 17, 41, 6)
21
+
22
+
23
+ def test_utc_to_helsinki() -> None:
24
+ assert sanitize_datetime(
25
+ datetime.datetime(2025, 6, 21, 14, 41, 6, tzinfo=zoneinfo.ZoneInfo("UTC")),
26
+ fallback_from="UTC",
27
+ fallback_to="Europe/Helsinki",
28
+ ) == datetime.datetime(
29
+ 2025, 6, 21, 17, 41, 6, tzinfo=zoneinfo.ZoneInfo("Europe/Helsinki")
30
+ )
31
+
32
+
33
+ def test_0200_to_helsinki() -> None:
34
+ assert sanitize_datetime(
35
+ datetime.datetime(
36
+ 2025,
37
+ 6,
38
+ 21,
39
+ 16,
40
+ 41,
41
+ 6,
42
+ tzinfo=datetime.timezone(datetime.timedelta(hours=2)),
43
+ ),
44
+ fallback_from="UTC",
45
+ fallback_to="Europe/Helsinki",
46
+ ) == datetime.datetime(
47
+ 2025, 6, 21, 17, 41, 6, tzinfo=zoneinfo.ZoneInfo("Europe/Helsinki")
48
+ )
49
+
50
+
51
+ def test_naive_utc_to_helsinki() -> None:
52
+ assert sanitize_datetime(
53
+ datetime.datetime(2025, 6, 21, 14, 41, 6),
54
+ fallback_from="UTC",
55
+ fallback_to="Europe/Helsinki",
56
+ ) == datetime.datetime(
57
+ 2025, 6, 21, 17, 41, 6, tzinfo=zoneinfo.ZoneInfo("Europe/Helsinki")
58
+ )
59
+
60
+
61
+ def test_naive_helsinki_to_helsinki() -> None:
62
+ assert sanitize_datetime(
63
+ datetime.datetime(2025, 6, 21, 17, 41, 6),
64
+ fallback_from="Europe/Helsinki",
65
+ fallback_to="Europe/Helsinki",
66
+ ) == datetime.datetime(
67
+ 2025, 6, 21, 17, 41, 6, tzinfo=zoneinfo.ZoneInfo("Europe/Helsinki")
68
+ )
69
+
70
+
71
+ def test_time_zone_from_abvio() -> None:
72
+ """
73
+ Apply local time zone from Abvio generated files.
74
+
75
+ As reported in https://github.com/martin-ueding/geo-activity-playground/issues/303, the GPX files from Abvio contain the time data as UTC, but there is a field `abvio:startTimeZone` which contains it.
76
+
77
+ ```
78
+ <desc>Cyclemeter Row 21. Jun 2025 at 17.41.06</desc>
79
+ <time>2025-06-21T15:10:41Z</time>
80
+ <trkpt lat="…" lon="…"><ele>137.7</ele><time>2025-06-21T14:41:06Z</time></trkpt>
81
+ ...
82
+ <abvio:startTime>2025-06-21 14:41:06.537</abvio:startTime>
83
+ <abvio:startTimeZone>Europe/Helsinki</abvio:startTimeZone>
84
+ ```
85
+ """
86
+ path = pathlib.Path(
87
+ "/home/mu/Dokumente/Geo Activity Playground/Test-Suite/b1b9ec9b-016a-4223-9218-12b97d7019f2.gpx"
88
+ )
89
+ meta, ts = read_activity(path)
90
+
91
+ assert ts["time"].iloc[0] == datetime.datetime(
92
+ 2025, 6, 21, 17, 41, 6, tzinfo=zoneinfo.ZoneInfo("Europe/Helsinki")
93
+ )
@@ -0,0 +1,44 @@
1
+ import datetime
2
+ import zoneinfo
3
+
4
+ import pytest
5
+ import sqlalchemy as sa
6
+ from sqlalchemy import Column
7
+ from sqlalchemy import ForeignKey
8
+ from sqlalchemy import String
9
+ from sqlalchemy import Table
10
+ from sqlalchemy.orm import DeclarativeBase
11
+ from sqlalchemy.orm import Mapped
12
+ from sqlalchemy.orm import mapped_column
13
+ from sqlalchemy.orm import relationship
14
+ from sqlalchemy.orm import Session
15
+
16
+
17
+ class MyTestBase(DeclarativeBase):
18
+ pass
19
+
20
+
21
+ class MyTestEvent(MyTestBase):
22
+ __tablename__ = "events"
23
+
24
+ # Housekeeping data:
25
+ id: Mapped[int] = mapped_column(primary_key=True)
26
+ time: Mapped[datetime.datetime] = mapped_column(
27
+ sa.DateTime(timezone=True), nullable=True
28
+ )
29
+
30
+
31
+ @pytest.mark.xfail(reason="SQLite cannot store time zones.")
32
+ def test_timezone_sqlalchemy() -> None:
33
+ engine = sa.create_engine("sqlite://", echo=False)
34
+ MyTestBase.metadata.create_all(engine)
35
+
36
+ dt_berlin = datetime.datetime(
37
+ 2025, 7, 1, 14, 0, 0, tzinfo=zoneinfo.ZoneInfo("Europe/Berlin")
38
+ )
39
+
40
+ with Session(engine) as session:
41
+ event = MyTestEvent(time=dt_berlin)
42
+ session.add(event)
43
+ session.commit()
44
+ assert event.time == dt_berlin
@@ -4,12 +4,15 @@ from typing import Iterator
4
4
  from typing import Optional
5
5
 
6
6
  import numpy as np
7
+ import pandas as pd
7
8
 
8
9
 
9
10
  logger = logging.getLogger(__name__)
10
11
 
11
12
 
12
- def compute_tile_float(lat: float, lon: float, zoom: int) -> tuple[float, float]:
13
+ def compute_tile_float(
14
+ lat: float | pd.Series, lon: float | pd.Series, zoom: int
15
+ ) -> tuple[float, float]:
13
16
  x = np.radians(lon)
14
17
  y = np.arcsinh(np.tan(np.radians(lat)))
15
18
  x = (1 + x / np.pi) / 2