geo-activity-playground 1.2.0__py3-none-any.whl → 1.3.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. geo_activity_playground/alembic/versions/85fe0348e8a2_add_time_series_uuid_field.py +28 -0
  2. geo_activity_playground/alembic/versions/f2f50843be2d_make_all_fields_in_activity_nullable.py +34 -0
  3. geo_activity_playground/core/coordinates.py +12 -1
  4. geo_activity_playground/core/copernicus_dem.py +95 -0
  5. geo_activity_playground/core/datamodel.py +43 -16
  6. geo_activity_playground/core/enrichment.py +229 -164
  7. geo_activity_playground/core/paths.py +8 -0
  8. geo_activity_playground/core/test_pandas_timezone.py +36 -0
  9. geo_activity_playground/core/test_time_zone_from_location.py +7 -0
  10. geo_activity_playground/core/test_time_zone_import.py +93 -0
  11. geo_activity_playground/core/test_timezone_sqlalchemy.py +44 -0
  12. geo_activity_playground/core/tiles.py +4 -1
  13. geo_activity_playground/core/time_conversion.py +42 -14
  14. geo_activity_playground/explorer/tile_visits.py +7 -4
  15. geo_activity_playground/importers/activity_parsers.py +31 -23
  16. geo_activity_playground/importers/directory.py +69 -108
  17. geo_activity_playground/importers/strava_api.py +55 -36
  18. geo_activity_playground/importers/strava_checkout.py +32 -57
  19. geo_activity_playground/webui/app.py +46 -2
  20. geo_activity_playground/webui/blueprints/activity_blueprint.py +13 -11
  21. geo_activity_playground/webui/blueprints/entry_views.py +1 -1
  22. geo_activity_playground/webui/blueprints/explorer_blueprint.py +1 -7
  23. geo_activity_playground/webui/blueprints/heatmap_blueprint.py +2 -2
  24. geo_activity_playground/webui/blueprints/settings_blueprint.py +3 -14
  25. geo_activity_playground/webui/blueprints/summary_blueprint.py +6 -6
  26. geo_activity_playground/webui/blueprints/time_zone_fixer_blueprint.py +69 -0
  27. geo_activity_playground/webui/blueprints/upload_blueprint.py +3 -16
  28. geo_activity_playground/webui/columns.py +9 -1
  29. geo_activity_playground/webui/templates/activity/show.html.j2 +5 -1
  30. geo_activity_playground/webui/templates/hall_of_fame/index.html.j2 +1 -1
  31. geo_activity_playground/webui/templates/home.html.j2 +3 -2
  32. geo_activity_playground/webui/templates/page.html.j2 +2 -0
  33. geo_activity_playground/webui/templates/time_zone_fixer/index.html.j2 +31 -0
  34. {geo_activity_playground-1.2.0.dist-info → geo_activity_playground-1.3.1.dist-info}/METADATA +8 -3
  35. {geo_activity_playground-1.2.0.dist-info → geo_activity_playground-1.3.1.dist-info}/RECORD +38 -30
  36. geo_activity_playground/core/test_time_conversion.py +0 -37
  37. {geo_activity_playground-1.2.0.dist-info → geo_activity_playground-1.3.1.dist-info}/LICENSE +0 -0
  38. {geo_activity_playground-1.2.0.dist-info → geo_activity_playground-1.3.1.dist-info}/WHEEL +0 -0
  39. {geo_activity_playground-1.2.0.dist-info → geo_activity_playground-1.3.1.dist-info}/entry_points.txt +0 -0
@@ -1,212 +1,277 @@
1
1
  import datetime
2
2
  import logging
3
- import pickle
4
- from typing import Optional
3
+ import uuid
4
+ import zoneinfo
5
+ from typing import Callable
5
6
 
6
7
  import numpy as np
7
8
  import pandas as pd
8
- import sqlalchemy
9
- from tqdm import tqdm
10
9
 
11
10
  from .config import Config
12
11
  from .coordinates import get_distance
12
+ from .copernicus_dem import get_elevation
13
13
  from .datamodel import Activity
14
- from .datamodel import ActivityMeta
15
14
  from .datamodel import DB
16
- from .datamodel import get_or_make_equipment
17
- from .datamodel import get_or_make_kind
18
15
  from .missing_values import some
19
- from .paths import activity_extracted_meta_dir
20
- from .paths import activity_extracted_time_series_dir
21
- from .paths import TIME_SERIES_DIR
22
16
  from .tiles import compute_tile_float
23
- from .time_conversion import convert_to_datetime_ns
17
+ from .time_conversion import get_country_timezone
24
18
 
25
19
  logger = logging.getLogger(__name__)
26
20
 
27
21
 
28
- def populate_database_from_extracted(config: Config) -> None:
29
- available_ids = {
30
- int(path.stem) for path in activity_extracted_meta_dir().glob("*.pickle")
31
- }
32
- present_ids = {
33
- int(elem)
34
- for elem in DB.session.scalars(sqlalchemy.select(Activity.upstream_id)).all()
35
- if elem
36
- }
37
- new_ids = available_ids - present_ids
38
-
39
- for upstream_id in tqdm(new_ids, desc="Importing new activities into database"):
40
- extracted_metadata_path = (
41
- activity_extracted_meta_dir() / f"{upstream_id}.pickle"
42
- )
43
- with open(extracted_metadata_path, "rb") as f:
44
- extracted_metadata: ActivityMeta = pickle.load(f)
22
+ def enrichment_set_timezone(
23
+ activity: Activity, time_series: pd.DataFrame, config: Config
24
+ ) -> bool:
25
+ assert (
26
+ len(time_series) > 0
27
+ ), f"You cannot import an activity without points. {activity=}"
28
+ latitude, longitude = time_series[["latitude", "longitude"]].iloc[0].to_list()
29
+ if activity.iana_timezone is None or activity.start_country is None:
30
+ country, tz_str = get_country_timezone(latitude, longitude)
31
+ activity.iana_timezone = tz_str
32
+ activity.start_country = country
33
+ return True
34
+ else:
35
+ return False
45
36
 
46
- extracted_time_series_path = (
47
- activity_extracted_time_series_dir() / f"{upstream_id}.parquet"
48
- )
49
- time_series = pd.read_parquet(extracted_time_series_path)
50
-
51
- # Skip activities that don't have geo information attached to them. This shouldn't happen, though.
52
- if "latitude" not in time_series.columns:
53
- logger.warning(
54
- f"Activity {upstream_id} doesn't have latitude/longitude information. Ignoring this one."
55
- )
56
- continue
57
-
58
- time_series = _embellish_single_time_series(
59
- time_series,
60
- extracted_metadata.get("start", None),
61
- config.time_diff_threshold_seconds,
62
- )
63
37
 
64
- kind_name = extracted_metadata.get("kind", None)
65
- if kind_name:
66
- # Rename kinds if needed.
67
- if kind_name in config.kind_renames:
68
- kind_name = config.kind_renames[kind_name]
69
- kind = get_or_make_kind(kind_name, config)
70
- else:
71
- kind = None
38
+ def enrichment_normalize_time(
39
+ activity: Activity, time_series: pd.DataFrame, config: Config
40
+ ) -> bool:
41
+ # Routes (as opposed to tracks) don't have time information. We cannot do anything with time here.
42
+ if (
43
+ "time" in time_series.columns
44
+ and pd.isna(time_series["time"]).all()
45
+ and not pd.api.types.is_datetime64_any_dtype(time_series["time"].dtype)
46
+ ):
47
+ time_series["time"] = pd.NaT
48
+ return True
72
49
 
73
- equipment_name = extracted_metadata.get("equipment", None)
74
- if equipment_name:
75
- equipment = get_or_make_equipment(equipment_name, config)
76
- elif kind:
77
- equipment = kind.default_equipment
78
- else:
79
- equipment = None
80
-
81
- activity = Activity(
82
- name=extracted_metadata.get("name", "Name Placeholder"),
83
- distance_km=0,
84
- equipment=equipment,
85
- kind=kind,
86
- calories=some(extracted_metadata.get("calories", None)),
87
- elevation_gain=some(extracted_metadata.get("elevation_gain", None)),
88
- steps=some(extracted_metadata.get("steps", None)),
89
- path=extracted_metadata.get("path", None),
90
- upstream_id=upstream_id,
91
- )
50
+ changed = False
51
+ tz_utc = zoneinfo.ZoneInfo("UTC")
52
+ # If the time is naive, assume that it is UTC.
53
+ if time_series["time"].dt.tz is None:
54
+ time_series["time"] = time_series["time"].dt.tz_localize(tz_utc)
55
+ changed = True
56
+
57
+ if time_series["time"].dt.tz.utcoffset(None) != tz_utc.utcoffset(None):
58
+ time_series["time"] = time_series["time"].dt.tz_convert(tz_utc)
59
+ changed = True
92
60
 
93
- update_via_time_series(activity, time_series)
61
+ if not pd.api.types.is_dtype_equal(
62
+ time_series["time"].dtype, "datetime64[ns, UTC]"
63
+ ):
64
+ time_series["time"] = time_series["time"].dt.tz_convert(tz_utc)
65
+ changed = True
94
66
 
95
- DB.session.add(activity)
96
- try:
97
- DB.session.commit()
98
- except sqlalchemy.exc.StatementError:
99
- logger.error(
100
- f"Could not insert the following activity into the database: {vars(activity)=}"
101
- )
102
- raise
67
+ assert pd.api.types.is_dtype_equal(
68
+ time_series["time"].dtype, "datetime64[ns, UTC]"
69
+ ), (
70
+ time_series["time"].dtype,
71
+ time_series["time"].iloc[0],
72
+ )
103
73
 
104
- enriched_time_series_path = TIME_SERIES_DIR() / f"{activity.id}.parquet"
105
- time_series.to_parquet(enriched_time_series_path)
74
+ new_start = some(time_series["time"].iloc[0])
75
+ if new_start != activity.start:
76
+ activity.start = new_start
77
+ changed = True
106
78
 
79
+ new_elapsed_time = some(time_series["time"].iloc[-1] - time_series["time"].iloc[0])
80
+ if new_elapsed_time != activity.elapsed_time:
81
+ activity.elapsed_time = new_elapsed_time
82
+ changed = True
107
83
 
108
- def update_via_time_series(
109
- activity: Activity, time_series: pd.DataFrame
110
- ) -> ActivityMeta:
111
- activity.start = some(time_series["time"].iloc[0])
112
- activity.elapsed_time = some(
113
- time_series["time"].iloc[-1] - time_series["time"].iloc[0]
114
- )
115
- activity.distance_km = (
116
- time_series["distance_km"].iloc[-1] - time_series["distance_km"].iloc[0]
117
- )
118
- if "calories" in time_series.columns:
119
- activity.calories = (
120
- time_series["calories"].iloc[-1] - time_series["calories"].iloc[0]
84
+ return changed
85
+
86
+
87
+ def enrichment_rename_altitude(
88
+ activity: Activity, time_series: pd.DataFrame, config: Config
89
+ ) -> bool:
90
+ if "altitude" in time_series.columns:
91
+ time_series.rename(columns={"altitude": "elevation"}, inplace=True)
92
+ return True
93
+ else:
94
+ return False
95
+
96
+
97
+ def enrichment_compute_tile_xy(
98
+ activity: Activity, time_series: pd.DataFrame, config: Config
99
+ ) -> bool:
100
+ if "x" not in time_series.columns:
101
+ x, y = compute_tile_float(time_series["latitude"], time_series["longitude"], 0)
102
+ time_series["x"] = x
103
+ time_series["y"] = y
104
+ return True
105
+ else:
106
+ return False
107
+
108
+
109
+ def enrichment_copernicus_elevation(
110
+ activity: Activity, time_series: pd.DataFrame, config: Config
111
+ ) -> bool:
112
+ if "copernicus_elevation" not in time_series.columns:
113
+ time_series["copernicus_elevation"] = [
114
+ get_elevation(lat, lon)
115
+ for lat, lon in zip(time_series["latitude"], time_series["longitude"])
116
+ ]
117
+ return True
118
+ else:
119
+ return False
120
+
121
+
122
+ def enrichment_elevation_gain(
123
+ activity: Activity, time_series: pd.DataFrame, config: Config
124
+ ) -> bool:
125
+ if (
126
+ "elevation" in time_series.columns
127
+ or "copernicus_elevation" in time_series.columns
128
+ ) and "elevation_gain_cum" not in time_series.columns:
129
+ elevation = (
130
+ time_series["elevation"]
131
+ if "elevation" in time_series.columns
132
+ else time_series["copernicus_elevation"]
121
133
  )
122
- activity.moving_time = _compute_moving_time(time_series)
123
-
124
- activity.start_latitude = time_series["latitude"].iloc[0]
125
- activity.end_latitude = time_series["latitude"].iloc[-1]
126
- activity.start_longitude = time_series["longitude"].iloc[0]
127
- activity.end_longitude = time_series["longitude"].iloc[-1]
128
- if "elevation_gain_cum" in time_series.columns:
129
- elevation_gain_cum = time_series["elevation_gain_cum"].fillna(0)
134
+ elevation_diff = elevation.diff()
135
+ elevation_diff = elevation_diff.ewm(span=5, min_periods=5).mean()
136
+ elevation_diff.loc[elevation_diff.abs() > 30] = 0
137
+ elevation_diff.loc[elevation_diff < 0] = 0
138
+ time_series["elevation_gain_cum"] = elevation_diff.cumsum().fillna(0)
139
+
130
140
  activity.elevation_gain = (
131
- elevation_gain_cum.iloc[-1] - elevation_gain_cum.iloc[0]
141
+ time_series["elevation_gain_cum"].iloc[-1]
142
+ - time_series["elevation_gain_cum"].iloc[0]
132
143
  )
144
+ return True
145
+ else:
146
+ return False
133
147
 
134
148
 
135
- def _compute_moving_time(time_series: pd.DataFrame) -> datetime.timedelta:
136
- def moving_time(group) -> datetime.timedelta:
137
- selection = group["speed"] > 1.0
138
- time_diff = group["time"].diff().loc[selection]
139
- return time_diff.sum()
140
-
141
- return (
142
- time_series.groupby("segment_id").apply(moving_time, include_groups=False).sum()
143
- )
149
+ def enrichment_add_calories(
150
+ activity: Activity, time_series: pd.DataFrame, config: Config
151
+ ) -> bool:
152
+ if activity.calories is None and "calories" in time_series.columns:
153
+ activity.calories = (
154
+ time_series["calories"].iloc[-1] - time_series["calories"].iloc[0]
155
+ )
156
+ return True
157
+ else:
158
+ return False
144
159
 
145
160
 
146
- def _embellish_single_time_series(
147
- timeseries: pd.DataFrame,
148
- start: Optional[datetime.datetime],
149
- time_diff_threshold_seconds: int,
150
- ) -> pd.DataFrame:
151
- if start is not None and pd.api.types.is_dtype_equal(
152
- timeseries["time"].dtype, "int64"
153
- ):
154
- time = timeseries["time"]
155
- del timeseries["time"]
156
- timeseries["time"] = [
157
- convert_to_datetime_ns(start + datetime.timedelta(seconds=t)) for t in time
158
- ]
159
- timeseries["time"] = convert_to_datetime_ns(timeseries["time"])
160
- assert pd.api.types.is_dtype_equal(timeseries["time"].dtype, "datetime64[ns]"), (
161
- timeseries["time"].dtype,
162
- timeseries["time"].iloc[0],
163
- )
161
+ def enrichment_distance(
162
+ activity: Activity, time_series: pd.DataFrame, config: Config
163
+ ) -> bool:
164
+ changed = False
164
165
 
165
166
  distances = get_distance(
166
- timeseries["latitude"].shift(1),
167
- timeseries["longitude"].shift(1),
168
- timeseries["latitude"],
169
- timeseries["longitude"],
167
+ time_series["latitude"].shift(1),
168
+ time_series["longitude"].shift(1),
169
+ time_series["latitude"],
170
+ time_series["longitude"],
170
171
  ).fillna(0.0)
171
- if time_diff_threshold_seconds:
172
+
173
+ if config.time_diff_threshold_seconds:
172
174
  time_diff = (
173
- timeseries["time"] - timeseries["time"].shift(1)
175
+ time_series["time"] - time_series["time"].shift(1)
174
176
  ).dt.total_seconds()
175
- jump_indices = time_diff >= time_diff_threshold_seconds
177
+ jump_indices = time_diff >= config.time_diff_threshold_seconds
176
178
  distances.loc[jump_indices] = 0.0
177
179
 
178
- if "distance_km" not in timeseries.columns:
179
- timeseries["distance_km"] = pd.Series(np.cumsum(distances)) / 1000
180
+ if "distance_km" not in time_series.columns:
181
+ time_series["distance_km"] = pd.Series(np.cumsum(distances)) / 1000
182
+ changed = True
180
183
 
181
- if "speed" not in timeseries.columns:
182
- timeseries["speed"] = (
183
- timeseries["distance_km"].diff()
184
- / (timeseries["time"].diff().dt.total_seconds() + 1e-3)
184
+ if "speed" not in time_series.columns:
185
+ time_series["speed"] = (
186
+ time_series["distance_km"].diff()
187
+ / (time_series["time"].diff().dt.total_seconds() + 1e-3)
185
188
  * 3600
186
189
  )
190
+ changed = True
187
191
 
188
- potential_jumps = (timeseries["speed"] > 40) & (timeseries["speed"].diff() > 10)
192
+ potential_jumps = (time_series["speed"] > 40) & (time_series["speed"].diff() > 10)
189
193
  if np.any(potential_jumps):
190
- timeseries = timeseries.loc[~potential_jumps].copy()
194
+ time_series.replace(time_series.loc[~potential_jumps])
195
+ changed = True
191
196
 
192
- if "segment_id" not in timeseries.columns:
193
- if time_diff_threshold_seconds:
194
- timeseries["segment_id"] = np.cumsum(jump_indices)
197
+ if "segment_id" not in time_series.columns:
198
+ if config.time_diff_threshold_seconds:
199
+ time_series["segment_id"] = np.cumsum(jump_indices)
195
200
  else:
196
- timeseries["segment_id"] = 0
201
+ time_series["segment_id"] = 0
202
+ changed = True
203
+
204
+ new_distance_km = (
205
+ time_series["distance_km"].iloc[-1] - time_series["distance_km"].iloc[0]
206
+ )
207
+ if new_distance_km != activity.distance_km:
208
+ activity.distance_km = new_distance_km
209
+ changed = True
197
210
 
198
- if "x" not in timeseries.columns:
199
- x, y = compute_tile_float(timeseries["latitude"], timeseries["longitude"], 0)
200
- timeseries["x"] = x
201
- timeseries["y"] = y
211
+ return changed
212
+
213
+
214
+ def enrichment_moving_time(
215
+ activity: Activity, time_series: pd.DataFrame, config: Config
216
+ ) -> bool:
217
+ def moving_time(group) -> datetime.timedelta:
218
+ selection = group["speed"] > 1.0
219
+ time_diff = group["time"].diff().loc[selection]
220
+ return time_diff.sum()
221
+
222
+ new_moving_time = (
223
+ time_series.groupby("segment_id").apply(moving_time, include_groups=False).sum()
224
+ )
225
+ if new_moving_time != activity.moving_time:
226
+ activity.moving_time = new_moving_time
227
+ return True
228
+ else:
229
+ return False
230
+
231
+
232
+ def enrichment_copy_latlon(
233
+ activity: Activity, time_series: pd.DataFrame, config: Config
234
+ ) -> bool:
235
+ if activity.start_latitude is None:
236
+ activity.start_latitude = time_series["latitude"].iloc[0]
237
+ activity.end_latitude = time_series["latitude"].iloc[-1]
238
+ activity.start_longitude = time_series["longitude"].iloc[0]
239
+ activity.end_longitude = time_series["longitude"].iloc[-1]
240
+ return True
241
+ else:
242
+ return False
243
+
244
+
245
+ enrichments: list[Callable[[Activity, pd.DataFrame, Config], bool]] = [
246
+ enrichment_set_timezone,
247
+ enrichment_normalize_time,
248
+ enrichment_rename_altitude,
249
+ enrichment_compute_tile_xy,
250
+ enrichment_copernicus_elevation,
251
+ enrichment_elevation_gain,
252
+ enrichment_add_calories,
253
+ enrichment_distance,
254
+ enrichment_moving_time,
255
+ enrichment_copy_latlon,
256
+ ]
257
+
258
+
259
+ def apply_enrichments(
260
+ activity: Activity, time_series: pd.DataFrame, config: Config
261
+ ) -> bool:
262
+ was_changed = False
263
+ for enrichment in enrichments:
264
+ was_changed |= enrichment(activity, time_series, config)
265
+ return was_changed
202
266
 
203
- if "altitude" in timeseries.columns:
204
- timeseries.rename(columns={"altitude": "elevation"}, inplace=True)
205
- if "elevation" in timeseries.columns:
206
- elevation_diff = timeseries["elevation"].diff()
207
- elevation_diff = elevation_diff.ewm(span=5, min_periods=5).mean()
208
- elevation_diff.loc[elevation_diff.abs() > 30] = 0
209
- elevation_diff.loc[elevation_diff < 0] = 0
210
- timeseries["elevation_gain_cum"] = elevation_diff.cumsum()
211
267
 
212
- return timeseries
268
+ def update_and_commit(
269
+ activity: Activity, time_series: pd.DataFrame, config: Config
270
+ ) -> None:
271
+ changed = apply_enrichments(activity, time_series, config)
272
+ if not activity.time_series_uuid:
273
+ activity.time_series_uuid = str(uuid.uuid4())
274
+ if changed:
275
+ activity.replace_time_series(time_series)
276
+ DB.session.add(activity)
277
+ DB.session.commit()
@@ -3,10 +3,18 @@ import functools
3
3
  import pathlib
4
4
  import typing
5
5
 
6
+ import appdirs
7
+
6
8
  """
7
9
  Paths within the playground and cache.
8
10
  """
9
11
 
12
+ APPDIRS = appdirs.AppDirs(appname="Geo Activity Playground", appauthor="Martin Ueding")
13
+
14
+ USER_CACHE_DIR = pathlib.Path(APPDIRS.user_cache_dir)
15
+ USER_CONFIG_DIR = pathlib.Path(APPDIRS.user_config_dir)
16
+ USER_DATA_DIR = pathlib.Path(APPDIRS.user_data_dir)
17
+
10
18
 
11
19
  def dir_wrapper(path: pathlib.Path) -> typing.Callable[[], pathlib.Path]:
12
20
  def wrapper() -> pathlib.Path:
@@ -0,0 +1,36 @@
1
+ import datetime
2
+ import tempfile
3
+ import zoneinfo
4
+
5
+ import altair as alt
6
+ import pandas as pd
7
+
8
+
9
+ def test_dataframe_timezone() -> None:
10
+ df = pd.DataFrame(
11
+ {
12
+ "time": [
13
+ datetime.datetime(
14
+ 2025, 1, 1, 1, 1, 1, tzinfo=zoneinfo.ZoneInfo("Europe/Berlin")
15
+ )
16
+ ]
17
+ }
18
+ )
19
+
20
+ with tempfile.TemporaryFile() as f:
21
+ df.to_parquet(f)
22
+
23
+
24
+ def test_altair_timezone() -> None:
25
+ df = pd.DataFrame(
26
+ {
27
+ "time": [
28
+ datetime.datetime(
29
+ 2025, 1, 1, 1, 1, 1, tzinfo=zoneinfo.ZoneInfo("Europe/Berlin")
30
+ )
31
+ ]
32
+ }
33
+ )
34
+
35
+ chart = alt.Chart(df).mark_tick().encode(alt.X("time"))
36
+ chart.to_json(format="vega")
@@ -0,0 +1,7 @@
1
+ from .time_conversion import get_country_timezone
2
+
3
+
4
+ def test_time_zone_from_location() -> None:
5
+ country, iana_timezone = get_country_timezone(50, 7)
6
+ assert country == "Germany"
7
+ assert iana_timezone == "Europe/Berlin"
@@ -0,0 +1,93 @@
1
+ import datetime
2
+ import pathlib
3
+ import zoneinfo
4
+
5
+ from ..importers.activity_parsers import read_activity
6
+ from .time_conversion import sanitize_datetime
7
+
8
+
9
+ def test_time_zone_from_string() -> None:
10
+ """
11
+ Understanding test for zoneinfo.
12
+
13
+ A user from Helsinki has recorded an activity. His device recorded 2025-06-21 14:41:06 in UTC. The local time was 17:41:06. How would we represent that properly? We need to import the time as UTC and then convert into the Helsinki time zone. At the end we drop the time zone information to make it a “naive” time but with the local time zone.
14
+ """
15
+ tz_helsinki = zoneinfo.ZoneInfo("Europe/Helsinki")
16
+ tz_utc = zoneinfo.ZoneInfo("UTC")
17
+ dt_utc = datetime.datetime(2025, 6, 21, 14, 41, 6, tzinfo=tz_utc)
18
+ dt_helsinki = dt_utc.astimezone(tz_helsinki)
19
+ assert dt_helsinki == datetime.datetime(2025, 6, 21, 17, 41, 6, tzinfo=tz_helsinki)
20
+ assert dt_helsinki.replace(tzinfo=None) == datetime.datetime(2025, 6, 21, 17, 41, 6)
21
+
22
+
23
+ def test_utc_to_helsinki() -> None:
24
+ assert sanitize_datetime(
25
+ datetime.datetime(2025, 6, 21, 14, 41, 6, tzinfo=zoneinfo.ZoneInfo("UTC")),
26
+ fallback_from="UTC",
27
+ fallback_to="Europe/Helsinki",
28
+ ) == datetime.datetime(
29
+ 2025, 6, 21, 17, 41, 6, tzinfo=zoneinfo.ZoneInfo("Europe/Helsinki")
30
+ )
31
+
32
+
33
+ def test_0200_to_helsinki() -> None:
34
+ assert sanitize_datetime(
35
+ datetime.datetime(
36
+ 2025,
37
+ 6,
38
+ 21,
39
+ 16,
40
+ 41,
41
+ 6,
42
+ tzinfo=datetime.timezone(datetime.timedelta(hours=2)),
43
+ ),
44
+ fallback_from="UTC",
45
+ fallback_to="Europe/Helsinki",
46
+ ) == datetime.datetime(
47
+ 2025, 6, 21, 17, 41, 6, tzinfo=zoneinfo.ZoneInfo("Europe/Helsinki")
48
+ )
49
+
50
+
51
+ def test_naive_utc_to_helsinki() -> None:
52
+ assert sanitize_datetime(
53
+ datetime.datetime(2025, 6, 21, 14, 41, 6),
54
+ fallback_from="UTC",
55
+ fallback_to="Europe/Helsinki",
56
+ ) == datetime.datetime(
57
+ 2025, 6, 21, 17, 41, 6, tzinfo=zoneinfo.ZoneInfo("Europe/Helsinki")
58
+ )
59
+
60
+
61
+ def test_naive_helsinki_to_helsinki() -> None:
62
+ assert sanitize_datetime(
63
+ datetime.datetime(2025, 6, 21, 17, 41, 6),
64
+ fallback_from="Europe/Helsinki",
65
+ fallback_to="Europe/Helsinki",
66
+ ) == datetime.datetime(
67
+ 2025, 6, 21, 17, 41, 6, tzinfo=zoneinfo.ZoneInfo("Europe/Helsinki")
68
+ )
69
+
70
+
71
+ def test_time_zone_from_abvio() -> None:
72
+ """
73
+ Apply local time zone from Abvio generated files.
74
+
75
+ As reported in https://github.com/martin-ueding/geo-activity-playground/issues/303, the GPX files from Abvio contain the time data as UTC, but there is a field `abvio:startTimeZone` which contains it.
76
+
77
+ ```
78
+ <desc>Cyclemeter Row 21. Jun 2025 at 17.41.06</desc>
79
+ <time>2025-06-21T15:10:41Z</time>
80
+ <trkpt lat="…" lon="…"><ele>137.7</ele><time>2025-06-21T14:41:06Z</time></trkpt>
81
+ ...
82
+ <abvio:startTime>2025-06-21 14:41:06.537</abvio:startTime>
83
+ <abvio:startTimeZone>Europe/Helsinki</abvio:startTimeZone>
84
+ ```
85
+ """
86
+ path = pathlib.Path(
87
+ "/home/mu/Dokumente/Geo Activity Playground/Test-Suite/b1b9ec9b-016a-4223-9218-12b97d7019f2.gpx"
88
+ )
89
+ meta, ts = read_activity(path)
90
+
91
+ assert ts["time"].iloc[0] == datetime.datetime(
92
+ 2025, 6, 21, 17, 41, 6, tzinfo=zoneinfo.ZoneInfo("Europe/Helsinki")
93
+ )
@@ -0,0 +1,44 @@
1
+ import datetime
2
+ import zoneinfo
3
+
4
+ import pytest
5
+ import sqlalchemy as sa
6
+ from sqlalchemy import Column
7
+ from sqlalchemy import ForeignKey
8
+ from sqlalchemy import String
9
+ from sqlalchemy import Table
10
+ from sqlalchemy.orm import DeclarativeBase
11
+ from sqlalchemy.orm import Mapped
12
+ from sqlalchemy.orm import mapped_column
13
+ from sqlalchemy.orm import relationship
14
+ from sqlalchemy.orm import Session
15
+
16
+
17
+ class MyTestBase(DeclarativeBase):
18
+ pass
19
+
20
+
21
+ class MyTestEvent(MyTestBase):
22
+ __tablename__ = "events"
23
+
24
+ # Housekeeping data:
25
+ id: Mapped[int] = mapped_column(primary_key=True)
26
+ time: Mapped[datetime.datetime] = mapped_column(
27
+ sa.DateTime(timezone=True), nullable=True
28
+ )
29
+
30
+
31
+ @pytest.mark.xfail(reason="SQLite cannot store time zones.")
32
+ def test_timezone_sqlalchemy() -> None:
33
+ engine = sa.create_engine("sqlite://", echo=False)
34
+ MyTestBase.metadata.create_all(engine)
35
+
36
+ dt_berlin = datetime.datetime(
37
+ 2025, 7, 1, 14, 0, 0, tzinfo=zoneinfo.ZoneInfo("Europe/Berlin")
38
+ )
39
+
40
+ with Session(engine) as session:
41
+ event = MyTestEvent(time=dt_berlin)
42
+ session.add(event)
43
+ session.commit()
44
+ assert event.time == dt_berlin
@@ -4,12 +4,15 @@ from typing import Iterator
4
4
  from typing import Optional
5
5
 
6
6
  import numpy as np
7
+ import pandas as pd
7
8
 
8
9
 
9
10
  logger = logging.getLogger(__name__)
10
11
 
11
12
 
12
- def compute_tile_float(lat: float, lon: float, zoom: int) -> tuple[float, float]:
13
+ def compute_tile_float(
14
+ lat: float | pd.Series, lon: float | pd.Series, zoom: int
15
+ ) -> tuple[float, float]:
13
16
  x = np.radians(lon)
14
17
  y = np.arcsinh(np.tan(np.radians(lat)))
15
18
  x = (1 + x / np.pi) / 2