geo-activity-playground 1.2.0__py3-none-any.whl → 1.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. geo_activity_playground/alembic/versions/85fe0348e8a2_add_time_series_uuid_field.py +28 -0
  2. geo_activity_playground/alembic/versions/f2f50843be2d_make_all_fields_in_activity_nullable.py +34 -0
  3. geo_activity_playground/core/coordinates.py +12 -1
  4. geo_activity_playground/core/copernicus_dem.py +95 -0
  5. geo_activity_playground/core/datamodel.py +43 -16
  6. geo_activity_playground/core/enrichment.py +226 -164
  7. geo_activity_playground/core/paths.py +8 -0
  8. geo_activity_playground/core/test_pandas_timezone.py +36 -0
  9. geo_activity_playground/core/test_time_zone_from_location.py +7 -0
  10. geo_activity_playground/core/test_time_zone_import.py +93 -0
  11. geo_activity_playground/core/test_timezone_sqlalchemy.py +44 -0
  12. geo_activity_playground/core/tiles.py +4 -1
  13. geo_activity_playground/core/time_conversion.py +42 -14
  14. geo_activity_playground/explorer/tile_visits.py +7 -4
  15. geo_activity_playground/importers/activity_parsers.py +21 -22
  16. geo_activity_playground/importers/directory.py +62 -108
  17. geo_activity_playground/importers/strava_api.py +53 -36
  18. geo_activity_playground/importers/strava_checkout.py +30 -56
  19. geo_activity_playground/webui/app.py +40 -2
  20. geo_activity_playground/webui/blueprints/activity_blueprint.py +13 -11
  21. geo_activity_playground/webui/blueprints/entry_views.py +1 -1
  22. geo_activity_playground/webui/blueprints/explorer_blueprint.py +1 -7
  23. geo_activity_playground/webui/blueprints/heatmap_blueprint.py +2 -2
  24. geo_activity_playground/webui/blueprints/settings_blueprint.py +3 -14
  25. geo_activity_playground/webui/blueprints/summary_blueprint.py +6 -6
  26. geo_activity_playground/webui/blueprints/time_zone_fixer_blueprint.py +69 -0
  27. geo_activity_playground/webui/blueprints/upload_blueprint.py +3 -16
  28. geo_activity_playground/webui/columns.py +9 -1
  29. geo_activity_playground/webui/templates/activity/show.html.j2 +3 -1
  30. geo_activity_playground/webui/templates/hall_of_fame/index.html.j2 +1 -1
  31. geo_activity_playground/webui/templates/home.html.j2 +3 -2
  32. geo_activity_playground/webui/templates/page.html.j2 +2 -0
  33. geo_activity_playground/webui/templates/time_zone_fixer/index.html.j2 +31 -0
  34. {geo_activity_playground-1.2.0.dist-info → geo_activity_playground-1.3.0.dist-info}/METADATA +7 -3
  35. {geo_activity_playground-1.2.0.dist-info → geo_activity_playground-1.3.0.dist-info}/RECORD +38 -30
  36. geo_activity_playground/core/test_time_conversion.py +0 -37
  37. {geo_activity_playground-1.2.0.dist-info → geo_activity_playground-1.3.0.dist-info}/LICENSE +0 -0
  38. {geo_activity_playground-1.2.0.dist-info → geo_activity_playground-1.3.0.dist-info}/WHEEL +0 -0
  39. {geo_activity_playground-1.2.0.dist-info → geo_activity_playground-1.3.0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,28 @@
1
+ from typing import Sequence
2
+ from typing import Union
3
+
4
+ import sqlalchemy as sa
5
+ from alembic import op
6
+
7
+
8
+ # revision identifiers, used by Alembic.
9
+ revision: str = "85fe0348e8a2"
10
+ down_revision: Union[str, None] = "f2f50843be2d"
11
+ branch_labels: Union[str, Sequence[str], None] = None
12
+ depends_on: Union[str, Sequence[str], None] = None
13
+
14
+
15
+ def upgrade() -> None:
16
+ # ### commands auto generated by Alembic - please adjust! ###
17
+ with op.batch_alter_table("activities", schema=None) as batch_op:
18
+ batch_op.add_column(sa.Column("time_series_uuid", sa.String(), nullable=True))
19
+
20
+ # ### end Alembic commands ###
21
+
22
+
23
+ def downgrade() -> None:
24
+ # ### commands auto generated by Alembic - please adjust! ###
25
+ with op.batch_alter_table("activities", schema=None) as batch_op:
26
+ batch_op.drop_column("time_series_uuid")
27
+
28
+ # ### end Alembic commands ###
@@ -0,0 +1,34 @@
1
+ from typing import Sequence
2
+ from typing import Union
3
+
4
+ import sqlalchemy as sa
5
+ from alembic import op
6
+
7
+
8
+ # revision identifiers, used by Alembic.
9
+ revision: str = "f2f50843be2d"
10
+ down_revision: Union[str, None] = "dc8073871da7"
11
+ branch_labels: Union[str, Sequence[str], None] = None
12
+ depends_on: Union[str, Sequence[str], None] = None
13
+
14
+
15
+ def upgrade() -> None:
16
+ # ### commands auto generated by Alembic - please adjust! ###
17
+ with op.batch_alter_table("activities", schema=None) as batch_op:
18
+ batch_op.add_column(sa.Column("iana_timezone", sa.String(), nullable=True))
19
+ batch_op.add_column(sa.Column("start_country", sa.String(), nullable=True))
20
+ batch_op.alter_column("name", existing_type=sa.VARCHAR(), nullable=True)
21
+ batch_op.alter_column("distance_km", existing_type=sa.FLOAT(), nullable=True)
22
+
23
+ # ### end Alembic commands ###
24
+
25
+
26
+ def downgrade() -> None:
27
+ # ### commands auto generated by Alembic - please adjust! ###
28
+ with op.batch_alter_table("activities", schema=None) as batch_op:
29
+ batch_op.alter_column("distance_km", existing_type=sa.FLOAT(), nullable=False)
30
+ batch_op.alter_column("name", existing_type=sa.VARCHAR(), nullable=False)
31
+ batch_op.drop_column("start_country")
32
+ batch_op.drop_column("iana_timezone")
33
+
34
+ # ### end Alembic commands ###
@@ -1,4 +1,7 @@
1
+ import typing
2
+
1
3
  import numpy as np
4
+ import pandas as pd
2
5
 
3
6
 
4
7
  class Bounds:
@@ -15,7 +18,15 @@ class Bounds:
15
18
  return (self.x_min < x < self.x_max) and (self.y_min < y < self.y_max)
16
19
 
17
20
 
18
- def get_distance(lat_1: float, lon_1: float, lat_2: float, lon_2: float) -> float:
21
+ FloatOrSeries = typing.TypeVar("FloatOrSeries", float, np.ndarray, pd.Series)
22
+
23
+
24
+ def get_distance(
25
+ lat_1: FloatOrSeries,
26
+ lon_1: FloatOrSeries,
27
+ lat_2: FloatOrSeries,
28
+ lon_2: FloatOrSeries,
29
+ ) -> FloatOrSeries:
19
30
  """
20
31
  https://en.wikipedia.org/wiki/Haversine_formula
21
32
  """
@@ -0,0 +1,95 @@
1
+ import functools
2
+ import math
3
+ import pathlib
4
+ from typing import Optional
5
+
6
+ import boto3
7
+ import botocore.config
8
+ import botocore.exceptions
9
+ import geotiff
10
+ import numpy as np
11
+ from scipy.interpolate import RegularGridInterpolator
12
+
13
+ from .paths import USER_CACHE_DIR
14
+
15
+
16
+ def s3_path(lat: int, lon: int) -> pathlib.Path:
17
+ lat_str = f"N{(lat):02d}" if lat >= 0 else f"S{(-lat):02d}"
18
+ lon_str = f"E{(lon):03d}" if lon >= 0 else f"W{(-lon):03d}"
19
+ result = (
20
+ USER_CACHE_DIR
21
+ / "Copernicus DEM"
22
+ / f"Copernicus_DSM_COG_30_{lat_str}_00_{lon_str}_00_DEM.tif"
23
+ )
24
+
25
+ result.parent.mkdir(exist_ok=True)
26
+ return result
27
+
28
+
29
+ def ensure_copernicus_file(p: pathlib.Path) -> None:
30
+ if p.exists():
31
+ return
32
+ s3 = boto3.client(
33
+ "s3", config=botocore.config.Config(signature_version=botocore.UNSIGNED)
34
+ )
35
+ try:
36
+ s3.download_file("copernicus-dem-90m", f"{p.stem}/{p.name}", p)
37
+ except botocore.exceptions.ClientError as e:
38
+ pass
39
+
40
+
41
+ @functools.lru_cache(9)
42
+ def get_elevation_arrays(p: pathlib.Path) -> Optional[np.ndarray]:
43
+ ensure_copernicus_file(p)
44
+ if not p.exists():
45
+ return None
46
+ gt = geotiff.GeoTiff(p)
47
+ a = np.array(gt.read())
48
+ lon_array, lat_array = gt.get_coord_arrays()
49
+ return np.stack([a, lat_array, lon_array], axis=0)
50
+
51
+
52
+ @functools.lru_cache(1)
53
+ def get_interpolator(lat: int, lon: int) -> Optional[RegularGridInterpolator]:
54
+ arrays = get_elevation_arrays(s3_path(lat, lon))
55
+ # If we don't have data for the current center, we cannot do anything.
56
+ if arrays is None:
57
+ return None
58
+
59
+ # # Take a look at the neighbors. If all 8 neighbor grid cells are present, we can
60
+ # neighbor_shapes = [
61
+ # get_elevation_arrays(s3_path(lat + lat_offset, lon + lon_offset)).shape
62
+ # for lon_offset in [-1, 0, 1]
63
+ # for lat_offset in [-1, 0, 1]
64
+ # if get_elevation_arrays(s3_path(lat + lat_offset, lon + lon_offset)) is not None
65
+ # ]
66
+ # if len(neighbor_shapes) == 9 and len(set(neighbor_shapes)) == 1:
67
+ # arrays = np.concatenate(
68
+ # [
69
+ # np.concatenate(
70
+ # [
71
+ # get_elevation_arrays(
72
+ # s3_path(lat + lat_offset, lon + lon_offset)
73
+ # )
74
+ # for lon_offset in [-1, 0, 1]
75
+ # ],
76
+ # axis=2,
77
+ # )
78
+ # for lat_offset in [1, 0, -1]
79
+ # ],
80
+ # axis=1,
81
+ # )
82
+ lat_labels = arrays[1, :, 0]
83
+ lon_labels = arrays[2, 0, :]
84
+
85
+ return RegularGridInterpolator(
86
+ (lat_labels, lon_labels), arrays[0], bounds_error=False, fill_value=None
87
+ )
88
+
89
+
90
+ def get_elevation(lat: float, lon: float) -> float:
91
+ interpolator = get_interpolator(math.floor(lat), math.floor(lon))
92
+ if interpolator is not None:
93
+ return float(interpolator((lat, lon)))
94
+ else:
95
+ return 0.0
@@ -1,7 +1,11 @@
1
1
  import datetime
2
2
  import json
3
3
  import logging
4
+ import os
4
5
  import pathlib
6
+ import shutil
7
+ import uuid
8
+ import zoneinfo
5
9
  from typing import Any
6
10
  from typing import Optional
7
11
  from typing import TypedDict
@@ -49,6 +53,7 @@ class ActivityMeta(TypedDict):
49
53
  calories: float
50
54
  commute: bool
51
55
  consider_for_achievements: bool
56
+ copernicus_elevation_gain: float
52
57
  distance_km: float
53
58
  elapsed_time: datetime.timedelta
54
59
  elevation_gain: float
@@ -85,27 +90,36 @@ class Activity(DB.Model):
85
90
 
86
91
  # Housekeeping data:
87
92
  id: Mapped[int] = mapped_column(primary_key=True)
88
- name: Mapped[str] = mapped_column(sa.String, nullable=False)
89
- distance_km: Mapped[float] = mapped_column(sa.Float, nullable=False)
93
+ name: Mapped[Optional[str]] = mapped_column(sa.String, nullable=True)
94
+ distance_km: Mapped[Optional[float]] = mapped_column(sa.Float, nullable=True)
95
+ time_series_uuid: Mapped[Optional[str]] = mapped_column(sa.String, nullable=True)
90
96
 
91
97
  # Where it comes from:
92
- path: Mapped[str] = mapped_column(sa.String, nullable=True)
93
- upstream_id: Mapped[str] = mapped_column(sa.String, nullable=True)
98
+ path: Mapped[Optional[str]] = mapped_column(sa.String, nullable=True)
99
+ upstream_id: Mapped[Optional[str]] = mapped_column(sa.String, nullable=True)
94
100
 
95
101
  # Crop data:
96
102
  index_begin: Mapped[int] = mapped_column(sa.Integer, nullable=True)
97
103
  index_end: Mapped[int] = mapped_column(sa.Integer, nullable=True)
98
104
 
99
105
  # Temporal data:
100
- start: Mapped[datetime.datetime] = mapped_column(sa.DateTime, nullable=True)
101
- elapsed_time: Mapped[datetime.timedelta] = mapped_column(sa.Interval, nullable=True)
102
- moving_time: Mapped[datetime.timedelta] = mapped_column(sa.Interval, nullable=True)
106
+ start: Mapped[Optional[datetime.datetime]] = mapped_column(
107
+ sa.DateTime, nullable=True
108
+ )
109
+ iana_timezone: Mapped[Optional[str]] = mapped_column(sa.String, nullable=True)
110
+ elapsed_time: Mapped[Optional[datetime.timedelta]] = mapped_column(
111
+ sa.Interval, nullable=True
112
+ )
113
+ moving_time: Mapped[Optional[datetime.timedelta]] = mapped_column(
114
+ sa.Interval, nullable=True
115
+ )
103
116
 
104
117
  # Geographic data:
105
118
  start_latitude: Mapped[float] = mapped_column(sa.Float, nullable=True)
106
119
  start_longitude: Mapped[float] = mapped_column(sa.Float, nullable=True)
107
120
  end_latitude: Mapped[float] = mapped_column(sa.Float, nullable=True)
108
121
  end_longitude: Mapped[float] = mapped_column(sa.Float, nullable=True)
122
+ start_country: Mapped[Optional[str]] = mapped_column(sa.String, nullable=True)
109
123
 
110
124
  # Elevation data:
111
125
  elevation_gain: Mapped[float] = mapped_column(sa.Float, nullable=True)
@@ -143,30 +157,36 @@ class Activity(DB.Model):
143
157
 
144
158
  @property
145
159
  def average_speed_moving_kmh(self) -> Optional[float]:
146
- if self.moving_time:
160
+ if self.distance_km and self.moving_time:
147
161
  return self.distance_km / (self.moving_time.total_seconds() / 3_600)
148
162
  else:
149
163
  return None
150
164
 
151
165
  @property
152
166
  def average_speed_elapsed_kmh(self) -> Optional[float]:
153
- if self.elapsed_time:
167
+ if self.distance_km and self.elapsed_time:
154
168
  return self.distance_km / (self.elapsed_time.total_seconds() / 3_600)
155
169
  else:
156
170
  return None
157
171
 
172
+ @property
173
+ def time_series_path(self) -> pathlib.Path:
174
+ return TIME_SERIES_DIR() / f"{self.time_series_uuid}.parquet"
175
+
158
176
  @property
159
177
  def raw_time_series(self) -> pd.DataFrame:
160
- path = TIME_SERIES_DIR() / f"{self.id}.parquet"
161
178
  try:
162
- time_series = pd.read_parquet(path)
179
+ time_series = pd.read_parquet(self.time_series_path)
163
180
  if "altitude" in time_series.columns:
164
181
  time_series.rename(columns={"altitude": "elevation"}, inplace=True)
165
182
  return time_series
166
183
  except OSError as e:
167
- logger.error(f"Error while reading {path}.")
184
+ logger.error(f"Error while reading {self.time_series_path}.")
168
185
  raise
169
186
 
187
+ def replace_time_series(self, time_series: pd.DataFrame) -> None:
188
+ time_series.to_parquet(self.time_series_path)
189
+
170
190
  @property
171
191
  def time_series(self) -> pd.DataFrame:
172
192
  if self.index_begin or self.index_end:
@@ -201,6 +221,15 @@ class Activity(DB.Model):
201
221
  ]:
202
222
  path.unlink(missing_ok=True)
203
223
 
224
+ @property
225
+ def start_local_tz(self) -> Optional[datetime.datetime]:
226
+ if self.start and self.iana_timezone:
227
+ return self.start.replace(
228
+ microsecond=0, tzinfo=zoneinfo.ZoneInfo("UTC")
229
+ ).astimezone(zoneinfo.ZoneInfo(self.iana_timezone))
230
+ else:
231
+ return self.start
232
+
204
233
 
205
234
  class Tag(DB.Model):
206
235
  __tablename__ = "tags"
@@ -329,7 +358,6 @@ def get_or_make_equipment(name: str, config: Config) -> Equipment:
329
358
  equipment = Equipment(
330
359
  name=name, offset_km=config.equipment_offsets.get(name, 0)
331
360
  )
332
- DB.session.add(equipment)
333
361
  return equipment
334
362
 
335
363
 
@@ -356,7 +384,7 @@ class Kind(DB.Model):
356
384
  __table_args__ = (sa.UniqueConstraint("name", name="kinds_name"),)
357
385
 
358
386
 
359
- def get_or_make_kind(name: str, config: Config) -> Kind:
387
+ def get_or_make_kind(name: str) -> Kind:
360
388
  kinds = DB.session.scalars(sqlalchemy.select(Kind).where(Kind.name == name)).all()
361
389
  if kinds:
362
390
  assert len(kinds) == 1, f"There must be only one kind with name '{name}'."
@@ -364,9 +392,8 @@ def get_or_make_kind(name: str, config: Config) -> Kind:
364
392
  else:
365
393
  kind = Kind(
366
394
  name=name,
367
- consider_for_achievements=name in config.kinds_without_achievements,
395
+ consider_for_achievements=True,
368
396
  )
369
- DB.session.add(kind)
370
397
  return kind
371
398
 
372
399