geo-activity-playground 0.22.0__tar.gz → 0.24.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (101) hide show
  1. {geo_activity_playground-0.22.0 → geo_activity_playground-0.24.0}/PKG-INFO +2 -1
  2. {geo_activity_playground-0.22.0 → geo_activity_playground-0.24.0}/geo_activity_playground/__main__.py +1 -1
  3. {geo_activity_playground-0.22.0 → geo_activity_playground-0.24.0}/geo_activity_playground/core/activities.py +16 -9
  4. {geo_activity_playground-0.22.0 → geo_activity_playground-0.24.0}/geo_activity_playground/core/activity_parsers.py +17 -32
  5. {geo_activity_playground-0.22.0 → geo_activity_playground-0.24.0}/geo_activity_playground/core/cache_migrations.py +24 -0
  6. {geo_activity_playground-0.22.0 → geo_activity_playground-0.24.0}/geo_activity_playground/core/heatmap.py +21 -21
  7. geo_activity_playground-0.24.0/geo_activity_playground/core/privacy_zones.py +16 -0
  8. {geo_activity_playground-0.22.0 → geo_activity_playground-0.24.0}/geo_activity_playground/core/similarity.py +1 -1
  9. geo_activity_playground-0.24.0/geo_activity_playground/core/test_time_conversion.py +37 -0
  10. geo_activity_playground-0.24.0/geo_activity_playground/core/time_conversion.py +14 -0
  11. {geo_activity_playground-0.22.0 → geo_activity_playground-0.24.0}/geo_activity_playground/explorer/tile_visits.py +44 -27
  12. geo_activity_playground-0.24.0/geo_activity_playground/importers/__init__.py +0 -0
  13. {geo_activity_playground-0.22.0 → geo_activity_playground-0.24.0}/geo_activity_playground/importers/directory.py +7 -2
  14. {geo_activity_playground-0.22.0 → geo_activity_playground-0.24.0}/geo_activity_playground/importers/strava_api.py +6 -0
  15. {geo_activity_playground-0.22.0 → geo_activity_playground-0.24.0}/geo_activity_playground/importers/strava_checkout.py +12 -3
  16. geo_activity_playground-0.24.0/geo_activity_playground/webui/__init__.py +0 -0
  17. geo_activity_playground-0.24.0/geo_activity_playground/webui/activity/__init__.py +0 -0
  18. geo_activity_playground-0.24.0/geo_activity_playground/webui/activity/blueprint.py +58 -0
  19. geo_activity_playground-0.22.0/geo_activity_playground/webui/activity_controller.py → geo_activity_playground-0.24.0/geo_activity_playground/webui/activity/controller.py +128 -18
  20. geo_activity_playground-0.22.0/geo_activity_playground/webui/templates/activity-day.html.j2 → geo_activity_playground-0.24.0/geo_activity_playground/webui/activity/templates/activity/day.html.j2 +14 -2
  21. geo_activity_playground-0.22.0/geo_activity_playground/webui/templates/activity-name.html.j2 → geo_activity_playground-0.24.0/geo_activity_playground/webui/activity/templates/activity/name.html.j2 +1 -1
  22. geo_activity_playground-0.22.0/geo_activity_playground/webui/templates/activity.html.j2 → geo_activity_playground-0.24.0/geo_activity_playground/webui/activity/templates/activity/show.html.j2 +9 -4
  23. geo_activity_playground-0.24.0/geo_activity_playground/webui/app.py +145 -0
  24. geo_activity_playground-0.24.0/geo_activity_playground/webui/calendar/__init__.py +0 -0
  25. geo_activity_playground-0.24.0/geo_activity_playground/webui/calendar/blueprint.py +26 -0
  26. geo_activity_playground-0.22.0/geo_activity_playground/webui/calendar_controller.py → geo_activity_playground-0.24.0/geo_activity_playground/webui/calendar/controller.py +5 -5
  27. geo_activity_playground-0.22.0/geo_activity_playground/webui/templates/calendar.html.j2 → geo_activity_playground-0.24.0/geo_activity_playground/webui/calendar/templates/calendar/index.html.j2 +3 -2
  28. geo_activity_playground-0.22.0/geo_activity_playground/webui/templates/calendar-month.html.j2 → geo_activity_playground-0.24.0/geo_activity_playground/webui/calendar/templates/calendar/month.html.j2 +2 -2
  29. geo_activity_playground-0.24.0/geo_activity_playground/webui/eddington/__init__.py +0 -0
  30. geo_activity_playground-0.24.0/geo_activity_playground/webui/eddington/blueprint.py +19 -0
  31. geo_activity_playground-0.22.0/geo_activity_playground/webui/eddington_controller.py → geo_activity_playground-0.24.0/geo_activity_playground/webui/eddington/controller.py +14 -6
  32. geo_activity_playground-0.24.0/geo_activity_playground/webui/eddington/templates/eddington/index.html.j2 +56 -0
  33. {geo_activity_playground-0.22.0 → geo_activity_playground-0.24.0}/geo_activity_playground/webui/entry_controller.py +4 -2
  34. geo_activity_playground-0.24.0/geo_activity_playground/webui/equipment/__init__.py +0 -0
  35. geo_activity_playground-0.24.0/geo_activity_playground/webui/equipment/blueprint.py +19 -0
  36. geo_activity_playground-0.22.0/geo_activity_playground/webui/equipment_controller.py → geo_activity_playground-0.24.0/geo_activity_playground/webui/equipment/controller.py +5 -3
  37. geo_activity_playground-0.24.0/geo_activity_playground/webui/explorer/__init__.py +0 -0
  38. geo_activity_playground-0.24.0/geo_activity_playground/webui/explorer/blueprint.py +54 -0
  39. geo_activity_playground-0.22.0/geo_activity_playground/webui/explorer_controller.py → geo_activity_playground-0.24.0/geo_activity_playground/webui/explorer/controller.py +6 -2
  40. geo_activity_playground-0.22.0/geo_activity_playground/webui/templates/explorer.html.j2 → geo_activity_playground-0.24.0/geo_activity_playground/webui/explorer/templates/explorer/index.html.j2 +2 -2
  41. geo_activity_playground-0.24.0/geo_activity_playground/webui/heatmap/__init__.py +0 -0
  42. geo_activity_playground-0.24.0/geo_activity_playground/webui/heatmap/blueprint.py +41 -0
  43. {geo_activity_playground-0.22.0/geo_activity_playground/webui → geo_activity_playground-0.24.0/geo_activity_playground/webui/heatmap}/heatmap_controller.py +36 -13
  44. geo_activity_playground-0.22.0/geo_activity_playground/webui/templates/heatmap.html.j2 → geo_activity_playground-0.24.0/geo_activity_playground/webui/heatmap/templates/heatmap/index.html.j2 +17 -2
  45. {geo_activity_playground-0.22.0 → geo_activity_playground-0.24.0}/geo_activity_playground/webui/search_controller.py +1 -9
  46. geo_activity_playground-0.24.0/geo_activity_playground/webui/square_planner/__init__.py +0 -0
  47. geo_activity_playground-0.24.0/geo_activity_playground/webui/square_planner/blueprint.py +38 -0
  48. geo_activity_playground-0.24.0/geo_activity_playground/webui/summary/__init__.py +0 -0
  49. geo_activity_playground-0.24.0/geo_activity_playground/webui/summary/blueprint.py +16 -0
  50. geo_activity_playground-0.24.0/geo_activity_playground/webui/summary/controller.py +268 -0
  51. geo_activity_playground-0.24.0/geo_activity_playground/webui/summary/templates/summary/index.html.j2 +135 -0
  52. geo_activity_playground-0.22.0/geo_activity_playground/webui/templates/index.html.j2 → geo_activity_playground-0.24.0/geo_activity_playground/webui/templates/home.html.j2 +1 -1
  53. {geo_activity_playground-0.22.0 → geo_activity_playground-0.24.0}/geo_activity_playground/webui/templates/page.html.j2 +32 -19
  54. {geo_activity_playground-0.22.0 → geo_activity_playground-0.24.0}/geo_activity_playground/webui/templates/search.html.j2 +1 -1
  55. geo_activity_playground-0.24.0/geo_activity_playground/webui/tile/__init__.py +0 -0
  56. geo_activity_playground-0.24.0/geo_activity_playground/webui/tile/blueprint.py +31 -0
  57. geo_activity_playground-0.24.0/geo_activity_playground/webui/upload/__init__.py +0 -0
  58. geo_activity_playground-0.24.0/geo_activity_playground/webui/upload/blueprint.py +28 -0
  59. geo_activity_playground-0.22.0/geo_activity_playground/webui/upload_controller.py → geo_activity_playground-0.24.0/geo_activity_playground/webui/upload/controller.py +15 -6
  60. geo_activity_playground-0.22.0/geo_activity_playground/webui/templates/upload.html.j2 → geo_activity_playground-0.24.0/geo_activity_playground/webui/upload/templates/upload/index.html.j2 +12 -11
  61. {geo_activity_playground-0.22.0 → geo_activity_playground-0.24.0}/pyproject.toml +3 -1
  62. geo_activity_playground-0.22.0/geo_activity_playground/webui/app.py +0 -358
  63. geo_activity_playground-0.22.0/geo_activity_playground/webui/config_controller.py +0 -12
  64. geo_activity_playground-0.22.0/geo_activity_playground/webui/locations_controller.py +0 -28
  65. geo_activity_playground-0.22.0/geo_activity_playground/webui/summary_controller.py +0 -60
  66. geo_activity_playground-0.22.0/geo_activity_playground/webui/templates/config.html.j2 +0 -24
  67. geo_activity_playground-0.22.0/geo_activity_playground/webui/templates/eddington.html.j2 +0 -18
  68. geo_activity_playground-0.22.0/geo_activity_playground/webui/templates/locations.html.j2 +0 -38
  69. geo_activity_playground-0.22.0/geo_activity_playground/webui/templates/summary.html.j2 +0 -21
  70. {geo_activity_playground-0.22.0 → geo_activity_playground-0.24.0}/LICENSE +0 -0
  71. {geo_activity_playground-0.22.0 → geo_activity_playground-0.24.0}/geo_activity_playground/__init__.py +0 -0
  72. {geo_activity_playground-0.22.0 → geo_activity_playground-0.24.0}/geo_activity_playground/core/__init__.py +0 -0
  73. {geo_activity_playground-0.22.0 → geo_activity_playground-0.24.0}/geo_activity_playground/core/config.py +0 -0
  74. {geo_activity_playground-0.22.0 → geo_activity_playground-0.24.0}/geo_activity_playground/core/coordinates.py +0 -0
  75. {geo_activity_playground-0.22.0 → geo_activity_playground-0.24.0}/geo_activity_playground/core/paths.py +0 -0
  76. {geo_activity_playground-0.22.0 → geo_activity_playground-0.24.0}/geo_activity_playground/core/tasks.py +0 -0
  77. {geo_activity_playground-0.22.0 → geo_activity_playground-0.24.0}/geo_activity_playground/core/test_tiles.py +0 -0
  78. {geo_activity_playground-0.22.0 → geo_activity_playground-0.24.0}/geo_activity_playground/core/tiles.py +0 -0
  79. {geo_activity_playground-0.22.0 → geo_activity_playground-0.24.0}/geo_activity_playground/explorer/__init__.py +0 -0
  80. {geo_activity_playground-0.22.0 → geo_activity_playground-0.24.0}/geo_activity_playground/explorer/grid_file.py +0 -0
  81. {geo_activity_playground-0.22.0 → geo_activity_playground-0.24.0}/geo_activity_playground/explorer/video.py +0 -0
  82. {geo_activity_playground-0.22.0 → geo_activity_playground-0.24.0}/geo_activity_playground/importers/test_directory.py +0 -0
  83. {geo_activity_playground-0.22.0 → geo_activity_playground-0.24.0}/geo_activity_playground/importers/test_strava_api.py +0 -0
  84. /geo_activity_playground-0.22.0/geo_activity_playground/webui/templates/activity-lines.html.j2 → /geo_activity_playground-0.24.0/geo_activity_playground/webui/activity/templates/activity/lines.html.j2 +0 -0
  85. /geo_activity_playground-0.22.0/geo_activity_playground/webui/templates/equipment.html.j2 → /geo_activity_playground-0.24.0/geo_activity_playground/webui/equipment/templates/equipment/index.html.j2 +0 -0
  86. /geo_activity_playground-0.22.0/geo_activity_playground/webui/square_planner_controller.py → /geo_activity_playground-0.24.0/geo_activity_playground/webui/square_planner/controller.py +0 -0
  87. /geo_activity_playground-0.22.0/geo_activity_playground/webui/templates/square-planner.html.j2 → /geo_activity_playground-0.24.0/geo_activity_playground/webui/square_planner/templates/square_planner/index.html.j2 +0 -0
  88. {geo_activity_playground-0.22.0 → geo_activity_playground-0.24.0}/geo_activity_playground/webui/static/android-chrome-192x192.png +0 -0
  89. {geo_activity_playground-0.22.0 → geo_activity_playground-0.24.0}/geo_activity_playground/webui/static/android-chrome-384x384.png +0 -0
  90. {geo_activity_playground-0.22.0 → geo_activity_playground-0.24.0}/geo_activity_playground/webui/static/android-chrome-512x512.png +0 -0
  91. {geo_activity_playground-0.22.0 → geo_activity_playground-0.24.0}/geo_activity_playground/webui/static/apple-touch-icon.png +0 -0
  92. {geo_activity_playground-0.22.0 → geo_activity_playground-0.24.0}/geo_activity_playground/webui/static/browserconfig.xml +0 -0
  93. {geo_activity_playground-0.22.0 → geo_activity_playground-0.24.0}/geo_activity_playground/webui/static/favicon-16x16.png +0 -0
  94. {geo_activity_playground-0.22.0 → geo_activity_playground-0.24.0}/geo_activity_playground/webui/static/favicon-32x32.png +0 -0
  95. {geo_activity_playground-0.22.0 → geo_activity_playground-0.24.0}/geo_activity_playground/webui/static/favicon.ico +0 -0
  96. {geo_activity_playground-0.22.0 → geo_activity_playground-0.24.0}/geo_activity_playground/webui/static/mstile-150x150.png +0 -0
  97. {geo_activity_playground-0.22.0 → geo_activity_playground-0.24.0}/geo_activity_playground/webui/static/safari-pinned-tab.svg +0 -0
  98. {geo_activity_playground-0.22.0 → geo_activity_playground-0.24.0}/geo_activity_playground/webui/static/site.webmanifest +0 -0
  99. {geo_activity_playground-0.22.0 → geo_activity_playground-0.24.0}/geo_activity_playground/webui/strava_controller.py +0 -0
  100. {geo_activity_playground-0.22.0 → geo_activity_playground-0.24.0}/geo_activity_playground/webui/templates/strava-connect.html.j2 +0 -0
  101. /geo_activity_playground-0.22.0/geo_activity_playground/webui/tile_controller.py → /geo_activity_playground-0.24.0/geo_activity_playground/webui/tile/controller.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: geo-activity-playground
3
- Version: 0.22.0
3
+ Version: 0.24.0
4
4
  Summary: Analysis of geo data activities like rides, runs or hikes.
5
5
  License: MIT
6
6
  Author: Martin Ueding
@@ -29,6 +29,7 @@ Requires-Dist: pyarrow (>=16.1.0,<17.0.0)
29
29
  Requires-Dist: python-dateutil (>=2.8.2,<3.0.0)
30
30
  Requires-Dist: requests (>=2.28.1,<3.0.0)
31
31
  Requires-Dist: scipy (>=1.8.1,<2.0.0)
32
+ Requires-Dist: shapely (>=2.0.5,<3.0.0)
32
33
  Requires-Dist: stravalib (>=1.3.3,<2.0.0)
33
34
  Requires-Dist: tcxreader (>=0.4.5,<0.5.0)
34
35
  Requires-Dist: tomli (>=2.0.1,<3.0.0) ; python_version < "3.11"
@@ -13,7 +13,7 @@ from geo_activity_playground.core.config import get_config
13
13
  from geo_activity_playground.explorer.tile_visits import TileVisitAccessor
14
14
  from geo_activity_playground.explorer.video import explorer_video_main
15
15
  from geo_activity_playground.webui.app import webui_main
16
- from geo_activity_playground.webui.upload_controller import scan_for_activities
16
+ from geo_activity_playground.webui.upload.controller import scan_for_activities
17
17
 
18
18
  logger = logging.getLogger(__name__)
19
19
 
@@ -18,6 +18,7 @@ from geo_activity_playground.core.paths import activities_path
18
18
  from geo_activity_playground.core.paths import activity_timeseries_path
19
19
  from geo_activity_playground.core.tasks import WorkTracker
20
20
  from geo_activity_playground.core.tiles import compute_tile_float
21
+ from geo_activity_playground.core.time_conversion import convert_to_datetime_ns
21
22
 
22
23
  logger = logging.getLogger(__name__)
23
24
 
@@ -25,6 +26,7 @@ logger = logging.getLogger(__name__)
25
26
  class ActivityMeta(TypedDict):
26
27
  calories: float
27
28
  commute: bool
29
+ consider_for_achievements: bool
28
30
  distance_km: float
29
31
  elapsed_time: datetime.timedelta
30
32
  end_latitude: float
@@ -88,7 +90,7 @@ class ActivityRepository:
88
90
  old_df = self.meta
89
91
  self.meta = pd.concat([old_df, new_df])
90
92
  assert pd.api.types.is_dtype_equal(
91
- self.meta["start"].dtype, "datetime64[ns, UTC]"
93
+ self.meta["start"].dtype, "datetime64[ns]"
92
94
  ), (self.meta["start"].dtype, self.meta["start"].iloc[0])
93
95
  self.save()
94
96
  self._loose_activities = []
@@ -116,14 +118,17 @@ class ActivityRepository:
116
118
  else:
117
119
  return None
118
120
 
119
- @property
120
- def activity_ids(self) -> set[int]:
121
- return set(self.meta.index)
121
+ def get_activity_ids(self, only_achievements: bool = False) -> set[int]:
122
+ if only_achievements:
123
+ return set(self.meta.loc[self.meta["consider_for_achievements"]].index)
124
+ else:
125
+ return set(self.meta.index)
122
126
 
123
- def iter_activities(self, new_to_old=True) -> Iterator[ActivityMeta]:
127
+ def iter_activities(self, new_to_old=True, dropna=False) -> Iterator[ActivityMeta]:
124
128
  direction = -1 if new_to_old else 1
125
129
  for index, row in self.meta[::direction].iterrows():
126
- yield row
130
+ if not dropna or not pd.isna(row["start"]):
131
+ yield row
127
132
 
128
133
  @functools.lru_cache()
129
134
  def get_activity_by_id(self, id: int) -> ActivityMeta:
@@ -146,7 +151,7 @@ class ActivityRepository:
146
151
 
147
152
  def embellish_time_series(repository: ActivityRepository) -> None:
148
153
  work_tracker = WorkTracker("embellish-time-series")
149
- activities_to_process = work_tracker.filter(repository.activity_ids)
154
+ activities_to_process = work_tracker.filter(repository.get_activity_ids())
150
155
  for activity_id in tqdm(activities_to_process, desc="Embellish time series data"):
151
156
  path = activity_timeseries_path(activity_id)
152
157
  df = pd.read_parquet(path)
@@ -170,9 +175,11 @@ def embellish_single_time_series(
170
175
  ):
171
176
  time = timeseries["time"]
172
177
  del timeseries["time"]
173
- timeseries["time"] = [start + datetime.timedelta(seconds=t) for t in time]
178
+ timeseries["time"] = [
179
+ convert_to_datetime_ns(start + datetime.timedelta(seconds=t)) for t in time
180
+ ]
174
181
  changed = True
175
- assert pd.api.types.is_dtype_equal(timeseries["time"].dtype, "datetime64[ns, UTC]")
182
+ assert pd.api.types.is_dtype_equal(timeseries["time"].dtype, "datetime64[ns]")
176
183
 
177
184
  distances = get_distance(
178
185
  timeseries["latitude"].shift(1),
@@ -8,14 +8,13 @@ import charset_normalizer
8
8
  import dateutil.parser
9
9
  import fitdecode
10
10
  import gpxpy
11
- import numpy as np
12
11
  import pandas as pd
13
12
  import tcxreader.tcxreader
14
13
  import xmltodict
15
14
 
16
15
  from geo_activity_playground.core.activities import ActivityMeta
17
16
  from geo_activity_playground.core.activities import embellish_single_time_series
18
- from geo_activity_playground.core.coordinates import get_distance
17
+ from geo_activity_playground.core.time_conversion import convert_to_datetime_ns
19
18
 
20
19
  logger = logging.getLogger(__name__)
21
20
 
@@ -57,24 +56,6 @@ def read_activity(path: pathlib.Path) -> tuple[ActivityMeta, pd.DataFrame]:
57
56
  raise ActivityParseError(f"Unsupported file format: {file_type}")
58
57
 
59
58
  if len(timeseries):
60
- # Unify time zones to UTC.
61
- try:
62
- if timeseries["time"].dt.tz is not None:
63
- timeseries["time"] = timeseries["time"].dt.tz_localize(None)
64
- timeseries["time"] = timeseries["time"].dt.tz_localize("UTC")
65
- except AttributeError as e:
66
- print(timeseries)
67
- print(timeseries.dtypes)
68
- types = {}
69
- for elem in timeseries["time"]:
70
- t = str(type(elem))
71
- if t not in types:
72
- types[t] = elem
73
- print(types)
74
- raise ActivityParseError(
75
- "It looks like the date parsing has gone wrong."
76
- ) from e
77
-
78
59
  timeseries, changed = embellish_single_time_series(timeseries)
79
60
 
80
61
  # Extract some meta data from the time series.
@@ -126,8 +107,13 @@ def read_fit_activity(path: pathlib.Path, open) -> tuple[ActivityMeta, pd.DataFr
126
107
  and values.get("position_long", None)
127
108
  ):
128
109
  time = values["timestamp"]
129
- assert isinstance(time, datetime.datetime)
130
- time = time.astimezone(datetime.timezone.utc)
110
+ if isinstance(time, datetime.datetime):
111
+ pass
112
+ elif time is None or isinstance(time, int):
113
+ time = pd.NaT
114
+ else:
115
+ raise RuntimeError(f"Cannot parse time: {time} in {path}.")
116
+ time = convert_to_datetime_ns(time)
131
117
  row = {
132
118
  "time": time,
133
119
  "latitude": values["position_lat"] / ((2**32) / 360),
@@ -202,10 +188,11 @@ def read_gpx_activity(path: pathlib.Path, open) -> pd.DataFrame:
202
188
  for point in segment.points:
203
189
  if isinstance(point.time, datetime.datetime):
204
190
  time = point.time
205
- else:
191
+ elif isinstance(point.time, str):
206
192
  time = dateutil.parser.parse(str(point.time))
207
- assert isinstance(time, datetime.datetime)
208
- time = time.astimezone(datetime.timezone.utc)
193
+ else:
194
+ time = pd.NaT
195
+ time = convert_to_datetime_ns(time)
209
196
  points.append((time, point.latitude, point.longitude, point.elevation))
210
197
 
211
198
  df = pd.DataFrame(points, columns=["time", "latitude", "longitude", "altitude"])
@@ -233,6 +220,7 @@ def read_tcx_activity(path: pathlib.Path, opener) -> pd.DataFrame:
233
220
  content = f.read().strip()
234
221
 
235
222
  stripped_file = pathlib.Path("Cache/temp.tcx")
223
+ stripped_file.parent.mkdir(exist_ok=True)
236
224
  with open(stripped_file, "wb") as f:
237
225
  f.write(content)
238
226
  data = tcx_reader.read(str(stripped_file))
@@ -242,7 +230,7 @@ def read_tcx_activity(path: pathlib.Path, opener) -> pd.DataFrame:
242
230
  if trackpoint.latitude and trackpoint.longitude:
243
231
  time = trackpoint.time
244
232
  assert isinstance(time, datetime.datetime)
245
- time = time.astimezone(datetime.timezone.utc)
233
+ time = convert_to_datetime_ns(time)
246
234
  row = {
247
235
  "time": time,
248
236
  "latitude": trackpoint.latitude,
@@ -270,7 +258,8 @@ def read_kml_activity(path: pathlib.Path, opener) -> pd.DataFrame:
270
258
  track = placemark["gx:Track"]
271
259
  rows = []
272
260
  for when, where in zip(track["when"], track["gx:coord"]):
273
- time = dateutil.parser.parse(when).astimezone(datetime.timezone.utc)
261
+ time = dateutil.parser.parse(when)
262
+ time = convert_to_datetime_ns(time)
274
263
  parts = where.split(" ")
275
264
  if len(parts) == 2:
276
265
  lon, lat = parts
@@ -289,11 +278,7 @@ def read_simra_activity(path: pathlib.Path, opener) -> pd.DataFrame:
289
278
  data["time"] = data["timeStamp"].apply(
290
279
  lambda d: datetime.datetime.fromtimestamp(d / 1000)
291
280
  )
292
- tz = (
293
- datetime.datetime.now(datetime.timezone.utc).astimezone().tzinfo
294
- ) # get local timezone
295
- data["time"] = data["time"].dt.tz_localize(tz)
296
- data["time"] = data["time"].dt.tz_convert("UTC")
281
+ data["time"] = convert_to_datetime_ns(data["time"])
297
282
  data = data.rename(columns={"lat": "latitude", "lon": "longitude"})
298
283
  return data.dropna(subset=["latitude"], ignore_index=True)[
299
284
  ["time", "latitude", "longitude"]
@@ -34,6 +34,7 @@ def reset_time_series_embellishment() -> None:
34
34
 
35
35
  def delete_tile_visits() -> None:
36
36
  paths = [
37
+ pathlib.Path("Cache/activities-per-tile.pickle"),
37
38
  pathlib.Path("Cache/tile-evolution-state.pickle"),
38
39
  pathlib.Path("Cache/tile-history.pickle"),
39
40
  pathlib.Path("Cache/tile-visits.pickle"),
@@ -78,6 +79,24 @@ def convert_distances_to_km() -> None:
78
79
  time_series.to_parquet(time_series_path)
79
80
 
80
81
 
82
+ def add_consider_for_achievements() -> None:
83
+ activities_path = pathlib.Path("Cache/activities.parquet")
84
+ if activities_path.exists():
85
+ df = pd.read_parquet(activities_path)
86
+ if "consider_for_achievements" not in df.columns:
87
+ df["consider_for_achievements"] = True
88
+ else:
89
+ df.loc[
90
+ df["consider_for_achievements"].isna(), "consider_for_achievements"
91
+ ] = True
92
+ df.to_parquet("Cache/activities.parquet")
93
+
94
+
95
+ def delete_everything() -> None:
96
+ if pathlib.Path("Cache").exists():
97
+ shutil.rmtree("Cache")
98
+
99
+
81
100
  def apply_cache_migrations() -> None:
82
101
  logger.info("Apply cache migration if needed …")
83
102
  cache_status_file = pathlib.Path("Cache/status.json")
@@ -98,6 +117,11 @@ def apply_cache_migrations() -> None:
98
117
  delete_activity_metadata,
99
118
  delete_tile_visits,
100
119
  delete_heatmap_cache,
120
+ add_consider_for_achievements,
121
+ delete_tile_visits,
122
+ delete_heatmap_cache,
123
+ delete_tile_visits,
124
+ delete_everything,
101
125
  ]
102
126
 
103
127
  for migration in migrations[cache_status["num_applied_migrations"] :]:
@@ -29,7 +29,7 @@ def get_bounds(lat_lon_data: np.ndarray) -> GeoBounds:
29
29
  def add_margin(lower: float, upper: float) -> tuple[float, float]:
30
30
  spread = upper - lower
31
31
  margin = spread / 20
32
- return max(0, lower - margin), upper + margin
32
+ return max(0.0, lower - margin), upper + margin
33
33
 
34
34
 
35
35
  def add_margin_to_geo_bounds(bounds: GeoBounds) -> GeoBounds:
@@ -51,11 +51,28 @@ class TileBounds:
51
51
  y_tile_min: int
52
52
  y_tile_max: int
53
53
 
54
+
55
+ @dataclasses.dataclass
56
+ class PixelBounds:
57
+ x_min: int
58
+ x_max: int
59
+ y_min: int
60
+ y_max: int
61
+
62
+ @classmethod
63
+ def from_tile_bounds(cls, tile_bounds: TileBounds) -> "PixelBounds":
64
+ return cls(
65
+ int(tile_bounds.x_tile_min) * OSM_TILE_SIZE,
66
+ int(tile_bounds.x_tile_max) * OSM_TILE_SIZE,
67
+ int(tile_bounds.y_tile_min) * OSM_TILE_SIZE,
68
+ int(tile_bounds.y_tile_max) * OSM_TILE_SIZE,
69
+ )
70
+
54
71
  @property
55
72
  def shape(self) -> tuple[int, int]:
56
73
  return (
57
- (self.y_tile_max - self.y_tile_min) * OSM_TILE_SIZE,
58
- (self.x_tile_max - self.x_tile_min) * OSM_TILE_SIZE,
74
+ self.y_max - self.y_min,
75
+ self.x_max - self.x_min,
59
76
  )
60
77
 
61
78
 
@@ -107,7 +124,7 @@ def get_sensible_zoom_level(
107
124
 
108
125
 
109
126
  def build_map_from_tiles(tile_bounds: TileBounds) -> np.ndarray:
110
- background = np.zeros((*tile_bounds.shape, 3))
127
+ background = np.zeros((*PixelBounds.from_tile_bounds(tile_bounds).shape, 3))
111
128
 
112
129
  for x in range(tile_bounds.x_tile_min, tile_bounds.x_tile_max):
113
130
  for y in range(tile_bounds.y_tile_min, tile_bounds.y_tile_max):
@@ -129,20 +146,3 @@ def convert_to_grayscale(image: np.ndarray) -> np.ndarray:
129
146
  image = np.sum(image * [0.2126, 0.7152, 0.0722], axis=2)
130
147
  image = np.dstack((image, image, image))
131
148
  return image
132
-
133
-
134
- def crop_image_to_bounds(
135
- image: np.ndarray, geo_bounds: GeoBounds, tile_bounds: TileBounds
136
- ) -> np.ndarray:
137
- min_x, min_y = compute_tile_float(
138
- geo_bounds.lat_max, geo_bounds.lon_min, tile_bounds.zoom
139
- )
140
- max_x, max_y = compute_tile_float(
141
- geo_bounds.lat_min, geo_bounds.lon_max, tile_bounds.zoom
142
- )
143
- min_x = int((min_x - tile_bounds.x_tile_min) * OSM_TILE_SIZE)
144
- min_y = int((min_y - tile_bounds.y_tile_min) * OSM_TILE_SIZE)
145
- max_x = int((max_x - tile_bounds.x_tile_min) * OSM_TILE_SIZE)
146
- max_y = int((max_y - tile_bounds.y_tile_min) * OSM_TILE_SIZE)
147
- image = image[min_y:max_y, min_x:max_x, :]
148
- return image
@@ -0,0 +1,16 @@
1
+ import pandas as pd
2
+ import shapely
3
+
4
+
5
+ class PrivacyZone:
6
+ def __init__(self, points: list[list[float]]) -> None:
7
+ self.points = points
8
+ self._polygon = shapely.Polygon(points)
9
+ shapely.prepare(self._polygon)
10
+
11
+ def filter_time_series(self, time_series: pd.DataFrame) -> pd.DataFrame:
12
+ mask = [
13
+ not shapely.contains_xy(self._polygon, row["longitude"], row["latitude"])
14
+ for index, row in time_series.iterrows()
15
+ ]
16
+ return time_series.loc[mask]
@@ -29,7 +29,7 @@ def precompute_activity_distances(repository: ActivityRepository) -> None:
29
29
  with stored_object(fingerprint_path, {}) as fingerprints, stored_object(
30
30
  distances_path, {}
31
31
  ) as distances:
32
- activity_ids = repository.activity_ids
32
+ activity_ids = repository.get_activity_ids()
33
33
 
34
34
  activity_ids_without_fingerprint = [
35
35
  activity_id
@@ -0,0 +1,37 @@
1
+ import datetime
2
+
3
+ import numpy as np
4
+ import pandas as pd
5
+
6
+ from .time_conversion import convert_to_datetime_ns
7
+
8
+ target = np.datetime64(datetime.datetime(2000, 1, 2, 3, 4, 5))
9
+
10
+
11
+ def test_convert_to_datetime_ns() -> None:
12
+ dt_local = datetime.datetime(2000, 1, 2, 3, 4, 5)
13
+ dt_tz = datetime.datetime(
14
+ 2000, 1, 2, 3, 4, 5, tzinfo=datetime.timezone(datetime.timedelta(hours=3))
15
+ )
16
+ dt_utc = datetime.datetime(2000, 1, 2, 3, 4, 5, tzinfo=datetime.timezone.utc)
17
+
18
+ inputs = [
19
+ dt_local,
20
+ dt_tz,
21
+ dt_utc,
22
+ pd.Timestamp(dt_local),
23
+ pd.Timestamp(dt_tz),
24
+ pd.Timestamp(dt_utc),
25
+ ]
26
+
27
+ for d in inputs:
28
+ actual = convert_to_datetime_ns(d)
29
+ # assert pd.api.types.is_dtype_equal(actual.dtype, "datetime64[ns]")
30
+ assert actual == target
31
+
32
+ actual = convert_to_datetime_ns(pd.Series([d]))
33
+ assert actual.iloc[0] == target
34
+
35
+
36
+ def test_NaT() -> None:
37
+ assert pd.isna(convert_to_datetime_ns(pd.NaT))
@@ -0,0 +1,14 @@
1
+ import numpy as np
2
+ import pandas as pd
3
+
4
+
5
+ def convert_to_datetime_ns(date) -> np.datetime64:
6
+ if isinstance(date, pd.Series):
7
+ ts = pd.to_datetime(date)
8
+ ts = ts.dt.tz_localize(None)
9
+ return ts
10
+ else:
11
+ ts = pd.to_datetime(date)
12
+ if ts.tzinfo is not None:
13
+ ts = ts.tz_localize(None)
14
+ return ts.to_datetime64()
@@ -25,11 +25,13 @@ class TileVisitAccessor:
25
25
  TILE_EVOLUTION_STATES_PATH = pathlib.Path("Cache/tile-evolution-state.pickle")
26
26
  TILE_HISTORIES_PATH = pathlib.Path(f"Cache/tile-history.pickle")
27
27
  TILE_VISITS_PATH = pathlib.Path(f"Cache/tile-visits.pickle")
28
+ ACTIVITIES_PER_TILE_PATH = pathlib.Path(f"Cache/activities-per-tile.pickle")
28
29
 
29
30
  def __init__(self) -> None:
30
31
  self.visits: dict[int, dict[tuple[int, int], dict[str, Any]]] = try_load_pickle(
31
32
  self.TILE_VISITS_PATH
32
33
  ) or collections.defaultdict(dict)
34
+ "zoom → (tile_x, tile_y) → tile_info"
33
35
 
34
36
  self.histories: dict[int, pd.DataFrame] = try_load_pickle(
35
37
  self.TILE_HISTORIES_PATH
@@ -39,6 +41,12 @@ class TileVisitAccessor:
39
41
  self.TILE_EVOLUTION_STATES_PATH
40
42
  ) or collections.defaultdict(TileEvolutionState)
41
43
 
44
+ self.activities_per_tile: dict[
45
+ int, dict[tuple[int, int], set[int]]
46
+ ] = try_load_pickle(self.ACTIVITIES_PER_TILE_PATH) or collections.defaultdict(
47
+ dict
48
+ )
49
+
42
50
  def save(self) -> None:
43
51
  with open(self.TILE_VISITS_PATH, "wb") as f:
44
52
  pickle.dump(self.visits, f)
@@ -49,13 +57,16 @@ class TileVisitAccessor:
49
57
  with open(self.TILE_EVOLUTION_STATES_PATH, "wb") as f:
50
58
  pickle.dump(self.states, f)
51
59
 
60
+ with open(self.ACTIVITIES_PER_TILE_PATH, "wb") as f:
61
+ pickle.dump(self.activities_per_tile, f)
62
+
52
63
 
53
64
  def compute_tile_visits(
54
65
  repository: ActivityRepository, tile_visits_accessor: TileVisitAccessor
55
66
  ) -> None:
56
67
 
57
68
  work_tracker = WorkTracker("tile-visits")
58
- activity_ids_to_process = work_tracker.filter(repository.activity_ids)
69
+ activity_ids_to_process = work_tracker.filter(repository.get_activity_ids())
59
70
  new_tile_history_rows = collections.defaultdict(list)
60
71
  for activity_id in tqdm(
61
72
  activity_ids_to_process, desc="Extract explorer tile visits"
@@ -64,34 +75,40 @@ def compute_tile_visits(
64
75
  for zoom in range(20):
65
76
  for time, tile_x, tile_y in _tiles_from_points(time_series, zoom):
66
77
  tile = (tile_x, tile_y)
67
- if tile in tile_visits_accessor.visits[zoom]:
68
- d = tile_visits_accessor.visits[zoom][tile]
69
- if d["first_time"] > time:
70
- d["first_time"] = time
71
- d["first_id"] = activity_id
72
- if d["last_time"] < time:
73
- d["last_time"] = time
74
- d["last_id"] = activity_id
75
- d["activity_ids"].add(activity_id)
76
- else:
77
- tile_visits_accessor.visits[zoom][tile] = {
78
- "first_time": time,
79
- "first_id": activity_id,
80
- "last_time": time,
81
- "last_id": activity_id,
82
- "activity_ids": {activity_id},
83
- }
84
- new_tile_history_rows[zoom].append(
85
- {
86
- "activity_id": activity_id,
87
- "time": time,
88
- "tile_x": tile_x,
89
- "tile_y": tile_y,
78
+ if not tile in tile_visits_accessor.activities_per_tile[zoom]:
79
+ tile_visits_accessor.activities_per_tile[zoom][tile] = set()
80
+ tile_visits_accessor.activities_per_tile[zoom][tile].add(activity_id)
81
+
82
+ activity = repository.get_activity_by_id(activity_id)
83
+ if activity["consider_for_achievements"]:
84
+ if tile in tile_visits_accessor.visits[zoom]:
85
+ d = tile_visits_accessor.visits[zoom][tile]
86
+ if d["first_time"] > time:
87
+ d["first_time"] = time
88
+ d["first_id"] = activity_id
89
+ if d["last_time"] < time:
90
+ d["last_time"] = time
91
+ d["last_id"] = activity_id
92
+ d["activity_ids"].add(activity_id)
93
+ else:
94
+ tile_visits_accessor.visits[zoom][tile] = {
95
+ "first_time": time,
96
+ "first_id": activity_id,
97
+ "last_time": time,
98
+ "last_id": activity_id,
99
+ "activity_ids": {activity_id},
90
100
  }
91
- )
101
+ new_tile_history_rows[zoom].append(
102
+ {
103
+ "activity_id": activity_id,
104
+ "time": time,
105
+ "tile_x": tile_x,
106
+ "tile_y": tile_y,
107
+ }
108
+ )
92
109
  work_tracker.mark_done(activity_id)
93
110
 
94
- if activity_ids_to_process:
111
+ if new_tile_history_rows:
95
112
  for zoom, new_rows in new_tile_history_rows.items():
96
113
  new_df = pd.DataFrame(new_rows)
97
114
  new_df.sort_values("time", inplace=True)
@@ -107,7 +124,7 @@ def compute_tile_visits(
107
124
  def _tiles_from_points(
108
125
  time_series: pd.DataFrame, zoom: int
109
126
  ) -> Iterator[tuple[datetime.datetime, int, int]]:
110
- assert pd.api.types.is_dtype_equal(time_series["time"].dtype, "datetime64[ns, UTC]")
127
+ assert pd.api.types.is_dtype_equal(time_series["time"].dtype, "datetime64[ns]")
111
128
  xf = time_series["x"] * 2**zoom
112
129
  yf = time_series["y"] * 2**zoom
113
130
  for t1, x1, y1, x2, y2, s1, s2 in zip(
@@ -6,6 +6,7 @@ import pickle
6
6
  import re
7
7
  import sys
8
8
  import traceback
9
+ from typing import Any
9
10
  from typing import Optional
10
11
 
11
12
  import pandas as pd
@@ -23,7 +24,9 @@ ACTIVITY_DIR = pathlib.Path("Activities")
23
24
 
24
25
 
25
26
  def import_from_directory(
26
- repository: ActivityRepository, metadata_extraction_regexes: list[str] = []
27
+ repository: ActivityRepository,
28
+ kind_defaults: dict[str, Any] = {},
29
+ metadata_extraction_regexes: list[str] = [],
27
30
  ) -> None:
28
31
  paths_with_errors = []
29
32
  work_tracker = WorkTracker("parse-activity-files")
@@ -66,9 +69,11 @@ def import_from_directory(
66
69
  path=str(path),
67
70
  kind="Unknown",
68
71
  equipment="Unknown",
72
+ consider_for_achievements=True,
69
73
  )
70
74
  activity_meta.update(activity_meta_from_file)
71
75
  activity_meta.update(_get_metadata_from_path(path, metadata_extraction_regexes))
76
+ activity_meta.update(kind_defaults.get(activity_meta["kind"], {}))
72
77
  repository.add_activity(activity_meta)
73
78
 
74
79
  if paths_with_errors:
@@ -97,7 +102,7 @@ def _cache_single_file(path: pathlib.Path) -> Optional[tuple[pathlib.Path, str]]
97
102
  except ActivityParseError as e:
98
103
  logger.error(f"Error while parsing file {path}:")
99
104
  traceback.print_exc()
100
- return (path, str(e))
105
+ return path, str(e)
101
106
  except:
102
107
  logger.error(f"Encountered a problem with {path=}, see details below.")
103
108
  raise
@@ -9,6 +9,7 @@ from typing import Any
9
9
 
10
10
  import pandas as pd
11
11
  from stravalib import Client
12
+ from stravalib.exc import Fault
12
13
  from stravalib.exc import ObjectNotFound
13
14
  from stravalib.exc import RateLimitExceeded
14
15
  from tqdm import tqdm
@@ -173,6 +174,11 @@ def try_import_strava(repository: ActivityRepository) -> bool:
173
174
  limit_exceeded = False
174
175
  except RateLimitExceeded:
175
176
  limit_exceeded = True
177
+ except Fault as e:
178
+ if "Too Many Requests" in str(e):
179
+ limit_exceeded = True
180
+ else:
181
+ raise
176
182
 
177
183
  repository.commit()
178
184
 
@@ -2,6 +2,7 @@ import datetime
2
2
  import logging
3
3
  import pathlib
4
4
  import shutil
5
+ import sys
5
6
  import traceback
6
7
  from typing import Optional
7
8
  from typing import Union
@@ -15,6 +16,7 @@ from geo_activity_playground.core.activities import ActivityRepository
15
16
  from geo_activity_playground.core.activity_parsers import ActivityParseError
16
17
  from geo_activity_playground.core.activity_parsers import read_activity
17
18
  from geo_activity_playground.core.tasks import WorkTracker
19
+ from geo_activity_playground.core.time_conversion import convert_to_datetime_ns
18
20
 
19
21
 
20
22
  logger = logging.getLogger(__name__)
@@ -133,6 +135,13 @@ def import_from_strava_checkout(repository: ActivityRepository) -> None:
133
135
  dayfirst = False
134
136
  if activities.columns[0] == "Aktivitäts-ID":
135
137
  activities = pd.read_csv(checkout_path / "activities.csv", decimal=",")
138
+ if len(activities.columns) != len(EXPECTED_COLUMNS):
139
+ logger.error(
140
+ f"You are trying to import a Strava checkout where the `activities.csv` contains German column headers. In order to import this, we need to map these to the English ones. Unfortunately Strava has changed the number of columns. Your file has {len(activities.columns)} but we expect {len(EXPECTED_COLUMNS)}. This means that the program needs to be updated to match the new Strava export format. Please go to https://github.com/martin-ueding/geo-activity-playground/issues and open a new issue and share the following output in the ticket:"
141
+ )
142
+ print(activities.columns)
143
+ print(activities.dtypes)
144
+ sys.exit(1)
136
145
  activities.columns = EXPECTED_COLUMNS
137
146
  dayfirst = True
138
147
 
@@ -166,9 +175,9 @@ def import_from_strava_checkout(repository: ActivityRepository) -> None:
166
175
  "id": activity_id,
167
176
  "name": row["Activity Name"],
168
177
  "path": str(activity_file),
169
- "start": dateutil.parser.parse(
170
- row["Activity Date"], dayfirst=dayfirst
171
- ).astimezone(datetime.timezone.utc),
178
+ "start": convert_to_datetime_ns(
179
+ dateutil.parser.parse(row["Activity Date"], dayfirst=dayfirst)
180
+ ),
172
181
  }
173
182
 
174
183
  time_series_path = activity_stream_dir / f"{activity_id}.parquet"