geo-activity-playground 0.42.0__py3-none-any.whl → 0.43.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (32) hide show
  1. geo_activity_playground/alembic/script.py.mako +0 -6
  2. geo_activity_playground/alembic/versions/da2cba03b71d_add_photos.py +40 -0
  3. geo_activity_playground/alembic/versions/script.py.mako +6 -0
  4. geo_activity_playground/core/activities.py +3 -1
  5. geo_activity_playground/core/datamodel.py +48 -22
  6. geo_activity_playground/core/enrichment.py +4 -2
  7. geo_activity_playground/core/meta_search.py +78 -34
  8. geo_activity_playground/core/missing_values.py +4 -3
  9. geo_activity_playground/core/paths.py +2 -0
  10. geo_activity_playground/core/test_missing_values.py +5 -0
  11. geo_activity_playground/webui/app.py +38 -13
  12. geo_activity_playground/webui/blueprints/activity_blueprint.py +25 -15
  13. geo_activity_playground/webui/blueprints/entry_views.py +4 -1
  14. geo_activity_playground/webui/blueprints/photo_blueprint.py +198 -0
  15. geo_activity_playground/webui/blueprints/upload_blueprint.py +11 -0
  16. geo_activity_playground/webui/search_util.py +23 -7
  17. geo_activity_playground/webui/templates/activity/show.html.j2 +46 -11
  18. geo_activity_playground/webui/templates/eddington/distance.html.j2 +1 -2
  19. geo_activity_playground/webui/templates/eddington/elevation_gain.html.j2 +1 -2
  20. geo_activity_playground/webui/templates/elevation_eddington/index.html.j2 +18 -15
  21. geo_activity_playground/webui/templates/heatmap/index.html.j2 +1 -2
  22. geo_activity_playground/webui/templates/page.html.j2 +8 -0
  23. geo_activity_playground/webui/templates/photo/map.html.j2 +45 -0
  24. geo_activity_playground/webui/templates/photo/new.html.j2 +13 -0
  25. geo_activity_playground/webui/templates/search/index.html.j2 +6 -3
  26. geo_activity_playground/webui/templates/search_form.html.j2 +47 -22
  27. geo_activity_playground/webui/templates/summary/index.html.j2 +12 -10
  28. {geo_activity_playground-0.42.0.dist-info → geo_activity_playground-0.43.1.dist-info}/METADATA +2 -1
  29. {geo_activity_playground-0.42.0.dist-info → geo_activity_playground-0.43.1.dist-info}/RECORD +32 -28
  30. {geo_activity_playground-0.42.0.dist-info → geo_activity_playground-0.43.1.dist-info}/LICENSE +0 -0
  31. {geo_activity_playground-0.42.0.dist-info → geo_activity_playground-0.43.1.dist-info}/WHEEL +0 -0
  32. {geo_activity_playground-0.42.0.dist-info → geo_activity_playground-0.43.1.dist-info}/entry_points.txt +0 -0
@@ -1,10 +1,4 @@
1
- """${message}
2
1
 
3
- Revision ID: ${up_revision}
4
- Revises: ${down_revision | comma,n}
5
- Create Date: ${create_date}
6
-
7
- """
8
2
  from typing import Sequence, Union
9
3
 
10
4
  from alembic import op
@@ -0,0 +1,40 @@
1
+ from typing import Sequence
2
+ from typing import Union
3
+
4
+ import sqlalchemy as sa
5
+ from alembic import op
6
+
7
+
8
+ # revision identifiers, used by Alembic.
9
+ revision: str = "da2cba03b71d"
10
+ down_revision: Union[str, None] = "38882503dc7c"
11
+ branch_labels: Union[str, Sequence[str], None] = None
12
+ depends_on: Union[str, Sequence[str], None] = None
13
+
14
+
15
+ def upgrade() -> None:
16
+ # ### commands auto generated by Alembic - please adjust! ###
17
+ op.create_table(
18
+ "photos",
19
+ sa.Column("id", sa.Integer(), nullable=False),
20
+ sa.Column("filename", sa.String(), nullable=False),
21
+ sa.Column("time", sa.DateTime(), nullable=False),
22
+ sa.Column("latitude", sa.Float(), nullable=False),
23
+ sa.Column("longitude", sa.Float(), nullable=False),
24
+ sa.Column("activity_id", sa.Integer(), nullable=False),
25
+ sa.ForeignKeyConstraint(["activity_id"], ["activities.id"], name="activity_id"),
26
+ sa.PrimaryKeyConstraint("id"),
27
+ )
28
+ with op.batch_alter_table("tags", schema=None) as batch_op:
29
+ batch_op.create_unique_constraint("tags_tag", ["tag"])
30
+
31
+ # ### end Alembic commands ###
32
+
33
+
34
+ def downgrade() -> None:
35
+ # ### commands auto generated by Alembic - please adjust! ###
36
+ with op.batch_alter_table("tags", schema=None) as batch_op:
37
+ batch_op.drop_constraint("tags_tag", type_="unique")
38
+
39
+ op.drop_table("photos")
40
+ # ### end Alembic commands ###
@@ -1,4 +1,10 @@
1
+ """${message}
1
2
 
3
+ Revision ID: ${up_revision}
4
+ Revises: ${down_revision | comma,n}
5
+ Create Date: ${create_date}
6
+
7
+ """
2
8
  from typing import Sequence, Union
3
9
 
4
10
  from alembic import op
@@ -24,7 +24,9 @@ logger = logging.getLogger(__name__)
24
24
 
25
25
  class ActivityRepository:
26
26
  def __len__(self) -> int:
27
- return len(self.get_activity_ids())
27
+ return DB.session.scalars(
28
+ sqlalchemy.select(sqlalchemy.func.count()).select_from(Activity)
29
+ ).one()
28
30
 
29
31
  def has_activity(self, activity_id: int) -> bool:
30
32
  return bool(
@@ -1,6 +1,7 @@
1
1
  import datetime
2
2
  import json
3
3
  import logging
4
+ import pathlib
4
5
  from typing import Any
5
6
  from typing import Optional
6
7
  from typing import TypedDict
@@ -117,6 +118,10 @@ class Activity(DB.Model):
117
118
  secondary=activity_tag_association_table, back_populates="activities"
118
119
  )
119
120
 
121
+ photos: Mapped[list["Photo"]] = relationship(
122
+ back_populates="activity", cascade="all, delete-orphan"
123
+ )
124
+
120
125
  def __str__(self) -> str:
121
126
  return f"{self.start} {self.name}"
122
127
 
@@ -179,7 +184,7 @@ def get_or_make_tag(tag: str) -> Tag:
179
184
  return tag
180
185
 
181
186
 
182
- def query_activity_meta() -> pd.DataFrame:
187
+ def query_activity_meta(clauses: list = []) -> pd.DataFrame:
183
188
  rows = DB.session.execute(
184
189
  sqlalchemy.select(
185
190
  Activity.id,
@@ -206,31 +211,33 @@ def query_activity_meta() -> pd.DataFrame:
206
211
  )
207
212
  .join(Activity.equipment)
208
213
  .join(Activity.kind)
214
+ .where(*clauses)
209
215
  .order_by(Activity.start)
210
216
  ).all()
211
217
  df = pd.DataFrame(rows)
212
218
 
213
- for old, new in [
214
- ("elapsed_time", "average_speed_elapsed_kmh"),
215
- ("moving_time", "average_speed_moving_kmh"),
216
- ]:
217
- df[new] = pd.NA
218
- mask = df[old].dt.total_seconds() > 0
219
- df.loc[mask, new] = df.loc[mask, "distance_km"] / (
220
- df.loc[mask, old].dt.total_seconds() / 3_600
221
- )
222
-
223
- df["date"] = df["start"].dt.date
224
- df["year"] = df["start"].dt.year
225
- df["month"] = df["start"].dt.month
226
- df["day"] = df["start"].dt.day
227
- df["week"] = df["start"].dt.isocalendar().week
228
- df["day_of_week"] = df["start"].dt.day_of_week
229
- df["iso_year"] = df["start"].dt.isocalendar().year
230
- df["hours"] = df["elapsed_time"].dt.total_seconds() / 3_600
231
- df["hours_moving"] = df["moving_time"].dt.total_seconds() / 3_600
232
-
233
- df.index = df["id"]
219
+ if len(df):
220
+ for old, new in [
221
+ ("elapsed_time", "average_speed_elapsed_kmh"),
222
+ ("moving_time", "average_speed_moving_kmh"),
223
+ ]:
224
+ df[new] = pd.NA
225
+ mask = df[old].dt.total_seconds() > 0
226
+ df.loc[mask, new] = df.loc[mask, "distance_km"] / (
227
+ df.loc[mask, old].dt.total_seconds() / 3_600
228
+ )
229
+
230
+ df["date"] = df["start"].dt.date
231
+ df["year"] = df["start"].dt.year
232
+ df["month"] = df["start"].dt.month
233
+ df["day"] = df["start"].dt.day
234
+ df["week"] = df["start"].dt.isocalendar().week
235
+ df["day_of_week"] = df["start"].dt.day_of_week
236
+ df["iso_year"] = df["start"].dt.isocalendar().year
237
+ df["hours"] = df["elapsed_time"].dt.total_seconds() / 3_600
238
+ df["hours_moving"] = df["moving_time"].dt.total_seconds() / 3_600
239
+
240
+ df.index = df["id"]
234
241
 
235
242
  return df
236
243
 
@@ -363,3 +370,22 @@ class PlotSpec(DB.Model):
363
370
  return json.dumps(
364
371
  {key: getattr(self, key) for key in self.FIELDS if getattr(self, key)}
365
372
  )
373
+
374
+
375
+ class Photo(DB.Model):
376
+ __tablename__ = "photos"
377
+ id: Mapped[int] = mapped_column(primary_key=True)
378
+
379
+ filename: Mapped[str] = mapped_column(sa.String, nullable=False)
380
+ time: Mapped[datetime.datetime] = mapped_column(sa.DateTime, nullable=False)
381
+ latitude: Mapped[float] = mapped_column(sa.Float, nullable=False)
382
+ longitude: Mapped[float] = mapped_column(sa.Float, nullable=False)
383
+
384
+ activity_id: Mapped[int] = mapped_column(
385
+ ForeignKey("activities.id", name="activity_id"), nullable=False
386
+ )
387
+ activity: Mapped["Activity"] = relationship(back_populates="photos")
388
+
389
+ @property
390
+ def path(self) -> pathlib.Path:
391
+ return pathlib.Path(self.filename)
@@ -108,8 +108,10 @@ def populate_database_from_extracted(config: Config) -> None:
108
108
  def update_via_time_series(
109
109
  activity: Activity, time_series: pd.DataFrame
110
110
  ) -> ActivityMeta:
111
- activity.start = time_series["time"].iloc[0]
112
- activity.elapsed_time = time_series["time"].iloc[-1] - time_series["time"].iloc[0]
111
+ activity.start = some(time_series["time"].iloc[0])
112
+ activity.elapsed_time = some(
113
+ time_series["time"].iloc[-1] - time_series["time"].iloc[0]
114
+ )
113
115
  activity.distance_km = (
114
116
  time_series["distance_km"].iloc[-1] - time_series["distance_km"].iloc[0]
115
117
  )
@@ -2,21 +2,33 @@ import dataclasses
2
2
  import datetime
3
3
  import re
4
4
  import urllib.parse
5
+ from collections.abc import Sequence
5
6
  from typing import Optional
6
7
 
7
8
  import dateutil.parser
8
9
  import numpy as np
9
10
  import pandas as pd
11
+ import sqlalchemy
12
+
13
+ from .datamodel import Activity
14
+ from .datamodel import DB
15
+ from .datamodel import Equipment
16
+ from .datamodel import Kind
17
+ from .datamodel import query_activity_meta
18
+ from .datamodel import Tag
10
19
 
11
20
 
12
21
  @dataclasses.dataclass
13
22
  class SearchQuery:
14
- equipment: list[str] = dataclasses.field(default_factory=list)
15
- kind: list[str] = dataclasses.field(default_factory=list)
23
+ equipment: list[Equipment] = dataclasses.field(default_factory=list)
24
+ kind: list[Kind] = dataclasses.field(default_factory=list)
25
+ tag: list[Tag] = dataclasses.field(default_factory=list)
16
26
  name: Optional[str] = None
17
27
  name_case_sensitive: bool = False
18
28
  start_begin: Optional[datetime.date] = None
19
29
  start_end: Optional[datetime.date] = None
30
+ distance_km_min: Optional[float] = None
31
+ distance_km_max: Optional[float] = None
20
32
 
21
33
  def __str__(self) -> str:
22
34
  bits = []
@@ -25,10 +37,14 @@ class SearchQuery:
25
37
  if self.equipment:
26
38
  bits.append(
27
39
  "equipment is "
28
- + (" or ".join(f"“{equipment}”" for equipment in self.equipment))
40
+ + (" or ".join(f"“{equipment.name}”" for equipment in self.equipment))
29
41
  )
30
42
  if self.kind:
31
- bits.append("kind is " + (" or ".join(f"“{kind}”" for kind in self.kind)))
43
+ bits.append(
44
+ "kind is " + (" or ".join(f"“{kind.name}”" for kind in self.kind))
45
+ )
46
+ if self.tag:
47
+ bits.append("tag is " + (" or ".join(f"“{tag.tag}”" for tag in self.tag)))
32
48
  if self.start_begin:
33
49
  bits.append(f"after “{self.start_begin.isoformat()}”")
34
50
  if self.start_end:
@@ -43,27 +59,38 @@ class SearchQuery:
43
59
  or self.name
44
60
  or self.start_begin
45
61
  or self.start_end
62
+ or self.tag
63
+ or self.distance_km_min
64
+ or self.distance_km_max
46
65
  )
47
66
 
48
67
  def to_primitives(self) -> dict:
49
68
  return {
50
- "equipment": self.equipment,
51
- "kind": self.kind,
69
+ "equipment": [equipment.id for equipment in self.equipment],
70
+ "kind": [kind.id for kind in self.kind],
71
+ "tag": [tag.id for tag in self.tag],
52
72
  "name": self.name or "",
53
73
  "name_case_sensitive": self.name_case_sensitive,
54
74
  "start_begin": _format_optional_date(self.start_begin),
55
75
  "start_end": _format_optional_date(self.start_end),
76
+ "distance_km_min": self.distance_km_min,
77
+ "distance_km_max": self.distance_km_max,
56
78
  }
57
79
 
58
80
  @classmethod
59
81
  def from_primitives(cls, d: dict) -> "SearchQuery":
60
82
  return cls(
61
- equipment=d.get("equipment", []),
62
- kind=d.get("kind", []),
83
+ equipment=[
84
+ DB.session.get_one(Equipment, id) for id in d.get("equipment", [])
85
+ ],
86
+ kind=[DB.session.get_one(Kind, id) for id in d.get("kind", [])],
87
+ tag=[DB.session.get_one(Tag, id) for id in d.get("tag", [])],
63
88
  name=d.get("name", None),
64
89
  name_case_sensitive=d.get("name_case_sensitive", False),
65
90
  start_begin=_parse_date_or_none(d.get("start_begin", None)),
66
91
  start_end=_parse_date_or_none(d.get("start_end", None)),
92
+ distance_km_min=d.get("distance_km_min", None),
93
+ distance_km_max=d.get("distance_km_max", None),
67
94
  )
68
95
 
69
96
  def to_jinja(self) -> dict:
@@ -74,9 +101,11 @@ class SearchQuery:
74
101
  def to_url_str(self) -> str:
75
102
  variables = []
76
103
  for equipment in self.equipment:
77
- variables.append(("equipment", equipment))
104
+ variables.append(("equipment", equipment.id))
78
105
  for kind in self.kind:
79
- variables.append(("kind", kind))
106
+ variables.append(("kind", kind.id))
107
+ for tag in self.tag:
108
+ variables.append(("tag", tag.id))
80
109
  if self.name:
81
110
  variables.append(("name", self.name))
82
111
  if self.name_case_sensitive:
@@ -85,6 +114,10 @@ class SearchQuery:
85
114
  variables.append(("start_begin", self.start_begin.isoformat()))
86
115
  if self.start_end:
87
116
  variables.append(("start_end", self.start_end.isoformat()))
117
+ if self.distance_km_min:
118
+ variables.append(("distance_km_min", self.distance_km_min))
119
+ if self.distance_km_max:
120
+ variables.append(("distance_km_max", self.distance_km_max))
88
121
 
89
122
  return "&".join(
90
123
  f"{key}={urllib.parse.quote_plus(value)}" for key, value in variables
@@ -94,36 +127,47 @@ class SearchQuery:
94
127
  def apply_search_query(
95
128
  activity_meta: pd.DataFrame, search_query: SearchQuery
96
129
  ) -> pd.DataFrame:
97
- mask = _make_mask(activity_meta.index, True)
130
+
131
+ filter_clauses = []
98
132
 
99
133
  if search_query.equipment:
100
- mask &= _filter_column(activity_meta["equipment"], search_query.equipment)
134
+ filter_clauses.append(
135
+ sqlalchemy.or_(
136
+ *[
137
+ Activity.equipment == equipment
138
+ for equipment in search_query.equipment
139
+ ]
140
+ )
141
+ )
142
+
101
143
  if search_query.kind:
102
- mask &= _filter_column(activity_meta["kind"], search_query.kind)
103
- if search_query.name:
104
- mask &= pd.Series(
105
- [
106
- bool(
107
- re.search(
108
- search_query.name,
109
- activity_name,
110
- 0 if search_query.name_case_sensitive else re.IGNORECASE,
111
- )
112
- )
113
- for activity_name in activity_meta["name"]
114
- ],
115
- index=activity_meta.index,
144
+ filter_clauses.append(
145
+ sqlalchemy.or_(*[Activity.kind == kind for kind in search_query.kind])
146
+ )
147
+
148
+ if search_query.tag:
149
+ filter_clauses.append(
150
+ sqlalchemy.or_(*[Activity.tags.contains(tag) for tag in search_query.tag])
116
151
  )
117
- if search_query.start_begin is not None:
118
- start_begin = datetime.datetime.combine(
119
- search_query.start_begin, datetime.time.min
152
+
153
+ if search_query.name:
154
+ filter_clauses.append(
155
+ Activity.name.contains(search_query.name)
156
+ if search_query.name_case_sensitive
157
+ else Activity.name.icontains(search_query.name)
120
158
  )
121
- mask &= start_begin <= activity_meta["start"]
122
- if search_query.start_end is not None:
123
- start_end = datetime.datetime.combine(search_query.start_end, datetime.time.max)
124
- mask &= activity_meta["start"] <= start_end
125
159
 
126
- return activity_meta.loc[mask]
160
+ if search_query.start_begin:
161
+ filter_clauses.append(Activity.start <= search_query.start_begin)
162
+ if search_query.start_end:
163
+ filter_clauses.append(Activity.start < search_query.start_end)
164
+
165
+ if search_query.distance_km_min:
166
+ filter_clauses.append(Activity.distance_km >= search_query.distance_km_min)
167
+ if search_query.distance_km_max:
168
+ filter_clauses.append(Activity.distance_km <= search_query.distance_km_max)
169
+
170
+ return query_activity_meta(filter_clauses)
127
171
 
128
172
 
129
173
  def _format_optional_date(date: Optional[datetime.date]) -> str:
@@ -1,13 +1,14 @@
1
+ from typing import Any
1
2
  from typing import Optional
2
- from typing import Union
3
3
 
4
4
  import numpy as np
5
+ import pandas as pd
5
6
 
6
7
 
7
- def some(value) -> Optional[Union[float, int]]:
8
+ def some(value: Any) -> Optional[Any]:
8
9
  if value is None:
9
10
  return None
10
- elif np.isnan(value):
11
+ elif pd.isna(value):
11
12
  return None
12
13
  else:
13
14
  return value
@@ -53,6 +53,7 @@ _strava_last_activity_date_path = _cache_dir / "strava-last-activity-date.json"
53
53
  _new_config_file = pathlib.Path("config.json")
54
54
  _activity_meta_override_dir = pathlib.Path("Metadata Override")
55
55
  _time_series_dir = pathlib.Path("Time Series")
56
+ _photos_dir = pathlib.Path("Photos")
56
57
 
57
58
 
58
59
  cache_dir = dir_wrapper(_cache_dir)
@@ -65,6 +66,7 @@ tiles_per_time_series = dir_wrapper(_tiles_per_time_series)
65
66
  strava_api_dir = dir_wrapper(_strava_api_dir)
66
67
  activity_meta_override_dir = dir_wrapper(_activity_meta_override_dir)
67
68
  time_series_dir = dir_wrapper(_time_series_dir)
69
+ PHOTOS_DIR = dir_wrapper(_photos_dir)
68
70
 
69
71
  activities_file = file_wrapper(_activities_file)
70
72
  strava_dynamic_config_path = file_wrapper(_strava_dynamic_config_path)
@@ -1,4 +1,5 @@
1
1
  import numpy as np
2
+ import pandas as pd
2
3
 
3
4
  from .missing_values import some
4
5
 
@@ -17,3 +18,7 @@ def test_float() -> None:
17
18
 
18
19
  def test_integer() -> None:
19
20
  assert some(1) == 1
21
+
22
+
23
+ def test_nat() -> None:
24
+ assert some(pd.NaT) == None
@@ -8,6 +8,8 @@ import secrets
8
8
  import shutil
9
9
  import urllib.parse
10
10
 
11
+ import pandas as pd
12
+ import sqlalchemy
11
13
  from flask import Flask
12
14
  from flask import request
13
15
  from flask_alembic import Alembic
@@ -17,6 +19,10 @@ from ..core.config import ConfigAccessor
17
19
  from ..core.config import import_old_config
18
20
  from ..core.config import import_old_strava_config
19
21
  from ..core.datamodel import DB
22
+ from ..core.datamodel import Equipment
23
+ from ..core.datamodel import Kind
24
+ from ..core.datamodel import Photo
25
+ from ..core.datamodel import Tag
20
26
  from ..core.heart_rate import HeartRateZoneComputer
21
27
  from ..core.raster_map import GrayscaleImageTransform
22
28
  from ..core.raster_map import IdentityImageTransform
@@ -33,6 +39,7 @@ from .blueprints.entry_views import register_entry_views
33
39
  from .blueprints.equipment_blueprint import make_equipment_blueprint
34
40
  from .blueprints.explorer_blueprint import make_explorer_blueprint
35
41
  from .blueprints.heatmap_blueprint import make_heatmap_blueprint
42
+ from .blueprints.photo_blueprint import make_photo_blueprint
36
43
  from .blueprints.plot_builder_blueprint import make_plot_builder_blueprint
37
44
  from .blueprints.search_blueprint import make_search_blueprint
38
45
  from .blueprints.settings_blueprint import make_settings_blueprint
@@ -99,15 +106,25 @@ def web_ui_main(
99
106
 
100
107
  @app.template_filter()
101
108
  def dt(value: datetime.datetime):
102
- return value.strftime("%Y-%m-%d %H:%M")
109
+ if pd.isna(value):
110
+ return "—"
111
+ else:
112
+ return value.strftime("%Y-%m-%d %H:%M")
103
113
 
104
114
  @app.template_filter()
105
115
  def td(v: datetime.timedelta):
106
- seconds = v.total_seconds()
107
- h = int(seconds // 3600)
108
- m = int(seconds // 60 % 60)
109
- s = int(seconds // 1 % 60)
110
- return f"{h}:{m:02d}:{s:02d}"
116
+ if pd.isna(v):
117
+ return "—"
118
+ else:
119
+ seconds = v.total_seconds()
120
+ h = int(seconds // 3600)
121
+ m = int(seconds // 60 % 60)
122
+ s = int(seconds // 1 % 60)
123
+ return f"{h}:{m:02d}:{s:02d}"
124
+
125
+ @app.template_filter()
126
+ def isna(value):
127
+ return pd.isna(value)
111
128
 
112
129
  authenticator = Authenticator(config_accessor())
113
130
  search_query_history = SearchQueryHistory(config_accessor, authenticator)
@@ -146,6 +163,7 @@ def web_ui_main(
146
163
  "/heatmap": make_heatmap_blueprint(
147
164
  repository, tile_visit_accessor, config_accessor(), search_query_history
148
165
  ),
166
+ "/photo": make_photo_blueprint(config_accessor, authenticator, flasher),
149
167
  "/plot-builder": make_plot_builder_blueprint(
150
168
  repository, flasher, authenticator
151
169
  ),
@@ -178,15 +196,22 @@ def web_ui_main(
178
196
  "version": _try_get_version(),
179
197
  "num_activities": len(repository),
180
198
  "map_tile_attribution": config_accessor().map_tile_attribution,
181
- "search_query_favorites": search_query_history.prepare_favorites(),
182
- "search_query_last": search_query_history.prepare_last(),
199
+ # "search_query_favorites": search_query_history.prepare_favorites(),
200
+ # "search_query_last": search_query_history.prepare_last(),
183
201
  "request_url": urllib.parse.quote_plus(request.url),
184
202
  }
185
- if len(repository):
186
- variables["equipments_avail"] = sorted(
187
- repository.meta["equipment"].unique()
188
- )
189
- variables["kinds_avail"] = sorted(repository.meta["kind"].unique())
203
+ variables["equipments_avail"] = DB.session.scalars(
204
+ sqlalchemy.select(Equipment).order_by(Equipment.name)
205
+ ).all()
206
+ variables["kinds_avail"] = DB.session.scalars(
207
+ sqlalchemy.select(Kind).order_by(Kind.name)
208
+ ).all()
209
+ variables["tags_avail"] = DB.session.scalars(
210
+ sqlalchemy.select(Tag).order_by(Tag.tag)
211
+ ).all()
212
+ variables["photo_count"] = DB.session.scalar(
213
+ sqlalchemy.select(sqlalchemy.func.count()).select_from(Photo)
214
+ )
190
215
  return variables
191
216
 
192
217
  app.run(host=host, port=port)
@@ -111,11 +111,14 @@ def make_activity_blueprint(
111
111
  == activity.id
112
112
  )
113
113
  for zoom in sorted(config.explorer_zoom_levels)
114
+ if not tile_visit_accessor.tile_state["tile_history"][zoom].empty
114
115
  }
115
116
 
116
117
  new_tiles_geojson = {}
117
118
  new_tiles_per_zoom = {}
118
119
  for zoom in sorted(config.explorer_zoom_levels):
120
+ if tile_visit_accessor.tile_state["tile_history"][zoom].empty:
121
+ continue
119
122
  new_tiles = tile_visit_accessor.tile_state["tile_history"][zoom].loc[
120
123
  tile_visit_accessor.tile_state["tile_history"][zoom]["activity_id"]
121
124
  == activity.id
@@ -141,25 +144,32 @@ def make_activity_blueprint(
141
144
 
142
145
  context = {
143
146
  "activity": activity,
144
- "line_json": line_json,
145
- "distance_time_plot": distance_time_plot(time_series),
146
- "color_line_geojson": make_geojson_color_line(
147
- time_series, line_color_column
148
- ),
149
- "speed_time_plot": speed_time_plot(time_series),
150
- "speed_distribution_plot": speed_distribution_plot(time_series),
147
+ "color_line_geojson": line_json,
151
148
  "similar_activites": similar_activities,
152
- "line_color_bar": make_color_bar(
153
- time_series[line_color_column],
154
- line_color_columns_avail[line_color_column].format,
155
- ),
156
- "date": activity.start.date(),
157
- "time": activity.start.time(),
158
149
  "new_tiles": new_tiles_per_zoom,
159
150
  "new_tiles_geojson": new_tiles_geojson,
160
- "line_color_column": line_color_column,
161
- "line_color_columns_avail": line_color_columns_avail,
162
151
  }
152
+
153
+ if not pd.isna(time_series["time"]).all():
154
+ context.update(
155
+ {
156
+ "distance_time_plot": distance_time_plot(time_series),
157
+ "color_line_geojson": make_geojson_color_line(
158
+ time_series, line_color_column
159
+ ),
160
+ "speed_time_plot": speed_time_plot(time_series),
161
+ "speed_distribution_plot": speed_distribution_plot(time_series),
162
+ "line_color_bar": make_color_bar(
163
+ time_series[line_color_column],
164
+ line_color_columns_avail[line_color_column].format,
165
+ ),
166
+ "date": activity.start.date(),
167
+ "time": activity.start.time(),
168
+ "line_color_column": line_color_column,
169
+ "line_color_columns_avail": line_color_columns_avail,
170
+ }
171
+ )
172
+
163
173
  if (
164
174
  heart_zones := _extract_heart_rate_zones(
165
175
  time_series, heart_rate_zone_computer
@@ -37,7 +37,10 @@ def register_entry_views(
37
37
 
38
38
  context["latest_activities"] = collections.defaultdict(list)
39
39
  for activity in DB.session.scalars(
40
- sqlalchemy.select(Activity).order_by(Activity.start.desc()).limit(100)
40
+ sqlalchemy.select(Activity)
41
+ .where(Activity.start.is_not(None))
42
+ .order_by(Activity.start.desc())
43
+ .limit(100)
41
44
  ):
42
45
  context["latest_activities"][activity.start.date()].append(
43
46
  {