geo-activity-playground 0.17.4__py3-none-any.whl → 0.18.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -7,6 +7,7 @@ import sys
7
7
  import coloredlogs
8
8
 
9
9
  from .importers.strava_checkout import convert_strava_checkout
10
+ from .importers.strava_checkout import import_from_strava_checkout
10
11
  from geo_activity_playground.core.activities import ActivityRepository
11
12
  from geo_activity_playground.core.activities import embellish_time_series
12
13
  from geo_activity_playground.core.cache_migrations import apply_cache_migrations
@@ -99,17 +100,16 @@ def make_activity_repository(basedir: pathlib.Path) -> ActivityRepository:
99
100
  os.chdir(basedir)
100
101
  apply_cache_migrations()
101
102
  config = get_config()
102
- if pathlib.Path("Activities").exists():
103
- import_from_directory()
104
- elif config:
105
- if "strava" in config:
106
- import_from_strava_api()
107
- else:
108
- logger.error(
109
- "You need to either have (1) an “Activities” directory with GPX/FIT/TCX/KML files in there or (2) a “config.toml” with information for the Strava API (see https://martin-ueding.github.io/geo-activity-playground/getting-started/using-strava-api/)."
110
- )
111
- sys.exit(1)
103
+
112
104
  repository = ActivityRepository()
105
+
106
+ if pathlib.Path("Activities").exists():
107
+ import_from_directory(repository)
108
+ if pathlib.Path("Strava Export").exists():
109
+ import_from_strava_checkout(repository)
110
+ if "strava" in config:
111
+ import_from_strava_api(repository)
112
+
113
113
  embellish_time_series(repository)
114
114
  compute_tile_visits(repository)
115
115
  compute_tile_evolution()
@@ -33,14 +33,52 @@ class ActivityMeta(TypedDict):
33
33
  start: datetime.datetime
34
34
 
35
35
 
36
+ activity_path = pathlib.Path("Cache/activities.parquet")
37
+
38
+
36
39
  class ActivityRepository:
37
40
  def __init__(self) -> None:
38
- self.meta = pd.read_parquet("Cache/activities.parquet")
39
- self.meta.index = self.meta["id"]
40
- self.meta.index.name = "index"
41
- self.meta["distance"] /= 1000
42
- self.meta["kind"].fillna("Unknown", inplace=True)
43
- self.meta["equipment"].fillna("Unknown", inplace=True)
41
+ if activity_path.exists():
42
+ self.meta = pd.read_parquet(activity_path)
43
+ else:
44
+ self.meta = pd.DataFrame()
45
+
46
+ self._loose_activities: list[ActivityMeta] = []
47
+
48
+ def add_activity(self, activity_meta: ActivityMeta) -> None:
49
+ self._loose_activities.append(activity_meta)
50
+
51
+ def commit(self) -> None:
52
+ if self._loose_activities:
53
+ logger.debug(
54
+ f"Adding {len(self._loose_activities)} activities to the repository …"
55
+ )
56
+ new_df = pd.DataFrame(self._loose_activities)
57
+ self.meta = pd.concat([self.meta, new_df])
58
+ assert pd.api.types.is_dtype_equal(
59
+ self.meta["start"].dtype, "datetime64[ns, UTC]"
60
+ ), self.meta["start"].dtype
61
+ self.meta.index = self.meta["id"]
62
+ self.meta.index.name = "index"
63
+ self.meta.sort_values("start", inplace=True)
64
+ activity_path.parent.mkdir(exist_ok=True, parents=True)
65
+ self.meta.to_parquet(activity_path)
66
+ self._loose_activities = []
67
+
68
+ def has_activity(self, activity_id: int) -> bool:
69
+ if len(self.meta):
70
+ if activity_id in self.meta["id"]:
71
+ return True
72
+
73
+ for activity_meta in self._loose_activities:
74
+ if activity_meta["id"] == activity_id:
75
+ return True
76
+
77
+ return False
78
+
79
+ def last_activity_date(self) -> Optional[datetime.datetime]:
80
+ if len(self.meta):
81
+ return self.meta.iloc[-1]["start"]
44
82
 
45
83
  @property
46
84
  def activity_ids(self) -> set[int]:
@@ -64,43 +102,6 @@ class ActivityRepository:
64
102
  logger.error(f"Error while reading {path}, deleting cache file …")
65
103
  path.unlink(missing_ok=True)
66
104
  raise
67
- changed = False
68
- if pd.api.types.is_dtype_equal(df["time"].dtype, "int64"):
69
- start = self.get_activity_by_id(id)["start"]
70
- time = df["time"]
71
- del df["time"]
72
- df["time"] = [start + datetime.timedelta(seconds=t) for t in time]
73
- changed = True
74
- assert pd.api.types.is_dtype_equal(df["time"].dtype, "datetime64[ns, UTC]")
75
-
76
- if "distance" in df.columns:
77
- if "distance/km" not in df.columns:
78
- df["distance/km"] = df["distance"] / 1000
79
- changed = True
80
-
81
- if "speed" not in df.columns:
82
- df["speed"] = (
83
- df["distance"].diff()
84
- / (df["time"].diff().dt.total_seconds() + 1e-3)
85
- * 3.6
86
- )
87
- changed = True
88
-
89
- if "latitude" in df.columns and "x" not in df.columns:
90
- x, y = compute_tile_float(df["latitude"], df["longitude"], 0)
91
- df["x"] = x
92
- df["y"] = y
93
- changed = True
94
-
95
- if "segment_id" not in df.columns:
96
- time_diff = (df["time"] - df["time"].shift(1)).dt.total_seconds()
97
- jump_indices = time_diff >= 30
98
- df["segment_id"] = np.cumsum(jump_indices)
99
- changed = True
100
-
101
- if changed:
102
- logger.info(f"Updating activity time series for {id = } …")
103
- df.to_parquet(path)
104
105
 
105
106
  return df
106
107
 
@@ -169,29 +170,25 @@ def make_geojson_from_time_series(time_series: pd.DataFrame) -> str:
169
170
 
170
171
  def make_geojson_color_line(time_series: pd.DataFrame) -> str:
171
172
  cmap = matplotlib.colormaps["viridis"]
172
- return geojson.dumps(
173
- geojson.FeatureCollection(
174
- features=[
175
- geojson.Feature(
176
- geometry=geojson.LineString(
177
- coordinates=[
178
- [row["longitude"], row["latitude"]],
179
- [next["longitude"], next["latitude"]],
180
- ]
181
- ),
182
- properties={
183
- "speed": next["speed"],
184
- "color": matplotlib.colors.to_hex(
185
- cmap(min(next["speed"] / 35, 1.0))
186
- ),
187
- },
188
- )
189
- for (_, row), (_, next) in zip(
190
- time_series.iterrows(), time_series.iloc[1:].iterrows()
191
- )
192
- ]
173
+ features = [
174
+ geojson.Feature(
175
+ geometry=geojson.LineString(
176
+ coordinates=[
177
+ [row["longitude"], row["latitude"]],
178
+ [next["longitude"], next["latitude"]],
179
+ ]
180
+ ),
181
+ properties={
182
+ "speed": next["speed"] if np.isfinite(next["speed"]) else 0.0,
183
+ "color": matplotlib.colors.to_hex(cmap(min(next["speed"] / 35, 1.0))),
184
+ },
193
185
  )
194
- )
186
+ for (_, row), (_, next) in zip(
187
+ time_series.iterrows(), time_series.iloc[1:].iterrows()
188
+ )
189
+ ]
190
+ feature_collection = geojson.FeatureCollection(features)
191
+ return geojson.dumps(feature_collection)
195
192
 
196
193
 
197
194
  def extract_heart_rate_zones(time_series: pd.DataFrame) -> Optional[pd.DataFrame]:
@@ -175,9 +175,9 @@ def read_fit_activity(path: pathlib.Path, open) -> tuple[ActivityMeta, pd.DataFr
175
175
  if "wkt_name" in fields:
176
176
  metadata["name"] = values["wkt_name"]
177
177
  if "sport" in fields:
178
- metadata["kind"] = values["sport"]
178
+ metadata["kind"] = str(values["sport"])
179
179
  if "sub_sport" in values:
180
- metadata["kind"] += " " + values["sub_sport"]
180
+ metadata["kind"] += " " + str(values["sub_sport"])
181
181
 
182
182
  return metadata, pd.DataFrame(rows)
183
183
 
@@ -64,6 +64,7 @@ def apply_cache_migrations() -> None:
64
64
  delete_tile_visits,
65
65
  delete_heatmap_cache,
66
66
  delete_activity_metadata,
67
+ delete_activity_metadata,
67
68
  ]
68
69
 
69
70
  for migration in migrations[cache_status["num_applied_migrations"] :]:
@@ -8,6 +8,7 @@ import pandas as pd
8
8
  from tqdm import tqdm
9
9
 
10
10
  from geo_activity_playground.core.activities import ActivityMeta
11
+ from geo_activity_playground.core.activities import ActivityRepository
11
12
  from geo_activity_playground.core.activity_parsers import ActivityParseError
12
13
  from geo_activity_playground.core.activity_parsers import read_activity
13
14
  from geo_activity_playground.core.tasks import WorkTracker
@@ -15,26 +16,19 @@ from geo_activity_playground.core.tasks import WorkTracker
15
16
  logger = logging.getLogger(__name__)
16
17
 
17
18
 
18
- def import_from_directory() -> None:
19
- meta_file = pathlib.Path("Cache") / "activities.parquet"
20
- if meta_file.exists():
21
- meta = pd.read_parquet(meta_file)
22
- else:
23
- meta = None
24
-
19
+ def import_from_directory(repository: ActivityRepository) -> None:
25
20
  paths_with_errors = []
26
21
  work_tracker = WorkTracker("parse-activity-files")
27
22
 
28
23
  activity_paths = {
29
24
  int(hashlib.sha3_224(str(path).encode()).hexdigest(), 16) % 2**62: path
30
25
  for path in pathlib.Path("Activities").rglob("*.*")
31
- if path.is_file() and path.suffixes
26
+ if path.is_file() and path.suffixes and not path.stem.startswith(".")
32
27
  }
33
28
  activities_ids_to_parse = work_tracker.filter(activity_paths.keys())
34
29
 
35
30
  activity_stream_dir = pathlib.Path("Cache/Activity Timeseries")
36
31
  activity_stream_dir.mkdir(exist_ok=True, parents=True)
37
- new_rows: list[dict] = []
38
32
  for activity_id in tqdm(activities_ids_to_parse, desc="Parse activity files"):
39
33
  path = activity_paths[activity_id]
40
34
  try:
@@ -62,6 +56,8 @@ def import_from_directory() -> None:
62
56
  # https://stackoverflow.com/a/74718395/653152
63
57
  name=path.name.removesuffix("".join(path.suffixes)),
64
58
  path=str(path),
59
+ kind="Unknown",
60
+ equipment="Unknown",
65
61
  )
66
62
  if len(path.parts) >= 3 and path.parts[1] != "Commute":
67
63
  activity_meta["kind"] = path.parts[1]
@@ -69,7 +65,7 @@ def import_from_directory() -> None:
69
65
  activity_meta["equipment"] = path.parts[2]
70
66
 
71
67
  activity_meta.update(activity_meta_from_file)
72
- new_rows.append(activity_meta)
68
+ repository.add_activity(activity_meta)
73
69
 
74
70
  if paths_with_errors:
75
71
  logger.warning(
@@ -78,18 +74,6 @@ def import_from_directory() -> None:
78
74
  for path, error in paths_with_errors:
79
75
  logger.error(f"{path}: {error}")
80
76
 
81
- new_df = pd.DataFrame(new_rows)
82
- merged = pd.concat([meta, new_df])
83
-
84
- if len(merged) == 0:
85
- activities_dir = pathlib.Path("Activities").resolve()
86
- logger.error(
87
- f"You seemingly want to use activity files as a data source, but you have not copied any GPX/FIT/TCX/KML files."
88
- f"Please copy at least one such file into {activities_dir}."
89
- )
90
- sys.exit(1)
77
+ repository.commit()
91
78
 
92
- merged.sort_values("start", inplace=True)
93
- meta_file.parent.mkdir(exist_ok=True, parents=True)
94
- merged.to_parquet(meta_file)
95
79
  work_tracker.close()
@@ -14,6 +14,7 @@ from stravalib.exc import ObjectNotFound
14
14
  from stravalib.exc import RateLimitExceeded
15
15
  from tqdm import tqdm
16
16
 
17
+ from geo_activity_playground.core.activities import ActivityRepository
17
18
  from geo_activity_playground.core.config import get_config
18
19
 
19
20
 
@@ -91,8 +92,8 @@ def round_to_next_quarter_hour(date: datetime.datetime) -> datetime.datetime:
91
92
  return next_quarter
92
93
 
93
94
 
94
- def import_from_strava_api() -> None:
95
- while try_import_strava():
95
+ def import_from_strava_api(repository: ActivityRepository) -> None:
96
+ while try_import_strava(repository):
96
97
  now = datetime.datetime.now()
97
98
  next_quarter = round_to_next_quarter_hour(now)
98
99
  seconds_to_wait = (next_quarter - now).total_seconds() + 10
@@ -102,22 +103,17 @@ def import_from_strava_api() -> None:
102
103
  time.sleep(seconds_to_wait)
103
104
 
104
105
 
105
- def try_import_strava() -> None:
106
- meta_file = pathlib.Path("Cache") / "activities.parquet"
107
- if meta_file.exists():
108
- logger.info("Loading metadata file …")
109
- meta = pd.read_parquet(meta_file)
110
- get_after = meta.iloc[-1]["start"].isoformat().replace("+00:00", "Z")
111
- else:
112
- logger.info("Didn't find a metadata file.")
113
- meta = None
114
- get_after = "2000-01-01T00:00:00Z"
106
+ def try_import_strava(repository: ActivityRepository) -> None:
107
+ get_after = (
108
+ repository.last_activity_date().isoformat().replace("+00:00", "Z")
109
+ if repository.last_activity_date() is not None
110
+ else "2000-01-01T00:00:00Z"
111
+ )
115
112
 
116
113
  gear_names = {None: "None"}
117
114
 
118
115
  client = Client(access_token=get_current_access_token())
119
116
 
120
- new_rows: list[dict] = []
121
117
  try:
122
118
  for activity in tqdm(
123
119
  client.get_activities(after=get_after), desc="Downloading Strava activities"
@@ -155,7 +151,7 @@ def try_import_strava() -> None:
155
151
  time_series.to_parquet(time_series_path)
156
152
 
157
153
  if len(time_series) > 0 and "latitude" in time_series.columns:
158
- new_rows.append(
154
+ repository.add_activity(
159
155
  {
160
156
  "id": activity.id,
161
157
  "commute": activity.commute,
@@ -172,11 +168,7 @@ def try_import_strava() -> None:
172
168
  except RateLimitExceeded:
173
169
  limit_exceeded = True
174
170
 
175
- new_df = pd.DataFrame(new_rows)
176
- merged: pd.DataFrame = pd.concat([meta, new_df])
177
- merged.sort_values("start", inplace=True)
178
- meta_file.parent.mkdir(exist_ok=True, parents=True)
179
- merged.to_parquet(meta_file)
171
+ repository.commit()
180
172
 
181
173
  return limit_exceeded
182
174
 
@@ -1,11 +1,22 @@
1
+ import datetime
2
+ import logging
1
3
  import pathlib
2
4
  import shutil
5
+ import traceback
3
6
 
4
7
  import dateutil.parser
5
8
  import numpy as np
6
9
  import pandas as pd
7
10
  from tqdm import tqdm
8
11
 
12
+ from geo_activity_playground.core.activities import ActivityRepository
13
+ from geo_activity_playground.core.activity_parsers import ActivityParseError
14
+ from geo_activity_playground.core.activity_parsers import read_activity
15
+ from geo_activity_playground.core.tasks import WorkTracker
16
+
17
+
18
+ logger = logging.getLogger(__name__)
19
+
9
20
 
10
21
  def nan_as_none(elem):
11
22
  if isinstance(elem, float) and np.isnan(elem):
@@ -14,6 +25,65 @@ def nan_as_none(elem):
14
25
  return elem
15
26
 
16
27
 
28
+ def import_from_strava_checkout(repository: ActivityRepository) -> None:
29
+ checkout_path = pathlib.Path("Strava Export")
30
+ activities = pd.read_csv(checkout_path / "activities.csv")
31
+ activities.index = activities["Activity ID"]
32
+ work_tracker = WorkTracker("import-strava-checkout-activities")
33
+ activities_ids_to_parse = work_tracker.filter(activities["Activity ID"])
34
+
35
+ activity_stream_dir = pathlib.Path("Cache/Activity Timeseries")
36
+ activity_stream_dir.mkdir(exist_ok=True, parents=True)
37
+
38
+ for activity_id in tqdm(activities_ids_to_parse, desc="Import from Strava export"):
39
+ row = activities.loc[activity_id]
40
+ activity_file = checkout_path / row["Filename"]
41
+ table_activity_meta = {
42
+ "calories": row["Calories"],
43
+ "commute": row["Commute"] == "true",
44
+ "distance": row["Distance"],
45
+ "elapsed_time": datetime.timedelta(seconds=int(row["Elapsed Time"])),
46
+ "equipment": str(
47
+ nan_as_none(row["Activity Gear"])
48
+ or nan_as_none(row["Bike"])
49
+ or nan_as_none(row["Gear"])
50
+ or ""
51
+ ),
52
+ "kind": row["Activity Type"],
53
+ "id": activity_id,
54
+ "name": row["Activity Name"],
55
+ "path": str(activity_file),
56
+ "start": dateutil.parser.parse(row["Activity Date"]).astimezone(
57
+ datetime.timezone.utc
58
+ ),
59
+ }
60
+
61
+ time_series_path = activity_stream_dir / f"{activity_id}.parquet"
62
+ if not time_series_path.exists():
63
+ try:
64
+ file_activity_meta, time_series = read_activity(activity_file)
65
+ except ActivityParseError as e:
66
+ logger.error(f"Error while parsing file {activity_file}:")
67
+ traceback.print_exc()
68
+ continue
69
+ except:
70
+ logger.error(
71
+ f"Encountered a problem with {activity_file=}, see details below."
72
+ )
73
+ raise
74
+
75
+ if not len(time_series):
76
+ continue
77
+
78
+ time_series.to_parquet(time_series_path)
79
+
80
+ work_tracker.mark_done(activity_id)
81
+ repository.add_activity(table_activity_meta)
82
+
83
+ repository.commit()
84
+ work_tracker.close()
85
+
86
+
17
87
  def convert_strava_checkout(
18
88
  checkout_path: pathlib.Path, playground_path: pathlib.Path
19
89
  ) -> None:
@@ -19,7 +19,7 @@ class CalendarController:
19
19
  meta["month"] = meta["start"].dt.month
20
20
 
21
21
  monthly_distance = meta.groupby(["year", "month"]).apply(
22
- lambda group: sum(group["distance"])
22
+ lambda group: sum(group["distance"]) / 1000
23
23
  )
24
24
  monthly_distance.name = "total_distance"
25
25
  monthly_pivot = (
@@ -28,9 +28,19 @@ class CalendarController:
28
28
  .fillna(0.0)
29
29
  )
30
30
 
31
+ yearly_distance = meta.groupby(["year"]).apply(
32
+ lambda group: sum(group["distance"]) / 1000
33
+ )
34
+ yearly_distance.name = "total_distance"
35
+ yearly_distances = {
36
+ row["year"]: row["total_distance"]
37
+ for index, row in yearly_distance.reset_index().iterrows()
38
+ }
39
+
31
40
  return {
32
41
  "num_activities": len(self._repository.meta),
33
42
  "monthly_distances": monthly_pivot,
43
+ "yearly_distances": yearly_distances,
34
44
  }
35
45
 
36
46
  @functools.cache
@@ -19,7 +19,7 @@ class EquipmentController:
19
19
  lambda group: pd.DataFrame(
20
20
  {
21
21
  "time": group["start"],
22
- "total_distance": group["distance"].cumsum(),
22
+ "total_distance": group["distance"].cumsum() / 1000,
23
23
  }
24
24
  )
25
25
  )
@@ -50,7 +50,7 @@ class EquipmentController:
50
50
  .apply(
51
51
  lambda group: pd.DataFrame(
52
52
  {
53
- "total_distance": group["distance"].sum(),
53
+ "total_distance": group["distance"].sum() / 1000,
54
54
  "first_use": group["start"].iloc[0],
55
55
  "last_use": group["start"].iloc[-1],
56
56
  },
@@ -17,7 +17,7 @@
17
17
  <dt>Commute</dt>
18
18
  <dd>{{ activity.commute }}</dd>
19
19
  <dt>Distance</dt>
20
- <dd>{{ activity.distance|round(1) }} km</dd>
20
+ <dd>{{ (activity.distance / 1000)|round(1) }} km</dd>
21
21
  <dt>Elapsed time</dt>
22
22
  <dd>{{ activity.elapsed_time }}</dd>
23
23
  <dt>Start time</dt>
@@ -28,6 +28,8 @@
28
28
  <dd>{{ activity.equipment }}</dd>
29
29
  <dt>ID</dt>
30
30
  <dd>{{ activity.id }}</dd>
31
+ <dt>Source path</dt>
32
+ <dd>{{ activity.path }}</dd>
31
33
  </dl>
32
34
  </div>
33
35
  <div class="col-8">
@@ -16,6 +16,7 @@
16
16
  {% for i in range(1, 13) %}
17
17
  <th style="text-align: right;">{{ i }}</th>
18
18
  {% endfor %}
19
+ <th style="text-align: right;">Total</th>
19
20
  </tr>
20
21
  </thead>
21
22
  <tbody>
@@ -32,6 +33,7 @@
32
33
  {% endif %}
33
34
  </td>
34
35
  {% endfor %}
36
+ <td align="right">{{ yearly_distances[year]|int() }} km</td>
35
37
  </tr>
36
38
  {% endfor %}
37
39
  </tbody>
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: geo-activity-playground
3
- Version: 0.17.4
3
+ Version: 0.18.0
4
4
  Summary: Analysis of geo data activities like rides, runs or hikes.
5
5
  License: MIT
6
6
  Author: Martin Ueding
@@ -1,9 +1,9 @@
1
1
  geo_activity_playground/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- geo_activity_playground/__main__.py,sha256=kEMt2o36XKGegtoDl1hZouqbml0KD_CFKCaYl1SUvTM,4199
2
+ geo_activity_playground/__main__.py,sha256=f1OunKE5hK-Nb319tiAlgtFcn2vLcVJPxorPYSM8Mjs,4029
3
3
  geo_activity_playground/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
- geo_activity_playground/core/activities.py,sha256=gbNNKN2yrYSUoTzxIWW66Fvs0HpTbvYInMqBdBo1hUg,7925
5
- geo_activity_playground/core/activity_parsers.py,sha256=vt2UiCHgHT9Kavcl3gjTtV6Sp-CDZSFiUpVj1Kyb7kk,11358
6
- geo_activity_playground/core/cache_migrations.py,sha256=jr43FY0w24nmWqew3wEfeaD8Vw10oWyP3PMcYdykGac,2229
4
+ geo_activity_playground/core/activities.py,sha256=BjPCGlF4OGOKlWrn2F9KtEnmdTTiAT-b_bmUP8RQHVI,7676
5
+ geo_activity_playground/core/activity_parsers.py,sha256=aqrprDClXLmxeuCIsV5Vh7rNwUndwO8vgbvA3f-vmD4,11368
6
+ geo_activity_playground/core/cache_migrations.py,sha256=P0JJ853UclERKtzhx5mdyVOFFOgFj_cHueDbuNPw6lE,2263
7
7
  geo_activity_playground/core/config.py,sha256=GNHEIeFI3dNRiFSMburn5URZHx0qkiitvePAx2toYUQ,456
8
8
  geo_activity_playground/core/coordinates.py,sha256=tDfr9mlXhK6E_MMIJ0vYWVCoH0Lq8uyuaqUgaa8i0jg,966
9
9
  geo_activity_playground/core/heatmap.py,sha256=JrYGaO36e2XXLhogHvBhY8bhpKYRtfrmk86Zci__iaM,5212
@@ -14,17 +14,17 @@ geo_activity_playground/explorer/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRk
14
14
  geo_activity_playground/explorer/grid_file.py,sha256=kFpIRyUL4NG6xHYcxsHlS7wv4_mtaYNNykcSSHYZQ7c,3158
15
15
  geo_activity_playground/explorer/tile_visits.py,sha256=CaXkACwvw61QZWiyibkdX1AqPiPDzf_Cz05BWUHAkT8,10665
16
16
  geo_activity_playground/explorer/video.py,sha256=RGZik93ghfZuRILXf8pfUbPh5VV37_QRAR4FgOMfZqQ,4354
17
- geo_activity_playground/importers/directory.py,sha256=PdZMXQOvSmT18yLGK4fsf5NVIhHxSnuVe0aEuH0oPY0,3355
18
- geo_activity_playground/importers/strava_api.py,sha256=2NDqwGIDRpksktvN9lqeoQ1WxiKN79EILo5rWtFiamw,6882
19
- geo_activity_playground/importers/strava_checkout.py,sha256=xwfQERuNCDbkVyfUGTw-Eb_b4JkI5TzyZBD94Ror1HM,1653
17
+ geo_activity_playground/importers/directory.py,sha256=20N9X0j42cuR5TMAIw1w5gL-jQxzyCAnEcb0aqhfkyI,2844
18
+ geo_activity_playground/importers/strava_api.py,sha256=B2nVOpg53iqRH6vODuW4QG0DWqt7QEUrjBxDeNNXQYM,6609
19
+ geo_activity_playground/importers/strava_checkout.py,sha256=v-TMLNgXXiaDnydK-4u4I-u5TwQ3sL_fLzRM-ZoBukA,4299
20
20
  geo_activity_playground/importers/test_strava_api.py,sha256=4vX7wDr1a9aRh8myxNrIq6RwDBbP8ZeoXXPc10CAbW4,431
21
21
  geo_activity_playground/webui/activity_controller.py,sha256=b_y-B9HcFu-5rQgyoJKRiNX_2zGpdYEJpTYqKVxK7vQ,5889
22
22
  geo_activity_playground/webui/app.py,sha256=rfTa0m5demmIdr7NSYA2EGAj9qgDbODUcMTUy0Rj3Kk,8705
23
- geo_activity_playground/webui/calendar_controller.py,sha256=maQ1RlrD99pncOts3ue5ye4OHr6WB-E40eAzs8ZxwPI,2239
23
+ geo_activity_playground/webui/calendar_controller.py,sha256=HEiRvrGpEU-DZCNXM96YTfR4vp5-V83_fMopidicO9o,2624
24
24
  geo_activity_playground/webui/config_controller.py,sha256=4M8mQc58Hkm-ssfYF1gKRepiAXFIzkZdIMRSbX-aI1U,320
25
25
  geo_activity_playground/webui/eddington_controller.py,sha256=b5mYkciv7Wkd5zord-WsdrV_8c-qpVi-8DG3jIUEKhs,2616
26
26
  geo_activity_playground/webui/entry_controller.py,sha256=n_p9ZUC47t4f1uyW0C4hELzFS1G9eQBtTUqAsGgAico,1733
27
- geo_activity_playground/webui/equipment_controller.py,sha256=lF6gV69NuGQYxeheSO3tDTPcwTG51C4N3y8rOX4wcCA,2370
27
+ geo_activity_playground/webui/equipment_controller.py,sha256=4tKGDCvd_vmxl0X0pHG_u4VD2GkjJbjNgjQrlLn3-lY,2384
28
28
  geo_activity_playground/webui/explorer_controller.py,sha256=5CmlhTjQE8NNDHviL1QcBmVxgYjAVSiJLPSUL-EPcRY,10618
29
29
  geo_activity_playground/webui/heatmap_controller.py,sha256=XlccVtX1fwWlSZPNIlWrrSWj_VenORxxJP7H1pUAoWI,5672
30
30
  geo_activity_playground/webui/search_controller.py,sha256=ezCPSy6ji7knd9bz7ctipkPjsorm8XUo5CxE2Q5y3l8,942
@@ -40,9 +40,9 @@ geo_activity_playground/webui/static/safari-pinned-tab.svg,sha256=OzoEVGY0igWRXM
40
40
  geo_activity_playground/webui/static/site.webmanifest,sha256=4vYxdPMpwTdB8EmOvHkkYcjZ8Yrci3pOwwY3o_VwACA,440
41
41
  geo_activity_playground/webui/strava_controller.py,sha256=-DZ1Ae-0cWx5tia2dJpGfsBBoIya0QO7IC2qa1-7Q_U,779
42
42
  geo_activity_playground/webui/summary_controller.py,sha256=FrMURFw5lFqR2iW0y2qOfUPoKq-jm-3GX-6SNOfAggA,1756
43
- geo_activity_playground/webui/templates/activity.html.j2,sha256=GU7v5aYV-rNexu3Ykh8NwKUQTTco55QThwXDC-6TjCg,2897
43
+ geo_activity_playground/webui/templates/activity.html.j2,sha256=HHATMlq4tCirUq8HhKZYceVGFDeodi-6wpPewW1zxjA,2980
44
44
  geo_activity_playground/webui/templates/calendar-month.html.j2,sha256=LVokl95lPlYpUo-5FbDe3n3SES3LE-MABg0BOcdqP7s,1384
45
- geo_activity_playground/webui/templates/calendar.html.j2,sha256=KyP3B7ab1OKydhpUiQEN81gqERpEn0rgh3SNnmfzgic,1206
45
+ geo_activity_playground/webui/templates/calendar.html.j2,sha256=x3E1R6KoscVxfcndFePEA855tYz5UoHDSrDbjkhuOOs,1349
46
46
  geo_activity_playground/webui/templates/config.html.j2,sha256=pmec-TqSl5CVznQlyHuC91o18qa0ZQWHXxSBrlV4au4,796
47
47
  geo_activity_playground/webui/templates/eddington.html.j2,sha256=yl75IzWeIkFpwPj8FjTrzJsz_f-qdETPmNnAGLPJuL8,487
48
48
  geo_activity_playground/webui/templates/equipment.html.j2,sha256=BwZzbZ2AuFuiM_Fxu2KOqvhcgHd9yr1xL76ihb_6YKc,1317
@@ -54,8 +54,8 @@ geo_activity_playground/webui/templates/search.html.j2,sha256=lYFe9PzP8gqTenhZuf
54
54
  geo_activity_playground/webui/templates/strava-connect.html.j2,sha256=vLMqTnTV-DZJ1FHRjpm4OMgbABMwZQvbs8Ru9baKeBg,1111
55
55
  geo_activity_playground/webui/templates/summary.html.j2,sha256=eEwcPOURJ-uT89jeJGZHq_5pSq56_fTC7z-j_m5nQiA,471
56
56
  geo_activity_playground/webui/tile_controller.py,sha256=kkZvZ4wUdp-HLoHDmhX1IVdCYKsQR_vg9i5mMI9N0R4,745
57
- geo_activity_playground-0.17.4.dist-info/LICENSE,sha256=4RpAwKO8bPkfXH2lnpeUW0eLkNWglyG4lbrLDU_MOwY,1070
58
- geo_activity_playground-0.17.4.dist-info/METADATA,sha256=JmAnuo_a_ZLSsoJgIS17XWA0q4OY2h_uZLDpRaM8HYs,1566
59
- geo_activity_playground-0.17.4.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
60
- geo_activity_playground-0.17.4.dist-info/entry_points.txt,sha256=pbNlLI6IIZIp7nPYCfAtiSiz2oxJSCl7DODD6SPkLKk,81
61
- geo_activity_playground-0.17.4.dist-info/RECORD,,
57
+ geo_activity_playground-0.18.0.dist-info/LICENSE,sha256=4RpAwKO8bPkfXH2lnpeUW0eLkNWglyG4lbrLDU_MOwY,1070
58
+ geo_activity_playground-0.18.0.dist-info/METADATA,sha256=aoIpLqMtjY-LsBZvePYet4iRiDJT1pOqaCUppSESN-c,1566
59
+ geo_activity_playground-0.18.0.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
60
+ geo_activity_playground-0.18.0.dist-info/entry_points.txt,sha256=pbNlLI6IIZIp7nPYCfAtiSiz2oxJSCl7DODD6SPkLKk,81
61
+ geo_activity_playground-0.18.0.dist-info/RECORD,,