geo-activity-playground 0.38.2__py3-none-any.whl → 0.39.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (113) hide show
  1. geo_activity_playground/__main__.py +5 -47
  2. geo_activity_playground/alembic/README +1 -0
  3. geo_activity_playground/alembic/env.py +76 -0
  4. geo_activity_playground/alembic/script.py.mako +26 -0
  5. geo_activity_playground/alembic/versions/451e7836b53d_add_square_planner_bookmark.py +33 -0
  6. geo_activity_playground/alembic/versions/63d3b7f6f93c_initial_version.py +73 -0
  7. geo_activity_playground/alembic/versions/ab83b9d23127_add_upstream_id.py +28 -0
  8. geo_activity_playground/alembic/versions/b03491c593f6_add_crop_indices.py +30 -0
  9. geo_activity_playground/alembic/versions/e02e27876deb_add_square_planner_bookmark_name.py +28 -0
  10. geo_activity_playground/alembic/versions/script.py.mako +28 -0
  11. geo_activity_playground/core/activities.py +50 -136
  12. geo_activity_playground/core/config.py +3 -3
  13. geo_activity_playground/core/datamodel.py +257 -0
  14. geo_activity_playground/core/enrichment.py +90 -92
  15. geo_activity_playground/core/heart_rate.py +1 -2
  16. geo_activity_playground/core/paths.py +6 -7
  17. geo_activity_playground/core/raster_map.py +43 -4
  18. geo_activity_playground/core/similarity.py +1 -2
  19. geo_activity_playground/core/tasks.py +2 -2
  20. geo_activity_playground/core/test_meta_search.py +3 -3
  21. geo_activity_playground/core/test_summary_stats.py +1 -1
  22. geo_activity_playground/explorer/grid_file.py +2 -2
  23. geo_activity_playground/explorer/tile_visits.py +8 -10
  24. geo_activity_playground/heatmap_video.py +7 -8
  25. geo_activity_playground/importers/activity_parsers.py +2 -2
  26. geo_activity_playground/importers/directory.py +9 -10
  27. geo_activity_playground/importers/strava_api.py +9 -9
  28. geo_activity_playground/importers/strava_checkout.py +12 -13
  29. geo_activity_playground/importers/test_csv_parser.py +3 -3
  30. geo_activity_playground/importers/test_directory.py +1 -1
  31. geo_activity_playground/importers/test_strava_api.py +1 -1
  32. geo_activity_playground/webui/app.py +94 -86
  33. geo_activity_playground/webui/authenticator.py +1 -1
  34. geo_activity_playground/webui/{activity/controller.py → blueprints/activity_blueprint.py} +246 -108
  35. geo_activity_playground/webui/{auth_blueprint.py → blueprints/auth_blueprint.py} +1 -1
  36. geo_activity_playground/webui/blueprints/bubble_chart_blueprint.py +61 -0
  37. geo_activity_playground/webui/{calendar/controller.py → blueprints/calendar_blueprint.py} +19 -19
  38. geo_activity_playground/webui/{eddington_blueprint.py → blueprints/eddington_blueprint.py} +5 -5
  39. geo_activity_playground/webui/blueprints/entry_views.py +68 -0
  40. geo_activity_playground/webui/{equipment_blueprint.py → blueprints/equipment_blueprint.py} +37 -4
  41. geo_activity_playground/webui/{explorer/controller.py → blueprints/explorer_blueprint.py} +88 -54
  42. geo_activity_playground/webui/blueprints/heatmap_blueprint.py +233 -0
  43. geo_activity_playground/webui/{search_blueprint.py → blueprints/search_blueprint.py} +7 -11
  44. geo_activity_playground/webui/blueprints/settings_blueprint.py +446 -0
  45. geo_activity_playground/webui/{square_planner_blueprint.py → blueprints/square_planner_blueprint.py} +31 -6
  46. geo_activity_playground/webui/{summary_blueprint.py → blueprints/summary_blueprint.py} +11 -23
  47. geo_activity_playground/webui/blueprints/tile_blueprint.py +27 -0
  48. geo_activity_playground/webui/{upload_blueprint.py → blueprints/upload_blueprint.py} +13 -18
  49. geo_activity_playground/webui/flasher.py +26 -0
  50. geo_activity_playground/webui/plot_util.py +1 -1
  51. geo_activity_playground/webui/search_util.py +4 -6
  52. geo_activity_playground/webui/static/images/layers-2x.png +0 -0
  53. geo_activity_playground/webui/static/images/layers.png +0 -0
  54. geo_activity_playground/webui/static/images/marker-icon-2x.png +0 -0
  55. geo_activity_playground/webui/static/images/marker-icon.png +0 -0
  56. geo_activity_playground/webui/static/images/marker-shadow.png +0 -0
  57. geo_activity_playground/webui/templates/activity/day.html.j2 +81 -0
  58. geo_activity_playground/webui/templates/activity/edit.html.j2 +38 -0
  59. geo_activity_playground/webui/{activity/templates → templates}/activity/name.html.j2 +29 -27
  60. geo_activity_playground/webui/{activity/templates → templates}/activity/show.html.j2 +57 -33
  61. geo_activity_playground/webui/templates/activity/trim.html.j2 +68 -0
  62. geo_activity_playground/webui/templates/bubble_chart/index.html.j2 +26 -0
  63. geo_activity_playground/webui/templates/calendar/index.html.j2 +48 -0
  64. geo_activity_playground/webui/templates/calendar/month.html.j2 +57 -0
  65. geo_activity_playground/webui/templates/equipment/index.html.j2 +7 -0
  66. geo_activity_playground/webui/templates/home.html.j2 +6 -6
  67. geo_activity_playground/webui/templates/page.html.j2 +2 -1
  68. geo_activity_playground/webui/{settings/templates → templates}/settings/index.html.j2 +9 -20
  69. geo_activity_playground/webui/templates/settings/manage-equipments.html.j2 +49 -0
  70. geo_activity_playground/webui/templates/settings/manage-kinds.html.j2 +48 -0
  71. geo_activity_playground/webui/{settings/templates → templates}/settings/privacy-zones.html.j2 +2 -0
  72. geo_activity_playground/webui/{settings/templates → templates}/settings/strava.html.j2 +2 -0
  73. geo_activity_playground/webui/templates/square_planner/index.html.j2 +63 -13
  74. {geo_activity_playground-0.38.2.dist-info → geo_activity_playground-0.39.0.dist-info}/METADATA +5 -1
  75. geo_activity_playground-0.39.0.dist-info/RECORD +133 -0
  76. geo_activity_playground/__init__.py +0 -0
  77. geo_activity_playground/core/__init__.py +0 -0
  78. geo_activity_playground/explorer/__init__.py +0 -0
  79. geo_activity_playground/importers/__init__.py +0 -0
  80. geo_activity_playground/webui/__init__.py +0 -0
  81. geo_activity_playground/webui/activity/__init__.py +0 -0
  82. geo_activity_playground/webui/activity/blueprint.py +0 -109
  83. geo_activity_playground/webui/activity/templates/activity/day.html.j2 +0 -80
  84. geo_activity_playground/webui/activity/templates/activity/edit.html.j2 +0 -42
  85. geo_activity_playground/webui/calendar/__init__.py +0 -0
  86. geo_activity_playground/webui/calendar/blueprint.py +0 -23
  87. geo_activity_playground/webui/calendar/templates/calendar/index.html.j2 +0 -46
  88. geo_activity_playground/webui/calendar/templates/calendar/month.html.j2 +0 -55
  89. geo_activity_playground/webui/entry_controller.py +0 -63
  90. geo_activity_playground/webui/explorer/__init__.py +0 -0
  91. geo_activity_playground/webui/explorer/blueprint.py +0 -62
  92. geo_activity_playground/webui/heatmap/__init__.py +0 -0
  93. geo_activity_playground/webui/heatmap/blueprint.py +0 -51
  94. geo_activity_playground/webui/heatmap/heatmap_controller.py +0 -216
  95. geo_activity_playground/webui/settings/blueprint.py +0 -262
  96. geo_activity_playground/webui/settings/controller.py +0 -272
  97. geo_activity_playground/webui/settings/templates/settings/equipment-offsets.html.j2 +0 -44
  98. geo_activity_playground/webui/settings/templates/settings/kind-renames.html.j2 +0 -25
  99. geo_activity_playground/webui/settings/templates/settings/kinds-without-achievements.html.j2 +0 -30
  100. geo_activity_playground/webui/tile_blueprint.py +0 -42
  101. geo_activity_playground-0.38.2.dist-info/RECORD +0 -129
  102. /geo_activity_playground/webui/{activity/templates → templates}/activity/lines.html.j2 +0 -0
  103. /geo_activity_playground/webui/{explorer/templates → templates}/explorer/index.html.j2 +0 -0
  104. /geo_activity_playground/webui/{heatmap/templates → templates}/heatmap/index.html.j2 +0 -0
  105. /geo_activity_playground/webui/{settings/templates → templates}/settings/admin-password.html.j2 +0 -0
  106. /geo_activity_playground/webui/{settings/templates → templates}/settings/color-schemes.html.j2 +0 -0
  107. /geo_activity_playground/webui/{settings/templates → templates}/settings/heart-rate.html.j2 +0 -0
  108. /geo_activity_playground/webui/{settings/templates → templates}/settings/metadata-extraction.html.j2 +0 -0
  109. /geo_activity_playground/webui/{settings/templates → templates}/settings/segmentation.html.j2 +0 -0
  110. /geo_activity_playground/webui/{settings/templates → templates}/settings/sharepic.html.j2 +0 -0
  111. {geo_activity_playground-0.38.2.dist-info → geo_activity_playground-0.39.0.dist-info}/LICENSE +0 -0
  112. {geo_activity_playground-0.38.2.dist-info → geo_activity_playground-0.39.0.dist-info}/WHEEL +0 -0
  113. {geo_activity_playground-0.38.2.dist-info → geo_activity_playground-0.39.0.dist-info}/entry_points.txt +0 -0
@@ -1,20 +1,13 @@
1
1
  import argparse
2
2
  import logging
3
- import os
4
3
  import pathlib
5
4
 
6
5
  import coloredlogs
7
6
 
7
+ from .explorer.video import explorer_video_main
8
+ from .heatmap_video import main_heatmap_video
8
9
  from .importers.strava_checkout import convert_strava_checkout
9
- from geo_activity_playground.core.activities import ActivityRepository
10
- from geo_activity_playground.core.config import ConfigAccessor
11
- from geo_activity_playground.core.config import import_old_config
12
- from geo_activity_playground.core.config import import_old_strava_config
13
- from geo_activity_playground.explorer.tile_visits import TileVisitAccessor
14
- from geo_activity_playground.explorer.video import explorer_video_main
15
- from geo_activity_playground.heatmap_video import main_heatmap_video
16
- from geo_activity_playground.webui.app import web_ui_main
17
- from geo_activity_playground.webui.upload_blueprint import scan_for_activities
10
+ from .webui.app import web_ui_main
18
11
 
19
12
  logger = logging.getLogger(__name__)
20
13
 
@@ -35,16 +28,6 @@ def main() -> None:
35
28
  description="The tools are organized in subcommands.", metavar="Command"
36
29
  )
37
30
 
38
- # subparser = subparsers.add_parser(
39
- # "explorer",
40
- # help="Generate GeoJSON/GPX files with explored and missing explorer tiles.",
41
- # )
42
- # subparser.set_defaults(
43
- # func=lambda options: main_explorer(
44
- # make_time_series_source(options.basedir)
45
- # )
46
- # )
47
-
48
31
  subparser = subparsers.add_parser(
49
32
  "explorer-video", help="Generate video with explorer timeline."
50
33
  )
@@ -65,7 +48,8 @@ def main() -> None:
65
48
  subparser = subparsers.add_parser("serve", help="Launch webserver")
66
49
  subparser.set_defaults(
67
50
  func=lambda options: web_ui_main(
68
- *make_activity_repository(options.basedir, options.skip_reload),
51
+ options.basedir,
52
+ options.skip_reload,
69
53
  host=options.host,
70
54
  port=options.port,
71
55
  )
@@ -78,9 +62,6 @@ def main() -> None:
78
62
  )
79
63
  subparser.add_argument("--skip-reload", action=argparse.BooleanOptionalAction)
80
64
 
81
- subparser = subparsers.add_parser("cache", help="Cache stuff")
82
- subparser.set_defaults(func=lambda options: main_cache(options.basedir))
83
-
84
65
  subparser = subparsers.add_parser(
85
66
  "heatmap-video", help="Create a video with the evolution of the heatmap"
86
67
  )
@@ -103,28 +84,5 @@ def main() -> None:
103
84
  options.func(options)
104
85
 
105
86
 
106
- def make_activity_repository(
107
- basedir: pathlib.Path, skip_reload: bool
108
- ) -> tuple[ActivityRepository, TileVisitAccessor, ConfigAccessor]:
109
- os.chdir(basedir)
110
-
111
- repository = ActivityRepository()
112
- tile_visit_accessor = TileVisitAccessor()
113
- config_accessor = ConfigAccessor()
114
- import_old_config(config_accessor)
115
- import_old_strava_config(config_accessor)
116
-
117
- if not skip_reload:
118
- scan_for_activities(repository, tile_visit_accessor, config_accessor())
119
-
120
- return repository, tile_visit_accessor, config_accessor
121
-
122
-
123
- def main_cache(basedir: pathlib.Path) -> None:
124
- (repository, tile_visit_accessor, config_accessor) = make_activity_repository(
125
- basedir, False
126
- )
127
-
128
-
129
87
  if __name__ == "__main__":
130
88
  main()
@@ -0,0 +1 @@
1
+ Generic single-database configuration.
@@ -0,0 +1,76 @@
1
+ from logging.config import fileConfig
2
+
3
+ from alembic import context
4
+ from sqlalchemy import engine_from_config
5
+ from sqlalchemy import pool
6
+
7
+ from geo_activity_playground.core.datamodel import Base
8
+
9
+ # this is the Alembic Config object, which provides
10
+ # access to the values within the .ini file in use.
11
+ config = context.config
12
+
13
+ # Interpret the config file for Python logging.
14
+ # This line sets up loggers basically.
15
+ if config.config_file_name is not None:
16
+ fileConfig(config.config_file_name)
17
+
18
+ target_metadata = Base.metadata
19
+
20
+ # other values from the config, defined by the needs of env.py,
21
+ # can be acquired:
22
+ # my_important_option = config.get_main_option("my_important_option")
23
+ # ... etc.
24
+
25
+
26
+ def run_migrations_offline() -> None:
27
+ """Run migrations in 'offline' mode.
28
+
29
+ This configures the context with just a URL
30
+ and not an Engine, though an Engine is acceptable
31
+ here as well. By skipping the Engine creation
32
+ we don't even need a DBAPI to be available.
33
+
34
+ Calls to context.execute() here emit the given string to the
35
+ script output.
36
+
37
+ """
38
+ url = config.get_main_option("sqlalchemy.url")
39
+ context.configure(
40
+ url=url,
41
+ target_metadata=target_metadata,
42
+ literal_binds=True,
43
+ dialect_opts={"paramstyle": "named"},
44
+ render_as_batch=True,
45
+ )
46
+
47
+ with context.begin_transaction():
48
+ context.run_migrations()
49
+
50
+
51
+ def run_migrations_online() -> None:
52
+ """Run migrations in 'online' mode.
53
+
54
+ In this scenario we need to create an Engine
55
+ and associate a connection with the context.
56
+
57
+ """
58
+ connectable = engine_from_config(
59
+ config.get_section(config.config_ini_section, {}),
60
+ prefix="sqlalchemy.",
61
+ poolclass=pool.NullPool,
62
+ )
63
+
64
+ with connectable.connect() as connection:
65
+ context.configure(
66
+ connection=connection, target_metadata=target_metadata, render_as_batch=True
67
+ )
68
+
69
+ with context.begin_transaction():
70
+ context.run_migrations()
71
+
72
+
73
+ if context.is_offline_mode():
74
+ run_migrations_offline()
75
+ else:
76
+ run_migrations_online()
@@ -0,0 +1,26 @@
1
+ """${message}
2
+
3
+ Revision ID: ${up_revision}
4
+ Revises: ${down_revision | comma,n}
5
+ Create Date: ${create_date}
6
+
7
+ """
8
+ from typing import Sequence, Union
9
+
10
+ from alembic import op
11
+ import sqlalchemy as sa
12
+ ${imports if imports else ""}
13
+
14
+ # revision identifiers, used by Alembic.
15
+ revision: str = ${repr(up_revision)}
16
+ down_revision: Union[str, None] = ${repr(down_revision)}
17
+ branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
18
+ depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
19
+
20
+
21
+ def upgrade() -> None:
22
+ ${upgrades if upgrades else "pass"}
23
+
24
+
25
+ def downgrade() -> None:
26
+ ${downgrades if downgrades else "pass"}
@@ -0,0 +1,33 @@
1
+ from typing import Sequence
2
+ from typing import Union
3
+
4
+ import sqlalchemy as sa
5
+ from alembic import op
6
+
7
+
8
+ # revision identifiers, used by Alembic.
9
+ revision: str = "451e7836b53d"
10
+ down_revision: Union[str, None] = "ab83b9d23127"
11
+ branch_labels: Union[str, Sequence[str], None] = None
12
+ depends_on: Union[str, Sequence[str], None] = None
13
+
14
+
15
+ def upgrade() -> None:
16
+ # ### commands auto generated by Alembic - please adjust! ###
17
+ op.create_table(
18
+ "square_planner_bookmarks",
19
+ sa.Column("id", sa.Integer(), nullable=False),
20
+ sa.Column("zoom", sa.Integer(), nullable=False),
21
+ sa.Column("x", sa.Integer(), nullable=False),
22
+ sa.Column("y", sa.Integer(), nullable=False),
23
+ sa.Column("size", sa.Integer(), nullable=False),
24
+ sa.PrimaryKeyConstraint("id"),
25
+ sa.UniqueConstraint("zoom", "x", "y", "size", name="kinds_name"),
26
+ )
27
+ # ### end Alembic commands ###
28
+
29
+
30
+ def downgrade() -> None:
31
+ # ### commands auto generated by Alembic - please adjust! ###
32
+ op.drop_table("square_planner_bookmarks")
33
+ # ### end Alembic commands ###
@@ -0,0 +1,73 @@
1
+ from typing import Sequence
2
+ from typing import Union
3
+
4
+ import sqlalchemy as sa
5
+ from alembic import op
6
+
7
+
8
+ # revision identifiers, used by Alembic.
9
+ revision: str = "63d3b7f6f93c"
10
+ down_revision: Union[str, None] = None
11
+ branch_labels: Union[str, Sequence[str], None] = None
12
+ depends_on: Union[str, Sequence[str], None] = None
13
+
14
+
15
+ def upgrade() -> None:
16
+ # ### commands auto generated by Alembic - please adjust! ###
17
+ op.create_table(
18
+ "equipments",
19
+ sa.Column("id", sa.Integer(), nullable=False),
20
+ sa.Column("name", sa.String(), nullable=False),
21
+ sa.Column("offset_km", sa.Integer(), nullable=False),
22
+ sa.PrimaryKeyConstraint("id"),
23
+ sa.UniqueConstraint("name", name="equipments_name"),
24
+ )
25
+ op.create_table(
26
+ "kinds",
27
+ sa.Column("id", sa.Integer(), nullable=False),
28
+ sa.Column("name", sa.String(), nullable=False),
29
+ sa.Column("consider_for_achievements", sa.Boolean(), nullable=False),
30
+ sa.Column("default_equipment_id", sa.Integer(), nullable=True),
31
+ sa.ForeignKeyConstraint(
32
+ ["default_equipment_id"], ["equipments.id"], name="default_equipment_id"
33
+ ),
34
+ sa.PrimaryKeyConstraint("id"),
35
+ sa.UniqueConstraint("name", name="kinds_name"),
36
+ )
37
+ op.create_table(
38
+ "activities",
39
+ sa.Column("id", sa.Integer(), nullable=False),
40
+ sa.Column("name", sa.String(), nullable=False),
41
+ sa.Column("path", sa.String(), nullable=True),
42
+ sa.Column("distance_km", sa.Float(), nullable=False),
43
+ sa.Column("start", sa.DateTime(), nullable=True),
44
+ sa.Column("elapsed_time", sa.Interval(), nullable=True),
45
+ sa.Column("moving_time", sa.Interval(), nullable=True),
46
+ sa.Column("start_latitude", sa.Float(), nullable=True),
47
+ sa.Column("start_longitude", sa.Float(), nullable=True),
48
+ sa.Column("end_latitude", sa.Float(), nullable=True),
49
+ sa.Column("end_longitude", sa.Float(), nullable=True),
50
+ sa.Column("elevation_gain", sa.Float(), nullable=True),
51
+ sa.Column("start_elevation", sa.Float(), nullable=True),
52
+ sa.Column("end_elevation", sa.Float(), nullable=True),
53
+ sa.Column("calories", sa.Integer(), nullable=True),
54
+ sa.Column("steps", sa.Integer(), nullable=True),
55
+ sa.Column("num_new_tiles_14", sa.Integer(), nullable=True),
56
+ sa.Column("num_new_tiles_17", sa.Integer(), nullable=True),
57
+ sa.Column("equipment_id", sa.Integer(), nullable=True),
58
+ sa.Column("kind_id", sa.Integer(), nullable=True),
59
+ sa.ForeignKeyConstraint(
60
+ ["equipment_id"], ["equipments.id"], name="equipment_id"
61
+ ),
62
+ sa.ForeignKeyConstraint(["kind_id"], ["kinds.id"], name="kind_id"),
63
+ sa.PrimaryKeyConstraint("id"),
64
+ )
65
+ # ### end Alembic commands ###
66
+
67
+
68
+ def downgrade() -> None:
69
+ # ### commands auto generated by Alembic - please adjust! ###
70
+ op.drop_table("activities")
71
+ op.drop_table("kinds")
72
+ op.drop_table("equipments")
73
+ # ### end Alembic commands ###
@@ -0,0 +1,28 @@
1
+ from typing import Sequence
2
+ from typing import Union
3
+
4
+ import sqlalchemy as sa
5
+ from alembic import op
6
+
7
+
8
+ # revision identifiers, used by Alembic.
9
+ revision: str = "ab83b9d23127"
10
+ down_revision: Union[str, None] = "b03491c593f6"
11
+ branch_labels: Union[str, Sequence[str], None] = None
12
+ depends_on: Union[str, Sequence[str], None] = None
13
+
14
+
15
+ def upgrade() -> None:
16
+ # ### commands auto generated by Alembic - please adjust! ###
17
+ with op.batch_alter_table("activities", schema=None) as batch_op:
18
+ batch_op.add_column(sa.Column("upstream_id", sa.String(), nullable=True))
19
+
20
+ # ### end Alembic commands ###
21
+
22
+
23
+ def downgrade() -> None:
24
+ # ### commands auto generated by Alembic - please adjust! ###
25
+ with op.batch_alter_table("activities", schema=None) as batch_op:
26
+ batch_op.drop_column("upstream_id")
27
+
28
+ # ### end Alembic commands ###
@@ -0,0 +1,30 @@
1
+ from typing import Sequence
2
+ from typing import Union
3
+
4
+ import sqlalchemy as sa
5
+ from alembic import op
6
+
7
+
8
+ # revision identifiers, used by Alembic.
9
+ revision: str = "b03491c593f6"
10
+ down_revision: Union[str, None] = "63d3b7f6f93c"
11
+ branch_labels: Union[str, Sequence[str], None] = None
12
+ depends_on: Union[str, Sequence[str], None] = None
13
+
14
+
15
+ def upgrade() -> None:
16
+ # ### commands auto generated by Alembic - please adjust! ###
17
+ with op.batch_alter_table("activities", schema=None) as batch_op:
18
+ batch_op.add_column(sa.Column("index_begin", sa.Integer(), nullable=True))
19
+ batch_op.add_column(sa.Column("index_end", sa.Integer(), nullable=True))
20
+
21
+ # ### end Alembic commands ###
22
+
23
+
24
+ def downgrade() -> None:
25
+ # ### commands auto generated by Alembic - please adjust! ###
26
+ with op.batch_alter_table("activities", schema=None) as batch_op:
27
+ batch_op.drop_column("index_end")
28
+ batch_op.drop_column("index_begin")
29
+
30
+ # ### end Alembic commands ###
@@ -0,0 +1,28 @@
1
+ from typing import Sequence
2
+ from typing import Union
3
+
4
+ import sqlalchemy as sa
5
+ from alembic import op
6
+
7
+
8
+ # revision identifiers, used by Alembic.
9
+ revision: str = "e02e27876deb"
10
+ down_revision: Union[str, None] = "451e7836b53d"
11
+ branch_labels: Union[str, Sequence[str], None] = None
12
+ depends_on: Union[str, Sequence[str], None] = None
13
+
14
+
15
+ def upgrade() -> None:
16
+ # ### commands auto generated by Alembic - please adjust! ###
17
+ with op.batch_alter_table("square_planner_bookmarks", schema=None) as batch_op:
18
+ batch_op.add_column(sa.Column("name", sa.String(), nullable=False))
19
+
20
+ # ### end Alembic commands ###
21
+
22
+
23
+ def downgrade() -> None:
24
+ # ### commands auto generated by Alembic - please adjust! ###
25
+ with op.batch_alter_table("square_planner_bookmarks", schema=None) as batch_op:
26
+ batch_op.drop_column("name")
27
+
28
+ # ### end Alembic commands ###
@@ -0,0 +1,28 @@
1
+ """${message}
2
+
3
+ Revision ID: ${up_revision}
4
+ Revises: ${down_revision | comma,n}
5
+ Create Date: ${create_date}
6
+
7
+ """
8
+ from typing import Sequence, Union
9
+
10
+ from alembic import op
11
+ import sqlalchemy as sa
12
+ ${imports if imports else ""}
13
+
14
+ # revision identifiers, used by Alembic.
15
+ revision: str = ${repr(up_revision)}
16
+ down_revision: Union[str, None] = ${repr(down_revision)}
17
+ branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
18
+ depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
19
+
20
+
21
+ def upgrade() -> None:
22
+ """Upgrade schema."""
23
+ ${upgrades if upgrades else "pass"}
24
+
25
+
26
+ def downgrade() -> None:
27
+ """Downgrade schema."""
28
+ ${downgrades if downgrades else "pass"}
@@ -1,51 +1,25 @@
1
1
  import datetime
2
2
  import functools
3
- import json
4
3
  import logging
5
- import pickle
6
4
  from collections.abc import Callable
7
5
  from typing import Any
8
- from typing import Iterator
9
6
  from typing import Optional
10
- from typing import TypedDict
11
7
 
12
8
  import geojson
13
9
  import matplotlib
14
10
  import numpy as np
15
11
  import pandas as pd
12
+ import sqlalchemy
16
13
  from tqdm import tqdm
17
14
 
18
- from geo_activity_playground.core.paths import activities_file
19
- from geo_activity_playground.core.paths import activity_enriched_meta_dir
20
- from geo_activity_playground.core.paths import activity_enriched_time_series_dir
21
- from geo_activity_playground.core.paths import activity_meta_override_dir
15
+ from geo_activity_playground.core.datamodel import Activity
16
+ from geo_activity_playground.core.datamodel import ActivityMeta
17
+ from geo_activity_playground.core.datamodel import DB
18
+ from geo_activity_playground.core.datamodel import Kind
22
19
 
23
20
  logger = logging.getLogger(__name__)
24
21
 
25
22
 
26
- class ActivityMeta(TypedDict):
27
- average_speed_elapsed_kmh: float
28
- average_speed_moving_kmh: float
29
- calories: float
30
- commute: bool
31
- consider_for_achievements: bool
32
- distance_km: float
33
- elapsed_time: datetime.timedelta
34
- elevation_gain: float
35
- end_latitude: float
36
- end_longitude: float
37
- equipment: str
38
- id: int
39
- kind: str
40
- moving_time: datetime.timedelta
41
- name: str
42
- path: str
43
- start_latitude: float
44
- start_longitude: float
45
- start: np.datetime64
46
- steps: int
47
-
48
-
49
23
  def make_activity_meta() -> ActivityMeta:
50
24
  return ActivityMeta(
51
25
  calories=None,
@@ -57,129 +31,69 @@ def make_activity_meta() -> ActivityMeta:
57
31
  )
58
32
 
59
33
 
60
- def build_activity_meta() -> None:
61
- if activities_file().exists():
62
- meta = pd.read_parquet(activities_file())
63
- present_ids = set(meta["id"])
64
- else:
65
- meta = pd.DataFrame(columns=["id"])
66
- present_ids = set()
67
-
68
- available_ids = {
69
- int(path.stem) for path in activity_enriched_meta_dir().glob("*.pickle")
70
- }
71
- new_ids = available_ids - present_ids
72
- deleted_ids = present_ids - available_ids
73
-
74
- # Remove updated activities and read these again.
75
- if activities_file().exists():
76
- meta_mtime = activities_file().stat().st_mtime
77
- updated_ids = {
78
- int(path.stem)
79
- for path in activity_enriched_meta_dir().glob("*.pickle")
80
- if path.stat().st_mtime > meta_mtime
81
- }
82
- new_ids.update(updated_ids)
83
- deleted_ids.update(updated_ids & present_ids)
84
-
85
- if deleted_ids:
86
- logger.debug(f"Removing activities {deleted_ids} from repository.")
87
- meta.drop(sorted(deleted_ids), axis="index", inplace=True)
88
-
89
- rows = []
90
- for new_id in tqdm(new_ids, desc="Register new activities"):
91
- with open(activity_enriched_meta_dir() / f"{new_id}.pickle", "rb") as f:
92
- data = pickle.load(f)
93
- override_file = activity_meta_override_dir() / f"{new_id}.json"
94
- if override_file.exists():
95
- with open(override_file) as f:
96
- data.update(json.load(f))
97
- rows.append(data)
98
-
99
- if rows:
100
- new_shard = pd.DataFrame(rows)
101
- new_shard.index = new_shard["id"]
102
- new_shard.index.name = "index"
103
- if len(meta):
104
- meta = pd.concat([meta, new_shard])
105
- else:
106
- meta = new_shard
107
-
108
- if len(meta):
109
- assert pd.api.types.is_dtype_equal(meta["start"].dtype, "datetime64[ns]"), (
110
- meta["start"].dtype,
111
- meta["start"].iloc[0],
112
- )
113
-
114
- meta.sort_values("start", inplace=True)
115
-
116
- meta.loc[meta["kind"] == "", "kind"] = "Unknown"
117
- meta.loc[meta["equipment"] == "", "equipment"] = "Unknown"
118
- meta["average_speed_moving_kmh"] = meta["distance_km"] / (
119
- meta["moving_time"].dt.total_seconds() / 3_600
120
- )
121
- meta["average_speed_elapsed_kmh"] = meta["distance_km"] / (
122
- meta["elapsed_time"].dt.total_seconds() / 3_600
123
- )
124
-
125
- meta.to_parquet(activities_file())
126
-
127
-
128
34
  class ActivityRepository:
129
- def __init__(self) -> None:
130
- self.meta = pd.DataFrame()
131
-
132
35
  def __len__(self) -> int:
133
- return len(self.meta)
134
-
135
- def reload(self) -> None:
136
- self.meta = pd.read_parquet(activities_file())
36
+ return len(self.get_activity_ids())
137
37
 
138
38
  def has_activity(self, activity_id: int) -> bool:
139
- if len(self.meta):
140
- if activity_id in self.meta["id"]:
141
- return True
142
-
143
- return False
39
+ return bool(
40
+ DB.session.scalars(
41
+ sqlalchemy.query(Activity).where(Activity.id == activity_id)
42
+ ).all()
43
+ )
144
44
 
145
45
  def last_activity_date(self) -> Optional[datetime.datetime]:
146
- if len(self.meta):
147
- return self.meta.iloc[-1]["start"]
46
+ result = DB.session.scalars(
47
+ sqlalchemy.select(Activity).order_by(Activity.start)
48
+ ).all()
49
+ if result:
50
+ return result[-1].start
148
51
  else:
149
52
  return None
150
53
 
151
54
  def get_activity_ids(self, only_achievements: bool = False) -> list[int]:
55
+ query = sqlalchemy.select(Activity.id)
152
56
  if only_achievements:
153
- return list(self.meta.loc[self.meta["consider_for_achievements"]].index)
154
- else:
155
- return list(self.meta.index)
156
-
157
- def iter_activities(self, new_to_old=True, dropna=False) -> Iterator[ActivityMeta]:
57
+ query = query.where(Kind.consider_for_achievements)
58
+ result = DB.session.scalars(query).all()
59
+ return result
60
+
61
+ def iter_activities(self, new_to_old=True, drop_na=False) -> list[Activity]:
62
+ query = sqlalchemy.select(Activity)
63
+ if drop_na:
64
+ query = query.where(Activity.start.is_not(None))
65
+ result = DB.session.scalars(query.order_by(Activity.start)).all()
158
66
  direction = -1 if new_to_old else 1
159
- for index, row in self.meta[::direction].iterrows():
160
- if not dropna or not pd.isna(row["start"]):
161
- yield row
67
+ return result[::direction]
162
68
 
163
- def get_activity_by_id(self, id: int) -> ActivityMeta:
164
- activity = self.meta.loc[id]
165
- assert isinstance(activity["name"], str), activity["name"]
69
+ def get_activity_by_id(self, id: int) -> Activity:
70
+ activity = DB.session.scalar(
71
+ sqlalchemy.select(Activity).where(Activity.id == int(id))
72
+ )
73
+ if activity is None:
74
+ raise ValueError(f"Cannot find activity {id} in DB.session.")
166
75
  return activity
167
76
 
168
- @functools.lru_cache(maxsize=3000)
169
77
  def get_time_series(self, id: int) -> pd.DataFrame:
170
- path = activity_enriched_time_series_dir() / f"{id}.parquet"
171
- try:
172
- df = pd.read_parquet(path)
173
- except OSError as e:
174
- logger.error(f"Error while reading {path}, deleting cache file …")
175
- path.unlink(missing_ok=True)
176
- raise
177
-
78
+ return self.get_activity_by_id(id).time_series
79
+
80
+ @property
81
+ def meta(self) -> pd.DataFrame:
82
+ activities = self.iter_activities(new_to_old=False, drop_na=True)
83
+ df = pd.DataFrame([activity.to_dict() for activity in activities])
84
+ df["date"] = df["start"].dt.date
85
+ df["year"] = [start.year for start in df["start"]]
86
+ df["month"] = [start.month for start in df["start"]]
87
+ df["day"] = [start.day for start in df["start"]]
88
+ df["week"] = [start.isocalendar().week for start in df["start"]]
89
+ df["day_of_week"] = df["start"].dt.day_of_week
90
+ df["iso_year"] = [start.isocalendar().year for start in df["start"]]
91
+ df["hours"] = [
92
+ elapsed_time.total_seconds() / 3600 for elapsed_time in df["elapsed_time"]
93
+ ]
94
+ df.index = df["id"]
178
95
  return df
179
96
 
180
- def save(self) -> None:
181
- self.meta.to_parquet(activities_file())
182
-
183
97
 
184
98
  def make_geojson_from_time_series(time_series: pd.DataFrame) -> str:
185
99
  fc = geojson.FeatureCollection(
@@ -5,14 +5,14 @@ import logging
5
5
  import pathlib
6
6
  from typing import Optional
7
7
 
8
- from geo_activity_playground.core.paths import new_config_file
9
- from geo_activity_playground.core.paths import strava_dynamic_config_path
8
+ from .paths import new_config_file
9
+ from .paths import strava_dynamic_config_path
10
10
 
11
11
 
12
12
  try:
13
13
  import tomllib
14
14
  except ModuleNotFoundError:
15
- import tomli as tomllib
15
+ import tomli as tomllib # type: ignore
16
16
 
17
17
 
18
18
  logger = logging.getLogger(__name__)