goodmap 0.5.2__tar.gz → 1.3.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {goodmap-0.5.2 → goodmap-1.3.0}/PKG-INFO +21 -11
- {goodmap-0.5.2 → goodmap-1.3.0}/README.md +9 -7
- goodmap-1.3.0/goodmap/admin_api.py +251 -0
- goodmap-1.3.0/goodmap/api_models.py +105 -0
- goodmap-1.3.0/goodmap/clustering.py +75 -0
- goodmap-1.3.0/goodmap/config.py +42 -0
- {goodmap-0.5.2 → goodmap-1.3.0}/goodmap/core.py +39 -0
- goodmap-1.3.0/goodmap/core_api.py +437 -0
- goodmap-1.3.0/goodmap/data_models/location.py +195 -0
- goodmap-1.3.0/goodmap/db.py +1466 -0
- goodmap-1.3.0/goodmap/exceptions.py +100 -0
- {goodmap-0.5.2 → goodmap-1.3.0}/goodmap/formatter.py +20 -0
- goodmap-1.3.0/goodmap/goodmap.py +195 -0
- goodmap-1.3.0/goodmap/json_security.py +102 -0
- {goodmap-0.5.2 → goodmap-1.3.0}/goodmap/templates/goodmap-admin.html +3 -1
- {goodmap-0.5.2 → goodmap-1.3.0}/goodmap/templates/map.html +8 -1
- goodmap-1.3.0/pyproject.toml +137 -0
- goodmap-0.5.2/goodmap/core_api.py +0 -352
- goodmap-0.5.2/goodmap/data_models/location.py +0 -38
- goodmap-0.5.2/goodmap/db.py +0 -564
- goodmap-0.5.2/goodmap/goodmap.py +0 -78
- goodmap-0.5.2/pyproject.toml +0 -83
- {goodmap-0.5.2 → goodmap-1.3.0}/LICENSE.md +0 -0
- {goodmap-0.5.2 → goodmap-1.3.0}/goodmap/__init__.py +0 -0
- {goodmap-0.5.2 → goodmap-1.3.0}/goodmap/data_validator.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.3
|
|
2
2
|
Name: goodmap
|
|
3
|
-
Version:
|
|
3
|
+
Version: 1.3.0
|
|
4
4
|
Summary: Map engine to serve all the people :)
|
|
5
5
|
Author: Krzysztof Kolodzinski
|
|
6
6
|
Author-email: krzysztof.kolodzinski@problematy.pl
|
|
@@ -10,6 +10,7 @@ Classifier: Programming Language :: Python :: 3.10
|
|
|
10
10
|
Classifier: Programming Language :: Python :: 3.11
|
|
11
11
|
Classifier: Programming Language :: Python :: 3.12
|
|
12
12
|
Classifier: Programming Language :: Python :: 3.13
|
|
13
|
+
Provides-Extra: docs
|
|
13
14
|
Requires-Dist: Babel (>=2.10.3,<3.0.0)
|
|
14
15
|
Requires-Dist: Flask (==3.0.3)
|
|
15
16
|
Requires-Dist: Flask-Babel (>=4.0.0,<5.0.0)
|
|
@@ -17,16 +18,23 @@ Requires-Dist: Flask-WTF (>=1.2.1,<2.0.0)
|
|
|
17
18
|
Requires-Dist: PyYAML (>=6.0,<7.0)
|
|
18
19
|
Requires-Dist: aiohttp (>=3.8.4,<4.0.0)
|
|
19
20
|
Requires-Dist: deprecation (>=2.1.0,<3.0.0)
|
|
20
|
-
Requires-Dist: flask-restx (>=1.3.0,<2.0.0)
|
|
21
21
|
Requires-Dist: google-cloud-storage (>=2.7.0,<3.0.0)
|
|
22
22
|
Requires-Dist: gql (>=3.4.0,<4.0.0)
|
|
23
|
-
Requires-Dist: gunicorn (>=20.1
|
|
23
|
+
Requires-Dist: gunicorn (>=20.1,<24.0)
|
|
24
24
|
Requires-Dist: humanize (>=4.6.0,<5.0.0)
|
|
25
|
-
Requires-Dist:
|
|
25
|
+
Requires-Dist: myst-parser (>=4.0.0,<5.0.0) ; extra == "docs"
|
|
26
|
+
Requires-Dist: numpy (>=2.2.0,<3.0.0)
|
|
27
|
+
Requires-Dist: platzky (>=1.0.0,<2.0.0)
|
|
26
28
|
Requires-Dist: pydantic (>=2.7.1,<3.0.0)
|
|
29
|
+
Requires-Dist: pysupercluster-problematy (>=0.7.8,<0.8.0)
|
|
30
|
+
Requires-Dist: scipy (>=1.15.1,<2.0.0)
|
|
31
|
+
Requires-Dist: spectree (>=2.0.1,<3.0.0)
|
|
32
|
+
Requires-Dist: sphinx (>=8.0.0,<9.0.0) ; extra == "docs"
|
|
33
|
+
Requires-Dist: sphinx-rtd-theme (>=3.0.0,<4.0.0) ; extra == "docs"
|
|
34
|
+
Requires-Dist: tomli (>=2.0.0,<3.0.0) ; extra == "docs"
|
|
27
35
|
Description-Content-Type: text/markdown
|
|
28
36
|
|
|
29
|
-

|
|
30
38
|
[](https://coveralls.io/github/Problematy/goodmap)
|
|
31
39
|
|
|
32
40
|
# Good Map
|
|
@@ -75,9 +83,9 @@ poetry run <command>
|
|
|
75
83
|
|
|
76
84
|
### TL;DR
|
|
77
85
|
If you don't want to go through all the configuration, e.g. you just simply want to test if everything works,
|
|
78
|
-
you can simply run app with test dataset provided in `
|
|
86
|
+
you can simply run app with test dataset provided in `examples` directory:
|
|
79
87
|
|
|
80
|
-
> poetry run flask --app 'goodmap.goodmap:create_app(config_path="./
|
|
88
|
+
> poetry run flask --app 'goodmap.goodmap:create_app(config_path="./examples/e2e_test_config.yml")' run
|
|
81
89
|
|
|
82
90
|
### Configuration
|
|
83
91
|
|
|
@@ -95,7 +103,7 @@ Afterwards run it with:
|
|
|
95
103
|
|
|
96
104
|
## Database
|
|
97
105
|
|
|
98
|
-
The database is stored in JSON, in the `map` section. For an example database see `
|
|
106
|
+
The database is stored in JSON, in the `map` section. For an example database see `examples/e2e_test_data.json`. The first subsection `data` consists of the actual datapoints, representing points on a map.
|
|
99
107
|
|
|
100
108
|
Datapoints have fields. The next subsections define special types of fields:
|
|
101
109
|
- `obligatory_fields` - here are explicitely stated all the fields that the application assumes are presnt in all datapoints. E.g.
|
|
@@ -123,9 +131,11 @@ You can define the fields in all these subsections. Besides these types of field
|
|
|
123
131
|
|
|
124
132
|
## Examples
|
|
125
133
|
|
|
126
|
-
You can find examples of working configuration and database in `
|
|
127
|
-
- `e2e_test_config.yml`
|
|
128
|
-
- `e2e_test_data.json`
|
|
134
|
+
You can find examples of working configuration and database in `examples/` directory:
|
|
135
|
+
- `e2e_test_config.yml` - Basic configuration example
|
|
136
|
+
- `e2e_test_data.json` - Example database with sample location data
|
|
137
|
+
- `mongo_e2e_test_config.yml` - MongoDB configuration example
|
|
129
138
|
|
|
130
139
|
|
|
140
|
+
# final test
|
|
131
141
|
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-

|
|
2
2
|
[](https://coveralls.io/github/Problematy/goodmap)
|
|
3
3
|
|
|
4
4
|
# Good Map
|
|
@@ -47,9 +47,9 @@ poetry run <command>
|
|
|
47
47
|
|
|
48
48
|
### TL;DR
|
|
49
49
|
If you don't want to go through all the configuration, e.g. you just simply want to test if everything works,
|
|
50
|
-
you can simply run app with test dataset provided in `
|
|
50
|
+
you can simply run app with test dataset provided in `examples` directory:
|
|
51
51
|
|
|
52
|
-
> poetry run flask --app 'goodmap.goodmap:create_app(config_path="./
|
|
52
|
+
> poetry run flask --app 'goodmap.goodmap:create_app(config_path="./examples/e2e_test_config.yml")' run
|
|
53
53
|
|
|
54
54
|
### Configuration
|
|
55
55
|
|
|
@@ -67,7 +67,7 @@ Afterwards run it with:
|
|
|
67
67
|
|
|
68
68
|
## Database
|
|
69
69
|
|
|
70
|
-
The database is stored in JSON, in the `map` section. For an example database see `
|
|
70
|
+
The database is stored in JSON, in the `map` section. For an example database see `examples/e2e_test_data.json`. The first subsection `data` consists of the actual datapoints, representing points on a map.
|
|
71
71
|
|
|
72
72
|
Datapoints have fields. The next subsections define special types of fields:
|
|
73
73
|
- `obligatory_fields` - here are explicitely stated all the fields that the application assumes are presnt in all datapoints. E.g.
|
|
@@ -95,8 +95,10 @@ You can define the fields in all these subsections. Besides these types of field
|
|
|
95
95
|
|
|
96
96
|
## Examples
|
|
97
97
|
|
|
98
|
-
You can find examples of working configuration and database in `
|
|
99
|
-
- `e2e_test_config.yml`
|
|
100
|
-
- `e2e_test_data.json`
|
|
98
|
+
You can find examples of working configuration and database in `examples/` directory:
|
|
99
|
+
- `e2e_test_config.yml` - Basic configuration example
|
|
100
|
+
- `e2e_test_data.json` - Example database with sample location data
|
|
101
|
+
- `mongo_e2e_test_config.yml` - MongoDB configuration example
|
|
101
102
|
|
|
102
103
|
|
|
104
|
+
# final test
|
|
@@ -0,0 +1,251 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import uuid
|
|
3
|
+
from typing import Any, Type
|
|
4
|
+
|
|
5
|
+
from flask import Blueprint, jsonify, make_response, request
|
|
6
|
+
from spectree import Response, SpecTree
|
|
7
|
+
from werkzeug.exceptions import BadRequest
|
|
8
|
+
|
|
9
|
+
from goodmap.api_models import (
|
|
10
|
+
ErrorResponse,
|
|
11
|
+
ReportUpdateRequest,
|
|
12
|
+
SuggestionStatusRequest,
|
|
13
|
+
)
|
|
14
|
+
from goodmap.exceptions import (
|
|
15
|
+
LocationAlreadyExistsError,
|
|
16
|
+
LocationNotFoundError,
|
|
17
|
+
LocationValidationError,
|
|
18
|
+
ReportNotFoundError,
|
|
19
|
+
)
|
|
20
|
+
|
|
21
|
+
# Error message constants
|
|
22
|
+
ERROR_INVALID_REQUEST_DATA = "Invalid request data"
|
|
23
|
+
ERROR_INVALID_LOCATION_DATA = "Invalid location data"
|
|
24
|
+
ERROR_INTERNAL_ERROR = "An internal error occurred"
|
|
25
|
+
ERROR_LOCATION_NOT_FOUND = "Location not found"
|
|
26
|
+
|
|
27
|
+
logger = logging.getLogger(__name__)
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def _clean_model_name(model: Type[Any]) -> str:
|
|
31
|
+
return model.__name__
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def _handle_location_validation_error(e: LocationValidationError):
|
|
35
|
+
"""Handle LocationValidationError and return appropriate response."""
|
|
36
|
+
logger.warning(
|
|
37
|
+
"Location validation failed",
|
|
38
|
+
extra={"uuid": e.uuid, "errors": e.validation_errors},
|
|
39
|
+
)
|
|
40
|
+
return make_response(jsonify({"message": ERROR_INVALID_LOCATION_DATA}), 400)
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def _get_locations_handler(database):
|
|
44
|
+
"""Handle GET /locations request."""
|
|
45
|
+
query_params = request.args.to_dict(flat=False)
|
|
46
|
+
if "sort_by" not in query_params:
|
|
47
|
+
query_params["sort_by"] = ["name"]
|
|
48
|
+
result = database.get_locations_paginated(query_params)
|
|
49
|
+
return jsonify(result)
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def _create_location_handler(database, location_model):
|
|
53
|
+
"""Handle POST /locations request."""
|
|
54
|
+
location_data = request.get_json()
|
|
55
|
+
if location_data is None:
|
|
56
|
+
logger.warning("Empty or invalid JSON in admin create location endpoint")
|
|
57
|
+
return make_response(jsonify({"message": ERROR_INVALID_REQUEST_DATA}), 400)
|
|
58
|
+
# TODO: Catch pydantic.ValidationError separately to return 400 instead of 500
|
|
59
|
+
try:
|
|
60
|
+
location_data.update({"uuid": str(uuid.uuid4())})
|
|
61
|
+
location = location_model.model_validate(location_data)
|
|
62
|
+
database.add_location(location.model_dump())
|
|
63
|
+
except LocationValidationError as e:
|
|
64
|
+
return _handle_location_validation_error(e)
|
|
65
|
+
except Exception:
|
|
66
|
+
logger.error("Error creating location", exc_info=True)
|
|
67
|
+
return make_response(jsonify({"message": ERROR_INTERNAL_ERROR}), 500)
|
|
68
|
+
return jsonify(location.model_dump())
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def _update_location_handler(database, location_model, location_id):
|
|
72
|
+
"""Handle PUT /locations/<location_id> request."""
|
|
73
|
+
location_data = request.get_json()
|
|
74
|
+
if location_data is None:
|
|
75
|
+
logger.warning("Empty or invalid JSON in admin update location endpoint")
|
|
76
|
+
return make_response(jsonify({"message": ERROR_INVALID_REQUEST_DATA}), 400)
|
|
77
|
+
# TODO: Catch pydantic.ValidationError separately to return 400 instead of 500
|
|
78
|
+
try:
|
|
79
|
+
location_data.update({"uuid": location_id})
|
|
80
|
+
location = location_model.model_validate(location_data)
|
|
81
|
+
database.update_location(location_id, location.model_dump())
|
|
82
|
+
except LocationValidationError as e:
|
|
83
|
+
return _handle_location_validation_error(e)
|
|
84
|
+
except LocationNotFoundError as e:
|
|
85
|
+
logger.info("Location not found for update", extra={"uuid": e.uuid})
|
|
86
|
+
return make_response(jsonify({"message": ERROR_LOCATION_NOT_FOUND}), 404)
|
|
87
|
+
except Exception:
|
|
88
|
+
logger.error("Error updating location", exc_info=True)
|
|
89
|
+
return make_response(jsonify({"message": ERROR_INTERNAL_ERROR}), 500)
|
|
90
|
+
return jsonify(location.model_dump())
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def _delete_location_handler(database, location_id):
|
|
94
|
+
"""Handle DELETE /locations/<location_id> request."""
|
|
95
|
+
try:
|
|
96
|
+
database.delete_location(location_id)
|
|
97
|
+
except LocationNotFoundError as e:
|
|
98
|
+
logger.info("Location not found for deletion", extra={"uuid": e.uuid})
|
|
99
|
+
return make_response(jsonify({"message": ERROR_LOCATION_NOT_FOUND}), 404)
|
|
100
|
+
except Exception:
|
|
101
|
+
logger.error("Error deleting location", exc_info=True)
|
|
102
|
+
return make_response(jsonify({"message": ERROR_INTERNAL_ERROR}), 500)
|
|
103
|
+
return "", 204
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
def _get_suggestions_handler(database):
|
|
107
|
+
"""Handle GET /suggestions request."""
|
|
108
|
+
query_params = request.args.to_dict(flat=False)
|
|
109
|
+
result = database.get_suggestions_paginated(query_params)
|
|
110
|
+
return jsonify(result)
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
def _update_suggestion_handler(database, suggestion_id):
|
|
114
|
+
"""Handle PUT /suggestions/<suggestion_id> request."""
|
|
115
|
+
try:
|
|
116
|
+
data = request.get_json()
|
|
117
|
+
status = data["status"] # Validated by Spectree
|
|
118
|
+
suggestion = database.get_suggestion(suggestion_id)
|
|
119
|
+
if not suggestion:
|
|
120
|
+
return make_response(jsonify({"message": "Suggestion not found"}), 404)
|
|
121
|
+
if suggestion.get("status") != "pending":
|
|
122
|
+
return make_response(jsonify({"message": "Suggestion already processed"}), 409)
|
|
123
|
+
if status == "accepted":
|
|
124
|
+
suggestion_data = {k: v for k, v in suggestion.items() if k != "status"}
|
|
125
|
+
database.add_location(suggestion_data)
|
|
126
|
+
database.update_suggestion(suggestion_id, status)
|
|
127
|
+
except LocationValidationError as e:
|
|
128
|
+
logger.warning(
|
|
129
|
+
"Location validation failed in suggestion",
|
|
130
|
+
extra={"uuid": e.uuid, "errors": e.validation_errors},
|
|
131
|
+
)
|
|
132
|
+
return make_response(jsonify({"message": ERROR_INVALID_LOCATION_DATA}), 400)
|
|
133
|
+
except LocationAlreadyExistsError as e:
|
|
134
|
+
logger.warning(
|
|
135
|
+
"Attempted to create duplicate location from suggestion", extra={"uuid": e.uuid}
|
|
136
|
+
)
|
|
137
|
+
return make_response(jsonify({"message": "Location already exists"}), 409)
|
|
138
|
+
except Exception:
|
|
139
|
+
logger.error("Error processing suggestion", exc_info=True)
|
|
140
|
+
return make_response(jsonify({"message": ERROR_INTERNAL_ERROR}), 500)
|
|
141
|
+
return jsonify(database.get_suggestion(suggestion_id))
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
def _get_reports_handler(database):
|
|
145
|
+
"""Handle GET /reports request."""
|
|
146
|
+
query_params = request.args.to_dict(flat=False)
|
|
147
|
+
result = database.get_reports_paginated(query_params)
|
|
148
|
+
return jsonify(result)
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
def _update_report_handler(database, report_id):
|
|
152
|
+
"""Handle PUT /reports/<report_id> request."""
|
|
153
|
+
try:
|
|
154
|
+
data = request.get_json()
|
|
155
|
+
status = data.get("status")
|
|
156
|
+
priority = data.get("priority")
|
|
157
|
+
report = database.get_report(report_id)
|
|
158
|
+
if not report:
|
|
159
|
+
return make_response(jsonify({"message": "Report not found"}), 404)
|
|
160
|
+
database.update_report(report_id, status=status, priority=priority)
|
|
161
|
+
except BadRequest:
|
|
162
|
+
logger.warning("Invalid JSON in report update endpoint")
|
|
163
|
+
return make_response(jsonify({"message": ERROR_INVALID_REQUEST_DATA}), 400)
|
|
164
|
+
except ReportNotFoundError as e:
|
|
165
|
+
logger.info("Report not found for update", extra={"uuid": e.uuid})
|
|
166
|
+
return make_response(jsonify({"message": "Report not found"}), 404)
|
|
167
|
+
except Exception:
|
|
168
|
+
logger.error("Error updating report", exc_info=True)
|
|
169
|
+
return make_response(jsonify({"message": ERROR_INTERNAL_ERROR}), 500)
|
|
170
|
+
return jsonify(database.get_report(report_id))
|
|
171
|
+
|
|
172
|
+
|
|
173
|
+
def admin_pages(database, location_model) -> Blueprint:
|
|
174
|
+
"""Create and return the admin API blueprint.
|
|
175
|
+
|
|
176
|
+
Args:
|
|
177
|
+
database: Database instance for data operations
|
|
178
|
+
location_model: Pydantic model for location validation
|
|
179
|
+
|
|
180
|
+
Returns:
|
|
181
|
+
Blueprint: Flask blueprint with all admin endpoints
|
|
182
|
+
"""
|
|
183
|
+
admin_api_blueprint = Blueprint("admin_api", __name__, url_prefix="/api/admin")
|
|
184
|
+
|
|
185
|
+
spec = SpecTree(
|
|
186
|
+
"flask",
|
|
187
|
+
title="Goodmap Admin API",
|
|
188
|
+
version="0.1",
|
|
189
|
+
path="doc",
|
|
190
|
+
annotations=True,
|
|
191
|
+
naming_strategy=_clean_model_name,
|
|
192
|
+
)
|
|
193
|
+
|
|
194
|
+
@admin_api_blueprint.route("/locations", methods=["GET"])
|
|
195
|
+
@spec.validate()
|
|
196
|
+
def admin_get_locations():
|
|
197
|
+
"""Get paginated list of all locations for admin panel."""
|
|
198
|
+
return _get_locations_handler(database)
|
|
199
|
+
|
|
200
|
+
@admin_api_blueprint.route("/locations", methods=["POST"])
|
|
201
|
+
@spec.validate(resp=Response(HTTP_400=ErrorResponse))
|
|
202
|
+
def admin_create_location():
|
|
203
|
+
"""Create a new location (admin only)."""
|
|
204
|
+
return _create_location_handler(database, location_model)
|
|
205
|
+
|
|
206
|
+
@admin_api_blueprint.route("/locations/<location_id>", methods=["PUT"])
|
|
207
|
+
@spec.validate(resp=Response(HTTP_400=ErrorResponse, HTTP_404=ErrorResponse))
|
|
208
|
+
def admin_update_location(location_id):
|
|
209
|
+
"""Update an existing location (admin only)."""
|
|
210
|
+
return _update_location_handler(database, location_model, location_id)
|
|
211
|
+
|
|
212
|
+
@admin_api_blueprint.route("/locations/<location_id>", methods=["DELETE"])
|
|
213
|
+
@spec.validate(resp=Response(HTTP_404=ErrorResponse))
|
|
214
|
+
def admin_delete_location(location_id):
|
|
215
|
+
"""Delete a location (admin only)."""
|
|
216
|
+
return _delete_location_handler(database, location_id)
|
|
217
|
+
|
|
218
|
+
@admin_api_blueprint.route("/suggestions", methods=["GET"])
|
|
219
|
+
@spec.validate()
|
|
220
|
+
def admin_get_suggestions():
|
|
221
|
+
"""Get paginated list of location suggestions (admin only)."""
|
|
222
|
+
return _get_suggestions_handler(database)
|
|
223
|
+
|
|
224
|
+
@admin_api_blueprint.route("/suggestions/<suggestion_id>", methods=["PUT"])
|
|
225
|
+
@spec.validate(
|
|
226
|
+
json=SuggestionStatusRequest,
|
|
227
|
+
resp=Response(HTTP_400=ErrorResponse, HTTP_404=ErrorResponse, HTTP_409=ErrorResponse),
|
|
228
|
+
)
|
|
229
|
+
def admin_update_suggestion(suggestion_id):
|
|
230
|
+
"""Accept or reject a location suggestion (admin only)."""
|
|
231
|
+
return _update_suggestion_handler(database, suggestion_id)
|
|
232
|
+
|
|
233
|
+
@admin_api_blueprint.route("/reports", methods=["GET"])
|
|
234
|
+
@spec.validate()
|
|
235
|
+
def admin_get_reports():
|
|
236
|
+
"""Get paginated list of location reports (admin only)."""
|
|
237
|
+
return _get_reports_handler(database)
|
|
238
|
+
|
|
239
|
+
@admin_api_blueprint.route("/reports/<report_id>", methods=["PUT"])
|
|
240
|
+
@spec.validate(
|
|
241
|
+
json=ReportUpdateRequest,
|
|
242
|
+
resp=Response(HTTP_400=ErrorResponse, HTTP_404=ErrorResponse),
|
|
243
|
+
)
|
|
244
|
+
def admin_update_report(report_id):
|
|
245
|
+
"""Update a report's status and/or priority (admin only)."""
|
|
246
|
+
return _update_report_handler(database, report_id)
|
|
247
|
+
|
|
248
|
+
# Register Spectree with blueprint after all routes are defined
|
|
249
|
+
spec.register(admin_api_blueprint)
|
|
250
|
+
|
|
251
|
+
return admin_api_blueprint
|
|
@@ -0,0 +1,105 @@
|
|
|
1
|
+
"""Pydantic models for API request/response validation.
|
|
2
|
+
|
|
3
|
+
This module defines request and response models for the Goodmap REST API.
|
|
4
|
+
These models are used by Spectree for automatic OpenAPI schema generation
|
|
5
|
+
and request/response validation.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from typing import Literal
|
|
9
|
+
|
|
10
|
+
from pydantic import BaseModel, Field
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class LocationReportRequest(BaseModel):
|
|
14
|
+
"""Request model for reporting a location issue."""
|
|
15
|
+
|
|
16
|
+
id: str = Field(..., description="Location UUID to report")
|
|
17
|
+
description: str = Field(..., min_length=1, description="Description of the problem")
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class LocationReportResponse(BaseModel):
|
|
21
|
+
"""Response model for location report submission."""
|
|
22
|
+
|
|
23
|
+
message: str = Field(..., description="Success message")
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class SuggestionStatusRequest(BaseModel):
|
|
27
|
+
"""Request model for updating suggestion status."""
|
|
28
|
+
|
|
29
|
+
status: Literal["accepted", "rejected"] = Field(
|
|
30
|
+
..., description="Status to set for the suggestion"
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class ReportUpdateRequest(BaseModel):
|
|
35
|
+
"""Request model for updating a report's status and priority."""
|
|
36
|
+
|
|
37
|
+
status: Literal["resolved", "rejected"] | None = Field(
|
|
38
|
+
None, description="New status for the report"
|
|
39
|
+
)
|
|
40
|
+
priority: Literal["critical", "high", "medium", "low"] | None = Field(
|
|
41
|
+
None, description="New priority for the report"
|
|
42
|
+
)
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class VersionResponse(BaseModel):
|
|
46
|
+
"""Response model for version endpoint."""
|
|
47
|
+
|
|
48
|
+
backend: str = Field(..., description="Backend version")
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
class CSRFTokenResponse(BaseModel):
|
|
52
|
+
"""Response model for CSRF token endpoint (deprecated)."""
|
|
53
|
+
|
|
54
|
+
csrf_token: str = Field(..., description="CSRF token")
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
class PaginationParams(BaseModel):
|
|
58
|
+
"""Common pagination and filtering parameters."""
|
|
59
|
+
|
|
60
|
+
page: int | None = Field(None, ge=1, description="Page number (1-indexed)")
|
|
61
|
+
per_page: int | None = Field(None, ge=1, le=100, description="Items per page")
|
|
62
|
+
sort_by: str | None = Field(None, description="Field to sort by")
|
|
63
|
+
sort_order: Literal["asc", "desc"] | None = Field(None, description="Sort direction")
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
class ClusteringParams(BaseModel):
|
|
67
|
+
"""Parameters for clustering request."""
|
|
68
|
+
|
|
69
|
+
zoom: int = Field(7, ge=0, le=16, description="Map zoom level for clustering")
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
class ErrorResponse(BaseModel):
|
|
73
|
+
"""Standard error response."""
|
|
74
|
+
|
|
75
|
+
message: str = Field(..., description="Error message")
|
|
76
|
+
error: str | None = Field(None, description="Detailed error information")
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
class SuccessResponse(BaseModel):
|
|
80
|
+
"""Standard success response."""
|
|
81
|
+
|
|
82
|
+
message: str = Field(..., description="Success message")
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
class BasicLocationInfo(BaseModel):
|
|
86
|
+
"""Basic location information (uuid + position)."""
|
|
87
|
+
|
|
88
|
+
uuid: str = Field(..., description="Location UUID")
|
|
89
|
+
position: tuple[float, float] = Field(
|
|
90
|
+
..., description="Location coordinates as (latitude, longitude)"
|
|
91
|
+
)
|
|
92
|
+
remark: bool = Field(False, description="Whether location has a remark")
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
class ClusterInfo(BaseModel):
|
|
96
|
+
"""Cluster information for map display."""
|
|
97
|
+
|
|
98
|
+
uuid: str | None = Field(None, description="Location UUID (None for multi-point clusters)")
|
|
99
|
+
position: tuple[float, float] = Field(..., description="Cluster center coordinates")
|
|
100
|
+
count: int = Field(..., description="Number of locations in cluster")
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
# Note: Full location model is dynamically created from LocationBase
|
|
104
|
+
# and cannot be statically defined here. API endpoints will use the
|
|
105
|
+
# dynamically created location_model passed to core_pages() function.
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import uuid
|
|
3
|
+
|
|
4
|
+
from scipy.spatial import KDTree
|
|
5
|
+
|
|
6
|
+
# Maximum distance to consider a point-cluster match (accounts for floating point errors)
|
|
7
|
+
DISTANCE_THRESHOLD = 1e-8
|
|
8
|
+
|
|
9
|
+
logger = logging.getLogger(__name__)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def map_clustering_data_to_proper_lazy_loading_object(input_array):
|
|
13
|
+
response_array = []
|
|
14
|
+
for item in input_array:
|
|
15
|
+
if item["count"] == 1:
|
|
16
|
+
response_object = {
|
|
17
|
+
"position": [item["longitude"], item["latitude"]],
|
|
18
|
+
"uuid": item["uuid"],
|
|
19
|
+
"cluster_uuid": None,
|
|
20
|
+
"cluster_count": None,
|
|
21
|
+
"type": "point",
|
|
22
|
+
}
|
|
23
|
+
response_array.append(response_object)
|
|
24
|
+
continue
|
|
25
|
+
response_object = {
|
|
26
|
+
"position": [item["longitude"], item["latitude"]],
|
|
27
|
+
"uuid": None,
|
|
28
|
+
"cluster_uuid": str(uuid.uuid4()),
|
|
29
|
+
"cluster_count": item["count"],
|
|
30
|
+
"type": "cluster",
|
|
31
|
+
}
|
|
32
|
+
response_array.append(response_object)
|
|
33
|
+
return response_array
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
# Since there can be some floating point errors
|
|
37
|
+
# we need to check if the distance is close enough to 0
|
|
38
|
+
def match_clusters_uuids(points, clusters):
|
|
39
|
+
"""
|
|
40
|
+
Match single-point clusters to their original point UUIDs.
|
|
41
|
+
|
|
42
|
+
For clusters containing exactly one point, this function attempts to match the cluster
|
|
43
|
+
coordinates back to the original point to retrieve its UUID. The 'uuid' key is optional
|
|
44
|
+
and will only be present in single-point clusters where a matching point is found.
|
|
45
|
+
|
|
46
|
+
Args:
|
|
47
|
+
points: List of point dicts with 'position' and 'uuid' keys
|
|
48
|
+
clusters: List of cluster dicts with 'longitude', 'latitude', and 'count' keys.
|
|
49
|
+
For single-point clusters (count=1), a 'uuid' key will be added if a
|
|
50
|
+
matching point is found (modified in place)
|
|
51
|
+
|
|
52
|
+
Returns:
|
|
53
|
+
The modified clusters list with 'uuid' keys added to matched single-point clusters
|
|
54
|
+
"""
|
|
55
|
+
points_coords = [(point["position"][0], point["position"][1]) for point in points]
|
|
56
|
+
tree = KDTree(points_coords)
|
|
57
|
+
for cluster in clusters:
|
|
58
|
+
if cluster["count"] == 1:
|
|
59
|
+
cluster_coords = (cluster["longitude"], cluster["latitude"])
|
|
60
|
+
dist, idx = tree.query(cluster_coords)
|
|
61
|
+
if dist < DISTANCE_THRESHOLD:
|
|
62
|
+
closest_point = points[idx]
|
|
63
|
+
cluster["uuid"] = closest_point["uuid"]
|
|
64
|
+
else:
|
|
65
|
+
# Log warning when no match is found - indicates data inconsistency
|
|
66
|
+
logger.warning(
|
|
67
|
+
"No matching UUID found for cluster at coordinates (%f, %f). "
|
|
68
|
+
"Distance to nearest point: %f (threshold: %f)",
|
|
69
|
+
cluster["longitude"],
|
|
70
|
+
cluster["latitude"],
|
|
71
|
+
dist,
|
|
72
|
+
DISTANCE_THRESHOLD,
|
|
73
|
+
)
|
|
74
|
+
cluster["uuid"] = None
|
|
75
|
+
return clusters
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
import typing as t
|
|
3
|
+
|
|
4
|
+
import yaml
|
|
5
|
+
from platzky.config import Config as PlatzkyConfig
|
|
6
|
+
from pydantic import Field
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class GoodmapConfig(PlatzkyConfig):
|
|
10
|
+
"""Extended configuration for Goodmap with additional frontend library URL."""
|
|
11
|
+
|
|
12
|
+
goodmap_frontend_lib_url: str = Field(
|
|
13
|
+
default="https://cdn.jsdelivr.net/npm/@problematy/goodmap@1.0.4",
|
|
14
|
+
alias="GOODMAP_FRONTEND_LIB_URL",
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
@classmethod
|
|
18
|
+
def model_validate(
|
|
19
|
+
cls,
|
|
20
|
+
obj: t.Any,
|
|
21
|
+
*,
|
|
22
|
+
strict: bool | None = None,
|
|
23
|
+
from_attributes: bool | None = None,
|
|
24
|
+
context: dict[str, t.Any] | None = None,
|
|
25
|
+
) -> "GoodmapConfig":
|
|
26
|
+
"""Override to return correct type for GoodmapConfig."""
|
|
27
|
+
return t.cast(
|
|
28
|
+
"GoodmapConfig",
|
|
29
|
+
super().model_validate(
|
|
30
|
+
obj, strict=strict, from_attributes=from_attributes, context=context
|
|
31
|
+
),
|
|
32
|
+
)
|
|
33
|
+
|
|
34
|
+
@classmethod
|
|
35
|
+
def parse_yaml(cls, path: str) -> "GoodmapConfig":
|
|
36
|
+
"""Parse YAML configuration file and return GoodmapConfig instance."""
|
|
37
|
+
try:
|
|
38
|
+
with open(path, "r") as f:
|
|
39
|
+
return cls.model_validate(yaml.safe_load(f))
|
|
40
|
+
except FileNotFoundError:
|
|
41
|
+
print(f"Config file not found: {path}", file=sys.stderr)
|
|
42
|
+
raise SystemExit(1)
|
|
@@ -1,9 +1,20 @@
|
|
|
1
|
+
"""Core data filtering and sorting utilities for location queries."""
|
|
2
|
+
|
|
1
3
|
from typing import Any, Dict, List
|
|
2
4
|
|
|
3
5
|
# TODO move filtering to db site
|
|
4
6
|
|
|
5
7
|
|
|
6
8
|
def does_fulfill_requirement(entry, requirements):
|
|
9
|
+
"""Check if an entry fulfills all category requirements.
|
|
10
|
+
|
|
11
|
+
Args:
|
|
12
|
+
entry: Location data entry to check
|
|
13
|
+
requirements: List of (category, values) tuples to match
|
|
14
|
+
|
|
15
|
+
Returns:
|
|
16
|
+
bool: True if entry matches all non-empty requirements
|
|
17
|
+
"""
|
|
7
18
|
matches = []
|
|
8
19
|
for category, values in requirements:
|
|
9
20
|
if not values:
|
|
@@ -13,6 +24,15 @@ def does_fulfill_requirement(entry, requirements):
|
|
|
13
24
|
|
|
14
25
|
|
|
15
26
|
def sort_by_distance(data: List[Dict[str, Any]], query_params: Dict[str, List[str]]):
|
|
27
|
+
"""Sort locations by distance from query coordinates.
|
|
28
|
+
|
|
29
|
+
Args:
|
|
30
|
+
data: List of location dictionaries
|
|
31
|
+
query_params: Query parameters containing 'lat' and 'lon'
|
|
32
|
+
|
|
33
|
+
Returns:
|
|
34
|
+
List[Dict[str, Any]]: Sorted data (or original if no coordinates provided)
|
|
35
|
+
"""
|
|
16
36
|
try:
|
|
17
37
|
if "lat" in query_params and "lon" in query_params:
|
|
18
38
|
lat = float(query_params["lat"][0])
|
|
@@ -25,6 +45,15 @@ def sort_by_distance(data: List[Dict[str, Any]], query_params: Dict[str, List[st
|
|
|
25
45
|
|
|
26
46
|
|
|
27
47
|
def limit(data, query_params):
|
|
48
|
+
"""Limit number of results based on query parameter.
|
|
49
|
+
|
|
50
|
+
Args:
|
|
51
|
+
data: List of data to limit
|
|
52
|
+
query_params: Query parameters containing optional 'limit'
|
|
53
|
+
|
|
54
|
+
Returns:
|
|
55
|
+
Limited data (or original if no limit specified)
|
|
56
|
+
"""
|
|
28
57
|
try:
|
|
29
58
|
if "limit" in query_params:
|
|
30
59
|
limit = int(query_params["limit"][0])
|
|
@@ -36,6 +65,16 @@ def limit(data, query_params):
|
|
|
36
65
|
|
|
37
66
|
|
|
38
67
|
def get_queried_data(all_data, categories, query_params):
|
|
68
|
+
"""Filter, sort, and limit location data based on query parameters.
|
|
69
|
+
|
|
70
|
+
Args:
|
|
71
|
+
all_data: Complete list of location data
|
|
72
|
+
categories: Available categories for filtering
|
|
73
|
+
query_params: Query parameters for filtering, sorting, and limiting
|
|
74
|
+
|
|
75
|
+
Returns:
|
|
76
|
+
Filtered, sorted, and limited location data
|
|
77
|
+
"""
|
|
39
78
|
requirements = []
|
|
40
79
|
for key in categories.keys():
|
|
41
80
|
requirements.append((key, query_params.get(key)))
|