goodmap 1.1.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- goodmap/__init__.py +1 -0
- goodmap/clustering.py +75 -0
- goodmap/config.py +42 -0
- goodmap/core.py +46 -0
- goodmap/core_api.py +467 -0
- goodmap/data_models/location.py +68 -0
- goodmap/data_validator.py +119 -0
- goodmap/db.py +1466 -0
- goodmap/exceptions.py +100 -0
- goodmap/formatter.py +27 -0
- goodmap/goodmap.py +89 -0
- goodmap/templates/goodmap-admin.html +743 -0
- goodmap/templates/map.html +124 -0
- goodmap-1.1.7.dist-info/METADATA +142 -0
- goodmap-1.1.7.dist-info/RECORD +17 -0
- goodmap-1.1.7.dist-info/WHEEL +4 -0
- goodmap-1.1.7.dist-info/licenses/LICENSE.md +21 -0
goodmap/db.py
ADDED
|
@@ -0,0 +1,1466 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import logging
|
|
3
|
+
import os
|
|
4
|
+
import tempfile
|
|
5
|
+
from functools import partial
|
|
6
|
+
from typing import Any
|
|
7
|
+
|
|
8
|
+
from goodmap.core import get_queried_data
|
|
9
|
+
from goodmap.data_models.location import LocationBase
|
|
10
|
+
from goodmap.exceptions import (
|
|
11
|
+
AlreadyExistsError,
|
|
12
|
+
LocationAlreadyExistsError,
|
|
13
|
+
LocationNotFoundError,
|
|
14
|
+
ReportNotFoundError,
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
logger = logging.getLogger(__name__)
|
|
18
|
+
|
|
19
|
+
# TODO file is temporary solution to be compatible with old, static code,
|
|
20
|
+
# it should be replaced with dynamic solution
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def __parse_pagination_params(query):
|
|
24
|
+
"""Extract and validate pagination parameters from query."""
|
|
25
|
+
try:
|
|
26
|
+
page = max(1, int(query.get("page", ["1"])[0]))
|
|
27
|
+
except (ValueError, IndexError, TypeError):
|
|
28
|
+
page = 1
|
|
29
|
+
|
|
30
|
+
per_page_raw = query.get("per_page", ["20"])[0] if query.get("per_page") else "20"
|
|
31
|
+
if per_page_raw == "all":
|
|
32
|
+
per_page = None
|
|
33
|
+
else:
|
|
34
|
+
try:
|
|
35
|
+
per_page = max(1, min(int(per_page_raw), 1000)) # Cap at 1000
|
|
36
|
+
except (ValueError, TypeError):
|
|
37
|
+
per_page = 20
|
|
38
|
+
|
|
39
|
+
sort_by = query.get("sort_by", [None])[0]
|
|
40
|
+
sort_order = query.get("sort_order", ["asc"])[0] if query.get("sort_order") else "asc"
|
|
41
|
+
|
|
42
|
+
return page, per_page, sort_by, sort_order.lower()
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def __build_pagination_response(items, total, page, per_page):
|
|
46
|
+
"""Build standardized pagination response."""
|
|
47
|
+
if per_page:
|
|
48
|
+
total_pages = (total + per_page - 1) // per_page
|
|
49
|
+
else:
|
|
50
|
+
total_pages = 1
|
|
51
|
+
per_page = total
|
|
52
|
+
|
|
53
|
+
return {
|
|
54
|
+
"items": items,
|
|
55
|
+
"pagination": {
|
|
56
|
+
"total": total,
|
|
57
|
+
"page": page,
|
|
58
|
+
"per_page": per_page,
|
|
59
|
+
"total_pages": total_pages,
|
|
60
|
+
},
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def json_file_atomic_dump(data, file_path):
|
|
65
|
+
dir_name = os.path.dirname(file_path)
|
|
66
|
+
with tempfile.NamedTemporaryFile("w", dir=dir_name, delete=False) as temp_file:
|
|
67
|
+
json.dump(data, temp_file)
|
|
68
|
+
temp_file.flush()
|
|
69
|
+
os.fsync(temp_file.fileno())
|
|
70
|
+
os.replace(temp_file.name, file_path)
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
class PaginationHelper:
|
|
74
|
+
"""Common pagination utility to eliminate duplication across backends."""
|
|
75
|
+
|
|
76
|
+
@staticmethod
|
|
77
|
+
def get_sort_key(item, sort_by):
|
|
78
|
+
"""Extract sort key from item for both dict and object types."""
|
|
79
|
+
if sort_by == "name" and hasattr(item, "name"):
|
|
80
|
+
value = item.name
|
|
81
|
+
elif isinstance(item, dict):
|
|
82
|
+
value = item.get(sort_by)
|
|
83
|
+
else:
|
|
84
|
+
value = getattr(item, sort_by, None)
|
|
85
|
+
return (value is not None, value or "")
|
|
86
|
+
|
|
87
|
+
@staticmethod
|
|
88
|
+
def apply_pagination_and_sorting(items, page, per_page, sort_by, sort_order):
|
|
89
|
+
"""Apply sorting and pagination to a list of items."""
|
|
90
|
+
# Apply sorting
|
|
91
|
+
if sort_by:
|
|
92
|
+
reverse = sort_order == "desc"
|
|
93
|
+
items.sort(
|
|
94
|
+
key=lambda item: PaginationHelper.get_sort_key(item, sort_by), reverse=reverse # type: ignore
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
total_count = len(items)
|
|
98
|
+
|
|
99
|
+
# Apply pagination
|
|
100
|
+
if per_page:
|
|
101
|
+
start_idx = (page - 1) * per_page
|
|
102
|
+
end_idx = start_idx + per_page
|
|
103
|
+
paginated_items = items[start_idx:end_idx]
|
|
104
|
+
else:
|
|
105
|
+
paginated_items = items
|
|
106
|
+
|
|
107
|
+
return paginated_items, total_count
|
|
108
|
+
|
|
109
|
+
@staticmethod
|
|
110
|
+
def apply_filters(items, filters):
|
|
111
|
+
"""Apply filtering based on provided filters dictionary."""
|
|
112
|
+
filtered_items = items
|
|
113
|
+
|
|
114
|
+
# Apply status filtering
|
|
115
|
+
if "status" in filters:
|
|
116
|
+
statuses = filters["status"]
|
|
117
|
+
if statuses:
|
|
118
|
+
filtered_items = [
|
|
119
|
+
item
|
|
120
|
+
for item in filtered_items
|
|
121
|
+
if (
|
|
122
|
+
item.get("status")
|
|
123
|
+
if isinstance(item, dict)
|
|
124
|
+
else getattr(item, "status", None)
|
|
125
|
+
)
|
|
126
|
+
in statuses
|
|
127
|
+
]
|
|
128
|
+
|
|
129
|
+
# Apply priority filtering
|
|
130
|
+
if "priority" in filters:
|
|
131
|
+
priorities = filters["priority"]
|
|
132
|
+
if priorities:
|
|
133
|
+
filtered_items = [
|
|
134
|
+
item
|
|
135
|
+
for item in filtered_items
|
|
136
|
+
if (
|
|
137
|
+
item.get("priority")
|
|
138
|
+
if isinstance(item, dict)
|
|
139
|
+
else getattr(item, "priority", None)
|
|
140
|
+
)
|
|
141
|
+
in priorities
|
|
142
|
+
]
|
|
143
|
+
|
|
144
|
+
return filtered_items
|
|
145
|
+
|
|
146
|
+
@staticmethod
|
|
147
|
+
def serialize_items(items):
|
|
148
|
+
"""Convert items to dict if needed (for location models)."""
|
|
149
|
+
if items and hasattr(items[0], "model_dump"):
|
|
150
|
+
return [x.model_dump() for x in items]
|
|
151
|
+
else:
|
|
152
|
+
return items
|
|
153
|
+
|
|
154
|
+
@staticmethod
|
|
155
|
+
def create_paginated_response(items, query, extract_filters_func=None):
|
|
156
|
+
"""Create a complete paginated response with all common logic."""
|
|
157
|
+
# Parse pagination parameters using the existing function
|
|
158
|
+
try:
|
|
159
|
+
page = max(1, int(query.get("page", ["1"])[0]))
|
|
160
|
+
except (ValueError, IndexError, TypeError):
|
|
161
|
+
page = 1
|
|
162
|
+
|
|
163
|
+
per_page_raw = query.get("per_page", ["20"])[0] if query.get("per_page") else "20"
|
|
164
|
+
if per_page_raw == "all":
|
|
165
|
+
per_page = None
|
|
166
|
+
else:
|
|
167
|
+
try:
|
|
168
|
+
per_page = max(1, min(int(per_page_raw), 1000)) # Cap at 1000
|
|
169
|
+
except (ValueError, TypeError):
|
|
170
|
+
per_page = 20
|
|
171
|
+
|
|
172
|
+
sort_by = query.get("sort_by", [None])[0]
|
|
173
|
+
sort_order = query.get("sort_order", ["asc"])[0] if query.get("sort_order") else "asc"
|
|
174
|
+
sort_order = sort_order.lower()
|
|
175
|
+
|
|
176
|
+
# Apply filters if any
|
|
177
|
+
filters = {}
|
|
178
|
+
if query:
|
|
179
|
+
if "status" in query:
|
|
180
|
+
filters["status"] = query["status"]
|
|
181
|
+
if "priority" in query:
|
|
182
|
+
filters["priority"] = query["priority"]
|
|
183
|
+
|
|
184
|
+
# Allow custom filter extraction
|
|
185
|
+
if extract_filters_func:
|
|
186
|
+
custom_filters = extract_filters_func(query)
|
|
187
|
+
filters.update(custom_filters)
|
|
188
|
+
|
|
189
|
+
if filters:
|
|
190
|
+
items = PaginationHelper.apply_filters(items, filters)
|
|
191
|
+
|
|
192
|
+
# Apply pagination and sorting
|
|
193
|
+
paginated_items, total_count = PaginationHelper.apply_pagination_and_sorting(
|
|
194
|
+
items, page, per_page, sort_by, sort_order
|
|
195
|
+
)
|
|
196
|
+
|
|
197
|
+
# Serialize items if needed
|
|
198
|
+
serialized_items = PaginationHelper.serialize_items(paginated_items)
|
|
199
|
+
|
|
200
|
+
# Build pagination response directly
|
|
201
|
+
if per_page:
|
|
202
|
+
total_pages = (total_count + per_page - 1) // per_page
|
|
203
|
+
else:
|
|
204
|
+
total_pages = 1
|
|
205
|
+
per_page = total_count
|
|
206
|
+
|
|
207
|
+
return {
|
|
208
|
+
"items": serialized_items,
|
|
209
|
+
"pagination": {
|
|
210
|
+
"total": total_count,
|
|
211
|
+
"page": page,
|
|
212
|
+
"per_page": per_page,
|
|
213
|
+
"total_pages": total_pages,
|
|
214
|
+
},
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
|
|
218
|
+
class FileIOHelper:
|
|
219
|
+
"""Common file I/O utilities to eliminate duplication."""
|
|
220
|
+
|
|
221
|
+
@staticmethod
|
|
222
|
+
def read_json_file(file_path):
|
|
223
|
+
"""Read and parse JSON file."""
|
|
224
|
+
with open(file_path, "r") as file:
|
|
225
|
+
return json.load(file)
|
|
226
|
+
|
|
227
|
+
@staticmethod
|
|
228
|
+
def write_json_file_atomic(data, file_path):
|
|
229
|
+
"""Write JSON data to file atomically."""
|
|
230
|
+
json_file_atomic_dump(data, file_path)
|
|
231
|
+
|
|
232
|
+
@staticmethod
|
|
233
|
+
def get_data_from_file(file_path, data_key="map"):
|
|
234
|
+
"""Get data from JSON file with specified key structure."""
|
|
235
|
+
json_data = FileIOHelper.read_json_file(file_path)
|
|
236
|
+
return json_data.get(data_key, {})
|
|
237
|
+
|
|
238
|
+
|
|
239
|
+
class CRUDHelper:
|
|
240
|
+
"""Common CRUD operation utilities to eliminate duplication."""
|
|
241
|
+
|
|
242
|
+
@staticmethod
|
|
243
|
+
def add_item_to_json_db(db_data, collection_name, item_data, default_status=None):
|
|
244
|
+
"""Add item to JSON in-memory database."""
|
|
245
|
+
collection = db_data.setdefault(collection_name, [])
|
|
246
|
+
uuid = item_data.get("uuid")
|
|
247
|
+
resource_type = collection_name.rstrip("s").capitalize()
|
|
248
|
+
|
|
249
|
+
# Check if item already exists
|
|
250
|
+
existing = next(
|
|
251
|
+
(
|
|
252
|
+
item
|
|
253
|
+
for item in collection
|
|
254
|
+
if (item.get("uuid") if isinstance(item, dict) else getattr(item, "uuid", None))
|
|
255
|
+
== uuid
|
|
256
|
+
),
|
|
257
|
+
None,
|
|
258
|
+
)
|
|
259
|
+
if existing:
|
|
260
|
+
raise AlreadyExistsError(uuid, resource_type)
|
|
261
|
+
|
|
262
|
+
record = dict(item_data)
|
|
263
|
+
if default_status:
|
|
264
|
+
record["status"] = default_status
|
|
265
|
+
collection.append(record)
|
|
266
|
+
|
|
267
|
+
@staticmethod
|
|
268
|
+
def add_item_to_json_file_db(file_path, collection_name, item_data, default_status=None):
|
|
269
|
+
"""Add item to JSON file database."""
|
|
270
|
+
json_file = FileIOHelper.read_json_file(file_path)
|
|
271
|
+
collection = json_file["map"].get(collection_name, [])
|
|
272
|
+
|
|
273
|
+
uuid = item_data.get("uuid")
|
|
274
|
+
resource_type = collection_name.rstrip("s").capitalize()
|
|
275
|
+
|
|
276
|
+
# Check if item already exists
|
|
277
|
+
existing = next(
|
|
278
|
+
(
|
|
279
|
+
item
|
|
280
|
+
for item in collection
|
|
281
|
+
if (item.get("uuid") if isinstance(item, dict) else getattr(item, "uuid", None))
|
|
282
|
+
== uuid
|
|
283
|
+
),
|
|
284
|
+
None,
|
|
285
|
+
)
|
|
286
|
+
if existing:
|
|
287
|
+
raise AlreadyExistsError(uuid, resource_type)
|
|
288
|
+
|
|
289
|
+
record = dict(item_data)
|
|
290
|
+
if default_status:
|
|
291
|
+
record["status"] = default_status
|
|
292
|
+
collection.append(record)
|
|
293
|
+
json_file["map"][collection_name] = collection
|
|
294
|
+
|
|
295
|
+
FileIOHelper.write_json_file_atomic(json_file, file_path)
|
|
296
|
+
|
|
297
|
+
@staticmethod
|
|
298
|
+
def add_item_to_mongodb(db_collection, item_data, item_type, default_status=None):
|
|
299
|
+
"""Add item to MongoDB database."""
|
|
300
|
+
existing = db_collection.find_one({"uuid": item_data.get("uuid")})
|
|
301
|
+
if existing:
|
|
302
|
+
raise AlreadyExistsError(item_data.get("uuid"), item_type)
|
|
303
|
+
|
|
304
|
+
record = dict(item_data)
|
|
305
|
+
if default_status:
|
|
306
|
+
record["status"] = default_status
|
|
307
|
+
db_collection.insert_one(record)
|
|
308
|
+
|
|
309
|
+
|
|
310
|
+
# ------------------------------------------------
|
|
311
|
+
# get_location_obligatory_fields
|
|
312
|
+
|
|
313
|
+
|
|
314
|
+
def json_db_get_location_obligatory_fields(db):
|
|
315
|
+
return db.data["location_obligatory_fields"]
|
|
316
|
+
|
|
317
|
+
|
|
318
|
+
def json_file_db_get_location_obligatory_fields(db):
|
|
319
|
+
with open(db.data_file_path, "r") as file:
|
|
320
|
+
return json.load(file)["map"]["location_obligatory_fields"]
|
|
321
|
+
|
|
322
|
+
|
|
323
|
+
def google_json_db_get_location_obligatory_fields(db):
|
|
324
|
+
return db.data.get("map", {}).get("location_obligatory_fields", [])
|
|
325
|
+
|
|
326
|
+
|
|
327
|
+
def mongodb_db_get_location_obligatory_fields(db):
|
|
328
|
+
config_doc = db.db.config.find_one({"_id": "map_config"})
|
|
329
|
+
if config_doc and "location_obligatory_fields" in config_doc:
|
|
330
|
+
return config_doc["location_obligatory_fields"]
|
|
331
|
+
return []
|
|
332
|
+
|
|
333
|
+
|
|
334
|
+
def get_location_obligatory_fields(db):
|
|
335
|
+
return globals()[f"{db.module_name}_get_location_obligatory_fields"](db)
|
|
336
|
+
|
|
337
|
+
|
|
338
|
+
# ------------------------------------------------
|
|
339
|
+
# get_data
|
|
340
|
+
|
|
341
|
+
|
|
342
|
+
def google_json_db_get_data(self):
|
|
343
|
+
return self.data.get("map", {})
|
|
344
|
+
|
|
345
|
+
|
|
346
|
+
def json_file_db_get_data(self):
|
|
347
|
+
with open(self.data_file_path, "r") as file:
|
|
348
|
+
return json.load(file)["map"]
|
|
349
|
+
|
|
350
|
+
|
|
351
|
+
def json_db_get_data(self):
|
|
352
|
+
return self.data
|
|
353
|
+
|
|
354
|
+
|
|
355
|
+
def mongodb_db_get_data(self):
|
|
356
|
+
config_doc = self.db.config.find_one({"_id": "map_config"})
|
|
357
|
+
if config_doc:
|
|
358
|
+
return {
|
|
359
|
+
"data": list(self.db.locations.find({}, {"_id": 0})),
|
|
360
|
+
"categories": config_doc.get("categories", {}),
|
|
361
|
+
"location_obligatory_fields": config_doc.get("location_obligatory_fields", []),
|
|
362
|
+
# Backward-compat keys expected by core_api today
|
|
363
|
+
"visible_data": config_doc.get("visible_data", {}),
|
|
364
|
+
"meta_data": config_doc.get("meta_data", {}),
|
|
365
|
+
}
|
|
366
|
+
return {
|
|
367
|
+
"data": [],
|
|
368
|
+
"categories": {},
|
|
369
|
+
"location_obligatory_fields": [],
|
|
370
|
+
"visible_data": {},
|
|
371
|
+
"meta_data": {},
|
|
372
|
+
}
|
|
373
|
+
|
|
374
|
+
|
|
375
|
+
def get_data(db):
|
|
376
|
+
return globals()[f"{db.module_name}_get_data"]
|
|
377
|
+
|
|
378
|
+
|
|
379
|
+
# ------------------------------------------------
|
|
380
|
+
# get_visible_data
|
|
381
|
+
|
|
382
|
+
|
|
383
|
+
def google_json_db_get_visible_data(self) -> dict[str, Any]:
|
|
384
|
+
"""
|
|
385
|
+
Retrieve visible data configuration from Google Cloud Storage JSON blob.
|
|
386
|
+
|
|
387
|
+
Returns:
|
|
388
|
+
dict: Dictionary containing field visibility configuration.
|
|
389
|
+
Returns empty dict if not found.
|
|
390
|
+
"""
|
|
391
|
+
return self.data.get("map", {}).get("visible_data", {})
|
|
392
|
+
|
|
393
|
+
|
|
394
|
+
def json_file_db_get_visible_data(self) -> dict[str, Any]:
|
|
395
|
+
"""
|
|
396
|
+
Retrieve visible data configuration from JSON file database.
|
|
397
|
+
|
|
398
|
+
Returns:
|
|
399
|
+
dict: Dictionary containing field visibility configuration.
|
|
400
|
+
Returns empty dict if not found.
|
|
401
|
+
"""
|
|
402
|
+
return self.data.get("map", {}).get("visible_data", {})
|
|
403
|
+
|
|
404
|
+
|
|
405
|
+
def json_db_get_visible_data(self) -> dict[str, Any]:
|
|
406
|
+
"""
|
|
407
|
+
Retrieve visible data configuration from in-memory JSON database.
|
|
408
|
+
|
|
409
|
+
Returns:
|
|
410
|
+
dict: Dictionary containing field visibility configuration.
|
|
411
|
+
Returns empty dict if not found.
|
|
412
|
+
"""
|
|
413
|
+
return self.data.get("visible_data", {})
|
|
414
|
+
|
|
415
|
+
|
|
416
|
+
def mongodb_db_get_visible_data(self) -> dict[str, Any]:
|
|
417
|
+
"""
|
|
418
|
+
Retrieve visible data configuration from MongoDB.
|
|
419
|
+
|
|
420
|
+
Returns:
|
|
421
|
+
dict: Dictionary containing field visibility configuration.
|
|
422
|
+
Returns empty dict if config document not found or field missing.
|
|
423
|
+
|
|
424
|
+
Raises:
|
|
425
|
+
pymongo.errors.ConnectionFailure: If database connection fails.
|
|
426
|
+
pymongo.errors.OperationFailure: If database operation fails.
|
|
427
|
+
"""
|
|
428
|
+
config_doc = self.db.config.find_one({"_id": "map_config"})
|
|
429
|
+
if config_doc:
|
|
430
|
+
return config_doc.get("visible_data", {})
|
|
431
|
+
return {}
|
|
432
|
+
|
|
433
|
+
|
|
434
|
+
def get_visible_data(db):
|
|
435
|
+
"""
|
|
436
|
+
Get the appropriate get_visible_data function for the given database backend.
|
|
437
|
+
|
|
438
|
+
Args:
|
|
439
|
+
db: Database instance (must have module_name attribute).
|
|
440
|
+
|
|
441
|
+
Returns:
|
|
442
|
+
callable: Backend-specific get_visible_data function.
|
|
443
|
+
"""
|
|
444
|
+
return globals()[f"{db.module_name}_get_visible_data"]
|
|
445
|
+
|
|
446
|
+
|
|
447
|
+
# ------------------------------------------------
|
|
448
|
+
# get_meta_data
|
|
449
|
+
|
|
450
|
+
|
|
451
|
+
def google_json_db_get_meta_data(self) -> dict[str, Any]:
|
|
452
|
+
"""
|
|
453
|
+
Retrieve metadata configuration from Google Cloud Storage JSON blob.
|
|
454
|
+
|
|
455
|
+
Returns:
|
|
456
|
+
dict: Dictionary containing metadata configuration.
|
|
457
|
+
Returns empty dict if not found.
|
|
458
|
+
"""
|
|
459
|
+
return self.data.get("map", {}).get("meta_data", {})
|
|
460
|
+
|
|
461
|
+
|
|
462
|
+
def json_file_db_get_meta_data(self) -> dict[str, Any]:
|
|
463
|
+
"""
|
|
464
|
+
Retrieve metadata configuration from JSON file database.
|
|
465
|
+
|
|
466
|
+
Returns:
|
|
467
|
+
dict: Dictionary containing metadata configuration.
|
|
468
|
+
Returns empty dict if not found.
|
|
469
|
+
"""
|
|
470
|
+
return self.data.get("map", {}).get("meta_data", {})
|
|
471
|
+
|
|
472
|
+
|
|
473
|
+
def json_db_get_meta_data(self) -> dict[str, Any]:
|
|
474
|
+
"""
|
|
475
|
+
Retrieve metadata configuration from in-memory JSON database.
|
|
476
|
+
|
|
477
|
+
Returns:
|
|
478
|
+
dict: Dictionary containing metadata configuration.
|
|
479
|
+
Returns empty dict if not found.
|
|
480
|
+
"""
|
|
481
|
+
return self.data.get("meta_data", {})
|
|
482
|
+
|
|
483
|
+
|
|
484
|
+
def mongodb_db_get_meta_data(self) -> dict[str, Any]:
|
|
485
|
+
"""
|
|
486
|
+
Retrieve metadata configuration from MongoDB.
|
|
487
|
+
|
|
488
|
+
Returns:
|
|
489
|
+
dict: Dictionary containing metadata configuration.
|
|
490
|
+
Returns empty dict if config document not found or field missing.
|
|
491
|
+
|
|
492
|
+
Raises:
|
|
493
|
+
pymongo.errors.ConnectionFailure: If database connection fails.
|
|
494
|
+
pymongo.errors.OperationFailure: If database operation fails.
|
|
495
|
+
"""
|
|
496
|
+
config_doc = self.db.config.find_one({"_id": "map_config"})
|
|
497
|
+
if config_doc:
|
|
498
|
+
return config_doc.get("meta_data", {})
|
|
499
|
+
return {}
|
|
500
|
+
|
|
501
|
+
|
|
502
|
+
def get_meta_data(db):
|
|
503
|
+
"""
|
|
504
|
+
Get the appropriate get_meta_data function for the given database backend.
|
|
505
|
+
|
|
506
|
+
Args:
|
|
507
|
+
db: Database instance (must have module_name attribute).
|
|
508
|
+
|
|
509
|
+
Returns:
|
|
510
|
+
callable: Backend-specific get_meta_data function.
|
|
511
|
+
"""
|
|
512
|
+
return globals()[f"{db.module_name}_get_meta_data"]
|
|
513
|
+
|
|
514
|
+
|
|
515
|
+
# ------------------------------------------------
|
|
516
|
+
# get_categories
|
|
517
|
+
|
|
518
|
+
|
|
519
|
+
def json_db_get_categories(self):
|
|
520
|
+
return self.data["categories"].keys()
|
|
521
|
+
|
|
522
|
+
|
|
523
|
+
def json_file_db_get_categories(self):
|
|
524
|
+
with open(self.data_file_path, "r") as file:
|
|
525
|
+
return json.load(file)["map"]["categories"].keys()
|
|
526
|
+
|
|
527
|
+
|
|
528
|
+
def google_json_db_get_categories(self):
|
|
529
|
+
return self.data.get("map", {}).get("categories", {}).keys()
|
|
530
|
+
|
|
531
|
+
|
|
532
|
+
def mongodb_db_get_categories(self):
|
|
533
|
+
config_doc = self.db.config.find_one({"_id": "map_config"})
|
|
534
|
+
if config_doc and "categories" in config_doc:
|
|
535
|
+
return list(config_doc["categories"].keys())
|
|
536
|
+
return []
|
|
537
|
+
|
|
538
|
+
|
|
539
|
+
def get_categories(db):
|
|
540
|
+
return globals()[f"{db.module_name}_get_categories"]
|
|
541
|
+
|
|
542
|
+
|
|
543
|
+
# ------------------------------------------------
|
|
544
|
+
# get_category_data
|
|
545
|
+
|
|
546
|
+
|
|
547
|
+
def json_db_get_category_data(self, category_type=None):
|
|
548
|
+
if category_type:
|
|
549
|
+
return {
|
|
550
|
+
"categories": {category_type: self.data["categories"].get(category_type, [])},
|
|
551
|
+
"categories_help": self.data.get("categories_help", []),
|
|
552
|
+
"categories_options_help": {
|
|
553
|
+
category_type: self.data.get("categories_options_help", {}).get(category_type, [])
|
|
554
|
+
},
|
|
555
|
+
}
|
|
556
|
+
return {
|
|
557
|
+
"categories": self.data["categories"],
|
|
558
|
+
"categories_help": self.data.get("categories_help", []),
|
|
559
|
+
"categories_options_help": self.data.get("categories_options_help", {}),
|
|
560
|
+
}
|
|
561
|
+
|
|
562
|
+
|
|
563
|
+
def json_file_db_get_category_data(self, category_type=None):
|
|
564
|
+
with open(self.data_file_path, "r") as file:
|
|
565
|
+
data = json.load(file)["map"]
|
|
566
|
+
if category_type:
|
|
567
|
+
return {
|
|
568
|
+
"categories": {category_type: data["categories"].get(category_type, [])},
|
|
569
|
+
"categories_help": data.get("categories_help", []),
|
|
570
|
+
"categories_options_help": {
|
|
571
|
+
category_type: data.get("categories_options_help", {}).get(category_type, [])
|
|
572
|
+
},
|
|
573
|
+
}
|
|
574
|
+
return {
|
|
575
|
+
"categories": data["categories"],
|
|
576
|
+
"categories_help": data.get("categories_help", []),
|
|
577
|
+
"categories_options_help": data.get("categories_options_help", {}),
|
|
578
|
+
}
|
|
579
|
+
|
|
580
|
+
|
|
581
|
+
def google_json_db_get_category_data(self, category_type=None):
|
|
582
|
+
data = self.data.get("map", {})
|
|
583
|
+
if category_type:
|
|
584
|
+
return {
|
|
585
|
+
"categories": {category_type: data.get("categories", {}).get(category_type, [])},
|
|
586
|
+
"categories_help": data.get("categories_help", []),
|
|
587
|
+
"categories_options_help": {
|
|
588
|
+
category_type: data.get("categories_options_help", {}).get(category_type, [])
|
|
589
|
+
},
|
|
590
|
+
}
|
|
591
|
+
return {
|
|
592
|
+
"categories": data.get("categories", {}),
|
|
593
|
+
"categories_help": data.get("categories_help", []),
|
|
594
|
+
"categories_options_help": data.get("categories_options_help", {}),
|
|
595
|
+
}
|
|
596
|
+
|
|
597
|
+
|
|
598
|
+
def mongodb_db_get_category_data(self, category_type=None):
|
|
599
|
+
config_doc = self.db.config.find_one({"_id": "map_config"})
|
|
600
|
+
if config_doc:
|
|
601
|
+
if category_type:
|
|
602
|
+
return {
|
|
603
|
+
"categories": {
|
|
604
|
+
category_type: config_doc.get("categories", {}).get(category_type, [])
|
|
605
|
+
},
|
|
606
|
+
"categories_help": config_doc.get("categories_help", []),
|
|
607
|
+
"categories_options_help": {
|
|
608
|
+
category_type: config_doc.get("categories_options_help", {}).get(
|
|
609
|
+
category_type, []
|
|
610
|
+
)
|
|
611
|
+
},
|
|
612
|
+
}
|
|
613
|
+
return {
|
|
614
|
+
"categories": config_doc.get("categories", {}),
|
|
615
|
+
"categories_help": config_doc.get("categories_help", []),
|
|
616
|
+
"categories_options_help": config_doc.get("categories_options_help", {}),
|
|
617
|
+
}
|
|
618
|
+
return {"categories": {}, "categories_help": [], "categories_options_help": {}}
|
|
619
|
+
|
|
620
|
+
|
|
621
|
+
def get_category_data(db):
|
|
622
|
+
return globals()[f"{db.module_name}_get_category_data"]
|
|
623
|
+
|
|
624
|
+
|
|
625
|
+
# ------------------------------------------------
|
|
626
|
+
# get_location
|
|
627
|
+
|
|
628
|
+
|
|
629
|
+
def get_location_from_raw_data(raw_data, uuid, location_model):
|
|
630
|
+
point = next((point for point in raw_data["data"] if point["uuid"] == uuid), None)
|
|
631
|
+
return location_model.model_validate(point) if point else None
|
|
632
|
+
|
|
633
|
+
|
|
634
|
+
def google_json_db_get_location(self, uuid, location_model):
|
|
635
|
+
return get_location_from_raw_data(self.data.get("map", {}), uuid, location_model)
|
|
636
|
+
|
|
637
|
+
|
|
638
|
+
def json_file_db_get_location(self, uuid, location_model):
|
|
639
|
+
with open(self.data_file_path, "r") as file:
|
|
640
|
+
point = get_location_from_raw_data(json.load(file)["map"], uuid, location_model)
|
|
641
|
+
return point
|
|
642
|
+
|
|
643
|
+
|
|
644
|
+
def json_db_get_location(self, uuid, location_model):
|
|
645
|
+
return get_location_from_raw_data(self.data, uuid, location_model)
|
|
646
|
+
|
|
647
|
+
|
|
648
|
+
def mongodb_db_get_location(self, uuid, location_model):
|
|
649
|
+
location_doc = self.db.locations.find_one({"uuid": uuid}, {"_id": 0})
|
|
650
|
+
return location_model.model_validate(location_doc) if location_doc else None
|
|
651
|
+
|
|
652
|
+
|
|
653
|
+
def get_location(db, location_model):
|
|
654
|
+
return partial(globals()[f"{db.module_name}_get_location"], location_model=location_model)
|
|
655
|
+
|
|
656
|
+
|
|
657
|
+
# ------------------------------------------------
|
|
658
|
+
# get_locations
|
|
659
|
+
|
|
660
|
+
|
|
661
|
+
def get_locations_list_from_raw_data(map_data, query, location_model):
|
|
662
|
+
filtered_locations = get_queried_data(map_data["data"], map_data["categories"], query)
|
|
663
|
+
return [location_model.model_validate(point) for point in filtered_locations]
|
|
664
|
+
|
|
665
|
+
|
|
666
|
+
def google_json_db_get_locations(self, query, location_model):
|
|
667
|
+
return get_locations_list_from_raw_data(self.data.get("map", {}), query, location_model)
|
|
668
|
+
|
|
669
|
+
|
|
670
|
+
def json_file_db_get_locations(self, query, location_model):
|
|
671
|
+
with open(self.data_file_path, "r") as file:
|
|
672
|
+
return get_locations_list_from_raw_data(json.load(file)["map"], query, location_model)
|
|
673
|
+
|
|
674
|
+
|
|
675
|
+
def json_db_get_locations(self, query, location_model):
|
|
676
|
+
return get_locations_list_from_raw_data(self.data, query, location_model)
|
|
677
|
+
|
|
678
|
+
|
|
679
|
+
def mongodb_db_get_locations(self, query, location_model):
|
|
680
|
+
mongo_query = {}
|
|
681
|
+
for key, values in query.items():
|
|
682
|
+
if values:
|
|
683
|
+
mongo_query[key] = {"$in": values}
|
|
684
|
+
|
|
685
|
+
projection = {"_id": 0, "uuid": 1, "position": 1, "remark": 1}
|
|
686
|
+
data = self.db.locations.find(mongo_query, projection)
|
|
687
|
+
return (LocationBase.model_validate(loc) for loc in data)
|
|
688
|
+
|
|
689
|
+
|
|
690
|
+
def get_locations(db, location_model):
|
|
691
|
+
return partial(globals()[f"{db.module_name}_get_locations"], location_model=location_model)
|
|
692
|
+
|
|
693
|
+
|
|
694
|
+
def google_json_db_get_locations_paginated(self, query, location_model):
|
|
695
|
+
"""Google JSON locations with improved pagination."""
|
|
696
|
+
# Get all locations from raw data
|
|
697
|
+
data = self.data.get("map", {})
|
|
698
|
+
all_locations = list(get_locations_list_from_raw_data(data, query, location_model))
|
|
699
|
+
return PaginationHelper.create_paginated_response(all_locations, query)
|
|
700
|
+
|
|
701
|
+
|
|
702
|
+
def json_db_get_locations_paginated(self, query, location_model):
|
|
703
|
+
"""JSON locations with improved pagination."""
|
|
704
|
+
# Get all locations from raw data
|
|
705
|
+
all_locations = list(get_locations_list_from_raw_data(self.data, query, location_model))
|
|
706
|
+
return PaginationHelper.create_paginated_response(all_locations, query)
|
|
707
|
+
|
|
708
|
+
|
|
709
|
+
def json_file_db_get_locations_paginated(self, query, location_model):
|
|
710
|
+
"""JSON file locations with improved pagination."""
|
|
711
|
+
data = FileIOHelper.get_data_from_file(self.data_file_path)
|
|
712
|
+
# Get all locations from raw data
|
|
713
|
+
all_locations = list(get_locations_list_from_raw_data(data, query, location_model))
|
|
714
|
+
return PaginationHelper.create_paginated_response(all_locations, query)
|
|
715
|
+
|
|
716
|
+
|
|
717
|
+
def mongodb_db_get_locations_paginated(self, query, location_model):
|
|
718
|
+
"""MongoDB locations with improved pagination."""
|
|
719
|
+
page, per_page, sort_by, sort_order = __parse_pagination_params(query)
|
|
720
|
+
|
|
721
|
+
# Build MongoDB query
|
|
722
|
+
mongo_query = {}
|
|
723
|
+
for key, values in query.items():
|
|
724
|
+
if values:
|
|
725
|
+
mongo_query[key] = {"$in": values}
|
|
726
|
+
|
|
727
|
+
# Get total count
|
|
728
|
+
total_count = self.db.locations.count_documents(mongo_query)
|
|
729
|
+
|
|
730
|
+
# Build aggregation pipeline
|
|
731
|
+
pipeline = [{"$match": mongo_query}]
|
|
732
|
+
|
|
733
|
+
# Add sorting
|
|
734
|
+
if sort_by:
|
|
735
|
+
sort_direction = -1 if sort_order == "desc" else 1
|
|
736
|
+
pipeline.append({"$sort": {sort_by: sort_direction}})
|
|
737
|
+
|
|
738
|
+
# Add pagination
|
|
739
|
+
if per_page:
|
|
740
|
+
pipeline.extend([{"$skip": (page - 1) * per_page}, {"$limit": per_page}]) # type: ignore
|
|
741
|
+
|
|
742
|
+
# Remove MongoDB _id field
|
|
743
|
+
pipeline.append({"$project": {"_id": 0}})
|
|
744
|
+
|
|
745
|
+
# Execute query
|
|
746
|
+
cursor = self.db.locations.aggregate(pipeline)
|
|
747
|
+
locations = [location_model.model_validate(loc) for loc in cursor]
|
|
748
|
+
|
|
749
|
+
# Convert items to dict if needed (for location models)
|
|
750
|
+
if locations and hasattr(locations[0], "model_dump"):
|
|
751
|
+
serialized_locations = [x.model_dump() for x in locations]
|
|
752
|
+
else:
|
|
753
|
+
serialized_locations = locations
|
|
754
|
+
|
|
755
|
+
return __build_pagination_response(serialized_locations, total_count, page, per_page)
|
|
756
|
+
|
|
757
|
+
|
|
758
|
+
def get_locations_paginated(db, location_model):
|
|
759
|
+
return partial(
|
|
760
|
+
globals()[f"{db.module_name}_get_locations_paginated"], location_model=location_model
|
|
761
|
+
)
|
|
762
|
+
|
|
763
|
+
|
|
764
|
+
# ------------------------------------------------
|
|
765
|
+
# add_location
|
|
766
|
+
|
|
767
|
+
|
|
768
|
+
def json_file_db_add_location(self, location_data, location_model):
|
|
769
|
+
location = location_model.model_validate(location_data)
|
|
770
|
+
with open(self.data_file_path, "r") as file:
|
|
771
|
+
json_file = json.load(file)
|
|
772
|
+
|
|
773
|
+
map_data = json_file["map"].get("data", [])
|
|
774
|
+
idx = next(
|
|
775
|
+
(i for i, point in enumerate(map_data) if point.get("uuid") == location_data["uuid"]), None
|
|
776
|
+
)
|
|
777
|
+
if idx is not None:
|
|
778
|
+
raise LocationAlreadyExistsError(location_data["uuid"])
|
|
779
|
+
|
|
780
|
+
map_data.append(location.model_dump())
|
|
781
|
+
json_file["map"]["data"] = map_data
|
|
782
|
+
|
|
783
|
+
json_file_atomic_dump(json_file, self.data_file_path)
|
|
784
|
+
|
|
785
|
+
|
|
786
|
+
def json_db_add_location(self, location_data, location_model):
|
|
787
|
+
location = location_model.model_validate(location_data)
|
|
788
|
+
idx = next(
|
|
789
|
+
(
|
|
790
|
+
i
|
|
791
|
+
for i, point in enumerate(self.data.get("data", []))
|
|
792
|
+
if point.get("uuid") == location_data["uuid"]
|
|
793
|
+
),
|
|
794
|
+
None,
|
|
795
|
+
)
|
|
796
|
+
if idx is not None:
|
|
797
|
+
raise LocationAlreadyExistsError(location_data["uuid"])
|
|
798
|
+
self.data["data"].append(location.model_dump())
|
|
799
|
+
|
|
800
|
+
|
|
801
|
+
def mongodb_db_add_location(self, location_data, location_model):
|
|
802
|
+
location = location_model.model_validate(location_data)
|
|
803
|
+
existing = self.db.locations.find_one({"uuid": location_data["uuid"]})
|
|
804
|
+
if existing:
|
|
805
|
+
raise LocationAlreadyExistsError(location_data["uuid"])
|
|
806
|
+
self.db.locations.insert_one(location.model_dump())
|
|
807
|
+
|
|
808
|
+
|
|
809
|
+
def add_location(db, location_data, location_model):
|
|
810
|
+
return globals()[f"{db.module_name}_add_location"](db, location_data, location_model)
|
|
811
|
+
|
|
812
|
+
|
|
813
|
+
# ------------------------------------------------
|
|
814
|
+
# update_location
|
|
815
|
+
|
|
816
|
+
|
|
817
|
+
def json_file_db_update_location(self, uuid, location_data, location_model):
|
|
818
|
+
location = location_model.model_validate(location_data)
|
|
819
|
+
with open(self.data_file_path, "r") as file:
|
|
820
|
+
json_file = json.load(file)
|
|
821
|
+
|
|
822
|
+
map_data = json_file["map"].get("data", [])
|
|
823
|
+
idx = next((i for i, point in enumerate(map_data) if point.get("uuid") == uuid), None)
|
|
824
|
+
if idx is None:
|
|
825
|
+
raise LocationNotFoundError(uuid)
|
|
826
|
+
|
|
827
|
+
map_data[idx] = location.model_dump()
|
|
828
|
+
json_file["map"]["data"] = map_data
|
|
829
|
+
|
|
830
|
+
json_file_atomic_dump(json_file, self.data_file_path)
|
|
831
|
+
|
|
832
|
+
|
|
833
|
+
def json_db_update_location(self, uuid, location_data, location_model):
|
|
834
|
+
location = location_model.model_validate(location_data)
|
|
835
|
+
idx = next(
|
|
836
|
+
(i for i, point in enumerate(self.data.get("data", [])) if point.get("uuid") == uuid), None
|
|
837
|
+
)
|
|
838
|
+
if idx is None:
|
|
839
|
+
raise LocationNotFoundError(uuid)
|
|
840
|
+
self.data["data"][idx] = location.model_dump()
|
|
841
|
+
|
|
842
|
+
|
|
843
|
+
def mongodb_db_update_location(self, uuid, location_data, location_model):
|
|
844
|
+
location = location_model.model_validate(location_data)
|
|
845
|
+
result = self.db.locations.update_one({"uuid": uuid}, {"$set": location.model_dump()})
|
|
846
|
+
if result.matched_count == 0:
|
|
847
|
+
raise LocationNotFoundError(uuid)
|
|
848
|
+
|
|
849
|
+
|
|
850
|
+
def update_location(db, uuid, location_data, location_model):
|
|
851
|
+
return globals()[f"{db.module_name}_update_location"](db, uuid, location_data, location_model)
|
|
852
|
+
|
|
853
|
+
|
|
854
|
+
# ------------------------------------------------
|
|
855
|
+
# delete_location
|
|
856
|
+
|
|
857
|
+
|
|
858
|
+
def json_file_db_delete_location(self, uuid):
|
|
859
|
+
with open(self.data_file_path, "r") as file:
|
|
860
|
+
json_file = json.load(file)
|
|
861
|
+
|
|
862
|
+
map_data = json_file["map"].get("data", [])
|
|
863
|
+
idx = next((i for i, point in enumerate(map_data) if point.get("uuid") == uuid), None)
|
|
864
|
+
if idx is None:
|
|
865
|
+
raise LocationNotFoundError(uuid)
|
|
866
|
+
|
|
867
|
+
del map_data[idx]
|
|
868
|
+
json_file["map"]["data"] = map_data
|
|
869
|
+
|
|
870
|
+
json_file_atomic_dump(json_file, self.data_file_path)
|
|
871
|
+
|
|
872
|
+
|
|
873
|
+
def json_db_delete_location(self, uuid):
|
|
874
|
+
idx = next(
|
|
875
|
+
(i for i, point in enumerate(self.data.get("data", [])) if point.get("uuid") == uuid), None
|
|
876
|
+
)
|
|
877
|
+
if idx is None:
|
|
878
|
+
raise LocationNotFoundError(uuid)
|
|
879
|
+
del self.data["data"][idx]
|
|
880
|
+
|
|
881
|
+
|
|
882
|
+
def mongodb_db_delete_location(self, uuid):
|
|
883
|
+
result = self.db.locations.delete_one({"uuid": uuid})
|
|
884
|
+
if result.deleted_count == 0:
|
|
885
|
+
raise LocationNotFoundError(uuid)
|
|
886
|
+
|
|
887
|
+
|
|
888
|
+
def delete_location(db, uuid):
|
|
889
|
+
return globals()[f"{db.module_name}_delete_location"](db, uuid)
|
|
890
|
+
|
|
891
|
+
|
|
892
|
+
# ------------------------------------------------
|
|
893
|
+
# add_suggestion
|
|
894
|
+
|
|
895
|
+
|
|
896
|
+
def json_db_add_suggestion(self, suggestion_data):
|
|
897
|
+
CRUDHelper.add_item_to_json_db(self.data, "suggestions", suggestion_data, "pending")
|
|
898
|
+
|
|
899
|
+
|
|
900
|
+
def json_file_db_add_suggestion(self, suggestion_data):
|
|
901
|
+
CRUDHelper.add_item_to_json_file_db(
|
|
902
|
+
self.data_file_path, "suggestions", suggestion_data, "pending"
|
|
903
|
+
)
|
|
904
|
+
|
|
905
|
+
|
|
906
|
+
def mongodb_db_add_suggestion(self, suggestion_data):
|
|
907
|
+
CRUDHelper.add_item_to_mongodb(self.db.suggestions, suggestion_data, "Suggestion", "pending")
|
|
908
|
+
|
|
909
|
+
|
|
910
|
+
def google_json_db_add_suggestion(self, suggestion_data):
|
|
911
|
+
# Temporary workaround: just use notifier without storing
|
|
912
|
+
# Full implementation would require writing back to Google Cloud Storage
|
|
913
|
+
pass
|
|
914
|
+
|
|
915
|
+
|
|
916
|
+
def add_suggestion(db, suggestion_data):
|
|
917
|
+
return globals()[f"{db.module_name}_add_suggestion"](db, suggestion_data)
|
|
918
|
+
|
|
919
|
+
|
|
920
|
+
# ------------------------------------------------
|
|
921
|
+
# get_suggestions
|
|
922
|
+
|
|
923
|
+
|
|
924
|
+
def json_db_get_suggestions(self, query_params):
|
|
925
|
+
suggestions = self.data.get("suggestions", [])
|
|
926
|
+
|
|
927
|
+
statuses = query_params.get("status")
|
|
928
|
+
if statuses:
|
|
929
|
+
suggestions = [s for s in suggestions if s.get("status") in statuses]
|
|
930
|
+
|
|
931
|
+
return suggestions
|
|
932
|
+
|
|
933
|
+
|
|
934
|
+
def json_db_get_suggestions_paginated(self, query):
|
|
935
|
+
"""JSON suggestions with improved pagination."""
|
|
936
|
+
suggestions = self.data.get("suggestions", [])
|
|
937
|
+
return PaginationHelper.create_paginated_response(suggestions, query)
|
|
938
|
+
|
|
939
|
+
|
|
940
|
+
def json_file_db_get_suggestions(self, query_params):
|
|
941
|
+
with open(self.data_file_path, "r") as file:
|
|
942
|
+
json_file = json.load(file)
|
|
943
|
+
|
|
944
|
+
suggestions = json_file["map"].get("suggestions", [])
|
|
945
|
+
|
|
946
|
+
statuses = query_params.get("status")
|
|
947
|
+
if statuses:
|
|
948
|
+
suggestions = [s for s in suggestions if s.get("status") in statuses]
|
|
949
|
+
|
|
950
|
+
return suggestions
|
|
951
|
+
|
|
952
|
+
|
|
953
|
+
def json_file_db_get_suggestions_paginated(self, query):
|
|
954
|
+
"""JSON file suggestions with improved pagination."""
|
|
955
|
+
with open(self.data_file_path, "r") as file:
|
|
956
|
+
json_file = json.load(file)
|
|
957
|
+
|
|
958
|
+
suggestions = json_file["map"].get("suggestions", [])
|
|
959
|
+
return PaginationHelper.create_paginated_response(suggestions, query)
|
|
960
|
+
|
|
961
|
+
|
|
962
|
+
def mongodb_db_get_suggestions(self, query_params):
|
|
963
|
+
query = {}
|
|
964
|
+
statuses = query_params.get("status")
|
|
965
|
+
if statuses:
|
|
966
|
+
query["status"] = {"$in": statuses}
|
|
967
|
+
|
|
968
|
+
return list(self.db.suggestions.find(query, {"_id": 0}))
|
|
969
|
+
|
|
970
|
+
|
|
971
|
+
def mongodb_db_get_suggestions_paginated(self, query):
|
|
972
|
+
"""MongoDB suggestions with improved pagination."""
|
|
973
|
+
page, per_page, sort_by, sort_order = __parse_pagination_params(query)
|
|
974
|
+
|
|
975
|
+
# Build MongoDB query
|
|
976
|
+
mongo_query = {}
|
|
977
|
+
statuses = query.get("status")
|
|
978
|
+
if statuses:
|
|
979
|
+
mongo_query["status"] = {"$in": statuses}
|
|
980
|
+
|
|
981
|
+
# Get total count
|
|
982
|
+
total_count = self.db.suggestions.count_documents(mongo_query)
|
|
983
|
+
|
|
984
|
+
# Build aggregation pipeline
|
|
985
|
+
pipeline = [{"$match": mongo_query}]
|
|
986
|
+
|
|
987
|
+
# Add sorting
|
|
988
|
+
if sort_by:
|
|
989
|
+
sort_direction = -1 if sort_order == "desc" else 1
|
|
990
|
+
pipeline.append({"$sort": {sort_by: sort_direction}})
|
|
991
|
+
|
|
992
|
+
# Add pagination
|
|
993
|
+
if per_page:
|
|
994
|
+
pipeline.extend([{"$skip": (page - 1) * per_page}, {"$limit": per_page}]) # type: ignore
|
|
995
|
+
|
|
996
|
+
# Remove MongoDB _id field
|
|
997
|
+
pipeline.append({"$project": {"_id": 0}})
|
|
998
|
+
|
|
999
|
+
# Execute query
|
|
1000
|
+
cursor = self.db.suggestions.aggregate(pipeline)
|
|
1001
|
+
items = list(cursor)
|
|
1002
|
+
|
|
1003
|
+
return __build_pagination_response(items, total_count, page, per_page)
|
|
1004
|
+
|
|
1005
|
+
|
|
1006
|
+
def google_json_db_get_suggestions(self, query_params):
|
|
1007
|
+
# GoogleJsonDb is read-only, suggestions not stored in blob
|
|
1008
|
+
return []
|
|
1009
|
+
|
|
1010
|
+
|
|
1011
|
+
def google_json_db_get_suggestions_paginated(self, query):
|
|
1012
|
+
"""Google JSON suggestions with pagination (read-only)."""
|
|
1013
|
+
return PaginationHelper.create_paginated_response([], query)
|
|
1014
|
+
|
|
1015
|
+
|
|
1016
|
+
def get_suggestions(db):
|
|
1017
|
+
return globals()[f"{db.module_name}_get_suggestions"]
|
|
1018
|
+
|
|
1019
|
+
|
|
1020
|
+
def get_suggestions_paginated(db):
|
|
1021
|
+
return globals()[f"{db.module_name}_get_suggestions_paginated"]
|
|
1022
|
+
|
|
1023
|
+
|
|
1024
|
+
# ------------------------------------------------
|
|
1025
|
+
# get_suggestion
|
|
1026
|
+
|
|
1027
|
+
|
|
1028
|
+
def json_db_get_suggestion(self, suggestion_id):
|
|
1029
|
+
return next(
|
|
1030
|
+
(s for s in self.data.get("suggestions", []) if s.get("uuid") == suggestion_id), None
|
|
1031
|
+
)
|
|
1032
|
+
|
|
1033
|
+
|
|
1034
|
+
def json_file_db_get_suggestion(self, suggestion_id):
|
|
1035
|
+
with open(self.data_file_path, "r") as file:
|
|
1036
|
+
json_file = json.load(file)
|
|
1037
|
+
return next(
|
|
1038
|
+
(s for s in json_file["map"].get("suggestions", []) if s.get("uuid") == suggestion_id), None
|
|
1039
|
+
)
|
|
1040
|
+
|
|
1041
|
+
|
|
1042
|
+
def mongodb_db_get_suggestion(self, suggestion_id):
|
|
1043
|
+
return self.db.suggestions.find_one({"uuid": suggestion_id}, {"_id": 0})
|
|
1044
|
+
|
|
1045
|
+
|
|
1046
|
+
def google_json_db_get_suggestion(self, suggestion_id):
|
|
1047
|
+
# GoogleJsonDb is read-only, suggestions not stored in blob
|
|
1048
|
+
return None
|
|
1049
|
+
|
|
1050
|
+
|
|
1051
|
+
def get_suggestion(db):
|
|
1052
|
+
return globals()[f"{db.module_name}_get_suggestion"]
|
|
1053
|
+
|
|
1054
|
+
|
|
1055
|
+
# ------------------------------------------------
|
|
1056
|
+
# update_suggestion
|
|
1057
|
+
|
|
1058
|
+
|
|
1059
|
+
def json_db_update_suggestion(self, suggestion_id, status):
|
|
1060
|
+
suggestions = self.data.get("suggestions", [])
|
|
1061
|
+
for s in suggestions:
|
|
1062
|
+
if s.get("uuid") == suggestion_id:
|
|
1063
|
+
s["status"] = status
|
|
1064
|
+
return
|
|
1065
|
+
raise ValueError(f"Suggestion with uuid {suggestion_id} not found")
|
|
1066
|
+
|
|
1067
|
+
|
|
1068
|
+
def json_file_db_update_suggestion(self, suggestion_id, status):
|
|
1069
|
+
with open(self.data_file_path, "r") as file:
|
|
1070
|
+
json_file = json.load(file)
|
|
1071
|
+
|
|
1072
|
+
suggestions = json_file["map"].get("suggestions", [])
|
|
1073
|
+
for s in suggestions:
|
|
1074
|
+
if s.get("uuid") == suggestion_id:
|
|
1075
|
+
s["status"] = status
|
|
1076
|
+
break
|
|
1077
|
+
else:
|
|
1078
|
+
raise ValueError(f"Suggestion with uuid {suggestion_id} not found")
|
|
1079
|
+
|
|
1080
|
+
json_file["map"]["suggestions"] = suggestions
|
|
1081
|
+
|
|
1082
|
+
json_file_atomic_dump(json_file, self.data_file_path)
|
|
1083
|
+
|
|
1084
|
+
|
|
1085
|
+
def mongodb_db_update_suggestion(self, suggestion_id, status):
|
|
1086
|
+
result = self.db.suggestions.update_one({"uuid": suggestion_id}, {"$set": {"status": status}})
|
|
1087
|
+
if result.matched_count == 0:
|
|
1088
|
+
raise ValueError(f"Suggestion with uuid {suggestion_id} not found")
|
|
1089
|
+
|
|
1090
|
+
|
|
1091
|
+
def google_json_db_update_suggestion(self, suggestion_id, status):
|
|
1092
|
+
# GoogleJsonDb is read-only, no-op
|
|
1093
|
+
pass
|
|
1094
|
+
|
|
1095
|
+
|
|
1096
|
+
def update_suggestion(db, suggestion_id, status):
|
|
1097
|
+
return globals()[f"{db.module_name}_update_suggestion"](db, suggestion_id, status)
|
|
1098
|
+
|
|
1099
|
+
|
|
1100
|
+
# ------------------------------------------------
|
|
1101
|
+
# delete_suggestion
|
|
1102
|
+
|
|
1103
|
+
|
|
1104
|
+
def json_db_delete_suggestion(self, suggestion_id):
|
|
1105
|
+
suggestions = self.data.get("suggestions", [])
|
|
1106
|
+
idx = next((i for i, s in enumerate(suggestions) if s.get("uuid") == suggestion_id), None)
|
|
1107
|
+
if idx is None:
|
|
1108
|
+
raise ValueError(f"Suggestion with uuid {suggestion_id} not found")
|
|
1109
|
+
|
|
1110
|
+
del suggestions[idx]
|
|
1111
|
+
|
|
1112
|
+
|
|
1113
|
+
def json_file_db_delete_suggestion(self, suggestion_id):
|
|
1114
|
+
with open(self.data_file_path, "r") as file:
|
|
1115
|
+
json_file = json.load(file)
|
|
1116
|
+
|
|
1117
|
+
suggestions = json_file["map"].get("suggestions", [])
|
|
1118
|
+
idx = next((i for i, s in enumerate(suggestions) if s.get("uuid") == suggestion_id), None)
|
|
1119
|
+
if idx is None:
|
|
1120
|
+
raise ValueError(f"Suggestion with uuid {suggestion_id} not found")
|
|
1121
|
+
|
|
1122
|
+
del suggestions[idx]
|
|
1123
|
+
json_file["map"]["suggestions"] = suggestions
|
|
1124
|
+
|
|
1125
|
+
json_file_atomic_dump(json_file, self.data_file_path)
|
|
1126
|
+
|
|
1127
|
+
|
|
1128
|
+
def mongodb_db_delete_suggestion(self, suggestion_id):
|
|
1129
|
+
result = self.db.suggestions.delete_one({"uuid": suggestion_id})
|
|
1130
|
+
if result.deleted_count == 0:
|
|
1131
|
+
raise ValueError(f"Suggestion with uuid {suggestion_id} not found")
|
|
1132
|
+
|
|
1133
|
+
|
|
1134
|
+
def google_json_db_delete_suggestion(self, suggestion_id):
|
|
1135
|
+
# GoogleJsonDb is read-only, no-op
|
|
1136
|
+
pass
|
|
1137
|
+
|
|
1138
|
+
|
|
1139
|
+
def delete_suggestion(db, suggestion_id):
|
|
1140
|
+
return globals()[f"{db.module_name}_delete_suggestion"](db, suggestion_id)
|
|
1141
|
+
|
|
1142
|
+
|
|
1143
|
+
# ------------------------------------------------
|
|
1144
|
+
# add_report
|
|
1145
|
+
|
|
1146
|
+
|
|
1147
|
+
def json_db_add_report(self, report_data):
|
|
1148
|
+
reports = self.data.setdefault("reports", [])
|
|
1149
|
+
if any(r.get("uuid") == report_data.get("uuid") for r in reports):
|
|
1150
|
+
raise ValueError(f"Report with uuid {report_data['uuid']} already exists")
|
|
1151
|
+
|
|
1152
|
+
reports.append(report_data)
|
|
1153
|
+
|
|
1154
|
+
|
|
1155
|
+
def json_file_db_add_report(self, report_data):
|
|
1156
|
+
with open(self.data_file_path, "r") as file:
|
|
1157
|
+
json_file = json.load(file)
|
|
1158
|
+
|
|
1159
|
+
reports = json_file["map"].get("reports", [])
|
|
1160
|
+
if any(r.get("uuid") == report_data.get("uuid") for r in reports):
|
|
1161
|
+
raise ValueError(f"Report with uuid {report_data['uuid']} already exists")
|
|
1162
|
+
|
|
1163
|
+
reports.append(report_data)
|
|
1164
|
+
json_file["map"]["reports"] = reports
|
|
1165
|
+
|
|
1166
|
+
json_file_atomic_dump(json_file, self.data_file_path)
|
|
1167
|
+
|
|
1168
|
+
|
|
1169
|
+
def mongodb_db_add_report(self, report_data):
|
|
1170
|
+
existing = self.db.reports.find_one({"uuid": report_data.get("uuid")})
|
|
1171
|
+
if existing:
|
|
1172
|
+
raise ValueError(f"Report with uuid {report_data['uuid']} already exists")
|
|
1173
|
+
|
|
1174
|
+
self.db.reports.insert_one(report_data)
|
|
1175
|
+
|
|
1176
|
+
|
|
1177
|
+
def google_json_db_add_report(self, report_data):
|
|
1178
|
+
# Temporary workaround: just use notifier without storing
|
|
1179
|
+
# Full implementation would require writing back to Google Cloud Storage
|
|
1180
|
+
pass
|
|
1181
|
+
|
|
1182
|
+
|
|
1183
|
+
def add_report(db, report_data):
|
|
1184
|
+
return globals()[f"{db.module_name}_add_report"](db, report_data)
|
|
1185
|
+
|
|
1186
|
+
|
|
1187
|
+
# ------------------------------------------------
|
|
1188
|
+
# get_reports
|
|
1189
|
+
|
|
1190
|
+
|
|
1191
|
+
def json_db_get_reports(self, query_params):
|
|
1192
|
+
reports = self.data.get("reports", [])
|
|
1193
|
+
|
|
1194
|
+
statuses = query_params.get("status")
|
|
1195
|
+
if statuses:
|
|
1196
|
+
reports = [r for r in reports if r.get("status") in statuses]
|
|
1197
|
+
|
|
1198
|
+
priorities = query_params.get("priority")
|
|
1199
|
+
if priorities:
|
|
1200
|
+
reports = [r for r in reports if r.get("priority") in priorities]
|
|
1201
|
+
|
|
1202
|
+
return reports
|
|
1203
|
+
|
|
1204
|
+
|
|
1205
|
+
def json_db_get_reports_paginated(self, query):
|
|
1206
|
+
"""JSON reports with improved pagination."""
|
|
1207
|
+
reports = self.data.get("reports", [])
|
|
1208
|
+
return PaginationHelper.create_paginated_response(reports, query)
|
|
1209
|
+
|
|
1210
|
+
|
|
1211
|
+
def json_file_db_get_reports(self, query_params):
|
|
1212
|
+
with open(self.data_file_path, "r") as file:
|
|
1213
|
+
json_file = json.load(file)
|
|
1214
|
+
|
|
1215
|
+
reports = json_file["map"].get("reports", [])
|
|
1216
|
+
|
|
1217
|
+
statuses = query_params.get("status")
|
|
1218
|
+
if statuses:
|
|
1219
|
+
reports = [r for r in reports if r.get("status") in statuses]
|
|
1220
|
+
|
|
1221
|
+
priorities = query_params.get("priority")
|
|
1222
|
+
if priorities:
|
|
1223
|
+
reports = [r for r in reports if r.get("priority") in priorities]
|
|
1224
|
+
|
|
1225
|
+
return reports
|
|
1226
|
+
|
|
1227
|
+
|
|
1228
|
+
def json_file_db_get_reports_paginated(self, query):
|
|
1229
|
+
"""JSON file reports with improved pagination."""
|
|
1230
|
+
data = FileIOHelper.get_data_from_file(self.data_file_path)
|
|
1231
|
+
reports = data.get("reports", [])
|
|
1232
|
+
return PaginationHelper.create_paginated_response(reports, query)
|
|
1233
|
+
|
|
1234
|
+
|
|
1235
|
+
def mongodb_db_get_reports(self, query_params):
|
|
1236
|
+
query = {}
|
|
1237
|
+
|
|
1238
|
+
statuses = query_params.get("status")
|
|
1239
|
+
if statuses:
|
|
1240
|
+
query["status"] = {"$in": statuses}
|
|
1241
|
+
|
|
1242
|
+
priorities = query_params.get("priority")
|
|
1243
|
+
if priorities:
|
|
1244
|
+
query["priority"] = {"$in": priorities}
|
|
1245
|
+
|
|
1246
|
+
return list(self.db.reports.find(query, {"_id": 0}))
|
|
1247
|
+
|
|
1248
|
+
|
|
1249
|
+
def mongodb_db_get_reports_paginated(self, query):
|
|
1250
|
+
"""MongoDB reports with improved pagination."""
|
|
1251
|
+
page, per_page, sort_by, sort_order = __parse_pagination_params(query)
|
|
1252
|
+
|
|
1253
|
+
# Build MongoDB query
|
|
1254
|
+
mongo_query = {}
|
|
1255
|
+
|
|
1256
|
+
statuses = query.get("status")
|
|
1257
|
+
if statuses:
|
|
1258
|
+
mongo_query["status"] = {"$in": statuses}
|
|
1259
|
+
|
|
1260
|
+
priorities = query.get("priority")
|
|
1261
|
+
if priorities:
|
|
1262
|
+
mongo_query["priority"] = {"$in": priorities}
|
|
1263
|
+
|
|
1264
|
+
# Get total count
|
|
1265
|
+
total_count = self.db.reports.count_documents(mongo_query)
|
|
1266
|
+
|
|
1267
|
+
# Build aggregation pipeline
|
|
1268
|
+
pipeline = [{"$match": mongo_query}]
|
|
1269
|
+
|
|
1270
|
+
# Add sorting
|
|
1271
|
+
if sort_by:
|
|
1272
|
+
sort_direction = -1 if sort_order == "desc" else 1
|
|
1273
|
+
pipeline.append({"$sort": {sort_by: sort_direction}})
|
|
1274
|
+
|
|
1275
|
+
# Add pagination
|
|
1276
|
+
if per_page:
|
|
1277
|
+
pipeline.extend([{"$skip": (page - 1) * per_page}, {"$limit": per_page}]) # type: ignore
|
|
1278
|
+
|
|
1279
|
+
# Remove MongoDB _id field
|
|
1280
|
+
pipeline.append({"$project": {"_id": 0}})
|
|
1281
|
+
|
|
1282
|
+
# Execute query
|
|
1283
|
+
cursor = self.db.reports.aggregate(pipeline)
|
|
1284
|
+
items = list(cursor)
|
|
1285
|
+
|
|
1286
|
+
return __build_pagination_response(items, total_count, page, per_page)
|
|
1287
|
+
|
|
1288
|
+
|
|
1289
|
+
def google_json_db_get_reports(self, query_params):
|
|
1290
|
+
# GoogleJsonDb is read-only, reports not stored in blob
|
|
1291
|
+
return []
|
|
1292
|
+
|
|
1293
|
+
|
|
1294
|
+
def google_json_db_get_reports_paginated(self, query):
|
|
1295
|
+
"""Google JSON reports with pagination (read-only)."""
|
|
1296
|
+
return PaginationHelper.create_paginated_response([], query)
|
|
1297
|
+
|
|
1298
|
+
|
|
1299
|
+
def get_reports(db):
|
|
1300
|
+
return globals()[f"{db.module_name}_get_reports"]
|
|
1301
|
+
|
|
1302
|
+
|
|
1303
|
+
def get_reports_paginated(db):
|
|
1304
|
+
return globals()[f"{db.module_name}_get_reports_paginated"]
|
|
1305
|
+
|
|
1306
|
+
|
|
1307
|
+
# ------------------------------------------------
|
|
1308
|
+
# get_report
|
|
1309
|
+
|
|
1310
|
+
|
|
1311
|
+
def json_db_get_report(self, report_id):
|
|
1312
|
+
return next((r for r in self.data.get("reports", []) if r.get("uuid") == report_id), None)
|
|
1313
|
+
|
|
1314
|
+
|
|
1315
|
+
def json_file_db_get_report(self, report_id):
|
|
1316
|
+
with open(self.data_file_path, "r") as file:
|
|
1317
|
+
json_file = json.load(file)
|
|
1318
|
+
|
|
1319
|
+
return next(
|
|
1320
|
+
(r for r in json_file["map"].get("reports", []) if r.get("uuid") == report_id), None
|
|
1321
|
+
)
|
|
1322
|
+
|
|
1323
|
+
|
|
1324
|
+
def mongodb_db_get_report(self, report_id):
|
|
1325
|
+
return self.db.reports.find_one({"uuid": report_id}, {"_id": 0})
|
|
1326
|
+
|
|
1327
|
+
|
|
1328
|
+
def google_json_db_get_report(self, report_id):
|
|
1329
|
+
# GoogleJsonDb is read-only, reports not stored in blob
|
|
1330
|
+
return None
|
|
1331
|
+
|
|
1332
|
+
|
|
1333
|
+
def get_report(db):
|
|
1334
|
+
return globals()[f"{db.module_name}_get_report"]
|
|
1335
|
+
|
|
1336
|
+
|
|
1337
|
+
# ------------------------------------------------
|
|
1338
|
+
# update_report
|
|
1339
|
+
|
|
1340
|
+
|
|
1341
|
+
def json_db_update_report(self, report_id, status=None, priority=None):
|
|
1342
|
+
reports = self.data.get("reports", [])
|
|
1343
|
+
for r in reports:
|
|
1344
|
+
if r.get("uuid") == report_id:
|
|
1345
|
+
if status:
|
|
1346
|
+
r["status"] = status
|
|
1347
|
+
if priority:
|
|
1348
|
+
r["priority"] = priority
|
|
1349
|
+
return
|
|
1350
|
+
raise ReportNotFoundError(report_id)
|
|
1351
|
+
|
|
1352
|
+
|
|
1353
|
+
def json_file_db_update_report(self, report_id, status=None, priority=None):
|
|
1354
|
+
with open(self.data_file_path, "r") as file:
|
|
1355
|
+
json_file = json.load(file)
|
|
1356
|
+
|
|
1357
|
+
reports = json_file["map"].get("reports", [])
|
|
1358
|
+
for r in reports:
|
|
1359
|
+
if r.get("uuid") == report_id:
|
|
1360
|
+
if status:
|
|
1361
|
+
r["status"] = status
|
|
1362
|
+
if priority:
|
|
1363
|
+
r["priority"] = priority
|
|
1364
|
+
break
|
|
1365
|
+
else:
|
|
1366
|
+
raise ReportNotFoundError(report_id)
|
|
1367
|
+
|
|
1368
|
+
json_file["map"]["reports"] = reports
|
|
1369
|
+
|
|
1370
|
+
json_file_atomic_dump(json_file, self.data_file_path)
|
|
1371
|
+
|
|
1372
|
+
|
|
1373
|
+
def mongodb_db_update_report(self, report_id, status=None, priority=None):
|
|
1374
|
+
update_doc = {}
|
|
1375
|
+
if status:
|
|
1376
|
+
update_doc["status"] = status
|
|
1377
|
+
if priority:
|
|
1378
|
+
update_doc["priority"] = priority
|
|
1379
|
+
|
|
1380
|
+
if update_doc:
|
|
1381
|
+
result = self.db.reports.update_one({"uuid": report_id}, {"$set": update_doc})
|
|
1382
|
+
if result.matched_count == 0:
|
|
1383
|
+
raise ReportNotFoundError(report_id)
|
|
1384
|
+
|
|
1385
|
+
|
|
1386
|
+
def google_json_db_update_report(self, report_id, status=None, priority=None):
|
|
1387
|
+
# GoogleJsonDb is read-only, no-op
|
|
1388
|
+
pass
|
|
1389
|
+
|
|
1390
|
+
|
|
1391
|
+
def update_report(db, report_id, status=None, priority=None):
|
|
1392
|
+
return globals()[f"{db.module_name}_update_report"](db, report_id, status, priority)
|
|
1393
|
+
|
|
1394
|
+
|
|
1395
|
+
# ------------------------------------------------
|
|
1396
|
+
# delete_report
|
|
1397
|
+
|
|
1398
|
+
|
|
1399
|
+
def json_db_delete_report(self, report_id):
|
|
1400
|
+
reports = self.data.get("reports", [])
|
|
1401
|
+
idx = next((i for i, r in enumerate(reports) if r.get("uuid") == report_id), None)
|
|
1402
|
+
if idx is None:
|
|
1403
|
+
raise ReportNotFoundError(report_id)
|
|
1404
|
+
del reports[idx]
|
|
1405
|
+
|
|
1406
|
+
|
|
1407
|
+
def json_file_db_delete_report(self, report_id):
|
|
1408
|
+
with open(self.data_file_path, "r") as file:
|
|
1409
|
+
json_file = json.load(file)
|
|
1410
|
+
|
|
1411
|
+
reports = json_file["map"].get("reports", [])
|
|
1412
|
+
idx = next((i for i, r in enumerate(reports) if r.get("uuid") == report_id), None)
|
|
1413
|
+
if idx is None:
|
|
1414
|
+
raise ReportNotFoundError(report_id)
|
|
1415
|
+
|
|
1416
|
+
del reports[idx]
|
|
1417
|
+
json_file["map"]["reports"] = reports
|
|
1418
|
+
|
|
1419
|
+
json_file_atomic_dump(json_file, self.data_file_path)
|
|
1420
|
+
|
|
1421
|
+
|
|
1422
|
+
def mongodb_db_delete_report(self, report_id):
|
|
1423
|
+
result = self.db.reports.delete_one({"uuid": report_id})
|
|
1424
|
+
if result.deleted_count == 0:
|
|
1425
|
+
raise ReportNotFoundError(report_id)
|
|
1426
|
+
|
|
1427
|
+
|
|
1428
|
+
def google_json_db_delete_report(self, report_id):
|
|
1429
|
+
# GoogleJsonDb is read-only, no-op
|
|
1430
|
+
pass
|
|
1431
|
+
|
|
1432
|
+
|
|
1433
|
+
def delete_report(db, report_id):
|
|
1434
|
+
return globals()[f"{db.module_name}_delete_report"](db, report_id)
|
|
1435
|
+
|
|
1436
|
+
|
|
1437
|
+
# TODO extension function should be replaced with simple extend which would take a db plugin
|
|
1438
|
+
# it could look like that:
|
|
1439
|
+
# `db.extend(goodmap_db_plugin)` in plugin all those functions would be organized
|
|
1440
|
+
|
|
1441
|
+
|
|
1442
|
+
def extend_db_with_goodmap_queries(db, location_model):
|
|
1443
|
+
db.extend("get_data", get_data(db))
|
|
1444
|
+
db.extend("get_visible_data", get_visible_data(db))
|
|
1445
|
+
db.extend("get_meta_data", get_meta_data(db))
|
|
1446
|
+
db.extend("get_locations", get_locations(db, location_model))
|
|
1447
|
+
db.extend("get_locations_paginated", get_locations_paginated(db, location_model))
|
|
1448
|
+
db.extend("get_location", get_location(db, location_model))
|
|
1449
|
+
db.extend("add_location", partial(add_location, location_model=location_model))
|
|
1450
|
+
db.extend("update_location", partial(update_location, location_model=location_model))
|
|
1451
|
+
db.extend("delete_location", delete_location)
|
|
1452
|
+
db.extend("get_categories", get_categories(db))
|
|
1453
|
+
db.extend("get_category_data", get_category_data(db))
|
|
1454
|
+
db.extend("add_suggestion", add_suggestion)
|
|
1455
|
+
db.extend("get_suggestions", get_suggestions(db))
|
|
1456
|
+
db.extend("get_suggestions_paginated", get_suggestions_paginated(db))
|
|
1457
|
+
db.extend("get_suggestion", get_suggestion(db))
|
|
1458
|
+
db.extend("update_suggestion", update_suggestion)
|
|
1459
|
+
db.extend("delete_suggestion", delete_suggestion)
|
|
1460
|
+
db.extend("add_report", add_report)
|
|
1461
|
+
db.extend("get_reports", get_reports(db))
|
|
1462
|
+
db.extend("get_reports_paginated", get_reports_paginated(db))
|
|
1463
|
+
db.extend("get_report", get_report(db))
|
|
1464
|
+
db.extend("update_report", update_report)
|
|
1465
|
+
db.extend("delete_report", delete_report)
|
|
1466
|
+
return db
|