udata 10.2.1.dev34728__py2.py3-none-any.whl → 10.2.1.dev34767__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of udata might be problematic. Click here for more details.
- udata/api/__init__.py +41 -8
- udata/api_fields.py +1 -5
- udata/core/contact_point/api.py +2 -6
- udata/core/dataservices/api.py +3 -10
- udata/core/dataset/api.py +2 -6
- udata/core/dataset/apiv2.py +1 -4
- udata/core/dataset/forms.py +1 -1
- udata/core/dataset/models.py +49 -23
- udata/core/dataset/rdf.py +17 -4
- udata/core/organization/apiv2.py +1 -5
- udata/core/reports/api.py +1 -6
- udata/core/reuse/api.py +1 -4
- udata/core/spatial/tests/test_api.py +56 -0
- udata/migrations/2025-03-20-save-quality-for-datasets.py +25 -0
- udata/mongo/errors.py +9 -2
- udata/static/chunks/{11.b6f741fcc366abfad9c4.js → 11.0f04e49a40a0a381bcce.js} +3 -3
- udata/static/chunks/{11.b6f741fcc366abfad9c4.js.map → 11.0f04e49a40a0a381bcce.js.map} +1 -1
- udata/static/chunks/{13.2d06442dd9a05d9777b5.js → 13.d9c1735d14038b94c17e.js} +2 -2
- udata/static/chunks/{13.2d06442dd9a05d9777b5.js.map → 13.d9c1735d14038b94c17e.js.map} +1 -1
- udata/static/chunks/{17.e8e4caaad5cb0cc0bacc.js → 17.81c57c0dedf812e43013.js} +2 -2
- udata/static/chunks/{17.e8e4caaad5cb0cc0bacc.js.map → 17.81c57c0dedf812e43013.js.map} +1 -1
- udata/static/chunks/{19.f03a102365af4315f9db.js → 19.8da42e8359d72afc2618.js} +3 -3
- udata/static/chunks/{19.f03a102365af4315f9db.js.map → 19.8da42e8359d72afc2618.js.map} +1 -1
- udata/static/chunks/{8.778091d55cd8ea39af6b.js → 8.494b003a94383b142c18.js} +2 -2
- udata/static/chunks/{8.778091d55cd8ea39af6b.js.map → 8.494b003a94383b142c18.js.map} +1 -1
- udata/static/common.js +1 -1
- udata/static/common.js.map +1 -1
- udata/tests/api/test_dataservices_api.py +5 -8
- udata/tests/apiv2/test_datasets.py +7 -1
- udata/tests/dataset/test_dataset_model.py +0 -10
- udata/tests/dataset/test_dataset_rdf.py +18 -0
- udata/tests/test_api_fields.py +2 -2
- {udata-10.2.1.dev34728.dist-info → udata-10.2.1.dev34767.dist-info}/METADATA +3 -1
- {udata-10.2.1.dev34728.dist-info → udata-10.2.1.dev34767.dist-info}/RECORD +38 -37
- {udata-10.2.1.dev34728.dist-info → udata-10.2.1.dev34767.dist-info}/LICENSE +0 -0
- {udata-10.2.1.dev34728.dist-info → udata-10.2.1.dev34767.dist-info}/WHEEL +0 -0
- {udata-10.2.1.dev34728.dist-info → udata-10.2.1.dev34767.dist-info}/entry_points.txt +0 -0
- {udata-10.2.1.dev34728.dist-info → udata-10.2.1.dev34767.dist-info}/top_level.txt +0 -0
udata/api/__init__.py
CHANGED
|
@@ -4,6 +4,7 @@ import urllib.parse
|
|
|
4
4
|
from functools import wraps
|
|
5
5
|
from importlib import import_module
|
|
6
6
|
|
|
7
|
+
import mongoengine
|
|
7
8
|
from flask import (
|
|
8
9
|
Blueprint,
|
|
9
10
|
current_app,
|
|
@@ -22,7 +23,6 @@ from udata import entrypoints, tracking
|
|
|
22
23
|
from udata.app import csrf
|
|
23
24
|
from udata.auth import Permission, PermissionDenied, RoleNeed, current_user, login_user
|
|
24
25
|
from udata.i18n import get_locale
|
|
25
|
-
from udata.mongo.errors import FieldValidationError
|
|
26
26
|
from udata.utils import safe_unicode
|
|
27
27
|
|
|
28
28
|
from . import fields
|
|
@@ -258,17 +258,50 @@ def handle_unauthorized_file_type(error):
|
|
|
258
258
|
return {"message": msg}, 400
|
|
259
259
|
|
|
260
260
|
|
|
261
|
-
validation_error_fields = api.model(
|
|
261
|
+
validation_error_fields = api.model(
|
|
262
|
+
"ValidationError",
|
|
263
|
+
{"errors": fields.Raw, "message": fields.String},
|
|
264
|
+
)
|
|
262
265
|
|
|
266
|
+
validation_error_fields_v2 = apiv2.inherit("ValidationError", validation_error_fields)
|
|
263
267
|
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
def handle_validation_error(error: FieldValidationError):
|
|
267
|
-
"""A validation error"""
|
|
268
|
+
|
|
269
|
+
def convert_object_of_exceptions_to_object_of_strings(exceptions: dict):
|
|
268
270
|
errors = {}
|
|
269
|
-
|
|
271
|
+
for key, exception in exceptions.items():
|
|
272
|
+
if isinstance(exception, Exception):
|
|
273
|
+
errors[key] = str(exception)
|
|
274
|
+
elif isinstance(exception, dict):
|
|
275
|
+
errors[key] = convert_object_of_exceptions_to_object_of_strings(exception)
|
|
276
|
+
elif isinstance(exception, str):
|
|
277
|
+
errors[key] = exception
|
|
278
|
+
else:
|
|
279
|
+
log.warning(
|
|
280
|
+
f"Unknown type in `convert_object_of_exceptions_to_object_of_strings`: {exception}"
|
|
281
|
+
)
|
|
282
|
+
errors[key] = str(exception)
|
|
283
|
+
|
|
284
|
+
return errors
|
|
285
|
+
|
|
286
|
+
|
|
287
|
+
@api.errorhandler(mongoengine.errors.ValidationError)
|
|
288
|
+
@api.marshal_with(validation_error_fields, code=400)
|
|
289
|
+
def handle_validation_error(error: mongoengine.errors.ValidationError):
|
|
290
|
+
"""Error returned when validation failed."""
|
|
291
|
+
return (
|
|
292
|
+
{
|
|
293
|
+
"errors": convert_object_of_exceptions_to_object_of_strings(error.errors),
|
|
294
|
+
"message": str(error),
|
|
295
|
+
},
|
|
296
|
+
400,
|
|
297
|
+
)
|
|
298
|
+
|
|
270
299
|
|
|
271
|
-
|
|
300
|
+
@apiv2.errorhandler(mongoengine.errors.ValidationError)
|
|
301
|
+
@apiv2.marshal_with(validation_error_fields_v2, code=400)
|
|
302
|
+
def handle_validation_error_v2(error: mongoengine.errors.ValidationError):
|
|
303
|
+
"""Error returned when validation failed."""
|
|
304
|
+
return handle_validation_error(error)
|
|
272
305
|
|
|
273
306
|
|
|
274
307
|
class API(Resource): # Avoid name collision as resource is a core model
|
udata/api_fields.py
CHANGED
|
@@ -567,11 +567,7 @@ def is_value_modified(old_value, new_value) -> bool:
|
|
|
567
567
|
|
|
568
568
|
def patch_and_save(obj, request) -> type:
|
|
569
569
|
obj = patch(obj, request)
|
|
570
|
-
|
|
571
|
-
try:
|
|
572
|
-
obj.save()
|
|
573
|
-
except mongoengine.errors.ValidationError as e:
|
|
574
|
-
api.abort(400, e.message)
|
|
570
|
+
obj.save()
|
|
575
571
|
|
|
576
572
|
return obj
|
|
577
573
|
|
udata/core/contact_point/api.py
CHANGED
|
@@ -1,5 +1,3 @@
|
|
|
1
|
-
import mongoengine
|
|
2
|
-
|
|
3
1
|
from udata.api import API, api
|
|
4
2
|
from udata.api.parsers import ModelApiParser
|
|
5
3
|
from udata.core.dataset.permissions import OwnablePermission
|
|
@@ -33,10 +31,8 @@ class ContactPointsListAPI(API):
|
|
|
33
31
|
def post(self):
|
|
34
32
|
"""Creates a contact point"""
|
|
35
33
|
form = api.validate(ContactPointForm)
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
except mongoengine.errors.ValidationError as e:
|
|
39
|
-
api.abort(400, e.message)
|
|
34
|
+
contact_point = form.save()
|
|
35
|
+
|
|
40
36
|
return contact_point, 201
|
|
41
37
|
|
|
42
38
|
|
udata/core/dataservices/api.py
CHANGED
|
@@ -45,11 +45,7 @@ class DataservicesAPI(API):
|
|
|
45
45
|
if not dataservice.owner and not dataservice.organization:
|
|
46
46
|
dataservice.owner = current_user._get_current_object()
|
|
47
47
|
|
|
48
|
-
|
|
49
|
-
dataservice.save()
|
|
50
|
-
except mongoengine.errors.ValidationError as e:
|
|
51
|
-
api.abort(400, e.message)
|
|
52
|
-
|
|
48
|
+
dataservice.save()
|
|
53
49
|
return dataservice, 201
|
|
54
50
|
|
|
55
51
|
|
|
@@ -78,11 +74,8 @@ class DataserviceAPI(API):
|
|
|
78
74
|
patch(dataservice, request)
|
|
79
75
|
dataservice.metadata_modified_at = datetime.utcnow()
|
|
80
76
|
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
return dataservice
|
|
84
|
-
except mongoengine.errors.ValidationError as e:
|
|
85
|
-
api.abort(400, e.message)
|
|
77
|
+
dataservice.save()
|
|
78
|
+
return dataservice
|
|
86
79
|
|
|
87
80
|
@api.secure
|
|
88
81
|
@api.doc("delete_dataservice")
|
udata/core/dataset/api.py
CHANGED
|
@@ -264,12 +264,8 @@ class DatasetAPI(API):
|
|
|
264
264
|
DatasetEditPermission(dataset).test()
|
|
265
265
|
dataset.last_modified_internal = datetime.utcnow()
|
|
266
266
|
form = api.validate(DatasetForm, dataset)
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
try:
|
|
270
|
-
return form.save()
|
|
271
|
-
except mongoengine.errors.ValidationError as e:
|
|
272
|
-
api.abort(400, e.message)
|
|
267
|
+
|
|
268
|
+
return form.save()
|
|
273
269
|
|
|
274
270
|
@api.secure
|
|
275
271
|
@api.doc("delete_dataset")
|
udata/core/dataset/apiv2.py
CHANGED
|
@@ -340,10 +340,7 @@ class DatasetExtrasAPI(API):
|
|
|
340
340
|
data.pop(key)
|
|
341
341
|
# then update the extras with the remaining payload
|
|
342
342
|
dataset.extras.update(data)
|
|
343
|
-
|
|
344
|
-
dataset.save(signal_kwargs={"ignores": ["post_save"]})
|
|
345
|
-
except mongoengine.errors.ValidationError as e:
|
|
346
|
-
apiv2.abort(400, e.message)
|
|
343
|
+
dataset.save(signal_kwargs={"ignores": ["post_save"]})
|
|
347
344
|
return dataset.extras
|
|
348
345
|
|
|
349
346
|
@apiv2.secure
|
udata/core/dataset/forms.py
CHANGED
udata/core/dataset/models.py
CHANGED
|
@@ -384,7 +384,7 @@ class ResourceMixin(object):
|
|
|
384
384
|
return to_naive_datetime(self.harvest.modified_at)
|
|
385
385
|
if self.filetype == "remote" and self.extras.get("analysis:last-modified-at"):
|
|
386
386
|
return to_naive_datetime(self.extras.get("analysis:last-modified-at"))
|
|
387
|
-
return self.last_modified_internal
|
|
387
|
+
return to_naive_datetime(self.last_modified_internal)
|
|
388
388
|
|
|
389
389
|
def clean(self):
|
|
390
390
|
super(ResourceMixin, self).clean()
|
|
@@ -565,6 +565,8 @@ class Dataset(WithMetrics, DatasetBadgeMixin, Owned, db.Document):
|
|
|
565
565
|
extras = db.ExtrasField()
|
|
566
566
|
harvest = db.EmbeddedDocumentField(HarvestDatasetMetadata)
|
|
567
567
|
|
|
568
|
+
quality_cached = db.DictField()
|
|
569
|
+
|
|
568
570
|
featured = db.BooleanField(required=True, default=False)
|
|
569
571
|
|
|
570
572
|
contact_points = db.ListField(db.ReferenceField("ContactPoint", reverse_delete_rule=db.PULL))
|
|
@@ -672,6 +674,8 @@ class Dataset(WithMetrics, DatasetBadgeMixin, Owned, db.Document):
|
|
|
672
674
|
if len(set(res.id for res in self.resources)) != len(self.resources):
|
|
673
675
|
raise MongoEngineValidationError(f"Duplicate resource ID in dataset #{self.id}.")
|
|
674
676
|
|
|
677
|
+
self.quality_cached = self.compute_quality()
|
|
678
|
+
|
|
675
679
|
for key, value in self.extras.items():
|
|
676
680
|
if not key.startswith("custom:"):
|
|
677
681
|
continue
|
|
@@ -763,13 +767,9 @@ class Dataset(WithMetrics, DatasetBadgeMixin, Owned, db.Document):
|
|
|
763
767
|
|
|
764
768
|
@property
|
|
765
769
|
def last_modified(self):
|
|
766
|
-
if
|
|
767
|
-
self.harvest
|
|
768
|
-
and self.harvest.modified_at
|
|
769
|
-
and to_naive_datetime(self.harvest.modified_at) < datetime.utcnow()
|
|
770
|
-
):
|
|
770
|
+
if self.harvest and self.harvest.modified_at:
|
|
771
771
|
return to_naive_datetime(self.harvest.modified_at)
|
|
772
|
-
return self.last_modified_internal
|
|
772
|
+
return to_naive_datetime(self.last_modified_internal)
|
|
773
773
|
|
|
774
774
|
@property
|
|
775
775
|
def last_update(self):
|
|
@@ -824,8 +824,34 @@ class Dataset(WithMetrics, DatasetBadgeMixin, Owned, db.Document):
|
|
|
824
824
|
else:
|
|
825
825
|
return self.last_update + delta
|
|
826
826
|
|
|
827
|
-
@
|
|
827
|
+
@property
|
|
828
828
|
def quality(self):
|
|
829
|
+
# `quality_cached` should always be set, except during the migration
|
|
830
|
+
# creating this property. We could remove `or self.compute_quality()`
|
|
831
|
+
# after the migration but since we need to keep the computed property for
|
|
832
|
+
# `update_fulfilled_in_time`, maybe we leave it here? Just in case?
|
|
833
|
+
quality = self.quality_cached or self.compute_quality()
|
|
834
|
+
|
|
835
|
+
# :UpdateFulfilledInTime
|
|
836
|
+
# `next_update_for_update_fulfilled_in_time` is only useful to compute the
|
|
837
|
+
# real `update_fulfilled_in_time` check, so we pop it to not polute the `quality`
|
|
838
|
+
# object for users.
|
|
839
|
+
next_update = quality.pop("next_update_for_update_fulfilled_in_time", None)
|
|
840
|
+
if next_update:
|
|
841
|
+
# Allow for being one day late on update.
|
|
842
|
+
# We may have up to one day delay due to harvesting for example
|
|
843
|
+
quality["update_fulfilled_in_time"] = (next_update - datetime.utcnow()).days >= -1
|
|
844
|
+
elif self.frequency in ["continuous", "irregular", "punctual"]:
|
|
845
|
+
# For these frequencies, we don't expect regular updates or can't quantify them.
|
|
846
|
+
# Thus we consider the update_fulfilled_in_time quality criterion to be true.
|
|
847
|
+
quality["update_fulfilled_in_time"] = True
|
|
848
|
+
|
|
849
|
+
# Since `update_fulfilled_in_time` cannot be precomputed, `score` cannot either.
|
|
850
|
+
quality["score"] = self.compute_quality_score(quality)
|
|
851
|
+
|
|
852
|
+
return quality
|
|
853
|
+
|
|
854
|
+
def compute_quality(self):
|
|
829
855
|
"""Return a dict filled with metrics related to the inner
|
|
830
856
|
|
|
831
857
|
quality of the dataset:
|
|
@@ -835,25 +861,18 @@ class Dataset(WithMetrics, DatasetBadgeMixin, Owned, db.Document):
|
|
|
835
861
|
* and so on
|
|
836
862
|
"""
|
|
837
863
|
result = {}
|
|
838
|
-
if not self.id:
|
|
839
|
-
# Quality is only relevant on saved Datasets
|
|
840
|
-
return result
|
|
841
864
|
|
|
842
865
|
result["license"] = True if self.license else False
|
|
843
866
|
result["temporal_coverage"] = True if self.temporal_coverage else False
|
|
844
867
|
result["spatial"] = True if self.spatial else False
|
|
845
868
|
|
|
846
869
|
result["update_frequency"] = self.frequency and self.frequency != "unknown"
|
|
847
|
-
|
|
848
|
-
|
|
849
|
-
|
|
850
|
-
|
|
851
|
-
|
|
852
|
-
|
|
853
|
-
elif self.frequency in ["continuous", "irregular", "punctual"]:
|
|
854
|
-
# For these frequencies, we don't expect regular updates or can't quantify them.
|
|
855
|
-
# Thus we consider the update_fulfilled_in_time quality criterion to be true.
|
|
856
|
-
result["update_fulfilled_in_time"] = True
|
|
870
|
+
|
|
871
|
+
# We only save the next_update here because it is based on resources
|
|
872
|
+
# We cannot save the `update_fulfilled_in_time` because it is time
|
|
873
|
+
# sensitive (so setting it on save is not really useful…)
|
|
874
|
+
# See :UpdateFulfilledInTime
|
|
875
|
+
result["next_update_for_update_fulfilled_in_time"] = self.next_update
|
|
857
876
|
|
|
858
877
|
result["dataset_description_quality"] = (
|
|
859
878
|
True
|
|
@@ -876,7 +895,6 @@ class Dataset(WithMetrics, DatasetBadgeMixin, Owned, db.Document):
|
|
|
876
895
|
resource_desc = True
|
|
877
896
|
result["resources_documentation"] = resource_doc or resource_desc
|
|
878
897
|
|
|
879
|
-
result["score"] = self.compute_quality_score(result)
|
|
880
898
|
return result
|
|
881
899
|
|
|
882
900
|
@property
|
|
@@ -934,8 +952,16 @@ class Dataset(WithMetrics, DatasetBadgeMixin, Owned, db.Document):
|
|
|
934
952
|
if resource.id in [r.id for r in self.resources]:
|
|
935
953
|
raise MongoEngineValidationError("Cannot add resource with already existing ID")
|
|
936
954
|
|
|
955
|
+
self.resources.insert(0, resource)
|
|
937
956
|
self.update(
|
|
938
|
-
__raw__={
|
|
957
|
+
__raw__={
|
|
958
|
+
"$set": {
|
|
959
|
+
"quality_cached": self.compute_quality(),
|
|
960
|
+
},
|
|
961
|
+
"$push": {
|
|
962
|
+
"resources": {"$each": [resource.to_mongo()], "$position": 0},
|
|
963
|
+
},
|
|
964
|
+
}
|
|
939
965
|
)
|
|
940
966
|
self.reload()
|
|
941
967
|
self.on_resource_added.send(self.__class__, document=self, resource_id=resource.id)
|
udata/core/dataset/rdf.py
CHANGED
|
@@ -5,7 +5,7 @@ This module centralize dataset helpers for RDF/DCAT serialization and parsing
|
|
|
5
5
|
import calendar
|
|
6
6
|
import json
|
|
7
7
|
import logging
|
|
8
|
-
from datetime import date
|
|
8
|
+
from datetime import date, datetime
|
|
9
9
|
from typing import Optional
|
|
10
10
|
|
|
11
11
|
from dateutil.parser import parse as parse_dt
|
|
@@ -50,7 +50,7 @@ from udata.rdf import (
|
|
|
50
50
|
url_from_rdf,
|
|
51
51
|
)
|
|
52
52
|
from udata.uris import endpoint_for
|
|
53
|
-
from udata.utils import get_by, safe_unicode
|
|
53
|
+
from udata.utils import get_by, safe_unicode, to_naive_datetime
|
|
54
54
|
|
|
55
55
|
from .constants import OGC_SERVICE_FORMATS, UPDATE_FREQUENCIES
|
|
56
56
|
from .models import Checksum, Dataset, License, Resource
|
|
@@ -735,7 +735,14 @@ def resource_from_rdf(graph_or_distrib, dataset=None, is_additionnal=False):
|
|
|
735
735
|
if not resource.harvest:
|
|
736
736
|
resource.harvest = HarvestResourceMetadata()
|
|
737
737
|
resource.harvest.created_at = created_at
|
|
738
|
-
|
|
738
|
+
|
|
739
|
+
# In the past, we've encountered future `modified_at` during harvesting
|
|
740
|
+
# do not save it. :FutureHarvestModifiedAt
|
|
741
|
+
if modified_at and to_naive_datetime(modified_at) > datetime.utcnow():
|
|
742
|
+
log.warning(f"Future `DCT.modified` date '{modified_at}' in resource")
|
|
743
|
+
else:
|
|
744
|
+
resource.harvest.modified_at = modified_at
|
|
745
|
+
|
|
739
746
|
resource.harvest.dct_identifier = identifier
|
|
740
747
|
resource.harvest.uri = uri
|
|
741
748
|
|
|
@@ -836,7 +843,13 @@ def dataset_from_rdf(graph: Graph, dataset=None, node=None, remote_url_prefix: s
|
|
|
836
843
|
dataset.harvest.uri = uri
|
|
837
844
|
dataset.harvest.remote_url = remote_url
|
|
838
845
|
dataset.harvest.created_at = created_at
|
|
839
|
-
|
|
846
|
+
|
|
847
|
+
# In the past, we've encountered future `modified_at` during harvesting
|
|
848
|
+
# do not save it. :FutureHarvestModifiedAt
|
|
849
|
+
if modified_at and to_naive_datetime(modified_at) > datetime.utcnow():
|
|
850
|
+
log.warning(f"Future `DCT.modified` date '{modified_at}' in dataset")
|
|
851
|
+
else:
|
|
852
|
+
dataset.harvest.modified_at = modified_at
|
|
840
853
|
|
|
841
854
|
return dataset
|
|
842
855
|
|
udata/core/organization/apiv2.py
CHANGED
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import mongoengine
|
|
2
1
|
from flask import request
|
|
3
2
|
|
|
4
3
|
from udata import search
|
|
@@ -65,10 +64,7 @@ class OrganizationExtrasAPI(API):
|
|
|
65
64
|
|
|
66
65
|
# then update the extras with the remaining payload
|
|
67
66
|
org.extras.update(data)
|
|
68
|
-
|
|
69
|
-
org.save()
|
|
70
|
-
except mongoengine.errors.ValidationError as e:
|
|
71
|
-
apiv2.abort(400, e.message)
|
|
67
|
+
org.save()
|
|
72
68
|
return org.extras
|
|
73
69
|
|
|
74
70
|
@apiv2.secure
|
udata/core/reports/api.py
CHANGED
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import mongoengine
|
|
2
1
|
from flask import request
|
|
3
2
|
from flask_login import current_user
|
|
4
3
|
|
|
@@ -31,11 +30,7 @@ class ReportsAPI(API):
|
|
|
31
30
|
if current_user.is_authenticated:
|
|
32
31
|
report.by = current_user._get_current_object()
|
|
33
32
|
|
|
34
|
-
|
|
35
|
-
report.save()
|
|
36
|
-
except mongoengine.errors.ValidationError as e:
|
|
37
|
-
api.abort(400, e.message)
|
|
38
|
-
|
|
33
|
+
report.save()
|
|
39
34
|
return report, 201
|
|
40
35
|
|
|
41
36
|
|
udata/core/reuse/api.py
CHANGED
|
@@ -125,10 +125,7 @@ class ReuseListAPI(API):
|
|
|
125
125
|
if not reuse.owner and not reuse.organization:
|
|
126
126
|
reuse.owner = current_user._get_current_object()
|
|
127
127
|
|
|
128
|
-
|
|
129
|
-
reuse.save()
|
|
130
|
-
except mongoengine.errors.ValidationError as e:
|
|
131
|
-
api.abort(400, e.message)
|
|
128
|
+
reuse.save()
|
|
132
129
|
|
|
133
130
|
return patch_and_save(reuse, request), 201
|
|
134
131
|
|
|
@@ -1,14 +1,17 @@
|
|
|
1
1
|
from flask import url_for
|
|
2
2
|
|
|
3
3
|
from udata.core.dataset.factories import DatasetFactory
|
|
4
|
+
from udata.core.dataset.models import Dataset
|
|
4
5
|
from udata.core.organization.factories import OrganizationFactory
|
|
5
6
|
from udata.core.spatial.factories import (
|
|
6
7
|
GeoLevelFactory,
|
|
7
8
|
GeoZoneFactory,
|
|
8
9
|
SpatialCoverageFactory,
|
|
9
10
|
)
|
|
11
|
+
from udata.core.spatial.models import spatial_granularities
|
|
10
12
|
from udata.core.spatial.tasks import compute_geozones_metrics
|
|
11
13
|
from udata.tests.api import APITestCase
|
|
14
|
+
from udata.tests.api.test_datasets_api import SAMPLE_GEOM
|
|
12
15
|
from udata.tests.features.territories import (
|
|
13
16
|
TerritoriesSettings,
|
|
14
17
|
create_geozones_fixtures,
|
|
@@ -286,3 +289,56 @@ class SpatialTerritoriesApiTest(APITestCase):
|
|
|
286
289
|
self.assert200(response)
|
|
287
290
|
# No dynamic datasets given that they are added by udata-front extension.
|
|
288
291
|
self.assertEqual(len(response.json), 2)
|
|
292
|
+
|
|
293
|
+
|
|
294
|
+
class DatasetsSpatialAPITest(APITestCase):
|
|
295
|
+
modules = []
|
|
296
|
+
|
|
297
|
+
def test_create_spatial_zones(self):
|
|
298
|
+
paca, _, _ = create_geozones_fixtures()
|
|
299
|
+
granularity = spatial_granularities[0][0]
|
|
300
|
+
data = DatasetFactory.as_dict()
|
|
301
|
+
data["spatial"] = {
|
|
302
|
+
"zones": [paca.id],
|
|
303
|
+
"granularity": granularity,
|
|
304
|
+
}
|
|
305
|
+
self.login()
|
|
306
|
+
response = self.post(url_for("api.datasets"), data)
|
|
307
|
+
self.assert201(response)
|
|
308
|
+
self.assertEqual(Dataset.objects.count(), 1)
|
|
309
|
+
dataset = Dataset.objects.first()
|
|
310
|
+
self.assertEqual([str(z) for z in dataset.spatial.zones], [paca.id])
|
|
311
|
+
self.assertEqual(dataset.spatial.geom, None)
|
|
312
|
+
self.assertEqual(dataset.spatial.granularity, granularity)
|
|
313
|
+
|
|
314
|
+
def test_create_spatial_geom(self):
|
|
315
|
+
granularity = spatial_granularities[0][0]
|
|
316
|
+
data = DatasetFactory.as_dict()
|
|
317
|
+
data["spatial"] = {
|
|
318
|
+
"geom": SAMPLE_GEOM,
|
|
319
|
+
"granularity": granularity,
|
|
320
|
+
}
|
|
321
|
+
self.login()
|
|
322
|
+
response = self.post(url_for("api.datasets"), data)
|
|
323
|
+
self.assert201(response)
|
|
324
|
+
self.assertEqual(Dataset.objects.count(), 1)
|
|
325
|
+
dataset = Dataset.objects.first()
|
|
326
|
+
self.assertEqual(dataset.spatial.zones, [])
|
|
327
|
+
self.assertEqual(dataset.spatial.geom, SAMPLE_GEOM)
|
|
328
|
+
self.assertEqual(dataset.spatial.granularity, granularity)
|
|
329
|
+
|
|
330
|
+
def test_cannot_create_both_geom_and_zones(self):
|
|
331
|
+
paca, _, _ = create_geozones_fixtures()
|
|
332
|
+
|
|
333
|
+
granularity = spatial_granularities[0][0]
|
|
334
|
+
data = DatasetFactory.as_dict()
|
|
335
|
+
data["spatial"] = {
|
|
336
|
+
"zones": [paca.id],
|
|
337
|
+
"geom": SAMPLE_GEOM,
|
|
338
|
+
"granularity": granularity,
|
|
339
|
+
}
|
|
340
|
+
self.login()
|
|
341
|
+
|
|
342
|
+
response = self.post(url_for("api.datasets"), data)
|
|
343
|
+
self.assert400(response)
|
|
344
|
+
self.assertEqual(Dataset.objects.count(), 0)
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
"""
|
|
2
|
+
This migration keeps only the "Local authority" badge if the organization also has the "Public service" badge.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import logging
|
|
6
|
+
|
|
7
|
+
import click
|
|
8
|
+
|
|
9
|
+
from udata.core.dataset.models import Dataset
|
|
10
|
+
|
|
11
|
+
log = logging.getLogger(__name__)
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def migrate(db):
|
|
15
|
+
log.info("Saving all datasets")
|
|
16
|
+
|
|
17
|
+
count = Dataset.objects().count()
|
|
18
|
+
with click.progressbar(Dataset.objects(), length=count) as datasets:
|
|
19
|
+
for dataset in datasets:
|
|
20
|
+
try:
|
|
21
|
+
dataset.save()
|
|
22
|
+
except Exception as err:
|
|
23
|
+
log.error(f"Cannot save dataset {dataset.id} {err}")
|
|
24
|
+
|
|
25
|
+
log.info("Done")
|
udata/mongo/errors.py
CHANGED
|
@@ -3,7 +3,14 @@ from mongoengine.errors import ValidationError
|
|
|
3
3
|
|
|
4
4
|
class FieldValidationError(ValidationError):
|
|
5
5
|
field: str
|
|
6
|
+
raw_message: str
|
|
6
7
|
|
|
7
|
-
def __init__(self, *args, field: str, **kwargs):
|
|
8
|
-
self.field = field
|
|
8
|
+
def __init__(self, message: str, *args, field: str, **kwargs):
|
|
9
9
|
super().__init__(*args, **kwargs)
|
|
10
|
+
|
|
11
|
+
self.raw_message = message # It's sad but ValidationError do some stuff with the message…
|
|
12
|
+
self.field = field
|
|
13
|
+
self.errors[self.field] = message
|
|
14
|
+
|
|
15
|
+
def __str__(self):
|
|
16
|
+
return str(self.raw_message)
|