udata 10.4.3.dev35680__py2.py3-none-any.whl → 10.4.3.dev35794__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of udata might be problematic. Click here for more details.
- udata/core/dataset/api.py +0 -10
- udata/core/dataset/models.py +34 -15
- udata/core/dataset/rdf.py +1 -1
- udata/core/dataset/tasks.py +3 -2
- udata/core/spatial/models.py +9 -0
- udata/core/user/models.py +11 -5
- udata/harvest/tests/dcat/bnodes.xml +5 -0
- udata/harvest/tests/test_dcat_backend.py +1 -0
- udata/migrations/2025-06-18-clean-spatial-coverages.py +25 -0
- udata/static/chunks/{11.51d706fb9521c16976bc.js → 11.b6f741fcc366abfad9c4.js} +3 -3
- udata/static/chunks/{11.51d706fb9521c16976bc.js.map → 11.b6f741fcc366abfad9c4.js.map} +1 -1
- udata/static/chunks/{13.39e106d56f794ebd06a0.js → 13.2d06442dd9a05d9777b5.js} +2 -2
- udata/static/chunks/{13.39e106d56f794ebd06a0.js.map → 13.2d06442dd9a05d9777b5.js.map} +1 -1
- udata/static/chunks/{17.70cbb4a91b002338007e.js → 17.e8e4caaad5cb0cc0bacc.js} +2 -2
- udata/static/chunks/{17.70cbb4a91b002338007e.js.map → 17.e8e4caaad5cb0cc0bacc.js.map} +1 -1
- udata/static/chunks/{19.a348a5fff8fe2801e52a.js → 19.f03a102365af4315f9db.js} +3 -3
- udata/static/chunks/{19.a348a5fff8fe2801e52a.js.map → 19.f03a102365af4315f9db.js.map} +1 -1
- udata/static/chunks/{5.343ca020a2d38cec1a14.js → 5.0fa1408dae4e76b87b2e.js} +3 -3
- udata/static/chunks/{5.343ca020a2d38cec1a14.js.map → 5.0fa1408dae4e76b87b2e.js.map} +1 -1
- udata/static/chunks/{6.a3b07de9dd2ca2d24e85.js → 6.d663709d877baa44a71e.js} +3 -3
- udata/static/chunks/{6.a3b07de9dd2ca2d24e85.js.map → 6.d663709d877baa44a71e.js.map} +1 -1
- udata/static/chunks/{8.462bb3029de008497675.js → 8.778091d55cd8ea39af6b.js} +2 -2
- udata/static/chunks/{8.462bb3029de008497675.js.map → 8.778091d55cd8ea39af6b.js.map} +1 -1
- udata/static/common.js +1 -1
- udata/static/common.js.map +1 -1
- udata/translations/udata.pot +98 -38
- {udata-10.4.3.dev35680.dist-info → udata-10.4.3.dev35794.dist-info}/METADATA +4 -1
- {udata-10.4.3.dev35680.dist-info → udata-10.4.3.dev35794.dist-info}/RECORD +32 -31
- {udata-10.4.3.dev35680.dist-info → udata-10.4.3.dev35794.dist-info}/LICENSE +0 -0
- {udata-10.4.3.dev35680.dist-info → udata-10.4.3.dev35794.dist-info}/WHEEL +0 -0
- {udata-10.4.3.dev35680.dist-info → udata-10.4.3.dev35794.dist-info}/entry_points.txt +0 -0
- {udata-10.4.3.dev35680.dist-info → udata-10.4.3.dev35794.dist-info}/top_level.txt +0 -0
udata/core/dataset/api.py
CHANGED
|
@@ -503,8 +503,6 @@ class ResourcesAPI(API):
|
|
|
503
503
|
api.abort(400, "This endpoint only supports remote resources")
|
|
504
504
|
form.populate_obj(resource)
|
|
505
505
|
dataset.add_resource(resource)
|
|
506
|
-
dataset.last_modified_internal = datetime.utcnow()
|
|
507
|
-
dataset.save()
|
|
508
506
|
return resource, 201
|
|
509
507
|
|
|
510
508
|
@api.secure
|
|
@@ -571,8 +569,6 @@ class UploadNewDatasetResource(UploadMixin, API):
|
|
|
571
569
|
infos = self.handle_upload(dataset)
|
|
572
570
|
resource = Resource(**infos)
|
|
573
571
|
dataset.add_resource(resource)
|
|
574
|
-
dataset.last_modified_internal = datetime.utcnow()
|
|
575
|
-
dataset.save()
|
|
576
572
|
return resource, 201
|
|
577
573
|
|
|
578
574
|
|
|
@@ -625,8 +621,6 @@ class UploadDatasetResource(ResourceMixin, UploadMixin, API):
|
|
|
625
621
|
for k, v in infos.items():
|
|
626
622
|
resource[k] = v
|
|
627
623
|
dataset.update_resource(resource)
|
|
628
|
-
dataset.last_modified_internal = datetime.utcnow()
|
|
629
|
-
dataset.save()
|
|
630
624
|
if fs_filename_to_remove is not None:
|
|
631
625
|
storages.resources.delete(fs_filename_to_remove)
|
|
632
626
|
return resource
|
|
@@ -699,8 +693,6 @@ class ResourceAPI(ResourceMixin, API):
|
|
|
699
693
|
form.populate_obj(resource)
|
|
700
694
|
resource.last_modified_internal = datetime.utcnow()
|
|
701
695
|
dataset.update_resource(resource)
|
|
702
|
-
dataset.last_modified_internal = datetime.utcnow()
|
|
703
|
-
dataset.save()
|
|
704
696
|
return resource
|
|
705
697
|
|
|
706
698
|
@api.secure
|
|
@@ -710,8 +702,6 @@ class ResourceAPI(ResourceMixin, API):
|
|
|
710
702
|
dataset.permissions["edit_resources"].test()
|
|
711
703
|
resource = self.get_resource_or_404(dataset, rid)
|
|
712
704
|
dataset.remove_resource(resource)
|
|
713
|
-
dataset.last_modified_internal = datetime.utcnow()
|
|
714
|
-
dataset.save()
|
|
715
705
|
return "", 204
|
|
716
706
|
|
|
717
707
|
|
udata/core/dataset/models.py
CHANGED
|
@@ -956,35 +956,54 @@ class Dataset(Auditable, WithMetrics, DatasetBadgeMixin, Owned, db.Document):
|
|
|
956
956
|
obj = cls.objects(slug=id_or_slug).first()
|
|
957
957
|
return obj or cls.objects.get_or_404(id=id_or_slug)
|
|
958
958
|
|
|
959
|
-
def add_resource(self, resource):
|
|
959
|
+
def add_resource(self, resource: Resource):
|
|
960
960
|
"""Perform an atomic prepend for a new resource"""
|
|
961
961
|
resource.validate()
|
|
962
|
-
if resource.id in [r.id for r in self.resources]:
|
|
963
|
-
raise MongoEngineValidationError("Cannot add resource with already existing ID")
|
|
964
962
|
|
|
963
|
+
existing_resource = next((r for r in self.resources if r.id == resource.id), None)
|
|
964
|
+
if existing_resource:
|
|
965
|
+
raise MongoEngineValidationError(
|
|
966
|
+
f"Cannot add resource '{resource.title}'. A resource '{existing_resource.title}' already exists with ID '{existing_resource.id}'"
|
|
967
|
+
)
|
|
968
|
+
|
|
969
|
+
# only useful for compute_quality(), we will reload to have a clean object
|
|
965
970
|
self.resources.insert(0, resource)
|
|
971
|
+
|
|
966
972
|
self.update(
|
|
967
|
-
|
|
968
|
-
|
|
969
|
-
|
|
970
|
-
},
|
|
971
|
-
"$push": {
|
|
972
|
-
"resources": {"$each": [resource.to_mongo()], "$position": 0},
|
|
973
|
-
},
|
|
974
|
-
}
|
|
973
|
+
set__quality_cached=self.compute_quality(),
|
|
974
|
+
push__resources={"$each": [resource.to_mongo()], "$position": 0},
|
|
975
|
+
set__last_modified_internal=datetime.utcnow(),
|
|
975
976
|
)
|
|
977
|
+
|
|
976
978
|
self.reload()
|
|
977
979
|
self.on_resource_added.send(self.__class__, document=self, resource_id=resource.id)
|
|
978
980
|
|
|
979
981
|
def update_resource(self, resource):
|
|
980
982
|
"""Perform an atomic update for an existing resource"""
|
|
981
|
-
|
|
982
|
-
|
|
983
|
-
self.
|
|
983
|
+
|
|
984
|
+
# only useful for compute_quality(), we will reload to have a clean object
|
|
985
|
+
index = next(i for i, r in enumerate(self.resources) if r.id == resource.id)
|
|
986
|
+
self.resources[index] = resource
|
|
987
|
+
|
|
988
|
+
Dataset.objects(id=self.id, resources__id=resource.id).update_one(
|
|
989
|
+
set__quality_cached=self.compute_quality(),
|
|
990
|
+
set__resources__S=resource,
|
|
991
|
+
set__last_modified_internal=datetime.utcnow(),
|
|
992
|
+
)
|
|
993
|
+
|
|
984
994
|
self.reload()
|
|
985
995
|
self.on_resource_updated.send(self.__class__, document=self, resource_id=resource.id)
|
|
986
996
|
|
|
987
997
|
def remove_resource(self, resource):
|
|
998
|
+
# only useful for compute_quality(), we will reload to have a clean object
|
|
999
|
+
self.resources = [r for r in self.resources if r.id != resource.id]
|
|
1000
|
+
|
|
1001
|
+
self.update(
|
|
1002
|
+
set__quality_cached=self.compute_quality(),
|
|
1003
|
+
pull__resources__id=resource.id,
|
|
1004
|
+
set__last_modified_internal=datetime.utcnow(),
|
|
1005
|
+
)
|
|
1006
|
+
|
|
988
1007
|
# Deletes resource's file from file storage
|
|
989
1008
|
if resource.fs_filename is not None:
|
|
990
1009
|
try:
|
|
@@ -994,7 +1013,7 @@ class Dataset(Auditable, WithMetrics, DatasetBadgeMixin, Owned, db.Document):
|
|
|
994
1013
|
f"File not found while deleting resource #{resource.id} in dataset {self.id}: {e}"
|
|
995
1014
|
)
|
|
996
1015
|
|
|
997
|
-
self.
|
|
1016
|
+
self.reload()
|
|
998
1017
|
self.on_resource_removed.send(self.__class__, document=self, resource_id=resource.id)
|
|
999
1018
|
|
|
1000
1019
|
@property
|
udata/core/dataset/rdf.py
CHANGED
|
@@ -536,7 +536,7 @@ def spatial_from_rdf(graph):
|
|
|
536
536
|
spatial_coverage.clean()
|
|
537
537
|
return spatial_coverage
|
|
538
538
|
except ValidationError as e:
|
|
539
|
-
log.warning(f"Cannot save the spatial coverage {
|
|
539
|
+
log.warning(f"Cannot save the spatial coverage {polygons} (error was {e})")
|
|
540
540
|
return None
|
|
541
541
|
|
|
542
542
|
|
udata/core/dataset/tasks.py
CHANGED
|
@@ -210,8 +210,9 @@ def export_csv_for_model(model, dataset):
|
|
|
210
210
|
# add it to the dataset
|
|
211
211
|
if created:
|
|
212
212
|
dataset.add_resource(resource)
|
|
213
|
-
|
|
214
|
-
|
|
213
|
+
else:
|
|
214
|
+
dataset.last_modified_internal = datetime.utcnow()
|
|
215
|
+
dataset.save()
|
|
215
216
|
finally:
|
|
216
217
|
csvfile.close()
|
|
217
218
|
os.unlink(csvfile.name)
|
udata/core/spatial/models.py
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import geojson
|
|
1
2
|
from flask import current_app
|
|
2
3
|
from werkzeug.local import LocalProxy
|
|
3
4
|
from werkzeug.utils import cached_property
|
|
@@ -174,3 +175,11 @@ class SpatialCoverage(db.EmbeddedDocument):
|
|
|
174
175
|
raise db.ValidationError(
|
|
175
176
|
"The spatial coverage cannot contains a Geozone and a Geometry"
|
|
176
177
|
)
|
|
178
|
+
|
|
179
|
+
if self.geom:
|
|
180
|
+
try:
|
|
181
|
+
geojson.loads(geojson.dumps(self.geom))
|
|
182
|
+
except (ValueError, TypeError) as err:
|
|
183
|
+
raise db.ValidationError(
|
|
184
|
+
f"Invalid GeoJSON data `{self.geom}`: {err}.", field_name="geom"
|
|
185
|
+
)
|
udata/core/user/models.py
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import json
|
|
2
|
+
import logging
|
|
2
3
|
from copy import copy
|
|
3
4
|
from datetime import datetime
|
|
4
5
|
from itertools import chain
|
|
@@ -26,6 +27,8 @@ from .constants import AVATAR_SIZES
|
|
|
26
27
|
|
|
27
28
|
__all__ = ("User", "Role", "datastore")
|
|
28
29
|
|
|
30
|
+
log = logging.getLogger(__name__)
|
|
31
|
+
|
|
29
32
|
|
|
30
33
|
# TODO: use simple text for role
|
|
31
34
|
class Role(db.Document, RoleMixin):
|
|
@@ -250,11 +253,14 @@ class User(WithMetrics, UserMixin, db.Document):
|
|
|
250
253
|
|
|
251
254
|
def mark_as_deleted(self, notify: bool = True, delete_comments: bool = False):
|
|
252
255
|
if self.avatar.filename is not None:
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
256
|
+
try:
|
|
257
|
+
storage = storages.avatars
|
|
258
|
+
storage.delete(self.avatar.filename)
|
|
259
|
+
storage.delete(self.avatar.original)
|
|
260
|
+
for key, value in self.avatar.thumbnails.items():
|
|
261
|
+
storage.delete(value)
|
|
262
|
+
except FileNotFoundError as e:
|
|
263
|
+
log.error(f"File not found while deleting user #{self.id} avatar: {e}")
|
|
258
264
|
|
|
259
265
|
copied_user = copy(self)
|
|
260
266
|
self.email = "{}@deleted".format(self.id)
|
|
@@ -35,6 +35,11 @@
|
|
|
35
35
|
<schema:endDate rdf:datatype="http://www.w3.org/2001/XMLSchema#dateTime">2016-12-05T00:00:00</schema:endDate>
|
|
36
36
|
</dcterms:PeriodOfTime>
|
|
37
37
|
</dct:temporal>
|
|
38
|
+
<dct:spatial>
|
|
39
|
+
<ogc:Polygon>
|
|
40
|
+
<locn:geometry rdf:datatype="https://www.iana.org/assignments/media-types/application/vnd.geo+json"><![CDATA[{"type":"Polygon","coordinates":[[[4, 45], [4, NaN], [4, 46], [4, 45], [4, 45]]]}]]></locn:geometry>
|
|
41
|
+
</ogc:Polygon>
|
|
42
|
+
</dct:spatial>
|
|
38
43
|
</dcat:Dataset>
|
|
39
44
|
</dcat:dataset>
|
|
40
45
|
<dcterms:title>Sample DCAT Catalog</dcterms:title>
|
|
@@ -334,6 +334,7 @@ class DcatBackendTest:
|
|
|
334
334
|
[[[159, -25.0], [159, -11], [212, -11], [212, -25.0], [159, -25.0]]],
|
|
335
335
|
],
|
|
336
336
|
}
|
|
337
|
+
# dataset-3 has a spatial with NaN values…
|
|
337
338
|
assert datasets["3"].spatial is None
|
|
338
339
|
|
|
339
340
|
@pytest.mark.options(SCHEMA_CATALOG_URL="https://example.com/schemas")
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
"""
|
|
2
|
+
This migration updates Topic.featured to False when it is None.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import logging
|
|
6
|
+
|
|
7
|
+
import click
|
|
8
|
+
|
|
9
|
+
from udata.core.dataset.models import Dataset
|
|
10
|
+
|
|
11
|
+
log = logging.getLogger(__name__)
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def migrate(db):
|
|
15
|
+
datasets = Dataset.objects(spatial__geom__exists=True)
|
|
16
|
+
count = Dataset.objects(spatial__geom__exists=True).count()
|
|
17
|
+
|
|
18
|
+
with click.progressbar(datasets, length=count) as datasets:
|
|
19
|
+
for dataset in datasets:
|
|
20
|
+
try:
|
|
21
|
+
dataset.spatial.clean()
|
|
22
|
+
except Exception as err:
|
|
23
|
+
log.error(f"Invalid spatial in dataset #{dataset.id} '{dataset.title}' {err}")
|
|
24
|
+
dataset.spatial = None
|
|
25
|
+
dataset.save()
|