udata 10.4.2.dev35475__py2.py3-none-any.whl → 10.4.3__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of udata might be problematic. Click here for more details.

Files changed (62) hide show
  1. udata/__init__.py +1 -1
  2. udata/api_fields.py +27 -2
  3. udata/commands/fixtures.py +11 -1
  4. udata/core/dataservices/api.py +12 -10
  5. udata/core/dataservices/apiv2.py +4 -1
  6. udata/core/dataservices/constants.py +19 -0
  7. udata/core/dataservices/models.py +54 -1
  8. udata/core/dataset/api.py +33 -27
  9. udata/core/dataset/api_fields.py +21 -0
  10. udata/core/dataset/apiv2.py +14 -11
  11. udata/core/dataset/models.py +60 -15
  12. udata/core/dataset/rdf.py +1 -1
  13. udata/core/dataset/tasks.py +3 -2
  14. udata/core/organization/api.py +11 -0
  15. udata/core/organization/models.py +29 -2
  16. udata/core/reuse/api.py +4 -5
  17. udata/core/reuse/api_fields.py +8 -0
  18. udata/core/reuse/apiv2.py +2 -0
  19. udata/core/reuse/models.py +18 -1
  20. udata/core/spatial/models.py +9 -0
  21. udata/core/user/models.py +11 -5
  22. udata/harvest/api.py +2 -1
  23. udata/harvest/tests/dcat/bnodes.xml +5 -0
  24. udata/harvest/tests/test_dcat_backend.py +1 -0
  25. udata/migrations/2025-06-18-clean-spatial-coverages.py +25 -0
  26. udata/static/chunks/{11.0f04e49a40a0a381bcce.js → 11.b6f741fcc366abfad9c4.js} +3 -3
  27. udata/static/chunks/{11.0f04e49a40a0a381bcce.js.map → 11.b6f741fcc366abfad9c4.js.map} +1 -1
  28. udata/static/chunks/{13.d9c1735d14038b94c17e.js → 13.2d06442dd9a05d9777b5.js} +2 -2
  29. udata/static/chunks/{13.d9c1735d14038b94c17e.js.map → 13.2d06442dd9a05d9777b5.js.map} +1 -1
  30. udata/static/chunks/{17.81c57c0dedf812e43013.js → 17.e8e4caaad5cb0cc0bacc.js} +2 -2
  31. udata/static/chunks/{17.81c57c0dedf812e43013.js.map → 17.e8e4caaad5cb0cc0bacc.js.map} +1 -1
  32. udata/static/chunks/{19.8da42e8359d72afc2618.js → 19.f03a102365af4315f9db.js} +3 -3
  33. udata/static/chunks/{19.8da42e8359d72afc2618.js.map → 19.f03a102365af4315f9db.js.map} +1 -1
  34. udata/static/chunks/{8.494b003a94383b142c18.js → 8.778091d55cd8ea39af6b.js} +2 -2
  35. udata/static/chunks/{8.494b003a94383b142c18.js.map → 8.778091d55cd8ea39af6b.js.map} +1 -1
  36. udata/static/common.js +1 -1
  37. udata/static/common.js.map +1 -1
  38. udata/tests/api/test_dataservices_api.py +78 -0
  39. udata/tests/api/test_follow_api.py +20 -0
  40. udata/tests/api/test_organizations_api.py +25 -0
  41. udata/tests/test_api_fields.py +35 -0
  42. udata/translations/ar/LC_MESSAGES/udata.mo +0 -0
  43. udata/translations/ar/LC_MESSAGES/udata.po +98 -38
  44. udata/translations/de/LC_MESSAGES/udata.mo +0 -0
  45. udata/translations/de/LC_MESSAGES/udata.po +98 -38
  46. udata/translations/es/LC_MESSAGES/udata.mo +0 -0
  47. udata/translations/es/LC_MESSAGES/udata.po +98 -38
  48. udata/translations/fr/LC_MESSAGES/udata.mo +0 -0
  49. udata/translations/fr/LC_MESSAGES/udata.po +98 -38
  50. udata/translations/it/LC_MESSAGES/udata.mo +0 -0
  51. udata/translations/it/LC_MESSAGES/udata.po +98 -38
  52. udata/translations/pt/LC_MESSAGES/udata.mo +0 -0
  53. udata/translations/pt/LC_MESSAGES/udata.po +98 -38
  54. udata/translations/sr/LC_MESSAGES/udata.mo +0 -0
  55. udata/translations/sr/LC_MESSAGES/udata.po +98 -38
  56. udata/translations/udata.pot +98 -38
  57. {udata-10.4.2.dev35475.dist-info → udata-10.4.3.dist-info}/METADATA +16 -4
  58. {udata-10.4.2.dev35475.dist-info → udata-10.4.3.dist-info}/RECORD +62 -61
  59. {udata-10.4.2.dev35475.dist-info → udata-10.4.3.dist-info}/LICENSE +0 -0
  60. {udata-10.4.2.dev35475.dist-info → udata-10.4.3.dist-info}/WHEEL +0 -0
  61. {udata-10.4.2.dev35475.dist-info → udata-10.4.3.dist-info}/entry_points.txt +0 -0
  62. {udata-10.4.2.dev35475.dist-info → udata-10.4.3.dist-info}/top_level.txt +0 -0
@@ -20,6 +20,7 @@ from .api_fields import (
20
20
  checksum_fields,
21
21
  dataset_harvest_fields,
22
22
  dataset_internal_fields,
23
+ dataset_permissions_fields,
23
24
  org_ref_fields,
24
25
  resource_fields,
25
26
  resource_harvest_fields,
@@ -31,7 +32,6 @@ from .api_fields import (
31
32
  )
32
33
  from .constants import DEFAULT_FREQUENCY, DEFAULT_LICENSE, FULL_OBJECTS_HEADER, UPDATE_FREQUENCIES
33
34
  from .models import CommunityResource, Dataset
34
- from .permissions import DatasetEditPermission, ResourceEditPermission
35
35
  from .search import DatasetSearch
36
36
 
37
37
  DEFAULT_PAGE_SIZE = 50
@@ -70,6 +70,7 @@ DEFAULT_MASK_APIV2 = ",".join(
70
70
  "internal",
71
71
  "contact_points",
72
72
  "featured",
73
+ "permissions",
73
74
  )
74
75
  )
75
76
 
@@ -224,6 +225,7 @@ dataset_fields = apiv2.model(
224
225
  required=False,
225
226
  description="The dataset contact points",
226
227
  ),
228
+ "permissions": fields.Nested(dataset_permissions_fields),
227
229
  },
228
230
  mask=DEFAULT_MASK_APIV2,
229
231
  )
@@ -269,6 +271,7 @@ apiv2.inherit("ResourceInternals", resource_internal_fields)
269
271
  apiv2.inherit("ContactPoint", contact_point_fields)
270
272
  apiv2.inherit("Schema", schema_fields)
271
273
  apiv2.inherit("CatalogSchema", catalog_schema_fields)
274
+ apiv2.inherit("DatasetPermissions", dataset_permissions_fields)
272
275
 
273
276
 
274
277
  @ns.route("/search/", endpoint="dataset_search")
@@ -318,7 +321,7 @@ class DatasetAPI(API):
318
321
  @apiv2.marshal_with(dataset_fields)
319
322
  def get(self, dataset):
320
323
  """Get a dataset given its identifier"""
321
- if not DatasetEditPermission(dataset).can():
324
+ if not dataset.permissions["edit"].can():
322
325
  if dataset.private:
323
326
  apiv2.abort(404)
324
327
  elif dataset.deleted:
@@ -335,7 +338,7 @@ class DatasetExtrasAPI(API):
335
338
  @apiv2.doc("get_dataset_extras")
336
339
  def get(self, dataset):
337
340
  """Get a dataset extras given its identifier"""
338
- if not DatasetEditPermission(dataset).can():
341
+ if not dataset.permissions["edit"].can():
339
342
  if dataset.private:
340
343
  apiv2.abort(404)
341
344
  elif dataset.deleted:
@@ -351,7 +354,7 @@ class DatasetExtrasAPI(API):
351
354
  apiv2.abort(400, "Wrong payload format, dict expected")
352
355
  if dataset.deleted:
353
356
  apiv2.abort(410, "Dataset has been deleted")
354
- DatasetEditPermission(dataset).test()
357
+ dataset.permissions["edit"].test()
355
358
  # first remove extras key associated to a None value in payload
356
359
  for key in [k for k in data if data[k] is None]:
357
360
  dataset.extras.pop(key, None)
@@ -370,7 +373,7 @@ class DatasetExtrasAPI(API):
370
373
  apiv2.abort(400, "Wrong payload format, list expected")
371
374
  if dataset.deleted:
372
375
  apiv2.abort(410, "Dataset has been deleted")
373
- DatasetEditPermission(dataset).test()
376
+ dataset.permissions["delete"].test()
374
377
  for key in data:
375
378
  try:
376
379
  del dataset.extras[key]
@@ -387,7 +390,7 @@ class ResourcesAPI(API):
387
390
  @apiv2.marshal_with(resource_page_fields)
388
391
  def get(self, dataset):
389
392
  """Get the given dataset resources, paginated."""
390
- if not DatasetEditPermission(dataset).can():
393
+ if not dataset.permissions["edit"].can():
391
394
  if dataset.private:
392
395
  apiv2.abort(404)
393
396
  elif dataset.deleted:
@@ -434,7 +437,7 @@ class DatasetSchemasAPI(API):
434
437
  @apiv2.marshal_with(schema_fields)
435
438
  def get(self, dataset):
436
439
  """Get a dataset schemas given its identifier"""
437
- if not DatasetEditPermission(dataset).can():
440
+ if not dataset.permissions["edit"].can():
438
441
  if dataset.private:
439
442
  apiv2.abort(404)
440
443
  elif dataset.deleted:
@@ -477,7 +480,7 @@ class ResourceAPI(API):
477
480
  def get(self, rid):
478
481
  dataset = Dataset.objects(resources__id=rid).first()
479
482
  if dataset:
480
- if not DatasetEditPermission(dataset).can():
483
+ if not dataset.permissions["edit"].can():
481
484
  if dataset.private:
482
485
  apiv2.abort(404)
483
486
  elif dataset.deleted:
@@ -508,7 +511,7 @@ class ResourceExtrasAPI(ResourceMixin, API):
508
511
  @apiv2.doc("get_resource_extras")
509
512
  def get(self, dataset, rid):
510
513
  """Get a resource extras given its identifier"""
511
- if not DatasetEditPermission(dataset).can():
514
+ if not dataset.permissions["edit"].can():
512
515
  if dataset.private:
513
516
  apiv2.abort(404)
514
517
  elif dataset.deleted:
@@ -525,7 +528,7 @@ class ResourceExtrasAPI(ResourceMixin, API):
525
528
  apiv2.abort(400, "Wrong payload format, dict expected")
526
529
  if dataset.deleted:
527
530
  apiv2.abort(410, "Dataset has been deleted")
528
- ResourceEditPermission(dataset).test()
531
+ dataset.permissions["edit_resources"].test()
529
532
  resource = self.get_resource_or_404(dataset, rid)
530
533
  # first remove extras key associated to a None value in payload
531
534
  for key in [k for k in data if data[k] is None]:
@@ -545,7 +548,7 @@ class ResourceExtrasAPI(ResourceMixin, API):
545
548
  apiv2.abort(400, "Wrong payload format, list expected")
546
549
  if dataset.deleted:
547
550
  apiv2.abort(410, "Dataset has been deleted")
548
- ResourceEditPermission(dataset).test()
551
+ dataset.permissions["edit_resources"].test()
549
552
  resource = self.get_resource_or_404(dataset, rid)
550
553
  try:
551
554
  for key in data:
@@ -20,6 +20,7 @@ from udata.api_fields import field
20
20
  from udata.app import cache
21
21
  from udata.core import storages
22
22
  from udata.core.activity.models import Auditable
23
+ from udata.core.metrics.helpers import get_stock_metrics
23
24
  from udata.core.owned import Owned, OwnedQuerySet
24
25
  from udata.frontend.markdown import mdstrip
25
26
  from udata.i18n import lazy_gettext as _
@@ -599,7 +600,9 @@ class Dataset(Auditable, WithMetrics, DatasetBadgeMixin, Owned, db.Document):
599
600
  __metrics_keys__ = [
600
601
  "discussions",
601
602
  "reuses",
603
+ "reuses_by_months",
602
604
  "followers",
605
+ "followers_by_months",
603
606
  "views",
604
607
  "resources_downloads",
605
608
  ]
@@ -706,6 +709,16 @@ class Dataset(Auditable, WithMetrics, DatasetBadgeMixin, Owned, db.Document):
706
709
  "Dataset's organization did not define the requested custom metadata."
707
710
  )
708
711
 
712
+ @property
713
+ def permissions(self):
714
+ from .permissions import DatasetEditPermission, ResourceEditPermission
715
+
716
+ return {
717
+ "delete": DatasetEditPermission(self),
718
+ "edit": DatasetEditPermission(self),
719
+ "edit_resources": ResourceEditPermission(self),
720
+ }
721
+
709
722
  def url_for(self, *args, **kwargs):
710
723
  return endpoint_for("datasets.show", "api.dataset", dataset=self, *args, **kwargs)
711
724
 
@@ -943,35 +956,54 @@ class Dataset(Auditable, WithMetrics, DatasetBadgeMixin, Owned, db.Document):
943
956
  obj = cls.objects(slug=id_or_slug).first()
944
957
  return obj or cls.objects.get_or_404(id=id_or_slug)
945
958
 
946
- def add_resource(self, resource):
959
+ def add_resource(self, resource: Resource):
947
960
  """Perform an atomic prepend for a new resource"""
948
961
  resource.validate()
949
- if resource.id in [r.id for r in self.resources]:
950
- raise MongoEngineValidationError("Cannot add resource with already existing ID")
951
962
 
963
+ existing_resource = next((r for r in self.resources if r.id == resource.id), None)
964
+ if existing_resource:
965
+ raise MongoEngineValidationError(
966
+ f"Cannot add resource '{resource.title}'. A resource '{existing_resource.title}' already exists with ID '{existing_resource.id}'"
967
+ )
968
+
969
+ # only useful for compute_quality(), we will reload to have a clean object
952
970
  self.resources.insert(0, resource)
971
+
953
972
  self.update(
954
- __raw__={
955
- "$set": {
956
- "quality_cached": self.compute_quality(),
957
- },
958
- "$push": {
959
- "resources": {"$each": [resource.to_mongo()], "$position": 0},
960
- },
961
- }
973
+ set__quality_cached=self.compute_quality(),
974
+ push__resources={"$each": [resource.to_mongo()], "$position": 0},
975
+ set__last_modified_internal=datetime.utcnow(),
962
976
  )
977
+
963
978
  self.reload()
964
979
  self.on_resource_added.send(self.__class__, document=self, resource_id=resource.id)
965
980
 
966
981
  def update_resource(self, resource):
967
982
  """Perform an atomic update for an existing resource"""
968
- index = self.resources.index(resource)
969
- data = {"resources__{index}".format(index=index): resource}
970
- self.update(**data)
983
+
984
+ # only useful for compute_quality(), we will reload to have a clean object
985
+ index = next(i for i, r in enumerate(self.resources) if r.id == resource.id)
986
+ self.resources[index] = resource
987
+
988
+ Dataset.objects(id=self.id, resources__id=resource.id).update_one(
989
+ set__quality_cached=self.compute_quality(),
990
+ set__resources__S=resource,
991
+ set__last_modified_internal=datetime.utcnow(),
992
+ )
993
+
971
994
  self.reload()
972
995
  self.on_resource_updated.send(self.__class__, document=self, resource_id=resource.id)
973
996
 
974
997
  def remove_resource(self, resource):
998
+ # only useful for compute_quality(), we will reload to have a clean object
999
+ self.resources = [r for r in self.resources if r.id != resource.id]
1000
+
1001
+ self.update(
1002
+ set__quality_cached=self.compute_quality(),
1003
+ pull__resources__id=resource.id,
1004
+ set__last_modified_internal=datetime.utcnow(),
1005
+ )
1006
+
975
1007
  # Deletes resource's file from file storage
976
1008
  if resource.fs_filename is not None:
977
1009
  try:
@@ -981,7 +1013,7 @@ class Dataset(Auditable, WithMetrics, DatasetBadgeMixin, Owned, db.Document):
981
1013
  f"File not found while deleting resource #{resource.id} in dataset {self.id}: {e}"
982
1014
  )
983
1015
 
984
- self.resources.remove(resource)
1016
+ self.reload()
985
1017
  self.on_resource_removed.send(self.__class__, document=self, resource_id=resource.id)
986
1018
 
987
1019
  @property
@@ -1053,12 +1085,16 @@ class Dataset(Auditable, WithMetrics, DatasetBadgeMixin, Owned, db.Document):
1053
1085
  from udata.models import Reuse
1054
1086
 
1055
1087
  self.metrics["reuses"] = Reuse.objects(datasets=self).visible().count()
1088
+ self.metrics["reuses_by_months"] = get_stock_metrics(Reuse.objects(datasets=self).visible())
1056
1089
  self.save(signal_kwargs={"ignores": ["post_save"]})
1057
1090
 
1058
1091
  def count_followers(self):
1059
1092
  from udata.models import Follow
1060
1093
 
1061
1094
  self.metrics["followers"] = Follow.objects(until=None).followers(self).count()
1095
+ self.metrics["followers_by_months"] = get_stock_metrics(
1096
+ Follow.objects(following=self), date_label="since"
1097
+ )
1062
1098
  self.save(signal_kwargs={"ignores": ["post_save"]})
1063
1099
 
1064
1100
 
@@ -1088,6 +1124,15 @@ class CommunityResource(ResourceMixin, WithMetrics, Owned, db.Document):
1088
1124
  def from_community(self):
1089
1125
  return True
1090
1126
 
1127
+ @property
1128
+ def permissions(self):
1129
+ from .permissions import ResourceEditPermission
1130
+
1131
+ return {
1132
+ "delete": ResourceEditPermission(self),
1133
+ "edit": ResourceEditPermission(self),
1134
+ }
1135
+
1091
1136
 
1092
1137
  class ResourceSchema(object):
1093
1138
  @staticmethod
udata/core/dataset/rdf.py CHANGED
@@ -536,7 +536,7 @@ def spatial_from_rdf(graph):
536
536
  spatial_coverage.clean()
537
537
  return spatial_coverage
538
538
  except ValidationError as e:
539
- log.warning(f"Cannot save the spatial coverage {coordinates} (error was {e})")
539
+ log.warning(f"Cannot save the spatial coverage {polygons} (error was {e})")
540
540
  return None
541
541
 
542
542
 
@@ -210,8 +210,9 @@ def export_csv_for_model(model, dataset):
210
210
  # add it to the dataset
211
211
  if created:
212
212
  dataset.add_resource(resource)
213
- dataset.last_modified_internal = datetime.utcnow()
214
- dataset.save()
213
+ else:
214
+ dataset.last_modified_internal = datetime.utcnow()
215
+ dataset.save()
215
216
  finally:
216
217
  csvfile.close()
217
218
  os.unlink(csvfile.name)
@@ -12,6 +12,7 @@ from udata.core.badges import api as badges_api
12
12
  from udata.core.badges.fields import badge_fields
13
13
  from udata.core.contact_point.api import ContactPointApiParser
14
14
  from udata.core.contact_point.api_fields import contact_point_page_fields
15
+ from udata.core.dataservices.csv import DataserviceCsvAdapter
15
16
  from udata.core.dataservices.models import Dataservice
16
17
  from udata.core.dataset.api import DatasetApiParser
17
18
  from udata.core.dataset.api_fields import dataset_page_fields
@@ -178,6 +179,16 @@ class DatasetsCsvAPI(API):
178
179
  return csv.stream(adapter, "{0}-datasets".format(org.slug))
179
180
 
180
181
 
182
+ @ns.route("/<org:org>/dataservices.csv", endpoint="organization_dataservices_csv")
183
+ @api.response(404, "Organization not found")
184
+ @api.response(410, "Organization has been deleted")
185
+ class DataservicesCsv(API):
186
+ def get(self, org):
187
+ dataservices = Dataservice.objects(organization=str(org.id)).visible()
188
+ adapter = DataserviceCsvAdapter(dataservices)
189
+ return csv.stream(adapter, "{0}-dataservices".format(org.slug))
190
+
191
+
181
192
  @ns.route("/<org:org>/discussions.csv", endpoint="organization_discussions_csv", doc=common_doc)
182
193
  @api.response(404, "Organization not found")
183
194
  @api.response(410, "Organization has been deleted")
@@ -8,6 +8,7 @@ from werkzeug.utils import cached_property
8
8
  from udata.api_fields import field
9
9
  from udata.core.activity.models import Auditable
10
10
  from udata.core.badges.models import Badge, BadgeMixin, BadgesList
11
+ from udata.core.metrics.helpers import get_stock_metrics
11
12
  from udata.core.metrics.models import WithMetrics
12
13
  from udata.core.storages import avatars, default_image_basename
13
14
  from udata.frontend.markdown import mdstrip
@@ -161,9 +162,16 @@ class Organization(Auditable, WithMetrics, OrganizationBadgeMixin, db.Datetimed,
161
162
  return self.name or ""
162
163
 
163
164
  __metrics_keys__ = [
165
+ "dataservices",
166
+ "dataservices_by_months",
164
167
  "datasets",
168
+ "datasets_by_months",
169
+ "datasets_followers_by_months",
170
+ "datasets_reuses_by_months",
165
171
  "members",
166
172
  "reuses",
173
+ "reuses_by_months",
174
+ "reuses_followers_by_months",
167
175
  "dataservices",
168
176
  "followers",
169
177
  "views",
@@ -299,21 +307,40 @@ class Organization(Auditable, WithMetrics, OrganizationBadgeMixin, db.Datetimed,
299
307
  self.save(signal_kwargs={"ignores": ["post_save"]})
300
308
 
301
309
  def count_datasets(self):
302
- from udata.models import Dataset
310
+ from udata.models import Dataset, Follow, Reuse
303
311
 
304
312
  self.metrics["datasets"] = Dataset.objects(organization=self).visible().count()
313
+ self.metrics["datasets_by_months"] = get_stock_metrics(
314
+ Dataset.objects(organization=self).visible(), date_label="created_at_internal"
315
+ )
316
+ self.metrics["datasets_followers_by_months"] = get_stock_metrics(
317
+ Follow.objects(following__in=Dataset.objects(organization=self)), date_label="since"
318
+ )
319
+ self.metrics["datasets_reuses_by_months"] = get_stock_metrics(
320
+ Reuse.objects(datasets__in=Dataset.objects(organization=self)).visible()
321
+ )
322
+
305
323
  self.save(signal_kwargs={"ignores": ["post_save"]})
306
324
 
307
325
  def count_reuses(self):
308
- from udata.models import Reuse
326
+ from udata.models import Follow, Reuse
309
327
 
310
328
  self.metrics["reuses"] = Reuse.objects(organization=self).visible().count()
329
+ self.metrics["reuses_by_months"] = get_stock_metrics(
330
+ Reuse.objects(organization=self).visible()
331
+ )
332
+ self.metrics["reuses_followers_by_months"] = get_stock_metrics(
333
+ Follow.objects(following__in=Reuse.objects(organization=self)), date_label="since"
334
+ )
311
335
  self.save(signal_kwargs={"ignores": ["post_save"]})
312
336
 
313
337
  def count_dataservices(self):
314
338
  from udata.models import Dataservice
315
339
 
316
340
  self.metrics["dataservices"] = Dataservice.objects(organization=self).visible().count()
341
+ self.metrics["dataservices_by_months"] = get_stock_metrics(
342
+ Dataservice.objects(organization=self).visible(), date_label="created_at"
343
+ )
317
344
  self.save(signal_kwargs={"ignores": ["post_save"]})
318
345
 
319
346
  def count_followers(self):
udata/core/reuse/api.py CHANGED
@@ -33,7 +33,6 @@ from .api_fields import (
33
33
  reuse_type_fields,
34
34
  )
35
35
  from .models import Reuse
36
- from .permissions import ReuseEditPermission
37
36
 
38
37
  DEFAULT_SORTING = "-created_at"
39
38
  SUGGEST_SORTING = "-metrics.followers"
@@ -179,7 +178,7 @@ class ReuseAPI(API):
179
178
  @api.marshal_with(Reuse.__read_fields__)
180
179
  def get(self, reuse):
181
180
  """Fetch a given reuse"""
182
- if not ReuseEditPermission(reuse).can():
181
+ if not reuse.permissions["edit"].can():
183
182
  if reuse.private:
184
183
  api.abort(404)
185
184
  elif reuse.deleted:
@@ -196,7 +195,7 @@ class ReuseAPI(API):
196
195
  request_deleted = request.json.get("deleted", True)
197
196
  if reuse.deleted and request_deleted is not None:
198
197
  api.abort(410, "This reuse has been deleted")
199
- ReuseEditPermission(reuse).test()
198
+ reuse.permissions["edit"].test()
200
199
 
201
200
  # This is a patch but old API acted like PATCH on PUT requests.
202
201
  return patch_and_save(reuse, request)
@@ -208,7 +207,7 @@ class ReuseAPI(API):
208
207
  """Delete a given reuse"""
209
208
  if reuse.deleted:
210
209
  api.abort(410, "This reuse has been deleted")
211
- ReuseEditPermission(reuse).test()
210
+ reuse.permissions["delete"].test()
212
211
  reuse.deleted = datetime.utcnow()
213
212
  reuse.save()
214
213
  return "", 204
@@ -335,7 +334,7 @@ class ReuseImageAPI(API):
335
334
  @api.marshal_with(uploaded_image_fields)
336
335
  def post(self, reuse):
337
336
  """Upload a new reuse image"""
338
- ReuseEditPermission(reuse).test()
337
+ reuse.permissions["edit"].test()
339
338
  parse_uploaded_image(reuse.image)
340
339
  reuse.save()
341
340
 
@@ -4,6 +4,14 @@ from .constants import IMAGE_SIZES
4
4
 
5
5
  BIGGEST_IMAGE_SIZE = IMAGE_SIZES[0]
6
6
 
7
+ reuse_permissions_fields = api.model(
8
+ "ReusePermissions",
9
+ {
10
+ "delete": fields.Permission(),
11
+ "edit": fields.Permission(),
12
+ },
13
+ )
14
+
7
15
  reuse_type_fields = api.model(
8
16
  "ReuseType",
9
17
  {
udata/core/reuse/apiv2.py CHANGED
@@ -5,8 +5,10 @@ from udata.api import API, apiv2
5
5
  from udata.core.reuse.models import Reuse
6
6
  from udata.utils import multi_to_dict
7
7
 
8
+ from .api_fields import reuse_permissions_fields
8
9
  from .search import ReuseSearch
9
10
 
11
+ apiv2.inherit("ReusePermissions", reuse_permissions_fields)
10
12
  apiv2.inherit("ReusePage", Reuse.__page_fields__)
11
13
  apiv2.inherit("Reuse (read)", Reuse.__read_fields__)
12
14
 
@@ -5,8 +5,9 @@ from werkzeug.utils import cached_property
5
5
  from udata.api_fields import field, function_field, generate_fields
6
6
  from udata.core.activity.models import Auditable
7
7
  from udata.core.dataset.api_fields import dataset_fields
8
+ from udata.core.metrics.helpers import get_stock_metrics
8
9
  from udata.core.owned import Owned, OwnedQuerySet
9
- from udata.core.reuse.api_fields import BIGGEST_IMAGE_SIZE
10
+ from udata.core.reuse.api_fields import BIGGEST_IMAGE_SIZE, reuse_permissions_fields
10
11
  from udata.core.storages import default_image_basename, images
11
12
  from udata.frontend.markdown import mdstrip
12
13
  from udata.i18n import lazy_gettext as _
@@ -151,6 +152,7 @@ class Reuse(db.Datetimed, Auditable, WithMetrics, ReuseBadgeMixin, Owned, db.Doc
151
152
  "discussions",
152
153
  "datasets",
153
154
  "followers",
155
+ "followers_by_months",
154
156
  "views",
155
157
  ]
156
158
 
@@ -200,6 +202,18 @@ class Reuse(db.Datetimed, Auditable, WithMetrics, ReuseBadgeMixin, Owned, db.Doc
200
202
  "reuses.show", reuse=self, _external=True, fallback_endpoint="api.reuse"
201
203
  )
202
204
 
205
+ @property
206
+ @function_field(
207
+ nested_fields=reuse_permissions_fields,
208
+ )
209
+ def permissions(self):
210
+ from .permissions import ReuseEditPermission
211
+
212
+ return {
213
+ "delete": ReuseEditPermission(self),
214
+ "edit": ReuseEditPermission(self),
215
+ }
216
+
203
217
  @property
204
218
  def is_visible(self):
205
219
  return not self.is_hidden
@@ -287,6 +301,9 @@ class Reuse(db.Datetimed, Auditable, WithMetrics, ReuseBadgeMixin, Owned, db.Doc
287
301
  from udata.models import Follow
288
302
 
289
303
  self.metrics["followers"] = Follow.objects(until=None).followers(self).count()
304
+ self.metrics["followers_by_months"] = get_stock_metrics(
305
+ Follow.objects(following=self), date_label="since"
306
+ )
290
307
  self.save(signal_kwargs={"ignores": ["post_save"]})
291
308
 
292
309
 
@@ -1,3 +1,4 @@
1
+ import geojson
1
2
  from flask import current_app
2
3
  from werkzeug.local import LocalProxy
3
4
  from werkzeug.utils import cached_property
@@ -174,3 +175,11 @@ class SpatialCoverage(db.EmbeddedDocument):
174
175
  raise db.ValidationError(
175
176
  "The spatial coverage cannot contains a Geozone and a Geometry"
176
177
  )
178
+
179
+ if self.geom:
180
+ try:
181
+ geojson.loads(geojson.dumps(self.geom))
182
+ except (ValueError, TypeError) as err:
183
+ raise db.ValidationError(
184
+ f"Invalid GeoJSON data `{self.geom}`: {err}.", field_name="geom"
185
+ )
udata/core/user/models.py CHANGED
@@ -1,4 +1,5 @@
1
1
  import json
2
+ import logging
2
3
  from copy import copy
3
4
  from datetime import datetime
4
5
  from itertools import chain
@@ -26,6 +27,8 @@ from .constants import AVATAR_SIZES
26
27
 
27
28
  __all__ = ("User", "Role", "datastore")
28
29
 
30
+ log = logging.getLogger(__name__)
31
+
29
32
 
30
33
  # TODO: use simple text for role
31
34
  class Role(db.Document, RoleMixin):
@@ -250,11 +253,14 @@ class User(WithMetrics, UserMixin, db.Document):
250
253
 
251
254
  def mark_as_deleted(self, notify: bool = True, delete_comments: bool = False):
252
255
  if self.avatar.filename is not None:
253
- storage = storages.avatars
254
- storage.delete(self.avatar.filename)
255
- storage.delete(self.avatar.original)
256
- for key, value in self.avatar.thumbnails.items():
257
- storage.delete(value)
256
+ try:
257
+ storage = storages.avatars
258
+ storage.delete(self.avatar.filename)
259
+ storage.delete(self.avatar.original)
260
+ for key, value in self.avatar.thumbnails.items():
261
+ storage.delete(value)
262
+ except FileNotFoundError as e:
263
+ log.error(f"File not found while deleting user #{self.id} avatar: {e}")
258
264
 
259
265
  copied_user = copy(self)
260
266
  self.email = "{}@deleted".format(self.id)
udata/harvest/api.py CHANGED
@@ -1,4 +1,5 @@
1
1
  from flask import current_app, request
2
+ from flask_login import current_user
2
3
  from werkzeug.exceptions import BadRequest
3
4
 
4
5
  from udata.api import API, api, fields
@@ -345,7 +346,7 @@ class RunSourceAPI(API):
345
346
  @api.marshal_with(source_fields)
346
347
  def post(self, ident):
347
348
  enabled = current_app.config.get("HARVEST_ENABLE_MANUAL_RUN")
348
- if not enabled:
349
+ if not enabled and not current_user.sysadmin:
349
350
  api.abort(
350
351
  400,
351
352
  "Cannot run source manually. Please contact the platform if you need to reschedule the harvester.",
@@ -35,6 +35,11 @@
35
35
  <schema:endDate rdf:datatype="http://www.w3.org/2001/XMLSchema#dateTime">2016-12-05T00:00:00</schema:endDate>
36
36
  </dcterms:PeriodOfTime>
37
37
  </dct:temporal>
38
+ <dct:spatial>
39
+ <ogc:Polygon>
40
+ <locn:geometry rdf:datatype="https://www.iana.org/assignments/media-types/application/vnd.geo+json"><![CDATA[{"type":"Polygon","coordinates":[[[4, 45], [4, NaN], [4, 46], [4, 45], [4, 45]]]}]]></locn:geometry>
41
+ </ogc:Polygon>
42
+ </dct:spatial>
38
43
  </dcat:Dataset>
39
44
  </dcat:dataset>
40
45
  <dcterms:title>Sample DCAT Catalog</dcterms:title>
@@ -334,6 +334,7 @@ class DcatBackendTest:
334
334
  [[[159, -25.0], [159, -11], [212, -11], [212, -25.0], [159, -25.0]]],
335
335
  ],
336
336
  }
337
+ # dataset-3 has a spatial with NaN values…
337
338
  assert datasets["3"].spatial is None
338
339
 
339
340
  @pytest.mark.options(SCHEMA_CATALOG_URL="https://example.com/schemas")
@@ -0,0 +1,25 @@
1
+ """
2
+ This migration updates Topic.featured to False when it is None.
3
+ """
4
+
5
+ import logging
6
+
7
+ import click
8
+
9
+ from udata.core.dataset.models import Dataset
10
+
11
+ log = logging.getLogger(__name__)
12
+
13
+
14
+ def migrate(db):
15
+ datasets = Dataset.objects(spatial__geom__exists=True)
16
+ count = Dataset.objects(spatial__geom__exists=True).count()
17
+
18
+ with click.progressbar(datasets, length=count) as datasets:
19
+ for dataset in datasets:
20
+ try:
21
+ dataset.spatial.clean()
22
+ except Exception as err:
23
+ log.error(f"Invalid spatial in dataset #{dataset.id} '{dataset.title}' {err}")
24
+ dataset.spatial = None
25
+ dataset.save()