udata 14.4.1.dev7__py3-none-any.whl → 14.5.1.dev9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. udata/api/__init__.py +2 -0
  2. udata/auth/views.py +7 -3
  3. udata/commands/dcat.py +1 -1
  4. udata/core/dataservices/api.py +8 -1
  5. udata/core/dataservices/apiv2.py +2 -5
  6. udata/core/dataservices/models.py +4 -1
  7. udata/core/dataservices/rdf.py +2 -1
  8. udata/core/dataservices/tasks.py +6 -2
  9. udata/core/dataset/api.py +28 -4
  10. udata/core/dataset/api_fields.py +1 -1
  11. udata/core/dataset/apiv2.py +1 -1
  12. udata/core/dataset/models.py +4 -4
  13. udata/core/dataset/rdf.py +8 -2
  14. udata/core/dataset/tasks.py +6 -2
  15. udata/core/discussions/api.py +15 -1
  16. udata/core/discussions/models.py +5 -0
  17. udata/core/legal/__init__.py +0 -0
  18. udata/core/legal/mails.py +128 -0
  19. udata/core/organization/api.py +8 -0
  20. udata/core/organization/api_fields.py +3 -3
  21. udata/core/organization/apiv2.py +2 -3
  22. udata/core/organization/models.py +6 -1
  23. udata/core/reuse/api.py +8 -0
  24. udata/core/reuse/apiv2.py +2 -5
  25. udata/core/topic/models.py +8 -2
  26. udata/core/user/api.py +10 -3
  27. udata/core/user/api_fields.py +3 -3
  28. udata/core/user/models.py +7 -1
  29. udata/flask_mongoengine/pagination.py +1 -1
  30. udata/harvest/backends/dcat.py +4 -1
  31. udata/harvest/tests/test_dcat_backend.py +24 -0
  32. udata/mail.py +14 -0
  33. udata/rdf.py +20 -5
  34. udata/settings.py +4 -0
  35. udata/tests/api/test_datasets_api.py +44 -0
  36. udata/tests/apiv2/test_search.py +30 -0
  37. udata/tests/dataservice/test_dataservice_tasks.py +29 -0
  38. udata/tests/dataset/test_dataset_rdf.py +16 -0
  39. udata/tests/dataset/test_dataset_tasks.py +25 -0
  40. udata/tests/frontend/test_auth.py +34 -0
  41. udata/tests/helpers.py +6 -0
  42. udata/tests/search/test_search_integration.py +33 -0
  43. udata/tests/test_api_fields.py +10 -0
  44. udata/tests/test_legal_mails.py +359 -0
  45. {udata-14.4.1.dev7.dist-info → udata-14.5.1.dev9.dist-info}/METADATA +2 -2
  46. {udata-14.4.1.dev7.dist-info → udata-14.5.1.dev9.dist-info}/RECORD +50 -45
  47. {udata-14.4.1.dev7.dist-info → udata-14.5.1.dev9.dist-info}/WHEEL +0 -0
  48. {udata-14.4.1.dev7.dist-info → udata-14.5.1.dev9.dist-info}/entry_points.txt +0 -0
  49. {udata-14.4.1.dev7.dist-info → udata-14.5.1.dev9.dist-info}/licenses/LICENSE +0 -0
  50. {udata-14.4.1.dev7.dist-info → udata-14.5.1.dev9.dist-info}/top_level.txt +0 -0
udata/api/__init__.py CHANGED
@@ -121,6 +121,8 @@ class UDataApi(Api):
121
121
  if "application/json" not in request.headers.get("Content-Type", ""):
122
122
  errors = {"Content-Type": "expecting application/json"}
123
123
  self.abort(400, errors=errors)
124
+ if not isinstance(request.json, dict):
125
+ self.abort(400, errors={"request": "expecting a JSON object"})
124
126
  form = form_cls.from_json(request.json, obj=obj, instance=obj, meta={"csrf": False})
125
127
  if not form.validate():
126
128
  self.abort(400, errors=form.errors)
udata/auth/views.py CHANGED
@@ -57,11 +57,15 @@ def confirm_change_email_token_status(token):
57
57
  token, "confirm", get_within_delta("CONFIRM_EMAIL_WITHIN")
58
58
  )
59
59
  new_email = None
60
+ user = None
60
61
 
61
62
  if not invalid and token_data:
62
- user, token_email_hash, new_email = token_data
63
- user = _datastore.find_user(fs_uniquifier=user)
64
- invalid = not verify_hash(token_email_hash, user.email)
63
+ user_uniquifier, token_email_hash, new_email = token_data
64
+ user = _datastore.find_user(fs_uniquifier=user_uniquifier)
65
+ if user is None:
66
+ invalid = True
67
+ else:
68
+ invalid = not verify_hash(token_email_hash, user.email)
65
69
 
66
70
  return expired, invalid, user, new_email
67
71
 
udata/commands/dcat.py CHANGED
@@ -85,7 +85,7 @@ def parse_url(url, csw, iso, quiet=False, rid=""):
85
85
  echo("Item kwargs: {}".format(yellow(item.kwargs)))
86
86
  node = backend.get_node_from_item(graph, item)
87
87
  dataset = MockDatasetFactory()
88
- dataset = dataset_from_rdf(graph, dataset, node=node)
88
+ dataset = dataset_from_rdf(graph, dataset, node=node, dryrun=True)
89
89
  echo("")
90
90
  echo(green("Dataset found!"))
91
91
  echo("Title: {}".format(yellow(dataset)))
@@ -12,6 +12,7 @@ from udata.auth import admin_permission
12
12
  from udata.core.access_type.constants import AccessType
13
13
  from udata.core.dataset.models import Dataset
14
14
  from udata.core.followers.api import FollowAPI
15
+ from udata.core.legal.mails import add_send_legal_notice_argument, send_legal_notice_on_deletion
15
16
  from udata.frontend.markdown import md
16
17
  from udata.i18n import gettext as _
17
18
  from udata.rdf import RDF_EXTENSIONS, graph_response, negociate_content
@@ -88,6 +89,9 @@ class DataservicesAtomFeedAPI(API):
88
89
  return response
89
90
 
90
91
 
92
+ dataservice_delete_parser = add_send_legal_notice_argument(api.parser())
93
+
94
+
91
95
  @ns.route("/<dataservice:dataservice>/", endpoint="dataservice")
92
96
  class DataserviceAPI(API):
93
97
  @api.doc("get_dataservice")
@@ -123,16 +127,19 @@ class DataserviceAPI(API):
123
127
 
124
128
  @api.secure
125
129
  @api.doc("delete_dataservice")
130
+ @api.expect(dataservice_delete_parser)
126
131
  @api.response(204, "dataservice deleted")
127
132
  def delete(self, dataservice):
133
+ args = dataservice_delete_parser.parse_args()
128
134
  if dataservice.deleted_at:
129
135
  api.abort(410, "dataservice has been deleted")
130
136
 
131
137
  dataservice.permissions["delete"].test()
138
+ send_legal_notice_on_deletion(dataservice, args)
139
+
132
140
  dataservice.deleted_at = datetime.utcnow()
133
141
  dataservice.metadata_modified_at = datetime.utcnow()
134
142
  dataservice.save()
135
-
136
143
  return "", 204
137
144
 
138
145
 
@@ -1,10 +1,7 @@
1
- from flask import request
2
-
3
1
  from udata import search
4
2
  from udata.api import API, apiv2
5
3
  from udata.core.access_type.models import AccessAudience
6
4
  from udata.core.dataservices.models import Dataservice, HarvestMetadata
7
- from udata.utils import multi_to_dict
8
5
 
9
6
  from .models import dataservice_permissions_fields
10
7
  from .search import DataserviceSearch
@@ -30,5 +27,5 @@ class DataserviceSearchAPI(API):
30
27
  @apiv2.marshal_with(Dataservice.__page_fields__)
31
28
  def get(self):
32
29
  """Search all dataservices"""
33
- search_parser.parse_args()
34
- return search.query(DataserviceSearch, **multi_to_dict(request.args))
30
+ args = search_parser.parse_args()
31
+ return search.query(DataserviceSearch, **args)
@@ -201,7 +201,10 @@ class Dataservice(
201
201
  ),
202
202
  readonly=True,
203
203
  )
204
- description = field(db.StringField(default=""), description="In markdown")
204
+ description = field(
205
+ db.StringField(default=""),
206
+ markdown=True,
207
+ )
205
208
  base_api_url = field(db.URLField(), sortable=True)
206
209
 
207
210
  machine_documentation_url = field(
@@ -31,6 +31,7 @@ def dataservice_from_rdf(
31
31
  node,
32
32
  all_datasets: list[Dataset],
33
33
  remote_url_prefix: str | None = None,
34
+ dryrun: bool = False,
34
35
  ) -> Dataservice:
35
36
  """
36
37
  Create or update a dataservice from a RDF/DCAT graph
@@ -51,7 +52,7 @@ def dataservice_from_rdf(
51
52
  dataservice.machine_documentation_url = url_from_rdf(d, DCAT.endpointDescription)
52
53
 
53
54
  roles = [ # Imbricated list of contact points for each role
54
- contact_points_from_rdf(d, rdf_entity, role, dataservice)
55
+ contact_points_from_rdf(d, rdf_entity, role, dataservice, dryrun=dryrun)
55
56
  for rdf_entity, role in CONTACT_POINT_ENTITY_TO_ROLE.items()
56
57
  ]
57
58
  dataservice.contact_points = [ # Flattened list of contact points
@@ -23,8 +23,12 @@ def purge_dataservices(self):
23
23
  Follow.objects(following=dataservice).delete()
24
24
  # Remove discussions
25
25
  Discussion.objects(subject=dataservice).delete()
26
- # Remove HarvestItem references
27
- HarvestJob.objects(items__dataservice=dataservice).update(set__items__S__dataservice=None)
26
+ # Remove HarvestItem references (using update_many with array_filters to update all matching items)
27
+ HarvestJob._get_collection().update_many(
28
+ {"items.dataservice": dataservice.id},
29
+ {"$set": {"items.$[item].dataservice": None}},
30
+ array_filters=[{"item.dataservice": dataservice.id}],
31
+ )
28
32
  # Remove associated Transfers
29
33
  Transfer.objects(subject=dataservice).delete()
30
34
  # Remove dataservices references in Topics
udata/core/dataset/api.py CHANGED
@@ -39,6 +39,7 @@ from udata.core.dataservices.models import Dataservice
39
39
  from udata.core.dataset.models import CHECKSUM_TYPES
40
40
  from udata.core.followers.api import FollowAPI
41
41
  from udata.core.followers.models import Follow
42
+ from udata.core.legal.mails import add_send_legal_notice_argument, send_legal_notice_on_deletion
42
43
  from udata.core.organization.models import Organization
43
44
  from udata.core.reuse.models import Reuse
44
45
  from udata.core.storages.api import handle_upload, upload_parser
@@ -327,17 +328,33 @@ class DatasetListAPI(API):
327
328
  @ns.route("/recent.atom", endpoint="recent_datasets_atom_feed")
328
329
  class DatasetsAtomFeedAPI(API):
329
330
  @api.doc("recent_datasets_atom_feed")
331
+ @api.expect(dataset_parser.parser)
330
332
  def get(self):
333
+ args = dataset_parser.parse()
334
+ queryset = Dataset.objects.visible()
335
+ queryset = DatasetApiParser.parse_filters(queryset, args)
336
+
337
+ q = args.get("q").strip() if args.get("q") else ""
338
+ has_filters = any(
339
+ args.get(k)
340
+ for k in ["q", "tag", "license", "organization", "owner", "format", "badge", "topic"]
341
+ )
342
+
343
+ if q:
344
+ title = _("Datasets search: {q}").format(q=q)
345
+ elif has_filters:
346
+ title = _("Filtered datasets")
347
+ else:
348
+ title = _("Latest datasets")
349
+
331
350
  feed = Atom1Feed(
332
- _("Latest datasets"),
351
+ title,
333
352
  description=None,
334
353
  feed_url=request.url,
335
354
  link=request.url_root,
336
355
  )
337
356
 
338
- datasets: list[Dataset] = get_rss_feed_list(
339
- Dataset.objects.visible(), "created_at_internal"
340
- )
357
+ datasets: list[Dataset] = get_rss_feed_list(queryset, "created_at_internal")
341
358
 
342
359
  for dataset in datasets:
343
360
  author_name = None
@@ -364,6 +381,9 @@ class DatasetsAtomFeedAPI(API):
364
381
  return response
365
382
 
366
383
 
384
+ dataset_delete_parser = add_send_legal_notice_argument(api.parser())
385
+
386
+
367
387
  @ns.route("/<dataset:dataset>/", endpoint="dataset", doc=common_doc)
368
388
  @api.response(404, "Dataset not found")
369
389
  @api.response(410, "Dataset has been deleted")
@@ -397,12 +417,16 @@ class DatasetAPI(API):
397
417
 
398
418
  @api.secure
399
419
  @api.doc("delete_dataset")
420
+ @api.expect(dataset_delete_parser)
400
421
  @api.response(204, "Dataset deleted")
401
422
  def delete(self, dataset):
402
423
  """Delete a dataset given its identifier"""
424
+ args = dataset_delete_parser.parse_args()
403
425
  if dataset.deleted:
404
426
  api.abort(410, "Dataset has been deleted")
405
427
  dataset.permissions["delete"].test()
428
+ send_legal_notice_on_deletion(dataset, args)
429
+
406
430
  dataset.deleted = datetime.utcnow()
407
431
  dataset.last_modified_internal = datetime.utcnow()
408
432
  dataset.save()
@@ -332,7 +332,7 @@ dataset_fields = api.model(
332
332
  "id": fields.String(description="The dataset identifier", readonly=True),
333
333
  "title": fields.String(description="The dataset title", required=True),
334
334
  "acronym": fields.String(description="An optional dataset acronym"),
335
- "slug": fields.String(description="The dataset permalink string", required=True),
335
+ "slug": fields.String(description="The dataset permalink string", readonly=True),
336
336
  "description": fields.Markdown(
337
337
  description="The dataset description in markdown", required=True
338
338
  ),
@@ -108,7 +108,7 @@ dataset_fields = apiv2.model(
108
108
  "id": fields.String(description="The dataset identifier", readonly=True),
109
109
  "title": fields.String(description="The dataset title", required=True),
110
110
  "acronym": fields.String(description="An optional dataset acronym"),
111
- "slug": fields.String(description="The dataset permalink string", required=True),
111
+ "slug": fields.String(description="The dataset permalink string", readonly=True),
112
112
  "description": fields.Markdown(
113
113
  description="The dataset description in markdown", required=True
114
114
  ),
@@ -546,7 +546,10 @@ class Dataset(
546
546
  ),
547
547
  auditable=False,
548
548
  )
549
- description = field(db.StringField(required=True, default=""))
549
+ description = field(
550
+ db.StringField(required=True, default=""),
551
+ markdown=True,
552
+ )
550
553
  description_short = field(db.StringField(max_length=DESCRIPTION_SHORT_SIZE_LIMIT))
551
554
  license = field(db.ReferenceField("License"))
552
555
 
@@ -1148,9 +1151,6 @@ class ResourceSchema(object):
1148
1151
  except requests.exceptions.RequestException as err:
1149
1152
  log.exception(f"Error while getting schema catalog from {endpoint}: {err}")
1150
1153
  schemas = cache.get(cache_key)
1151
- except requests.exceptions.JSONDecodeError as err:
1152
- log.exception(f"Error while getting schema catalog from {endpoint}: {err}")
1153
- schemas = cache.get(cache_key)
1154
1154
  else:
1155
1155
  schemas = data.get("schemas", [])
1156
1156
  cache.set(cache_key, schemas)
udata/core/dataset/rdf.py CHANGED
@@ -742,7 +742,13 @@ def resource_from_rdf(graph_or_distrib, dataset=None, is_additionnal=False):
742
742
  return resource
743
743
 
744
744
 
745
- def dataset_from_rdf(graph: Graph, dataset=None, node=None, remote_url_prefix: str | None = None):
745
+ def dataset_from_rdf(
746
+ graph: Graph,
747
+ dataset=None,
748
+ node=None,
749
+ remote_url_prefix: str | None = None,
750
+ dryrun: bool = False,
751
+ ):
746
752
  """
747
753
  Create or update a dataset from a RDF/DCAT graph
748
754
  """
@@ -764,7 +770,7 @@ def dataset_from_rdf(graph: Graph, dataset=None, node=None, remote_url_prefix: s
764
770
  dataset.description = sanitize_html(description)
765
771
  dataset.frequency = frequency_from_rdf(d.value(DCT.accrualPeriodicity)) or dataset.frequency
766
772
  roles = [ # Imbricated list of contact points for each role
767
- contact_points_from_rdf(d, rdf_entity, role, dataset)
773
+ contact_points_from_rdf(d, rdf_entity, role, dataset, dryrun=dryrun)
768
774
  for rdf_entity, role in CONTACT_POINT_ENTITY_TO_ROLE.items()
769
775
  ]
770
776
  dataset.contact_points = [ # Flattened list of contact points
@@ -54,8 +54,12 @@ def purge_datasets(self):
54
54
  datasets = dataservice.datasets
55
55
  datasets.remove(dataset)
56
56
  dataservice.update(datasets=datasets)
57
- # Remove HarvestItem references
58
- HarvestJob.objects(items__dataset=dataset).update(set__items__S__dataset=None)
57
+ # Remove HarvestItem references (using update_many with array_filters to update all matching items)
58
+ HarvestJob._get_collection().update_many(
59
+ {"items.dataset": dataset.id},
60
+ {"$set": {"items.$[item].dataset": None}},
61
+ array_filters=[{"item.dataset": dataset.id}],
62
+ )
59
63
  # Remove datasets in pages (mongoengine doesn't support updating a field in a generic embed)
60
64
  Page._get_collection().update_many(
61
65
  {"blocs.datasets": dataset.id},
@@ -7,6 +7,7 @@ from flask_security import current_user
7
7
  from udata.api import API, api, fields
8
8
  from udata.core.dataservices.models import Dataservice
9
9
  from udata.core.dataset.models import Dataset
10
+ from udata.core.legal.mails import add_send_legal_notice_argument, send_legal_notice_on_deletion
10
11
  from udata.core.organization.api_fields import org_ref_fields
11
12
  from udata.core.organization.models import Organization
12
13
  from udata.core.reuse.models import Reuse
@@ -164,6 +165,9 @@ class DiscussionSpamAPI(SpamAPIMixin):
164
165
  model = Discussion
165
166
 
166
167
 
168
+ discussion_delete_parser = add_send_legal_notice_argument(api.parser())
169
+
170
+
167
171
  @ns.route("/<id>/", endpoint="discussion")
168
172
  class DiscussionAPI(API):
169
173
  """
@@ -236,11 +240,14 @@ class DiscussionAPI(API):
236
240
  return discussion
237
241
 
238
242
  @api.doc("delete_discussion")
243
+ @api.expect(discussion_delete_parser)
239
244
  @api.response(403, "Not allowed to delete this discussion")
240
245
  def delete(self, id):
241
246
  """Delete a discussion given its ID"""
247
+ args = discussion_delete_parser.parse_args()
242
248
  discussion = Discussion.objects.get_or_404(id=id_or_404(id))
243
249
  discussion.permissions["delete"].test()
250
+ send_legal_notice_on_deletion(discussion, args)
244
251
 
245
252
  discussion.delete()
246
253
  on_discussion_deleted.send(discussion)
@@ -259,6 +266,9 @@ class DiscussionCommentSpamAPI(SpamAPIMixin):
259
266
  return discussion, discussion.discussion[cidx]
260
267
 
261
268
 
269
+ message_delete_parser = add_send_legal_notice_argument(api.parser())
270
+
271
+
262
272
  @ns.route("/<id>/comments/<int:cidx>/", endpoint="discussion_comment")
263
273
  class DiscussionCommentAPI(API):
264
274
  """
@@ -286,16 +296,20 @@ class DiscussionCommentAPI(API):
286
296
  return discussion
287
297
 
288
298
  @api.doc("delete_discussion_comment")
299
+ @api.expect(message_delete_parser)
289
300
  @api.response(403, "Not allowed to delete this comment")
290
301
  def delete(self, id, cidx):
291
302
  """Delete a comment given its index"""
303
+ args = message_delete_parser.parse_args()
292
304
  discussion = Discussion.objects.get_or_404(id=id_or_404(id))
293
305
  if len(discussion.discussion) <= cidx:
294
306
  api.abort(404, "Comment does not exist")
295
307
  elif cidx == 0:
296
308
  api.abort(400, "You cannot delete the first comment of a discussion")
297
309
 
298
- discussion.discussion[cidx].permissions["delete"].test()
310
+ message = discussion.discussion[cidx]
311
+ message.permissions["delete"].test()
312
+ send_legal_notice_on_deletion(message, args)
299
313
 
300
314
  discussion.discussion.pop(cidx)
301
315
  discussion.save()
@@ -6,6 +6,7 @@ from flask_login import current_user
6
6
 
7
7
  from udata.core.linkable import Linkable
8
8
  from udata.core.spam.models import SpamMixin, spam_protected
9
+ from udata.i18n import lazy_gettext as _
9
10
  from udata.mongo import db
10
11
 
11
12
  from .signals import on_discussion_closed, on_new_discussion, on_new_discussion_comment
@@ -14,6 +15,8 @@ log = logging.getLogger(__name__)
14
15
 
15
16
 
16
17
  class Message(SpamMixin, db.EmbeddedDocument):
18
+ verbose_name = _("message")
19
+
17
20
  id = db.AutoUUIDField()
18
21
  content = db.StringField(required=True)
19
22
  posted_on = db.DateTimeField(default=datetime.utcnow, required=True)
@@ -70,6 +73,8 @@ class Message(SpamMixin, db.EmbeddedDocument):
70
73
 
71
74
 
72
75
  class Discussion(SpamMixin, Linkable, db.Document):
76
+ verbose_name = _("discussion")
77
+
73
78
  user = db.ReferenceField("User")
74
79
  organization = db.ReferenceField("Organization")
75
80
 
File without changes
@@ -0,0 +1,128 @@
1
+ from flask import current_app
2
+ from flask_babel import LazyString
3
+ from flask_login import current_user
4
+ from flask_restx.inputs import boolean
5
+
6
+ from udata.core.dataservices.models import Dataservice
7
+ from udata.core.dataset.models import Dataset
8
+ from udata.core.discussions.models import Discussion, Message
9
+ from udata.core.organization.models import Organization
10
+ from udata.core.reuse.models import Reuse
11
+ from udata.core.user.models import User
12
+ from udata.i18n import lazy_gettext as _
13
+ from udata.mail import Link, MailMessage, ParagraphWithLinks
14
+
15
+ DeletableObject = Dataset | Reuse | Dataservice | Organization | User | Discussion | Message
16
+
17
+
18
+ def add_send_legal_notice_argument(parser):
19
+ """Add the send_legal_notice argument to a parser.
20
+
21
+ When send_legal_notice=true is passed by an admin, a formal legal notice email
22
+ is sent to the content owner. This email includes terms of use references and
23
+ information about how to contest the deletion (administrative appeal).
24
+ """
25
+ parser.add_argument(
26
+ "send_legal_notice",
27
+ type=boolean,
28
+ default=False,
29
+ location="args",
30
+ help="Send formal legal notice with appeal information to owner (admin only)",
31
+ )
32
+ return parser
33
+
34
+
35
+ def _get_recipients_for_organization(org: Organization) -> list[User]:
36
+ return [m.user for m in org.by_role("admin")]
37
+
38
+
39
+ def _get_recipients_for_owned_object(obj: Dataset | Reuse | Dataservice) -> list[User]:
40
+ if obj.owner:
41
+ return [obj.owner]
42
+ elif obj.organization:
43
+ return _get_recipients_for_organization(obj.organization)
44
+ return []
45
+
46
+
47
+ def send_legal_notice_on_deletion(obj: DeletableObject, args: dict):
48
+ """Send a formal legal notice email when content is deleted by an admin.
49
+
50
+ The email is only sent if:
51
+ - send_legal_notice=true was passed in args
52
+ - The current user is a sysadmin
53
+ """
54
+ if not args.get("send_legal_notice") or not current_user.sysadmin:
55
+ return
56
+
57
+ if isinstance(obj, Organization):
58
+ recipients = _get_recipients_for_organization(obj)
59
+ elif isinstance(obj, User):
60
+ recipients = [obj]
61
+ elif isinstance(obj, Discussion):
62
+ recipients = [obj.user] if obj.user else []
63
+ elif isinstance(obj, Message):
64
+ recipients = [obj.posted_by] if obj.posted_by else []
65
+ else:
66
+ recipients = _get_recipients_for_owned_object(obj)
67
+
68
+ if recipients:
69
+ _content_deleted(obj.verbose_name).send(recipients)
70
+
71
+
72
+ def _content_deleted(content_type_label: LazyString) -> MailMessage:
73
+ admin = current_user._get_current_object()
74
+ terms_of_use_url = current_app.config.get("TERMS_OF_USE_URL")
75
+ terms_of_use_deletion_article = current_app.config.get("TERMS_OF_USE_DELETION_ARTICLE")
76
+ telerecours_url = current_app.config.get("TELERECOURS_URL")
77
+
78
+ if terms_of_use_url and terms_of_use_deletion_article:
79
+ terms_paragraph = ParagraphWithLinks(
80
+ _(
81
+ 'Our %(terms_link)s specify in point %(article)s that the platform is not "intended '
82
+ "to disseminate advertising content, promotions of private interests, content contrary "
83
+ "to public order, illegal content, spam and any contribution violating the applicable "
84
+ "legal framework. The Editor reserves the right, without prior notice, to remove or "
85
+ "make inaccessible content published on the Platform that has no connection with its "
86
+ 'Purpose. The Editor does not carry out "a priori" control over publications. As soon '
87
+ "as the Editor becomes aware of content contrary to these terms of use, it acts quickly "
88
+ 'to remove or make it inaccessible".',
89
+ terms_link=Link(_("terms of use"), terms_of_use_url),
90
+ article=terms_of_use_deletion_article,
91
+ )
92
+ )
93
+ else:
94
+ terms_paragraph = _(
95
+ 'The platform is not "intended to disseminate advertising content, promotions of '
96
+ "private interests, content contrary to public order, illegal content, spam and any "
97
+ "contribution violating the applicable legal framework. The Editor reserves the right, "
98
+ "without prior notice, to remove or make inaccessible content published on the Platform "
99
+ 'that has no connection with its Purpose. The Editor does not carry out "a priori" '
100
+ "control over publications. As soon as the Editor becomes aware of content contrary to "
101
+ 'these terms of use, it acts quickly to remove or make it inaccessible".'
102
+ )
103
+
104
+ if telerecours_url:
105
+ appeal_paragraph = ParagraphWithLinks(
106
+ _(
107
+ "You may contest this decision within two months of its notification by filing "
108
+ "an administrative appeal (recours gracieux ou hiérarchique). You may also bring "
109
+ 'the matter before the administrative court via the "%(telerecours_link)s" application.',
110
+ telerecours_link=Link(_("Télérecours citoyens"), telerecours_url),
111
+ )
112
+ )
113
+ else:
114
+ appeal_paragraph = _("You may contest this decision by contacting us.")
115
+
116
+ paragraphs = [
117
+ _("Your %(content_type)s has been deleted.", content_type=content_type_label),
118
+ terms_paragraph,
119
+ appeal_paragraph,
120
+ _("Best regards,"),
121
+ admin.fullname,
122
+ _("%(site)s team member", site=current_app.config.get("SITE_TITLE", "data.gouv.fr")),
123
+ ]
124
+
125
+ return MailMessage(
126
+ subject=_("Deletion of your %(content_type)s", content_type=content_type_label),
127
+ paragraphs=paragraphs,
128
+ )
@@ -21,6 +21,7 @@ from udata.core.discussions.api import discussion_fields
21
21
  from udata.core.discussions.csv import DiscussionCsvAdapter
22
22
  from udata.core.discussions.models import Discussion
23
23
  from udata.core.followers.api import FollowAPI
24
+ from udata.core.legal.mails import add_send_legal_notice_argument, send_legal_notice_on_deletion
24
25
  from udata.core.reuse.models import Reuse
25
26
  from udata.core.storages.api import (
26
27
  image_parser,
@@ -137,6 +138,9 @@ class OrganizationListAPI(API):
137
138
  return organization, 201
138
139
 
139
140
 
141
+ org_delete_parser = add_send_legal_notice_argument(api.parser())
142
+
143
+
140
144
  @ns.route("/<org:org>/", endpoint="organization", doc=common_doc)
141
145
  @api.response(404, "Organization not found")
142
146
  @api.response(410, "Organization has been deleted")
@@ -170,12 +174,16 @@ class OrganizationAPI(API):
170
174
 
171
175
  @api.secure
172
176
  @api.doc("delete_organization")
177
+ @api.expect(org_delete_parser)
173
178
  @api.response(204, "Organization deleted")
174
179
  def delete(self, org):
175
180
  """Delete a organization given its identifier"""
181
+ args = org_delete_parser.parse_args()
176
182
  if org.deleted:
177
183
  api.abort(410, "Organization has been deleted")
178
184
  EditOrganizationPermission(org).test()
185
+ send_legal_notice_on_deletion(org, args)
186
+
179
187
  org.deleted = datetime.utcnow()
180
188
  org.save()
181
189
  return "", 204
@@ -14,7 +14,7 @@ org_ref_fields = api.inherit(
14
14
  "name": fields.String(description="The organization name", readonly=True),
15
15
  "acronym": fields.String(description="The organization acronym"),
16
16
  "slug": fields.String(
17
- description="The organization string used as permalink", required=True
17
+ description="The organization string used as permalink", readonly=True
18
18
  ),
19
19
  "uri": fields.String(
20
20
  attribute=lambda o: o.self_api_url(),
@@ -122,12 +122,12 @@ member_fields = api.model(
122
122
  org_fields = api.model(
123
123
  "Organization",
124
124
  {
125
- "id": fields.String(description="The organization identifier", required=True),
125
+ "id": fields.String(description="The organization identifier", readonly=True),
126
126
  "name": fields.String(description="The organization name", required=True),
127
127
  "acronym": fields.String(description="The organization acronym"),
128
128
  "url": fields.String(description="The organization website URL"),
129
129
  "slug": fields.String(
130
- description="The organization string used as permalink", required=True
130
+ description="The organization string used as permalink", readonly=True
131
131
  ),
132
132
  "description": fields.Markdown(
133
133
  description="The organization description in Markdown", required=True
@@ -3,7 +3,6 @@ from flask import request
3
3
  from udata import search
4
4
  from udata.api import API, apiv2
5
5
  from udata.core.contact_point.api_fields import contact_point_fields
6
- from udata.utils import multi_to_dict
7
6
 
8
7
  from .api_fields import member_fields, org_fields, org_page_fields
9
8
  from .permissions import EditOrganizationPermission
@@ -30,8 +29,8 @@ class OrganizationSearchAPI(API):
30
29
  @apiv2.marshal_with(org_page_fields)
31
30
  def get(self):
32
31
  """Search all organizations"""
33
- search_parser.parse_args()
34
- return search.query(OrganizationSearch, **multi_to_dict(request.args))
32
+ args = search_parser.parse_args()
33
+ return search.query(OrganizationSearch, **args)
35
34
 
36
35
 
37
36
  @ns.route("/<org:org>/extras/", endpoint="organization_extras")
@@ -126,7 +126,10 @@ class Organization(
126
126
  db.SlugField(max_length=255, required=True, populate_from="name", update=True, follow=True),
127
127
  auditable=False,
128
128
  )
129
- description = field(db.StringField(required=True))
129
+ description = field(
130
+ db.StringField(required=True),
131
+ markdown=True,
132
+ )
130
133
  url = field(db.URLField())
131
134
  image_url = field(db.StringField())
132
135
  logo = field(
@@ -165,6 +168,8 @@ class Organization(
165
168
  "auto_create_index_on_save": True,
166
169
  }
167
170
 
171
+ verbose_name = _("organization")
172
+
168
173
  def __str__(self):
169
174
  return self.name or ""
170
175
 
udata/core/reuse/api.py CHANGED
@@ -15,6 +15,7 @@ from udata.core.badges.fields import badge_fields
15
15
  from udata.core.dataservices.models import Dataservice
16
16
  from udata.core.dataset.api_fields import dataset_ref_fields
17
17
  from udata.core.followers.api import FollowAPI
18
+ from udata.core.legal.mails import add_send_legal_notice_argument, send_legal_notice_on_deletion
18
19
  from udata.core.organization.models import Organization
19
20
  from udata.core.reuse.constants import REUSE_TOPICS, REUSE_TYPES
20
21
  from udata.core.storages.api import (
@@ -170,6 +171,9 @@ class ReusesAtomFeedAPI(API):
170
171
  return response
171
172
 
172
173
 
174
+ reuse_delete_parser = add_send_legal_notice_argument(api.parser())
175
+
176
+
173
177
  @ns.route("/<reuse:reuse>/", endpoint="reuse", doc=common_doc)
174
178
  @api.response(404, "Reuse not found")
175
179
  @api.response(410, "Reuse has been deleted")
@@ -202,12 +206,16 @@ class ReuseAPI(API):
202
206
 
203
207
  @api.secure
204
208
  @api.doc("delete_reuse")
209
+ @api.expect(reuse_delete_parser)
205
210
  @api.response(204, "Reuse deleted")
206
211
  def delete(self, reuse):
207
212
  """Delete a given reuse"""
213
+ args = reuse_delete_parser.parse_args()
208
214
  if reuse.deleted:
209
215
  api.abort(410, "This reuse has been deleted")
210
216
  reuse.permissions["delete"].test()
217
+ send_legal_notice_on_deletion(reuse, args)
218
+
211
219
  reuse.deleted = datetime.utcnow()
212
220
  reuse.save()
213
221
  return "", 204