udata 10.1.2.dev34172__py2.py3-none-any.whl → 10.1.3__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of udata might be problematic. Click here for more details.

Files changed (59) hide show
  1. udata/__init__.py +1 -1
  2. udata/commands/fixtures.py +1 -1
  3. udata/core/dataservices/constants.py +11 -0
  4. udata/core/dataservices/csv.py +3 -3
  5. udata/core/dataservices/models.py +27 -12
  6. udata/core/dataservices/rdf.py +5 -3
  7. udata/core/dataservices/search.py +13 -5
  8. udata/core/dataset/api.py +18 -3
  9. udata/core/dataset/forms.py +8 -4
  10. udata/core/dataset/models.py +6 -0
  11. udata/core/metrics/commands.py +20 -1
  12. udata/core/organization/api_fields.py +3 -1
  13. udata/core/user/api.py +8 -1
  14. udata/core/user/api_fields.py +5 -0
  15. udata/core/user/models.py +16 -11
  16. udata/core/user/tasks.py +81 -2
  17. udata/core/user/tests/test_user_model.py +29 -12
  18. udata/features/transfer/api.py +7 -4
  19. udata/harvest/actions.py +5 -0
  20. udata/harvest/backends/base.py +22 -2
  21. udata/harvest/models.py +19 -0
  22. udata/harvest/tests/test_actions.py +12 -0
  23. udata/harvest/tests/test_base_backend.py +74 -8
  24. udata/harvest/tests/test_dcat_backend.py +1 -1
  25. udata/migrations/2025-01-05-dataservices-fields-changes.py +136 -0
  26. udata/settings.py +5 -0
  27. udata/templates/mail/account_inactivity.html +29 -0
  28. udata/templates/mail/account_inactivity.txt +22 -0
  29. udata/templates/mail/inactive_account_deleted.html +5 -0
  30. udata/templates/mail/inactive_account_deleted.txt +6 -0
  31. udata/tests/api/test_dataservices_api.py +41 -2
  32. udata/tests/api/test_datasets_api.py +58 -0
  33. udata/tests/api/test_me_api.py +1 -1
  34. udata/tests/api/test_transfer_api.py +38 -0
  35. udata/tests/api/test_user_api.py +47 -8
  36. udata/tests/dataservice/test_csv_adapter.py +2 -0
  37. udata/tests/dataset/test_dataset_model.py +14 -0
  38. udata/tests/user/test_user_tasks.py +144 -0
  39. udata/translations/ar/LC_MESSAGES/udata.mo +0 -0
  40. udata/translations/ar/LC_MESSAGES/udata.po +88 -60
  41. udata/translations/de/LC_MESSAGES/udata.mo +0 -0
  42. udata/translations/de/LC_MESSAGES/udata.po +88 -60
  43. udata/translations/es/LC_MESSAGES/udata.mo +0 -0
  44. udata/translations/es/LC_MESSAGES/udata.po +88 -60
  45. udata/translations/fr/LC_MESSAGES/udata.mo +0 -0
  46. udata/translations/fr/LC_MESSAGES/udata.po +88 -60
  47. udata/translations/it/LC_MESSAGES/udata.mo +0 -0
  48. udata/translations/it/LC_MESSAGES/udata.po +88 -60
  49. udata/translations/pt/LC_MESSAGES/udata.mo +0 -0
  50. udata/translations/pt/LC_MESSAGES/udata.po +88 -60
  51. udata/translations/sr/LC_MESSAGES/udata.mo +0 -0
  52. udata/translations/sr/LC_MESSAGES/udata.po +88 -60
  53. udata/translations/udata.pot +83 -54
  54. {udata-10.1.2.dev34172.dist-info → udata-10.1.3.dist-info}/METADATA +15 -2
  55. {udata-10.1.2.dev34172.dist-info → udata-10.1.3.dist-info}/RECORD +59 -52
  56. {udata-10.1.2.dev34172.dist-info → udata-10.1.3.dist-info}/LICENSE +0 -0
  57. {udata-10.1.2.dev34172.dist-info → udata-10.1.3.dist-info}/WHEEL +0 -0
  58. {udata-10.1.2.dev34172.dist-info → udata-10.1.3.dist-info}/entry_points.txt +0 -0
  59. {udata-10.1.2.dev34172.dist-info → udata-10.1.3.dist-info}/top_level.txt +0 -0
udata/harvest/actions.py CHANGED
@@ -7,6 +7,7 @@ from bson import ObjectId
7
7
  from flask import current_app
8
8
 
9
9
  from udata.auth import current_user
10
+ from udata.core.dataservices.models import Dataservice
10
11
  from udata.core.dataset.models import HarvestDatasetMetadata
11
12
  from udata.models import Dataset, Organization, PeriodicTask, User
12
13
  from udata.storage.s3 import delete_file
@@ -18,6 +19,7 @@ from .models import (
18
19
  VALIDATION_REFUSED,
19
20
  HarvestJob,
20
21
  HarvestSource,
22
+ archive_harvested_dataservice,
21
23
  archive_harvested_dataset,
22
24
  )
23
25
  from .tasks import harvest
@@ -161,6 +163,9 @@ def purge_sources():
161
163
  datasets = Dataset.objects.filter(harvest__source_id=str(source.id))
162
164
  for dataset in datasets:
163
165
  archive_harvested_dataset(dataset, reason="harvester-deleted", dryrun=False)
166
+ dataservices = Dataservice.objects.filter(harvest__source_id=str(source.id))
167
+ for dataservice in dataservices:
168
+ archive_harvested_dataservice(dataservice, reason="harvester-deleted", dryrun=False)
164
169
  source.delete()
165
170
  return count
166
171
 
@@ -20,6 +20,7 @@ from ..models import (
20
20
  HarvestItem,
21
21
  HarvestJob,
22
22
  HarvestLog,
23
+ archive_harvested_dataservice,
23
24
  archive_harvested_dataset,
24
25
  )
25
26
  from ..signals import after_harvest_job, before_harvest_job
@@ -342,6 +343,7 @@ class BaseBackend(object):
342
343
  harvest.last_update = datetime.utcnow()
343
344
 
344
345
  harvest.archived_at = None
346
+ harvest.archived_reason = None
345
347
 
346
348
  return harvest
347
349
 
@@ -370,9 +372,10 @@ class BaseBackend(object):
370
372
  "harvest__remote_id__nin": remote_ids,
371
373
  "harvest__last_update__lt": limit_date,
372
374
  }
373
- local_items_not_on_remote = Dataset.objects.filter(**q)
375
+ local_datasets_not_on_remote = Dataset.objects.filter(**q)
376
+ local_dataservices_not_on_remote = Dataservice.objects.filter(**q)
374
377
 
375
- for dataset in local_items_not_on_remote:
378
+ for dataset in local_datasets_not_on_remote:
376
379
  if not dataset.harvest.archived_at:
377
380
  archive_harvested_dataset(dataset, reason="not-on-remote", dryrun=self.dryrun)
378
381
  # add a HarvestItem to the job list (useful for report)
@@ -385,6 +388,23 @@ class BaseBackend(object):
385
388
 
386
389
  self.save_job()
387
390
 
391
+ for dataservice in local_dataservices_not_on_remote:
392
+ if not dataservice.harvest.archived_at:
393
+ archive_harvested_dataservice(
394
+ dataservice, reason="not-on-remote", dryrun=self.dryrun
395
+ )
396
+ # add a HarvestItem to the job list (useful for report)
397
+ # even when archiving has already been done (useful for debug)
398
+ self.job.items.append(
399
+ HarvestItem(
400
+ remote_id=str(dataservice.harvest.remote_id),
401
+ dataservice=dataservice,
402
+ status="archived",
403
+ )
404
+ )
405
+
406
+ self.save_job()
407
+
388
408
  def get_dataset(self, remote_id):
389
409
  """Get or create a dataset given its remote ID (and its source)
390
410
  We first try to match `source_id` to be source domain independent
udata/harvest/models.py CHANGED
@@ -6,6 +6,7 @@ from urllib.parse import urlparse
6
6
  from werkzeug.utils import cached_property
7
7
 
8
8
  from udata.core.dataservices.models import Dataservice
9
+ from udata.core.dataservices.models import HarvestMetadata as HarvestDataserviceMetadata
9
10
  from udata.core.dataset.models import HarvestDatasetMetadata
10
11
  from udata.core.owned import Owned, OwnedQuerySet
11
12
  from udata.i18n import lazy_gettext as _
@@ -203,3 +204,21 @@ def archive_harvested_dataset(dataset, reason, dryrun=False):
203
204
  dataset.validate()
204
205
  else:
205
206
  dataset.save()
207
+
208
+
209
+ def archive_harvested_dataservice(dataservice, reason, dryrun=False):
210
+ """
211
+ Archive an harvested dataservice, setting extras accordingly.
212
+ If `dryrun` is True, the dataservice is not saved but validated only.
213
+ """
214
+ log.debug("Archiving dataservice %s", dataservice.id)
215
+ archival_date = datetime.utcnow()
216
+ dataservice.archived_at = archival_date
217
+ if not dataservice.harvest:
218
+ dataservice.harvest = HarvestDataserviceMetadata()
219
+ dataservice.harvest.archived_reason = reason
220
+ dataservice.harvest.archived_at = archival_date
221
+ if dryrun:
222
+ dataservice.validate()
223
+ else:
224
+ dataservice.save()
@@ -6,6 +6,8 @@ from tempfile import NamedTemporaryFile
6
6
  import pytest
7
7
  from mock import patch
8
8
 
9
+ from udata.core.dataservices.factories import DataserviceFactory
10
+ from udata.core.dataservices.models import HarvestMetadata as HarvestDataserviceMetadata
9
11
  from udata.core.dataset.factories import DatasetFactory
10
12
  from udata.core.dataset.models import HarvestDatasetMetadata
11
13
  from udata.core.organization.factories import OrganizationFactory
@@ -396,17 +398,27 @@ class HarvestActionsTest:
396
398
  dataset_to_archive = DatasetFactory(
397
399
  harvest=HarvestDatasetMetadata(source_id=str(to_delete[0].id))
398
400
  )
401
+ dataservice_to_archive = DataserviceFactory(
402
+ harvest=HarvestDataserviceMetadata(source_id=str(to_delete[0].id))
403
+ )
399
404
 
400
405
  result = actions.purge_sources()
401
406
  dataset_to_archive.reload()
407
+ dataservice_to_archive.reload()
402
408
 
403
409
  assert result == len(to_delete)
404
410
  assert len(HarvestSource.objects) == len(to_keep)
405
411
  assert PeriodicTask.objects.filter(id=periodic_task.id).count() == 0
406
412
  assert HarvestJob.objects(id=harvest_job.id).count() == 0
413
+
407
414
  assert dataset_to_archive.harvest.archived == "harvester-deleted"
415
+ assert_equal_dates(dataset_to_archive.harvest.archived_at, now)
408
416
  assert_equal_dates(dataset_to_archive.archived, now)
409
417
 
418
+ assert dataservice_to_archive.harvest.archived_reason == "harvester-deleted"
419
+ assert_equal_dates(dataservice_to_archive.harvest.archived_at, now)
420
+ assert_equal_dates(dataservice_to_archive.archived_at, now)
421
+
410
422
  @pytest.mark.options(HARVEST_JOBS_RETENTION_DAYS=2)
411
423
  def test_purge_jobs(self):
412
424
  now = datetime.utcnow()
@@ -4,6 +4,8 @@ from urllib.parse import urlparse
4
4
  import pytest
5
5
  from voluptuous import Schema
6
6
 
7
+ from udata.core.dataservices.factories import DataserviceFactory
8
+ from udata.core.dataservices.models import Dataservice
7
9
  from udata.core.dataset import tasks
8
10
  from udata.core.dataset.factories import DatasetFactory
9
11
  from udata.harvest.models import HarvestItem
@@ -20,9 +22,9 @@ class Unknown:
20
22
  pass
21
23
 
22
24
 
23
- def gen_remote_IDs(num: int) -> list[str]:
25
+ def gen_remote_IDs(num: int, prefix: str = "") -> list[str]:
24
26
  """Generate remote IDs."""
25
- return [f"fake-{i}" for i in range(num)]
27
+ return [f"{prefix}fake-{i}" for i in range(num)]
26
28
 
27
29
 
28
30
  class FakeBackend(BaseBackend):
@@ -45,6 +47,11 @@ class FakeBackend(BaseBackend):
45
47
  if self.is_done():
46
48
  return
47
49
 
50
+ for remote_id in self.source.config.get("dataservice_remote_ids", []):
51
+ self.process_dataservice(remote_id)
52
+ if self.is_done():
53
+ return
54
+
48
55
  def inner_process_dataset(self, item: HarvestItem):
49
56
  dataset = self.get_dataset(item.remote_id)
50
57
 
@@ -55,6 +62,16 @@ class FakeBackend(BaseBackend):
55
62
  dataset.last_modified_internal = self.source.config["last_modified"]
56
63
  return dataset
57
64
 
65
+ def inner_process_dataservice(self, item: HarvestItem):
66
+ dataservice = self.get_dataservice(item.remote_id)
67
+
68
+ for key, value in DataserviceFactory.as_dict().items():
69
+ if getattr(dataservice, key) is None:
70
+ setattr(dataservice, key, value)
71
+ if self.source.config.get("last_modified"):
72
+ dataservice.last_modified_internal = self.source.config["last_modified"]
73
+ return dataservice
74
+
58
75
 
59
76
  class HarvestFilterTest:
60
77
  @pytest.mark.parametrize("type,expected", HarvestFilter.TYPES.items())
@@ -210,7 +227,13 @@ class BaseBackendTest:
210
227
 
211
228
  def test_autoarchive(self, app):
212
229
  nb_datasets = 3
213
- source = HarvestSourceFactory(config={"dataset_remote_ids": gen_remote_IDs(nb_datasets)})
230
+ nb_dataservices = 3
231
+ source = HarvestSourceFactory(
232
+ config={
233
+ "dataset_remote_ids": gen_remote_IDs(nb_datasets, "dataset-"),
234
+ "dataservice_remote_ids": gen_remote_IDs(nb_dataservices, "dataservice-"),
235
+ }
236
+ )
214
237
  backend = FakeBackend(source)
215
238
 
216
239
  # create a dangling dataset to be archived
@@ -220,7 +243,15 @@ class BaseBackendTest:
220
243
  harvest={
221
244
  "domain": source.domain,
222
245
  "source_id": str(source.id),
223
- "remote_id": "not-on-remote",
246
+ "remote_id": "dataset-not-on-remote",
247
+ "last_update": last_update,
248
+ }
249
+ )
250
+ dataservice_arch = DataserviceFactory(
251
+ harvest={
252
+ "domain": source.domain,
253
+ "source_id": str(source.id),
254
+ "remote_id": "dataservice-not-on-remote",
224
255
  "last_update": last_update,
225
256
  }
226
257
  )
@@ -232,7 +263,15 @@ class BaseBackendTest:
232
263
  harvest={
233
264
  "domain": source.domain,
234
265
  "source_id": str(source.id),
235
- "remote_id": "not-on-remote-two",
266
+ "remote_id": "dataset-not-on-remote-two",
267
+ "last_update": last_update,
268
+ }
269
+ )
270
+ dataservice_no_arch = DataserviceFactory(
271
+ harvest={
272
+ "domain": source.domain,
273
+ "source_id": str(source.id),
274
+ "remote_id": "dataservice-not-on-remote-two",
236
275
  "last_update": last_update,
237
276
  }
238
277
  )
@@ -240,13 +279,17 @@ class BaseBackendTest:
240
279
  job = backend.harvest()
241
280
 
242
281
  # all datasets except arch : 3 mocks + 1 manual (no_arch)
243
- assert len(job.items) == nb_datasets + 1
282
+ assert len(job.items) == (nb_datasets + 1) + (nb_dataservices + 1)
244
283
  # all datasets : 3 mocks + 2 manuals (arch and no_arch)
245
284
  assert Dataset.objects.count() == nb_datasets + 2
285
+ assert Dataservice.objects.count() == nb_dataservices + 2
246
286
 
247
287
  archived_items = [i for i in job.items if i.status == "archived"]
248
- assert len(archived_items) == 1
288
+ assert len(archived_items) == 2
249
289
  assert archived_items[0].dataset == dataset_arch
290
+ assert archived_items[0].dataservice is None
291
+ assert archived_items[1].dataset is None
292
+ assert archived_items[1].dataservice == dataservice_arch
250
293
 
251
294
  dataset_arch.reload()
252
295
  assert dataset_arch.archived is not None
@@ -258,18 +301,41 @@ class BaseBackendTest:
258
301
  assert "archived" not in dataset_no_arch.harvest
259
302
  assert "archived_at" not in dataset_no_arch.harvest
260
303
 
304
+ dataservice_arch.reload()
305
+ assert dataservice_arch.archived_at is not None
306
+ assert "archived_reason" in dataservice_arch.harvest
307
+ assert "archived_at" in dataservice_arch.harvest
308
+
309
+ dataservice_no_arch.reload()
310
+ assert dataservice_no_arch.archived_at is None
311
+ assert "archived_reason" not in dataservice_no_arch.harvest
312
+ assert "archived_at" not in dataservice_no_arch.harvest
313
+
261
314
  # test unarchive: archive manually then relaunch harvest
262
- dataset = Dataset.objects.get(**{"harvest__remote_id": "fake-1"})
315
+ dataset = Dataset.objects.get(**{"harvest__remote_id": "dataset-fake-1"})
263
316
  dataset.archived = datetime.utcnow()
264
317
  dataset.harvest.archived = "not-on-remote"
265
318
  dataset.harvest.archived_at = datetime.utcnow()
266
319
  dataset.save()
320
+
321
+ dataservice = Dataservice.objects.get(**{"harvest__remote_id": "dataservice-fake-1"})
322
+ dataservice.archived_at = datetime.utcnow()
323
+ dataservice.harvest.archived_reason = "not-on-remote"
324
+ dataservice.harvest.archived_at = datetime.utcnow()
325
+ dataservice.save()
326
+
267
327
  backend.harvest()
328
+
268
329
  dataset.reload()
269
330
  assert dataset.archived is None
270
331
  assert "archived" not in dataset.harvest
271
332
  assert "archived_at" not in dataset.harvest
272
333
 
334
+ dataservice.reload()
335
+ assert dataservice.archived_at is None
336
+ assert "archived_reason" not in dataservice.harvest
337
+ assert "archived_at" not in dataservice.harvest
338
+
273
339
  def test_harvest_datasets_get_deleted(self):
274
340
  nb_datasets = 3
275
341
  source = HarvestSourceFactory(config={"dataset_remote_ids": gen_remote_IDs(nb_datasets)})
@@ -179,7 +179,7 @@ class DcatBackendTest:
179
179
  assert dataservices[0].title == "Explore API v2"
180
180
  assert dataservices[0].base_api_url == "https://data.paris2024.org/api/explore/v2.1/"
181
181
  assert (
182
- dataservices[0].endpoint_description_url
182
+ dataservices[0].machine_documentation_url
183
183
  == "https://data.paris2024.org/api/explore/v2.1/swagger.json"
184
184
  )
185
185
  assert (
@@ -0,0 +1,136 @@
1
+ """
2
+ This migration keeps only the "Local authority" badge if the organization also has the "Public service" badge.
3
+ """
4
+
5
+ import logging
6
+ from typing import List
7
+
8
+ from mongoengine.connection import get_db
9
+
10
+ from udata.core.dataservices.constants import (
11
+ DATASERVICE_ACCESS_TYPE_OPEN,
12
+ DATASERVICE_ACCESS_TYPE_OPEN_WITH_ACCOUNT,
13
+ DATASERVICE_ACCESS_TYPE_RESTRICTED,
14
+ )
15
+ from udata.core.dataservices.models import Dataservice
16
+
17
+ log = logging.getLogger(__name__)
18
+
19
+
20
+ def migrate(db):
21
+ log.info("Preprocessing dataservices…")
22
+
23
+ count = get_db().dataservice.update_many(
24
+ filter={
25
+ "$or": [
26
+ {"is_restricted": None},
27
+ {"is_restricted": {"$exists": False}},
28
+ ]
29
+ },
30
+ update={"$set": {"is_restricted": False}},
31
+ )
32
+ log.info(
33
+ f"\tConverted {count.modified_count} dataservices from `is_restricted=None` to `is_restricted=False`"
34
+ )
35
+
36
+ count = get_db().dataservice.update_many(
37
+ filter={
38
+ "$or": [
39
+ {"has_token": None},
40
+ {"has_token": {"$exists": False}},
41
+ ]
42
+ },
43
+ update={"$set": {"has_token": False}},
44
+ )
45
+ log.info(
46
+ f"\tConverted {count.modified_count} dataservices from `has_token=None` to `has_token=False`"
47
+ )
48
+
49
+ for dataservice in get_db().dataservice.find({"is_restricted": True, "has_token": False}):
50
+ log.info(
51
+ f"\tDataservice #{dataservice['_id']} {dataservice['title']} is restricted but without token. (will be set to access_type={DATASERVICE_ACCESS_TYPE_RESTRICTED})"
52
+ )
53
+
54
+ log.info("Processing dataservices…")
55
+
56
+ count = get_db().dataservice.update_many(
57
+ filter={
58
+ "is_restricted": True,
59
+ # `has_token` could be True or False, we don't care
60
+ },
61
+ update={"$set": {"access_type": DATASERVICE_ACCESS_TYPE_RESTRICTED}},
62
+ )
63
+ log.info(
64
+ f"\t{count.modified_count} restricted dataservices to DATASERVICE_ACCESS_TYPE_RESTRICTED"
65
+ )
66
+
67
+ count = get_db().dataservice.update_many(
68
+ filter={
69
+ "is_restricted": False,
70
+ "has_token": True,
71
+ },
72
+ update={"$set": {"access_type": DATASERVICE_ACCESS_TYPE_OPEN_WITH_ACCOUNT}},
73
+ )
74
+ log.info(
75
+ f"\t{count.modified_count} dataservices not restricted but with token to DATASERVICE_ACCESS_TYPE_OPEN_WITH_ACCOUNT"
76
+ )
77
+
78
+ count = get_db().dataservice.update_many(
79
+ filter={
80
+ "is_restricted": False,
81
+ "has_token": False,
82
+ },
83
+ update={"$set": {"access_type": DATASERVICE_ACCESS_TYPE_OPEN}},
84
+ )
85
+ log.info(f"\t{count.modified_count} open dataservices to DATASERVICE_ACCESS_TYPE_OPEN")
86
+
87
+ dataservices: List[Dataservice] = get_db().dataservice.find()
88
+ for dataservice in dataservices:
89
+ if (
90
+ "endpoint_description_url" not in dataservice
91
+ or not dataservice["endpoint_description_url"]
92
+ ):
93
+ continue
94
+
95
+ to_set = {}
96
+ if (
97
+ dataservice["endpoint_description_url"].endswith(".json")
98
+ or dataservice["endpoint_description_url"].endswith(".yaml")
99
+ or dataservice["endpoint_description_url"].endswith(".yml")
100
+ or dataservice["endpoint_description_url"].endswith("?format=openapi-json")
101
+ or "getcapabilities" in dataservice["endpoint_description_url"].lower()
102
+ or "getresourcedescription" in dataservice["endpoint_description_url"].lower()
103
+ or dataservice["endpoint_description_url"].startswith(
104
+ "https://api.insee.fr/catalogue/api-docs/carbon.super"
105
+ )
106
+ ):
107
+ # log.info(f"[MACHINE] {dataservice["endpoint_description_url"]}")
108
+ to_set["machine_documentation_url"] = dataservice["endpoint_description_url"]
109
+ else:
110
+ # log.info(f"[ HUMAN ] {dataservice["endpoint_description_url"]}")
111
+ to_set["technical_documentation_url"] = dataservice["endpoint_description_url"]
112
+
113
+ result = get_db().dataservice.update_one(
114
+ filter={
115
+ "_id": dataservice["_id"],
116
+ },
117
+ update={"$set": to_set},
118
+ )
119
+ assert result.modified_count == 1
120
+ assert result.matched_count == 1
121
+
122
+ log.info("Postprocessing dataservices…")
123
+
124
+ count = get_db().dataservice.update_many(
125
+ {},
126
+ {
127
+ "$unset": {
128
+ "endpoint_description_url": "",
129
+ "is_restricted": "",
130
+ "has_token": "",
131
+ }
132
+ },
133
+ )
134
+ log.info(f"\tUnset legacy fields on {count.modified_count} dataservices")
135
+
136
+ log.info("Done")
udata/settings.py CHANGED
@@ -115,6 +115,11 @@ class Defaults(object):
115
115
 
116
116
  SECURITY_RETURN_GENERIC_RESPONSES = False
117
117
 
118
+ # Inactive users settings
119
+ YEARS_OF_INACTIVITY_BEFORE_DELETION = None
120
+ DAYS_BEFORE_ACCOUNT_INACTIVITY_NOTIFY_DELAY = 30
121
+ MAX_NUMBER_OF_USER_INACTIVITY_NOTIFICATIONS = 200
122
+
118
123
  # Sentry configuration
119
124
  SENTRY_DSN = None
120
125
  SENTRY_TAGS = {}
@@ -0,0 +1,29 @@
1
+ {% extends 'mail/base.html' %}
2
+ {% from 'mail/button.html' import mail_button %}
3
+
4
+ {% block body %}
5
+ <p style="margin: 0;padding: 0;">
6
+ {{ _(
7
+ 'Your account (%(user_email)s) has been inactive for %(inactivity_years)d years or more.',
8
+ user_email=user.email,
9
+ inactivity_years=config.YEARS_OF_INACTIVITY_BEFORE_DEACTIVATION
10
+ )
11
+ }}
12
+ </p>
13
+ <br/>
14
+ <p style="margin: 0;padding: 0;"><b>
15
+ {{ _(
16
+ 'If you want to keep your account, please log in with your account on %(site)s.',
17
+ site=config.SITE_TITLE
18
+ )
19
+ }}
20
+ </b></p>
21
+ <br/>
22
+ <p style="margin: 0;padding: 0;">
23
+ {{ _(
24
+ 'Without logging in, your account will be deleted within %(notify_delay)d days.',
25
+ notify_delay=config.DAYS_BEFORE_ACCOUNT_INACTIVITY_NOTIFY_DELAY
26
+ )
27
+ }}
28
+ </p>
29
+ {% endblock %}
@@ -0,0 +1,22 @@
1
+ {% extends 'mail/base.txt' %}
2
+
3
+ {% block body %}
4
+ {{ _(
5
+ 'Your account (%(user_email)s) has been inactive for %(inactivity_years)d years or more.',
6
+ user_email=user.email,
7
+ inactivity_years=config.YEARS_OF_INACTIVITY_BEFORE_DEACTIVATION
8
+ )
9
+ }}
10
+
11
+ {{ _(
12
+ 'If you want to keep your account, please log in with your account on %(site)s.',
13
+ site=config.SITE_TITLE
14
+ )
15
+ }}
16
+
17
+ {{ _(
18
+ 'Without logging in, your account will be deleted within %(notify_delay)d days.',
19
+ notify_delay=config.DAYS_BEFORE_ACCOUNT_INACTIVITY_NOTIFY_DELAY
20
+ )
21
+ }}
22
+ {% endblock %}
@@ -0,0 +1,5 @@
1
+ {% extends 'mail/base.html' %}
2
+
3
+ {% block body %}
4
+ <p style="margin: 0;padding: 0;">{{ _('Your account on %(site)s has been deleted due to inactivity', site=config.SITE_TITLE) }}</p>
5
+ {% endblock %}
@@ -0,0 +1,6 @@
1
+ {% extends 'mail/base.txt' %}
2
+
3
+ {% block body %}
4
+ {{ _('Your account on %(site)s has been deleted due to inactivity', site=config.SITE_TITLE) }}.
5
+
6
+ {% endblock %}
@@ -5,6 +5,11 @@ from flask import url_for
5
5
  from werkzeug.test import TestResponse
6
6
 
7
7
  import udata.core.organization.constants as org_constants
8
+ from udata.core.dataservices.constants import (
9
+ DATASERVICE_ACCESS_TYPE_OPEN,
10
+ DATASERVICE_ACCESS_TYPE_OPEN_WITH_ACCOUNT,
11
+ DATASERVICE_ACCESS_TYPE_RESTRICTED,
12
+ )
8
13
  from udata.core.dataservices.factories import DataserviceFactory
9
14
  from udata.core.dataservices.models import Dataservice
10
15
  from udata.core.dataset.factories import DatasetFactory, LicenseFactory
@@ -246,6 +251,7 @@ class DataserviceAPITest(APITestCase):
246
251
  "title": "B",
247
252
  "base_api_url": "https://example.org/B",
248
253
  "datasets": [dataset_b.id],
254
+ "access_type": DATASERVICE_ACCESS_TYPE_OPEN,
249
255
  },
250
256
  )
251
257
  self.post(
@@ -254,6 +260,7 @@ class DataserviceAPITest(APITestCase):
254
260
  "title": "C",
255
261
  "base_api_url": "https://example.org/C",
256
262
  "datasets": [dataset_a.id, dataset_b.id],
263
+ "access_type": DATASERVICE_ACCESS_TYPE_OPEN_WITH_ACCOUNT,
257
264
  },
258
265
  )
259
266
  self.post(
@@ -262,6 +269,7 @@ class DataserviceAPITest(APITestCase):
262
269
  "title": "A",
263
270
  "base_api_url": "https://example.org/A",
264
271
  "datasets": [dataset_a.id],
272
+ "access_type": DATASERVICE_ACCESS_TYPE_RESTRICTED,
265
273
  },
266
274
  )
267
275
  self.post(
@@ -324,10 +332,27 @@ class DataserviceAPITest(APITestCase):
324
332
  self.assertEqual(response.json["data"][0]["title"], "A")
325
333
  self.assertEqual(response.json["data"][1]["title"], "C")
326
334
 
335
+ response = self.get(url_for("api.dataservices", access_type=DATASERVICE_ACCESS_TYPE_OPEN))
336
+ self.assert200(response)
337
+
338
+ print(response.json)
339
+ self.assertEqual(response.json["total"], 1)
340
+ self.assertEqual(response.json["data"][0]["title"], "B")
341
+
327
342
  def test_dataservice_api_index_with_sorts(self):
328
343
  DataserviceFactory(title="A", created_at="2024-03-01", metadata_modified_at="2024-03-01")
329
- DataserviceFactory(title="B", created_at="2024-02-01", metadata_modified_at="2024-05-01")
330
- DataserviceFactory(title="C", created_at="2024-05-01", metadata_modified_at="2024-04-01")
344
+ DataserviceFactory(
345
+ title="B",
346
+ created_at="2024-02-01",
347
+ metadata_modified_at="2024-05-01",
348
+ metrics={"views": 42},
349
+ )
350
+ DataserviceFactory(
351
+ title="C",
352
+ created_at="2024-05-01",
353
+ metadata_modified_at="2024-04-01",
354
+ metrics={"views": 1337},
355
+ )
331
356
  DataserviceFactory(title="D", created_at="2024-04-01", metadata_modified_at="2024-02-01")
332
357
 
333
358
  response = self.get(url_for("api.dataservices", sort="title"))
@@ -366,6 +391,20 @@ class DataserviceAPITest(APITestCase):
366
391
  [dataservice["title"] for dataservice in response.json["data"]], ["B", "C", "A", "D"]
367
392
  )
368
393
 
394
+ response = self.get(url_for("api.dataservices", sort="views"))
395
+ self.assert200(response)
396
+
397
+ self.assertEqual(
398
+ [dataservice["title"] for dataservice in response.json["data"]], ["A", "D", "B", "C"]
399
+ )
400
+
401
+ response = self.get(url_for("api.dataservices", sort="-views"))
402
+ self.assert200(response)
403
+
404
+ self.assertEqual(
405
+ [dataservice["title"] for dataservice in response.json["data"]], ["C", "B", "D", "A"]
406
+ )
407
+
369
408
  def test_dataservice_api_index_with_wrong_dataset_id(self):
370
409
  response = self.get(url_for("api.dataservices", sort="title", dataset=str("xxx")))
371
410
  self.assert400(response)