udata 14.0.3.dev1__py3-none-any.whl → 14.7.3.dev4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- udata/api/__init__.py +2 -0
- udata/api_fields.py +120 -19
- udata/app.py +18 -20
- udata/auth/__init__.py +4 -7
- udata/auth/forms.py +3 -3
- udata/auth/views.py +13 -6
- udata/commands/dcat.py +1 -1
- udata/commands/serve.py +3 -11
- udata/core/activity/api.py +5 -6
- udata/core/badges/tests/test_tasks.py +0 -2
- udata/core/csv.py +5 -0
- udata/core/dataservices/api.py +8 -1
- udata/core/dataservices/apiv2.py +3 -6
- udata/core/dataservices/models.py +5 -2
- udata/core/dataservices/rdf.py +2 -1
- udata/core/dataservices/tasks.py +6 -2
- udata/core/dataset/api.py +30 -4
- udata/core/dataset/api_fields.py +1 -1
- udata/core/dataset/apiv2.py +1 -1
- udata/core/dataset/constants.py +2 -9
- udata/core/dataset/models.py +21 -9
- udata/core/dataset/permissions.py +31 -0
- udata/core/dataset/rdf.py +18 -16
- udata/core/dataset/tasks.py +16 -7
- udata/core/discussions/api.py +15 -1
- udata/core/discussions/models.py +6 -0
- udata/core/legal/__init__.py +0 -0
- udata/core/legal/mails.py +128 -0
- udata/core/organization/api.py +16 -5
- udata/core/organization/api_fields.py +3 -3
- udata/core/organization/apiv2.py +3 -4
- udata/core/organization/mails.py +1 -1
- udata/core/organization/models.py +40 -7
- udata/core/organization/notifications.py +84 -0
- udata/core/organization/permissions.py +1 -1
- udata/core/organization/tasks.py +3 -0
- udata/core/pages/models.py +49 -0
- udata/core/pages/tests/test_api.py +165 -1
- udata/core/post/api.py +25 -70
- udata/core/post/constants.py +8 -0
- udata/core/post/models.py +109 -17
- udata/core/post/tests/test_api.py +140 -3
- udata/core/post/tests/test_models.py +24 -0
- udata/core/reports/api.py +18 -0
- udata/core/reports/models.py +42 -2
- udata/core/reuse/api.py +8 -0
- udata/core/reuse/apiv2.py +3 -6
- udata/core/reuse/models.py +1 -1
- udata/core/spatial/forms.py +2 -2
- udata/core/topic/models.py +8 -2
- udata/core/user/api.py +10 -3
- udata/core/user/api_fields.py +3 -3
- udata/core/user/models.py +33 -8
- udata/features/notifications/api.py +7 -18
- udata/features/notifications/models.py +59 -0
- udata/features/notifications/tasks.py +25 -0
- udata/features/transfer/actions.py +2 -0
- udata/features/transfer/models.py +17 -0
- udata/features/transfer/notifications.py +96 -0
- udata/flask_mongoengine/engine.py +0 -4
- udata/flask_mongoengine/pagination.py +1 -1
- udata/frontend/markdown.py +2 -1
- udata/harvest/actions.py +20 -0
- udata/harvest/api.py +24 -7
- udata/harvest/backends/base.py +27 -1
- udata/harvest/backends/ckan/harvesters.py +21 -4
- udata/harvest/backends/dcat.py +4 -1
- udata/harvest/commands.py +33 -0
- udata/harvest/filters.py +17 -6
- udata/harvest/models.py +16 -0
- udata/harvest/permissions.py +27 -0
- udata/harvest/tests/ckan/test_ckan_backend.py +33 -0
- udata/harvest/tests/test_actions.py +46 -2
- udata/harvest/tests/test_api.py +161 -6
- udata/harvest/tests/test_base_backend.py +86 -1
- udata/harvest/tests/test_dcat_backend.py +68 -3
- udata/harvest/tests/test_filters.py +6 -0
- udata/i18n.py +1 -4
- udata/mail.py +14 -0
- udata/migrations/2021-08-17-harvest-integrity.py +23 -16
- udata/migrations/2025-10-31-create-membership-request-notifications.py +55 -0
- udata/migrations/2025-12-04-add-uuid-to-discussion-messages.py +28 -0
- udata/migrations/2025-12-16-create-transfer-request-notifications.py +69 -0
- udata/migrations/2026-01-14-add-default-kind-to-posts.py +17 -0
- udata/mongo/slug_fields.py +1 -1
- udata/rdf.py +65 -11
- udata/routing.py +2 -2
- udata/settings.py +11 -0
- udata/tasks.py +2 -0
- udata/templates/mail/message.html +3 -1
- udata/tests/api/__init__.py +7 -17
- udata/tests/api/test_activities_api.py +36 -0
- udata/tests/api/test_datasets_api.py +69 -0
- udata/tests/api/test_organizations_api.py +0 -3
- udata/tests/api/test_reports_api.py +157 -0
- udata/tests/api/test_user_api.py +1 -1
- udata/tests/apiv2/test_dataservices.py +14 -0
- udata/tests/apiv2/test_organizations.py +9 -0
- udata/tests/apiv2/test_reuses.py +11 -0
- udata/tests/cli/test_cli_base.py +0 -1
- udata/tests/dataservice/test_dataservice_tasks.py +29 -0
- udata/tests/dataset/test_dataset_model.py +13 -1
- udata/tests/dataset/test_dataset_rdf.py +164 -5
- udata/tests/dataset/test_dataset_tasks.py +25 -0
- udata/tests/frontend/test_auth.py +58 -1
- udata/tests/frontend/test_csv.py +0 -3
- udata/tests/helpers.py +31 -27
- udata/tests/organization/test_notifications.py +67 -2
- udata/tests/search/test_search_integration.py +70 -0
- udata/tests/site/test_site_csv_exports.py +22 -10
- udata/tests/test_activity.py +9 -9
- udata/tests/test_api_fields.py +10 -0
- udata/tests/test_discussions.py +5 -5
- udata/tests/test_legal_mails.py +359 -0
- udata/tests/test_notifications.py +15 -57
- udata/tests/test_notifications_task.py +43 -0
- udata/tests/test_owned.py +81 -1
- udata/tests/test_transfer.py +181 -2
- udata/tests/test_uris.py +33 -0
- udata/translations/ar/LC_MESSAGES/udata.mo +0 -0
- udata/translations/ar/LC_MESSAGES/udata.po +309 -158
- udata/translations/de/LC_MESSAGES/udata.mo +0 -0
- udata/translations/de/LC_MESSAGES/udata.po +313 -160
- udata/translations/es/LC_MESSAGES/udata.mo +0 -0
- udata/translations/es/LC_MESSAGES/udata.po +312 -160
- udata/translations/fr/LC_MESSAGES/udata.mo +0 -0
- udata/translations/fr/LC_MESSAGES/udata.po +475 -202
- udata/translations/it/LC_MESSAGES/udata.mo +0 -0
- udata/translations/it/LC_MESSAGES/udata.po +317 -162
- udata/translations/pt/LC_MESSAGES/udata.mo +0 -0
- udata/translations/pt/LC_MESSAGES/udata.po +315 -161
- udata/translations/sr/LC_MESSAGES/udata.mo +0 -0
- udata/translations/sr/LC_MESSAGES/udata.po +323 -164
- udata/translations/udata.pot +169 -124
- udata/uris.py +0 -2
- udata/utils.py +23 -0
- udata-14.7.3.dev4.dist-info/METADATA +109 -0
- {udata-14.0.3.dev1.dist-info → udata-14.7.3.dev4.dist-info}/RECORD +142 -135
- udata/core/post/forms.py +0 -30
- udata/flask_mongoengine/json.py +0 -38
- udata/templates/mail/base.html +0 -105
- udata/templates/mail/base.txt +0 -6
- udata/templates/mail/button.html +0 -3
- udata/templates/mail/layouts/1-column.html +0 -19
- udata/templates/mail/layouts/2-columns.html +0 -20
- udata/templates/mail/layouts/center-panel.html +0 -16
- udata-14.0.3.dev1.dist-info/METADATA +0 -132
- {udata-14.0.3.dev1.dist-info → udata-14.7.3.dev4.dist-info}/WHEEL +0 -0
- {udata-14.0.3.dev1.dist-info → udata-14.7.3.dev4.dist-info}/entry_points.txt +0 -0
- {udata-14.0.3.dev1.dist-info → udata-14.7.3.dev4.dist-info}/licenses/LICENSE +0 -0
- {udata-14.0.3.dev1.dist-info → udata-14.7.3.dev4.dist-info}/top_level.txt +0 -0
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
from datetime import datetime
|
|
2
|
+
|
|
1
3
|
from flask import url_for
|
|
2
4
|
|
|
3
5
|
from udata.core.dataset.factories import DatasetFactory
|
|
@@ -148,6 +150,28 @@ class ReportsAPITest(APITestCase):
|
|
|
148
150
|
|
|
149
151
|
self.assertEqual(payload["data"][1]["subject"]["id"], str(spam_reuse.id))
|
|
150
152
|
|
|
153
|
+
def test_reports_api_list_sort_by_reported_at(self):
|
|
154
|
+
user = UserFactory()
|
|
155
|
+
|
|
156
|
+
dataset1 = DatasetFactory.create(owner=user)
|
|
157
|
+
dataset2 = DatasetFactory.create(owner=user)
|
|
158
|
+
dataset3 = DatasetFactory.create(owner=user)
|
|
159
|
+
|
|
160
|
+
# Create reports with different reported_at times
|
|
161
|
+
report1 = Report(subject=dataset1, reason="spam", reported_at=datetime(2024, 1, 1)).save()
|
|
162
|
+
report2 = Report(subject=dataset2, reason="spam", reported_at=datetime(2024, 1, 3)).save()
|
|
163
|
+
report3 = Report(subject=dataset3, reason="spam", reported_at=datetime(2024, 1, 2)).save()
|
|
164
|
+
|
|
165
|
+
self.login(AdminFactory())
|
|
166
|
+
|
|
167
|
+
# Sort by -reported_at (most recent first)
|
|
168
|
+
response = self.get(url_for("api.reports", sort="-reported_at"))
|
|
169
|
+
self.assert200(response)
|
|
170
|
+
payload = response.json
|
|
171
|
+
self.assertEqual(payload["data"][0]["id"], str(report2.id))
|
|
172
|
+
self.assertEqual(payload["data"][1]["id"], str(report3.id))
|
|
173
|
+
self.assertEqual(payload["data"][2]["id"], str(report1.id))
|
|
174
|
+
|
|
151
175
|
def test_reports_api_get(self):
|
|
152
176
|
user = UserFactory()
|
|
153
177
|
|
|
@@ -165,3 +189,136 @@ class ReportsAPITest(APITestCase):
|
|
|
165
189
|
|
|
166
190
|
payload = response.json
|
|
167
191
|
self.assertEqual(payload["subject"]["id"], str(spam_dataset.id))
|
|
192
|
+
|
|
193
|
+
def test_reports_api_dismiss(self):
|
|
194
|
+
user = UserFactory()
|
|
195
|
+
admin = AdminFactory()
|
|
196
|
+
|
|
197
|
+
spam_dataset = DatasetFactory.create(owner=user)
|
|
198
|
+
report = Report(subject=spam_dataset, reason="spam").save()
|
|
199
|
+
|
|
200
|
+
dismiss_time = datetime.utcnow().isoformat()
|
|
201
|
+
|
|
202
|
+
# Should require admin
|
|
203
|
+
response = self.patch(url_for("api.report", report=report), {"dismissed_at": dismiss_time})
|
|
204
|
+
self.assert401(response)
|
|
205
|
+
|
|
206
|
+
self.login(user)
|
|
207
|
+
response = self.patch(url_for("api.report", report=report), {"dismissed_at": dismiss_time})
|
|
208
|
+
self.assert403(response)
|
|
209
|
+
|
|
210
|
+
self.login(admin)
|
|
211
|
+
response = self.patch(url_for("api.report", report=report), {"dismissed_at": dismiss_time})
|
|
212
|
+
self.assert200(response)
|
|
213
|
+
|
|
214
|
+
payload = response.json
|
|
215
|
+
self.assertIsNotNone(payload["dismissed_at"])
|
|
216
|
+
self.assertEqual(payload["dismissed_by"]["id"], str(admin.id))
|
|
217
|
+
|
|
218
|
+
report.reload()
|
|
219
|
+
self.assertIsNotNone(report.dismissed_at)
|
|
220
|
+
self.assertEqual(report.dismissed_by.id, admin.id)
|
|
221
|
+
|
|
222
|
+
def test_reports_api_undismiss(self):
|
|
223
|
+
user = UserFactory()
|
|
224
|
+
admin = AdminFactory()
|
|
225
|
+
|
|
226
|
+
spam_dataset = DatasetFactory.create(owner=user)
|
|
227
|
+
report = Report(
|
|
228
|
+
subject=spam_dataset,
|
|
229
|
+
reason="spam",
|
|
230
|
+
dismissed_at=datetime.utcnow(),
|
|
231
|
+
dismissed_by=admin,
|
|
232
|
+
).save()
|
|
233
|
+
|
|
234
|
+
# Verify report is dismissed
|
|
235
|
+
self.assertIsNotNone(report.dismissed_at)
|
|
236
|
+
self.assertIsNotNone(report.dismissed_by)
|
|
237
|
+
|
|
238
|
+
# Should require admin
|
|
239
|
+
response = self.patch(url_for("api.report", report=report), {"dismissed_at": None})
|
|
240
|
+
self.assert401(response)
|
|
241
|
+
|
|
242
|
+
self.login(user)
|
|
243
|
+
response = self.patch(url_for("api.report", report=report), {"dismissed_at": None})
|
|
244
|
+
self.assert403(response)
|
|
245
|
+
|
|
246
|
+
self.login(admin)
|
|
247
|
+
response = self.patch(url_for("api.report", report=report), {"dismissed_at": None})
|
|
248
|
+
self.assert200(response)
|
|
249
|
+
|
|
250
|
+
payload = response.json
|
|
251
|
+
self.assertIsNone(payload["dismissed_at"])
|
|
252
|
+
self.assertIsNone(payload["dismissed_by"])
|
|
253
|
+
|
|
254
|
+
report.reload()
|
|
255
|
+
self.assertIsNone(report.dismissed_at)
|
|
256
|
+
self.assertIsNone(report.dismissed_by)
|
|
257
|
+
|
|
258
|
+
def test_reports_api_filter_by_handled(self):
|
|
259
|
+
user = UserFactory()
|
|
260
|
+
admin = AdminFactory()
|
|
261
|
+
|
|
262
|
+
dataset1 = DatasetFactory.create(owner=user)
|
|
263
|
+
dataset2 = DatasetFactory.create(owner=user)
|
|
264
|
+
|
|
265
|
+
# Unhandled report (not dismissed)
|
|
266
|
+
ongoing_report = Report(subject=dataset1, reason="spam").save()
|
|
267
|
+
|
|
268
|
+
# Handled report (dismissed)
|
|
269
|
+
dismissed_report = Report(
|
|
270
|
+
subject=dataset2, reason="spam", dismissed_at=datetime.utcnow(), dismissed_by=admin
|
|
271
|
+
).save()
|
|
272
|
+
|
|
273
|
+
self.login(admin)
|
|
274
|
+
|
|
275
|
+
# Filter by unhandled
|
|
276
|
+
response = self.get(url_for("api.reports", handled="false"))
|
|
277
|
+
self.assert200(response)
|
|
278
|
+
payload = response.json
|
|
279
|
+
self.assertEqual(payload["total"], 1)
|
|
280
|
+
self.assertEqual(payload["data"][0]["id"], str(ongoing_report.id))
|
|
281
|
+
|
|
282
|
+
# Filter by handled
|
|
283
|
+
response = self.get(url_for("api.reports", handled="true"))
|
|
284
|
+
self.assert200(response)
|
|
285
|
+
payload = response.json
|
|
286
|
+
self.assertEqual(payload["total"], 1)
|
|
287
|
+
self.assertEqual(payload["data"][0]["id"], str(dismissed_report.id))
|
|
288
|
+
|
|
289
|
+
# No filter (all reports)
|
|
290
|
+
response = self.get(url_for("api.reports"))
|
|
291
|
+
self.assert200(response)
|
|
292
|
+
payload = response.json
|
|
293
|
+
self.assertEqual(payload["total"], 2)
|
|
294
|
+
|
|
295
|
+
def test_reports_api_filter_handled_with_deleted_subject(self):
|
|
296
|
+
"""Reports with deleted subjects should appear when handled="true", not handled="false"."""
|
|
297
|
+
user = UserFactory()
|
|
298
|
+
admin = AdminFactory()
|
|
299
|
+
|
|
300
|
+
dataset1 = DatasetFactory.create(owner=user)
|
|
301
|
+
dataset2 = DatasetFactory.create(owner=user)
|
|
302
|
+
|
|
303
|
+
# Unhandled report (not dismissed, subject exists)
|
|
304
|
+
ongoing_report = Report(subject=dataset1, reason="spam").save()
|
|
305
|
+
|
|
306
|
+
# Report with deleted subject (should appear in "handled", not "unhandled")
|
|
307
|
+
deleted_subject_report = Report(subject=dataset2, reason="spam").save()
|
|
308
|
+
dataset2.delete()
|
|
309
|
+
|
|
310
|
+
self.login(admin)
|
|
311
|
+
|
|
312
|
+
# Filter by unhandled - should only return the report with existing subject
|
|
313
|
+
response = self.get(url_for("api.reports", handled="false"))
|
|
314
|
+
self.assert200(response)
|
|
315
|
+
payload = response.json
|
|
316
|
+
self.assertEqual(payload["total"], 1)
|
|
317
|
+
self.assertEqual(payload["data"][0]["id"], str(ongoing_report.id))
|
|
318
|
+
|
|
319
|
+
# Filter by handled - should return the report with deleted subject
|
|
320
|
+
response = self.get(url_for("api.reports", handled="true"))
|
|
321
|
+
self.assert200(response)
|
|
322
|
+
payload = response.json
|
|
323
|
+
self.assertEqual(payload["total"], 1)
|
|
324
|
+
self.assertEqual(payload["data"][0]["id"], str(deleted_subject_report.id))
|
udata/tests/api/test_user_api.py
CHANGED
|
@@ -382,7 +382,7 @@ class UserAPITest(APITestCase):
|
|
|
382
382
|
response = self.delete(url_for("api.user", user=user_to_delete))
|
|
383
383
|
self.assertEqual(list(storages.avatars.list_files()), [])
|
|
384
384
|
self.assert204(response)
|
|
385
|
-
self.
|
|
385
|
+
self.assertEqual(len(mails), 1)
|
|
386
386
|
|
|
387
387
|
user_to_delete.reload()
|
|
388
388
|
response = self.delete(url_for("api.user", user=user_to_delete))
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
from udata.core.dataservices.factories import DataserviceFactory
|
|
2
|
+
from udata.tests.api import APITestCase
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class DataserviceSearchAPIV2Test(APITestCase):
|
|
6
|
+
def test_dataservice_search_with_model_query_param(self):
|
|
7
|
+
"""Searching dataservices with 'model' as query param should not crash.
|
|
8
|
+
|
|
9
|
+
Regression test for: TypeError: query() got multiple values for argument 'model'
|
|
10
|
+
"""
|
|
11
|
+
DataserviceFactory.create_batch(3)
|
|
12
|
+
|
|
13
|
+
response = self.get("/api/2/dataservices/search/?model=malicious")
|
|
14
|
+
self.assert200(response)
|
|
@@ -4,6 +4,15 @@ from udata.core.organization.factories import Member, OrganizationFactory
|
|
|
4
4
|
from udata.tests.api import APITestCase
|
|
5
5
|
|
|
6
6
|
|
|
7
|
+
class OrganizationSearchAPIV2Test(APITestCase):
|
|
8
|
+
def test_organization_search_with_model_query_param(self):
|
|
9
|
+
"""Searching organizations with 'model' as query param should not crash."""
|
|
10
|
+
OrganizationFactory.create_batch(3)
|
|
11
|
+
|
|
12
|
+
response = self.get("/api/2/organizations/search/?model=malicious")
|
|
13
|
+
self.assert200(response)
|
|
14
|
+
|
|
15
|
+
|
|
7
16
|
class OrganizationExtrasAPITest(APITestCase):
|
|
8
17
|
def setUp(self):
|
|
9
18
|
self.login()
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
from udata.core.reuse.factories import ReuseFactory
|
|
2
|
+
from udata.tests.api import APITestCase
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class ReuseSearchAPIV2Test(APITestCase):
|
|
6
|
+
def test_reuse_search_with_model_query_param(self):
|
|
7
|
+
"""Searching reuses with 'model' as query param should not crash."""
|
|
8
|
+
ReuseFactory.create_batch(3)
|
|
9
|
+
|
|
10
|
+
response = self.get("/api/2/reuses/search/?model=malicious")
|
|
11
|
+
self.assert200(response)
|
udata/tests/cli/test_cli_base.py
CHANGED
|
@@ -43,3 +43,32 @@ class DataserviceTasksTest(PytestOnlyDBTestCase):
|
|
|
43
43
|
assert Discussion.objects.filter(id=discussion.id).count() == 0
|
|
44
44
|
assert Follow.objects.filter(id=follower.id).count() == 0
|
|
45
45
|
assert HarvestJob.objects.filter(items__dataservice=dataservices[0].id).count() == 0
|
|
46
|
+
|
|
47
|
+
def test_purge_dataservices_cleans_all_harvest_items_references(self):
|
|
48
|
+
"""Test that purging dataservices cleans all HarvestItem references in a job.
|
|
49
|
+
|
|
50
|
+
The same dataservice can appear multiple times in a job's items (e.g. if the
|
|
51
|
+
harvest source has duplicates). The $ operator only updates the first match,
|
|
52
|
+
so we need to use $[] with array_filters to update all matches.
|
|
53
|
+
"""
|
|
54
|
+
dataservice_to_delete = Dataservice.objects.create(
|
|
55
|
+
title="delete me", base_api_url="https://example.com/api", deleted_at="2016-01-01"
|
|
56
|
+
)
|
|
57
|
+
dataservice_keep = Dataservice.objects.create(
|
|
58
|
+
title="keep me", base_api_url="https://example.com/api"
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
job = HarvestJobFactory(
|
|
62
|
+
items=[
|
|
63
|
+
HarvestItem(dataservice=dataservice_to_delete, remote_id="1"),
|
|
64
|
+
HarvestItem(dataservice=dataservice_keep, remote_id="2"),
|
|
65
|
+
HarvestItem(dataservice=dataservice_to_delete, remote_id="3"),
|
|
66
|
+
]
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
tasks.purge_dataservices()
|
|
70
|
+
|
|
71
|
+
job.reload()
|
|
72
|
+
assert job.items[0].dataservice is None
|
|
73
|
+
assert job.items[1].dataservice == dataservice_keep
|
|
74
|
+
assert job.items[2].dataservice is None
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from datetime import date, datetime, timedelta
|
|
1
|
+
from datetime import date, datetime, timedelta, timezone
|
|
2
2
|
from uuid import uuid4
|
|
3
3
|
|
|
4
4
|
import pytest
|
|
@@ -206,6 +206,18 @@ class DatasetModelTest(PytestOnlyDBTestCase):
|
|
|
206
206
|
assert dataset.quality["update_fulfilled_in_time"] is False
|
|
207
207
|
assert dataset.quality["score"] == Dataset.normalize_score(1)
|
|
208
208
|
|
|
209
|
+
def test_quality_frequency_update_with_harvest_timezone_aware(self):
|
|
210
|
+
"""Test that update_fulfilled_in_time works with timezone-aware harvest dates."""
|
|
211
|
+
dataset = DatasetFactory(
|
|
212
|
+
description="",
|
|
213
|
+
frequency=UpdateFrequency.DAILY,
|
|
214
|
+
harvest=HarvestDatasetMetadata(
|
|
215
|
+
modified_at=datetime.now(timezone.utc) - timedelta(hours=1),
|
|
216
|
+
),
|
|
217
|
+
)
|
|
218
|
+
assert dataset.quality["update_frequency"] is True
|
|
219
|
+
assert dataset.quality["update_fulfilled_in_time"] is True
|
|
220
|
+
|
|
209
221
|
def test_quality_description_length(self):
|
|
210
222
|
dataset = DatasetFactory(
|
|
211
223
|
description="a" * (current_app.config.get("QUALITY_DESCRIPTION_LENGTH") - 1)
|
|
@@ -518,6 +518,22 @@ class RdfToDatasetTest(PytestOnlyDBTestCase):
|
|
|
518
518
|
assert isinstance(dataset, Dataset)
|
|
519
519
|
assert dataset.harvest.modified_at is None
|
|
520
520
|
|
|
521
|
+
def test_unparseable_modified_at(self):
|
|
522
|
+
"""Regression test: template strings like {{modified:toISO}} should not crash parsing."""
|
|
523
|
+
node = BNode()
|
|
524
|
+
g = Graph()
|
|
525
|
+
|
|
526
|
+
g.add((node, RDF.type, DCAT.Dataset))
|
|
527
|
+
g.add((node, DCT.identifier, Literal(faker.uuid4())))
|
|
528
|
+
g.add((node, DCT.title, Literal(faker.sentence())))
|
|
529
|
+
g.add((node, DCT.modified, Literal("{{modified:toISO}}")))
|
|
530
|
+
|
|
531
|
+
dataset = dataset_from_rdf(g)
|
|
532
|
+
dataset.validate()
|
|
533
|
+
|
|
534
|
+
assert isinstance(dataset, Dataset)
|
|
535
|
+
assert dataset.harvest.modified_at is None
|
|
536
|
+
|
|
521
537
|
def test_contact_point_individual_vcard(self):
|
|
522
538
|
g = Graph()
|
|
523
539
|
node = URIRef("https://test.org/dataset")
|
|
@@ -643,10 +659,10 @@ class RdfToDatasetTest(PytestOnlyDBTestCase):
|
|
|
643
659
|
|
|
644
660
|
assert len(dataset.contact_points) == 1
|
|
645
661
|
assert dataset.contact_points[0].role == "contact"
|
|
646
|
-
assert dataset.contact_points[0].name == "foo"
|
|
662
|
+
assert dataset.contact_points[0].name == "foo (bar)"
|
|
647
663
|
assert dataset.contact_points[0].email == "foo@example.com"
|
|
648
664
|
|
|
649
|
-
def
|
|
665
|
+
def test_contact_point_organization_member_foaf_both_mails(self):
|
|
650
666
|
g = Graph()
|
|
651
667
|
node = URIRef("https://test.org/dataset")
|
|
652
668
|
g.set((node, RDF.type, DCAT.Dataset))
|
|
@@ -673,10 +689,10 @@ class RdfToDatasetTest(PytestOnlyDBTestCase):
|
|
|
673
689
|
|
|
674
690
|
assert len(dataset.contact_points) == 1
|
|
675
691
|
assert dataset.contact_points[0].role == "creator"
|
|
676
|
-
assert dataset.contact_points[0].name == "foo"
|
|
692
|
+
assert dataset.contact_points[0].name == "foo (bar)"
|
|
677
693
|
assert dataset.contact_points[0].email == "foo@example.com"
|
|
678
694
|
|
|
679
|
-
def
|
|
695
|
+
def test_contact_point_organization_member_foaf_no_org_mail(self):
|
|
680
696
|
g = Graph()
|
|
681
697
|
node = URIRef("https://test.org/dataset")
|
|
682
698
|
g.set((node, RDF.type, DCAT.Dataset))
|
|
@@ -703,9 +719,39 @@ class RdfToDatasetTest(PytestOnlyDBTestCase):
|
|
|
703
719
|
|
|
704
720
|
assert len(dataset.contact_points) == 1
|
|
705
721
|
assert dataset.contact_points[0].role == "creator"
|
|
706
|
-
assert dataset.contact_points[0].name == "foo"
|
|
722
|
+
assert dataset.contact_points[0].name == "foo (bar)"
|
|
707
723
|
assert dataset.contact_points[0].email == "foo@example.com"
|
|
708
724
|
|
|
725
|
+
def test_contact_point_organization_member_foaf_no_agent_mail(self):
|
|
726
|
+
g = Graph()
|
|
727
|
+
node = URIRef("https://test.org/dataset")
|
|
728
|
+
g.set((node, RDF.type, DCAT.Dataset))
|
|
729
|
+
g.set((node, DCT.identifier, Literal(faker.uuid4())))
|
|
730
|
+
g.set((node, DCT.title, Literal(faker.sentence())))
|
|
731
|
+
|
|
732
|
+
org = BNode()
|
|
733
|
+
g.add((org, RDF.type, FOAF.Organization))
|
|
734
|
+
g.add((org, FOAF.name, Literal("bar")))
|
|
735
|
+
g.add((org, FOAF.mbox, Literal("bar@example.com")))
|
|
736
|
+
contact = BNode()
|
|
737
|
+
g.add((contact, RDF.type, FOAF.Person))
|
|
738
|
+
g.add((contact, FOAF.name, Literal("foo")))
|
|
739
|
+
# no agent email
|
|
740
|
+
g.add((contact, ORG.memberOf, org))
|
|
741
|
+
g.add((node, DCT.creator, contact))
|
|
742
|
+
|
|
743
|
+
# Dataset needs an owner/organization for contact_points_from_rdf() to work
|
|
744
|
+
d = DatasetFactory.build()
|
|
745
|
+
d.organization = OrganizationFactory(name="organization")
|
|
746
|
+
|
|
747
|
+
dataset = dataset_from_rdf(g, d)
|
|
748
|
+
dataset.validate()
|
|
749
|
+
|
|
750
|
+
assert len(dataset.contact_points) == 1
|
|
751
|
+
assert dataset.contact_points[0].role == "creator"
|
|
752
|
+
assert dataset.contact_points[0].name == "foo (bar)"
|
|
753
|
+
assert dataset.contact_points[0].email == "bar@example.com"
|
|
754
|
+
|
|
709
755
|
def test_theme_and_tags(self):
|
|
710
756
|
node = BNode()
|
|
711
757
|
g = Graph()
|
|
@@ -725,6 +771,22 @@ class RdfToDatasetTest(PytestOnlyDBTestCase):
|
|
|
725
771
|
assert isinstance(dataset, Dataset)
|
|
726
772
|
assert set(dataset.tags) == set(tags + themes)
|
|
727
773
|
|
|
774
|
+
def test_keyword_as_uriref(self):
|
|
775
|
+
"""Regression test: keywords can be URIRef instead of Literal in some DCAT feeds."""
|
|
776
|
+
node = BNode()
|
|
777
|
+
g = Graph()
|
|
778
|
+
|
|
779
|
+
g.add((node, RDF.type, DCAT.Dataset))
|
|
780
|
+
g.add((node, DCT.title, Literal(faker.sentence())))
|
|
781
|
+
g.add((node, DCAT.keyword, Literal("literal-tag")))
|
|
782
|
+
g.add((node, DCAT.keyword, URIRef("http://example.org/keyword/uriref-tag")))
|
|
783
|
+
|
|
784
|
+
dataset = dataset_from_rdf(g)
|
|
785
|
+
dataset.validate()
|
|
786
|
+
|
|
787
|
+
assert isinstance(dataset, Dataset)
|
|
788
|
+
assert "literal-tag" in dataset.tags
|
|
789
|
+
|
|
728
790
|
def test_parse_null_frequency(self):
|
|
729
791
|
assert frequency_from_rdf(None) is None
|
|
730
792
|
|
|
@@ -818,6 +880,39 @@ class RdfToDatasetTest(PytestOnlyDBTestCase):
|
|
|
818
880
|
assert resource.harvest.modified_at.date() == modified.date()
|
|
819
881
|
assert resource.format == "csv"
|
|
820
882
|
|
|
883
|
+
def test_resource_future_modified_at(self):
|
|
884
|
+
node = BNode()
|
|
885
|
+
g = Graph()
|
|
886
|
+
|
|
887
|
+
modified = faker.future_datetime()
|
|
888
|
+
|
|
889
|
+
g.add((node, RDF.type, DCAT.Distribution))
|
|
890
|
+
g.add((node, DCT.title, Literal(faker.sentence())))
|
|
891
|
+
g.add((node, DCAT.downloadURL, Literal(faker.uri())))
|
|
892
|
+
g.add((node, DCT.modified, Literal(modified)))
|
|
893
|
+
|
|
894
|
+
resource = resource_from_rdf(g)
|
|
895
|
+
resource.validate()
|
|
896
|
+
|
|
897
|
+
assert isinstance(resource, Resource)
|
|
898
|
+
assert resource.harvest.modified_at is None
|
|
899
|
+
|
|
900
|
+
def test_resource_unparseable_modified_at(self):
|
|
901
|
+
"""Regression test: template strings like {{modified:toISO}} should not crash parsing."""
|
|
902
|
+
node = BNode()
|
|
903
|
+
g = Graph()
|
|
904
|
+
|
|
905
|
+
g.add((node, RDF.type, DCAT.Distribution))
|
|
906
|
+
g.add((node, DCT.title, Literal(faker.sentence())))
|
|
907
|
+
g.add((node, DCAT.downloadURL, Literal(faker.uri())))
|
|
908
|
+
g.add((node, DCT.modified, Literal("{{modified:toISO}}")))
|
|
909
|
+
|
|
910
|
+
resource = resource_from_rdf(g)
|
|
911
|
+
resource.validate()
|
|
912
|
+
|
|
913
|
+
assert isinstance(resource, Resource)
|
|
914
|
+
assert resource.harvest.modified_at is None
|
|
915
|
+
|
|
821
916
|
def test_download_url_over_access_url(self):
|
|
822
917
|
node = BNode()
|
|
823
918
|
g = Graph()
|
|
@@ -1364,6 +1459,70 @@ class DatasetRdfViewsTest(PytestOnlyAPITestCase):
|
|
|
1364
1459
|
assert200(response)
|
|
1365
1460
|
assert response.content_type == mime
|
|
1366
1461
|
|
|
1462
|
+
@pytest.mark.parametrize(
|
|
1463
|
+
"fmt,mime",
|
|
1464
|
+
[
|
|
1465
|
+
("n3", "text/n3"),
|
|
1466
|
+
("nt", "application/n-triples"),
|
|
1467
|
+
("ttl", "application/x-turtle"),
|
|
1468
|
+
("xml", "application/rdf+xml"),
|
|
1469
|
+
("rdf", "application/rdf+xml"),
|
|
1470
|
+
("owl", "application/rdf+xml"),
|
|
1471
|
+
("trig", "application/trig"),
|
|
1472
|
+
],
|
|
1473
|
+
)
|
|
1474
|
+
def test_dont_fail_with_invalid_uri(self, client, fmt, mime):
|
|
1475
|
+
"""Invalid URIs (with spaces or curly brackets) shouldn't make rdf export fail in any format"""
|
|
1476
|
+
invalid_uri_with_quote = 'https://test.org/dataset_with"quote"'
|
|
1477
|
+
invalid_uri_with_curly_bracket = 'http://opendata-sig.saintdenis.re/datasets/identifiant.kml?outSR={"latestWkid":2975,"wkid":2975}'
|
|
1478
|
+
invalid_uri_with_space = "https://catalogue.opendata-ligair.fr/geonetwork/srv/60678572-36e5-4e78-9af3-48f726670dfd fr-modelisation-sirane-vacarm_no2"
|
|
1479
|
+
dataset = DatasetFactory(
|
|
1480
|
+
resources=[
|
|
1481
|
+
ResourceFactory(url=invalid_uri_with_quote),
|
|
1482
|
+
ResourceFactory(url=invalid_uri_with_curly_bracket),
|
|
1483
|
+
],
|
|
1484
|
+
harvest=HarvestDatasetMetadata(uri=invalid_uri_with_space),
|
|
1485
|
+
)
|
|
1486
|
+
|
|
1487
|
+
url = url_for("api.dataset_rdf_format", dataset=dataset, _format=fmt)
|
|
1488
|
+
response = client.get(url, headers={"Accept": mime})
|
|
1489
|
+
assert200(response)
|
|
1490
|
+
|
|
1491
|
+
@pytest.mark.parametrize(
|
|
1492
|
+
"fmt,mime",
|
|
1493
|
+
[
|
|
1494
|
+
("n3", "text/n3"),
|
|
1495
|
+
("nt", "application/n-triples"),
|
|
1496
|
+
("ttl", "application/x-turtle"),
|
|
1497
|
+
("trig", "application/trig"),
|
|
1498
|
+
],
|
|
1499
|
+
)
|
|
1500
|
+
def test_invalid_uri_escape_in_n3_turtle_format(self, client, fmt, mime):
|
|
1501
|
+
"""Invalid URIs (with spaces or curly brackets) should be escaped in N3/turtle formats"""
|
|
1502
|
+
invalid_uri_with_quote = 'https://test.org/dataset_with"quote"'
|
|
1503
|
+
invalid_uri_with_curly_bracket = 'http://opendata-sig.saintdenis.re/datasets/identifiant.kml?outSR={"latestWkid":2975,"wkid":2975}'
|
|
1504
|
+
invalid_uri_with_space = "https://catalogue.opendata-ligair.fr/geonetwork/srv/60678572-36e5-4e78-9af3-48f726670dfd fr-modelisation-sirane-vacarm_no2"
|
|
1505
|
+
dataset = DatasetFactory(
|
|
1506
|
+
resources=[
|
|
1507
|
+
ResourceFactory(url=invalid_uri_with_quote),
|
|
1508
|
+
ResourceFactory(url=invalid_uri_with_curly_bracket),
|
|
1509
|
+
],
|
|
1510
|
+
harvest=HarvestDatasetMetadata(uri=invalid_uri_with_space),
|
|
1511
|
+
)
|
|
1512
|
+
|
|
1513
|
+
url = url_for("api.dataset_rdf_format", dataset=dataset, _format=fmt)
|
|
1514
|
+
response = client.get(url, headers={"Accept": mime})
|
|
1515
|
+
assert200(response)
|
|
1516
|
+
assert "https://test.org/dataset_with%22quote%22" in response.text
|
|
1517
|
+
assert (
|
|
1518
|
+
"http://opendata-sig.saintdenis.re/datasets/identifiant.kml?outSR=%7B%22latestWkid%22:2975,%22wkid%22:2975%7D"
|
|
1519
|
+
in response.text
|
|
1520
|
+
)
|
|
1521
|
+
assert (
|
|
1522
|
+
"https://catalogue.opendata-ligair.fr/geonetwork/srv/60678572-36e5-4e78-9af3-48f726670dfd%20fr-modelisation-sirane-vacarm_no2"
|
|
1523
|
+
in response.text
|
|
1524
|
+
)
|
|
1525
|
+
|
|
1367
1526
|
|
|
1368
1527
|
class DatasetFromRdfUtilsTest(PytestOnlyTestCase):
|
|
1369
1528
|
def test_licenses_from_rdf(self):
|
|
@@ -60,6 +60,31 @@ class DatasetTasksTest(PytestOnlyDBTestCase):
|
|
|
60
60
|
assert HarvestJob.objects.filter(items__dataset=datasets[0].id).count() == 0
|
|
61
61
|
assert Dataservice.objects.filter(datasets=datasets[0].id).count() == 0
|
|
62
62
|
|
|
63
|
+
def test_purge_datasets_cleans_all_harvest_items_references(self):
|
|
64
|
+
"""Test that purging datasets cleans all HarvestItem references in a job.
|
|
65
|
+
|
|
66
|
+
The same dataset can appear multiple times in a job's items (e.g. if the
|
|
67
|
+
harvest source has duplicates). The $ operator only updates the first match,
|
|
68
|
+
so we need to use $[] with array_filters to update all matches.
|
|
69
|
+
"""
|
|
70
|
+
dataset_to_delete = Dataset.objects.create(title="delete me", deleted="2016-01-01")
|
|
71
|
+
dataset_keep = Dataset.objects.create(title="keep me")
|
|
72
|
+
|
|
73
|
+
job = HarvestJobFactory(
|
|
74
|
+
items=[
|
|
75
|
+
HarvestItem(dataset=dataset_to_delete, remote_id="1"),
|
|
76
|
+
HarvestItem(dataset=dataset_keep, remote_id="2"),
|
|
77
|
+
HarvestItem(dataset=dataset_to_delete, remote_id="3"),
|
|
78
|
+
]
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
tasks.purge_datasets()
|
|
82
|
+
|
|
83
|
+
job.reload()
|
|
84
|
+
assert job.items[0].dataset is None
|
|
85
|
+
assert job.items[1].dataset == dataset_keep
|
|
86
|
+
assert job.items[2].dataset is None
|
|
87
|
+
|
|
63
88
|
def test_purge_datasets_community(self):
|
|
64
89
|
dataset = Dataset.objects.create(title="delete me", deleted="2016-01-01")
|
|
65
90
|
community_resource1 = CommunityResourceFactory()
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from flask import current_app, url_for
|
|
2
2
|
from flask_security.utils import hash_data
|
|
3
3
|
|
|
4
|
-
from udata.core.user.factories import AdminFactory
|
|
4
|
+
from udata.core.user.factories import AdminFactory, UserFactory
|
|
5
5
|
from udata.tests.api import APITestCase
|
|
6
6
|
|
|
7
7
|
|
|
@@ -22,3 +22,60 @@ class AuthTest(APITestCase):
|
|
|
22
22
|
|
|
23
23
|
user.reload()
|
|
24
24
|
assert user.email == new_email
|
|
25
|
+
|
|
26
|
+
def test_change_mail_already_taken(self):
|
|
27
|
+
"""Should not allow changing email to one already taken by another user"""
|
|
28
|
+
user = self.login(AdminFactory())
|
|
29
|
+
original_email = user.email
|
|
30
|
+
|
|
31
|
+
# Create another user with the target email
|
|
32
|
+
existing_user = UserFactory(email="taken@example.com")
|
|
33
|
+
new_email = existing_user.email
|
|
34
|
+
|
|
35
|
+
security = current_app.extensions["security"]
|
|
36
|
+
|
|
37
|
+
data = [str(user.fs_uniquifier), hash_data(user.email), new_email]
|
|
38
|
+
token = security.confirm_serializer.dumps(data)
|
|
39
|
+
confirmation_link = url_for("security.confirm_change_email", token=token)
|
|
40
|
+
|
|
41
|
+
resp = self.get(confirmation_link)
|
|
42
|
+
assert resp.status_code == 302
|
|
43
|
+
assert "change_email_already_taken" in resp.location
|
|
44
|
+
|
|
45
|
+
# Email should not have changed
|
|
46
|
+
user.reload()
|
|
47
|
+
assert user.email == original_email
|
|
48
|
+
|
|
49
|
+
def test_change_mail_after_password_change(self):
|
|
50
|
+
"""Changing password rotates fs_uniquifier and invalidates email change token"""
|
|
51
|
+
user = UserFactory(password="Password123")
|
|
52
|
+
self.login(user)
|
|
53
|
+
old_uniquifier = user.fs_uniquifier
|
|
54
|
+
|
|
55
|
+
new_email = "new@example.com"
|
|
56
|
+
|
|
57
|
+
security = current_app.extensions["security"]
|
|
58
|
+
|
|
59
|
+
data = [str(user.fs_uniquifier), hash_data(user.email), new_email]
|
|
60
|
+
token = security.confirm_serializer.dumps(data)
|
|
61
|
+
confirmation_link = url_for("security.confirm_change_email", token=token)
|
|
62
|
+
|
|
63
|
+
# Change password via API
|
|
64
|
+
resp = self.post(
|
|
65
|
+
url_for("security.change_password"),
|
|
66
|
+
{
|
|
67
|
+
"password": "Password123",
|
|
68
|
+
"new_password": "NewPassword456",
|
|
69
|
+
"new_password_confirm": "NewPassword456",
|
|
70
|
+
"submit": True,
|
|
71
|
+
},
|
|
72
|
+
)
|
|
73
|
+
assert resp.status_code == 200, f"Password change failed: {resp.data}"
|
|
74
|
+
|
|
75
|
+
user.reload()
|
|
76
|
+
assert user.fs_uniquifier != old_uniquifier, "fs_uniquifier should have changed"
|
|
77
|
+
|
|
78
|
+
# Now try to use the email change link - should fail
|
|
79
|
+
resp = self.get(confirmation_link)
|
|
80
|
+
assert resp.status_code == 302
|
|
81
|
+
assert "change_email_invalid" in resp.location
|
udata/tests/frontend/test_csv.py
CHANGED
|
@@ -269,7 +269,6 @@ class CsvTest(APITestCase):
|
|
|
269
269
|
|
|
270
270
|
self.assert200(response)
|
|
271
271
|
self.assertEqual(response.mimetype, "text/csv")
|
|
272
|
-
self.assertEqual(response.charset, "utf-8")
|
|
273
272
|
|
|
274
273
|
csvfile = StringIO(response.data.decode("utf8"))
|
|
275
274
|
reader = csv.get_reader(csvfile)
|
|
@@ -327,7 +326,6 @@ class CsvTest(APITestCase):
|
|
|
327
326
|
|
|
328
327
|
self.assert200(response)
|
|
329
328
|
self.assertEqual(response.mimetype, "text/csv")
|
|
330
|
-
self.assertEqual(response.charset, "utf-8")
|
|
331
329
|
|
|
332
330
|
csvfile = StringIO(response.data.decode("utf8"))
|
|
333
331
|
reader = csv.get_reader(csvfile)
|
|
@@ -349,7 +347,6 @@ class CsvTest(APITestCase):
|
|
|
349
347
|
|
|
350
348
|
self.assert200(response)
|
|
351
349
|
self.assertEqual(response.mimetype, "text/csv")
|
|
352
|
-
self.assertEqual(response.charset, "utf-8")
|
|
353
350
|
|
|
354
351
|
csvfile = StringIO(response.data.decode("utf8"))
|
|
355
352
|
reader = csv.get_reader(csvfile)
|