udata 14.0.3.dev1__py3-none-any.whl → 14.7.3.dev4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- udata/api/__init__.py +2 -0
- udata/api_fields.py +120 -19
- udata/app.py +18 -20
- udata/auth/__init__.py +4 -7
- udata/auth/forms.py +3 -3
- udata/auth/views.py +13 -6
- udata/commands/dcat.py +1 -1
- udata/commands/serve.py +3 -11
- udata/core/activity/api.py +5 -6
- udata/core/badges/tests/test_tasks.py +0 -2
- udata/core/csv.py +5 -0
- udata/core/dataservices/api.py +8 -1
- udata/core/dataservices/apiv2.py +3 -6
- udata/core/dataservices/models.py +5 -2
- udata/core/dataservices/rdf.py +2 -1
- udata/core/dataservices/tasks.py +6 -2
- udata/core/dataset/api.py +30 -4
- udata/core/dataset/api_fields.py +1 -1
- udata/core/dataset/apiv2.py +1 -1
- udata/core/dataset/constants.py +2 -9
- udata/core/dataset/models.py +21 -9
- udata/core/dataset/permissions.py +31 -0
- udata/core/dataset/rdf.py +18 -16
- udata/core/dataset/tasks.py +16 -7
- udata/core/discussions/api.py +15 -1
- udata/core/discussions/models.py +6 -0
- udata/core/legal/__init__.py +0 -0
- udata/core/legal/mails.py +128 -0
- udata/core/organization/api.py +16 -5
- udata/core/organization/api_fields.py +3 -3
- udata/core/organization/apiv2.py +3 -4
- udata/core/organization/mails.py +1 -1
- udata/core/organization/models.py +40 -7
- udata/core/organization/notifications.py +84 -0
- udata/core/organization/permissions.py +1 -1
- udata/core/organization/tasks.py +3 -0
- udata/core/pages/models.py +49 -0
- udata/core/pages/tests/test_api.py +165 -1
- udata/core/post/api.py +25 -70
- udata/core/post/constants.py +8 -0
- udata/core/post/models.py +109 -17
- udata/core/post/tests/test_api.py +140 -3
- udata/core/post/tests/test_models.py +24 -0
- udata/core/reports/api.py +18 -0
- udata/core/reports/models.py +42 -2
- udata/core/reuse/api.py +8 -0
- udata/core/reuse/apiv2.py +3 -6
- udata/core/reuse/models.py +1 -1
- udata/core/spatial/forms.py +2 -2
- udata/core/topic/models.py +8 -2
- udata/core/user/api.py +10 -3
- udata/core/user/api_fields.py +3 -3
- udata/core/user/models.py +33 -8
- udata/features/notifications/api.py +7 -18
- udata/features/notifications/models.py +59 -0
- udata/features/notifications/tasks.py +25 -0
- udata/features/transfer/actions.py +2 -0
- udata/features/transfer/models.py +17 -0
- udata/features/transfer/notifications.py +96 -0
- udata/flask_mongoengine/engine.py +0 -4
- udata/flask_mongoengine/pagination.py +1 -1
- udata/frontend/markdown.py +2 -1
- udata/harvest/actions.py +20 -0
- udata/harvest/api.py +24 -7
- udata/harvest/backends/base.py +27 -1
- udata/harvest/backends/ckan/harvesters.py +21 -4
- udata/harvest/backends/dcat.py +4 -1
- udata/harvest/commands.py +33 -0
- udata/harvest/filters.py +17 -6
- udata/harvest/models.py +16 -0
- udata/harvest/permissions.py +27 -0
- udata/harvest/tests/ckan/test_ckan_backend.py +33 -0
- udata/harvest/tests/test_actions.py +46 -2
- udata/harvest/tests/test_api.py +161 -6
- udata/harvest/tests/test_base_backend.py +86 -1
- udata/harvest/tests/test_dcat_backend.py +68 -3
- udata/harvest/tests/test_filters.py +6 -0
- udata/i18n.py +1 -4
- udata/mail.py +14 -0
- udata/migrations/2021-08-17-harvest-integrity.py +23 -16
- udata/migrations/2025-10-31-create-membership-request-notifications.py +55 -0
- udata/migrations/2025-12-04-add-uuid-to-discussion-messages.py +28 -0
- udata/migrations/2025-12-16-create-transfer-request-notifications.py +69 -0
- udata/migrations/2026-01-14-add-default-kind-to-posts.py +17 -0
- udata/mongo/slug_fields.py +1 -1
- udata/rdf.py +65 -11
- udata/routing.py +2 -2
- udata/settings.py +11 -0
- udata/tasks.py +2 -0
- udata/templates/mail/message.html +3 -1
- udata/tests/api/__init__.py +7 -17
- udata/tests/api/test_activities_api.py +36 -0
- udata/tests/api/test_datasets_api.py +69 -0
- udata/tests/api/test_organizations_api.py +0 -3
- udata/tests/api/test_reports_api.py +157 -0
- udata/tests/api/test_user_api.py +1 -1
- udata/tests/apiv2/test_dataservices.py +14 -0
- udata/tests/apiv2/test_organizations.py +9 -0
- udata/tests/apiv2/test_reuses.py +11 -0
- udata/tests/cli/test_cli_base.py +0 -1
- udata/tests/dataservice/test_dataservice_tasks.py +29 -0
- udata/tests/dataset/test_dataset_model.py +13 -1
- udata/tests/dataset/test_dataset_rdf.py +164 -5
- udata/tests/dataset/test_dataset_tasks.py +25 -0
- udata/tests/frontend/test_auth.py +58 -1
- udata/tests/frontend/test_csv.py +0 -3
- udata/tests/helpers.py +31 -27
- udata/tests/organization/test_notifications.py +67 -2
- udata/tests/search/test_search_integration.py +70 -0
- udata/tests/site/test_site_csv_exports.py +22 -10
- udata/tests/test_activity.py +9 -9
- udata/tests/test_api_fields.py +10 -0
- udata/tests/test_discussions.py +5 -5
- udata/tests/test_legal_mails.py +359 -0
- udata/tests/test_notifications.py +15 -57
- udata/tests/test_notifications_task.py +43 -0
- udata/tests/test_owned.py +81 -1
- udata/tests/test_transfer.py +181 -2
- udata/tests/test_uris.py +33 -0
- udata/translations/ar/LC_MESSAGES/udata.mo +0 -0
- udata/translations/ar/LC_MESSAGES/udata.po +309 -158
- udata/translations/de/LC_MESSAGES/udata.mo +0 -0
- udata/translations/de/LC_MESSAGES/udata.po +313 -160
- udata/translations/es/LC_MESSAGES/udata.mo +0 -0
- udata/translations/es/LC_MESSAGES/udata.po +312 -160
- udata/translations/fr/LC_MESSAGES/udata.mo +0 -0
- udata/translations/fr/LC_MESSAGES/udata.po +475 -202
- udata/translations/it/LC_MESSAGES/udata.mo +0 -0
- udata/translations/it/LC_MESSAGES/udata.po +317 -162
- udata/translations/pt/LC_MESSAGES/udata.mo +0 -0
- udata/translations/pt/LC_MESSAGES/udata.po +315 -161
- udata/translations/sr/LC_MESSAGES/udata.mo +0 -0
- udata/translations/sr/LC_MESSAGES/udata.po +323 -164
- udata/translations/udata.pot +169 -124
- udata/uris.py +0 -2
- udata/utils.py +23 -0
- udata-14.7.3.dev4.dist-info/METADATA +109 -0
- {udata-14.0.3.dev1.dist-info → udata-14.7.3.dev4.dist-info}/RECORD +142 -135
- udata/core/post/forms.py +0 -30
- udata/flask_mongoengine/json.py +0 -38
- udata/templates/mail/base.html +0 -105
- udata/templates/mail/base.txt +0 -6
- udata/templates/mail/button.html +0 -3
- udata/templates/mail/layouts/1-column.html +0 -19
- udata/templates/mail/layouts/2-columns.html +0 -20
- udata/templates/mail/layouts/center-panel.html +0 -16
- udata-14.0.3.dev1.dist-info/METADATA +0 -132
- {udata-14.0.3.dev1.dist-info → udata-14.7.3.dev4.dist-info}/WHEEL +0 -0
- {udata-14.0.3.dev1.dist-info → udata-14.7.3.dev4.dist-info}/entry_points.txt +0 -0
- {udata-14.0.3.dev1.dist-info → udata-14.7.3.dev4.dist-info}/licenses/LICENSE +0 -0
- {udata-14.0.3.dev1.dist-info → udata-14.7.3.dev4.dist-info}/top_level.txt +0 -0
udata/harvest/api.py
CHANGED
|
@@ -6,7 +6,6 @@ from udata.api import API, api, fields
|
|
|
6
6
|
from udata.auth import admin_permission
|
|
7
7
|
from udata.core.dataservices.models import Dataservice
|
|
8
8
|
from udata.core.dataset.api_fields import dataset_fields, dataset_ref_fields
|
|
9
|
-
from udata.core.dataset.permissions import OwnablePermission
|
|
10
9
|
from udata.core.organization.api_fields import org_ref_fields
|
|
11
10
|
from udata.core.organization.permissions import EditOrganizationPermission
|
|
12
11
|
from udata.core.user.api_fields import user_ref_fields
|
|
@@ -55,6 +54,7 @@ item_fields = api.model(
|
|
|
55
54
|
"HarvestItem",
|
|
56
55
|
{
|
|
57
56
|
"remote_id": fields.String(description="The item remote ID to process", required=True),
|
|
57
|
+
"remote_url": fields.String(description="The item remote url (if available)"),
|
|
58
58
|
"dataset": fields.Nested(
|
|
59
59
|
dataset_ref_fields, description="The processed dataset", allow_null=True
|
|
60
60
|
),
|
|
@@ -115,6 +115,18 @@ validation_fields = api.model(
|
|
|
115
115
|
},
|
|
116
116
|
)
|
|
117
117
|
|
|
118
|
+
source_permissions_fields = api.model(
|
|
119
|
+
"HarvestSourcePermissions",
|
|
120
|
+
{
|
|
121
|
+
"edit": fields.Permission(),
|
|
122
|
+
"delete": fields.Permission(),
|
|
123
|
+
"run": fields.Permission(),
|
|
124
|
+
"preview": fields.Permission(),
|
|
125
|
+
"validate": fields.Permission(),
|
|
126
|
+
"schedule": fields.Permission(),
|
|
127
|
+
},
|
|
128
|
+
)
|
|
129
|
+
|
|
118
130
|
source_fields = api.model(
|
|
119
131
|
"HarvestSource",
|
|
120
132
|
{
|
|
@@ -153,6 +165,7 @@ source_fields = api.model(
|
|
|
153
165
|
"schedule": fields.String(
|
|
154
166
|
description="The source schedule (interval or cron expression)", readonly=True
|
|
155
167
|
),
|
|
168
|
+
"permissions": fields.Nested(source_permissions_fields, readonly=True),
|
|
156
169
|
},
|
|
157
170
|
)
|
|
158
171
|
|
|
@@ -313,7 +326,7 @@ class SourceAPI(API):
|
|
|
313
326
|
@api.marshal_with(source_fields)
|
|
314
327
|
def put(self, source: HarvestSource):
|
|
315
328
|
"""Update a harvest source"""
|
|
316
|
-
|
|
329
|
+
source.permissions["edit"].test()
|
|
317
330
|
form = api.validate(HarvestSourceForm, source)
|
|
318
331
|
source = actions.update_source(source, form.data)
|
|
319
332
|
return source
|
|
@@ -322,18 +335,19 @@ class SourceAPI(API):
|
|
|
322
335
|
@api.doc("delete_harvest_source")
|
|
323
336
|
@api.marshal_with(source_fields)
|
|
324
337
|
def delete(self, source: HarvestSource):
|
|
325
|
-
|
|
338
|
+
source.permissions["delete"].test()
|
|
326
339
|
return actions.delete_source(source), 204
|
|
327
340
|
|
|
328
341
|
|
|
329
342
|
@ns.route("/source/<harvest_source:source>/validate/", endpoint="validate_harvest_source")
|
|
330
343
|
class ValidateSourceAPI(API):
|
|
331
344
|
@api.doc("validate_harvest_source")
|
|
332
|
-
@api.secure
|
|
345
|
+
@api.secure
|
|
333
346
|
@api.expect(validation_fields)
|
|
334
347
|
@api.marshal_with(source_fields)
|
|
335
348
|
def post(self, source: HarvestSource):
|
|
336
349
|
"""Validate or reject an harvest source"""
|
|
350
|
+
source.permissions["validate"].test()
|
|
337
351
|
form = api.validate(HarvestSourceValidationForm)
|
|
338
352
|
if form.state.data == VALIDATION_ACCEPTED:
|
|
339
353
|
return actions.validate_source(source, form.comment.data)
|
|
@@ -354,7 +368,7 @@ class RunSourceAPI(API):
|
|
|
354
368
|
"Cannot run source manually. Please contact the platform if you need to reschedule the harvester.",
|
|
355
369
|
)
|
|
356
370
|
|
|
357
|
-
|
|
371
|
+
source.permissions["run"].test()
|
|
358
372
|
|
|
359
373
|
if source.validation.state != VALIDATION_ACCEPTED:
|
|
360
374
|
api.abort(400, "Source is not validated. Please validate the source before running.")
|
|
@@ -367,11 +381,12 @@ class RunSourceAPI(API):
|
|
|
367
381
|
@ns.route("/source/<harvest_source:source>/schedule/", endpoint="schedule_harvest_source")
|
|
368
382
|
class ScheduleSourceAPI(API):
|
|
369
383
|
@api.doc("schedule_harvest_source")
|
|
370
|
-
@api.secure
|
|
384
|
+
@api.secure
|
|
371
385
|
@api.expect((str, "A cron expression"))
|
|
372
386
|
@api.marshal_with(source_fields)
|
|
373
387
|
def post(self, source: HarvestSource):
|
|
374
388
|
"""Schedule an harvest source"""
|
|
389
|
+
source.permissions["schedule"].test()
|
|
375
390
|
# Handle both syntax: quoted and unquoted
|
|
376
391
|
try:
|
|
377
392
|
data = request.json
|
|
@@ -380,10 +395,11 @@ class ScheduleSourceAPI(API):
|
|
|
380
395
|
return actions.schedule(source, data)
|
|
381
396
|
|
|
382
397
|
@api.doc("unschedule_harvest_source")
|
|
383
|
-
@api.secure
|
|
398
|
+
@api.secure
|
|
384
399
|
@api.marshal_with(source_fields)
|
|
385
400
|
def delete(self, source: HarvestSource):
|
|
386
401
|
"""Unschedule an harvest source"""
|
|
402
|
+
source.permissions["schedule"].test()
|
|
387
403
|
return actions.unschedule(source), 204
|
|
388
404
|
|
|
389
405
|
|
|
@@ -408,6 +424,7 @@ class PreviewSourceAPI(API):
|
|
|
408
424
|
@api.marshal_with(preview_job_fields)
|
|
409
425
|
def get(self, source: HarvestSource):
|
|
410
426
|
"""Preview a single harvest source given an ID or a slug"""
|
|
427
|
+
source.permissions["preview"].test()
|
|
411
428
|
return actions.preview(source)
|
|
412
429
|
|
|
413
430
|
|
udata/harvest/backends/base.py
CHANGED
|
@@ -166,6 +166,7 @@ class BaseBackend(object):
|
|
|
166
166
|
log.debug(f"Starting harvesting {self.source.name} ({self.source.url})…")
|
|
167
167
|
factory = HarvestJob if self.dryrun else HarvestJob.objects.create
|
|
168
168
|
self.job = factory(status="initialized", started=datetime.utcnow(), source=self.source)
|
|
169
|
+
self.remote_ids = set()
|
|
169
170
|
|
|
170
171
|
before_harvest_job.send(self)
|
|
171
172
|
# Set harvest_activity_user on global context during the run
|
|
@@ -190,6 +191,7 @@ class BaseBackend(object):
|
|
|
190
191
|
|
|
191
192
|
if any(i.status == "failed" for i in self.job.items):
|
|
192
193
|
self.job.status += "-errors"
|
|
194
|
+
|
|
193
195
|
except HarvestValidationError as e:
|
|
194
196
|
log.exception(
|
|
195
197
|
f'Harvesting validation failed for "{safe_unicode(self.source.name)}" ({self.source.backend})'
|
|
@@ -199,6 +201,15 @@ class BaseBackend(object):
|
|
|
199
201
|
|
|
200
202
|
error = HarvestError(message=safe_unicode(e))
|
|
201
203
|
self.job.errors.append(error)
|
|
204
|
+
except (requests.exceptions.ConnectionError, requests.exceptions.Timeout) as e:
|
|
205
|
+
log.warning(
|
|
206
|
+
f'Harvesting connection error for "{safe_unicode(self.source.name)}" ({self.source.backend}): {e}'
|
|
207
|
+
)
|
|
208
|
+
|
|
209
|
+
self.job.status = "failed"
|
|
210
|
+
|
|
211
|
+
error = HarvestError(message=safe_unicode(e), details=traceback.format_exc())
|
|
212
|
+
self.job.errors.append(error)
|
|
202
213
|
except Exception as e:
|
|
203
214
|
log.exception(
|
|
204
215
|
f'Harvesting failed for "{safe_unicode(self.source.name)}" ({self.source.backend})'
|
|
@@ -232,8 +243,13 @@ class BaseBackend(object):
|
|
|
232
243
|
|
|
233
244
|
current_app.logger.addHandler(log_catcher)
|
|
234
245
|
dataset = self.inner_process_dataset(item, **kwargs)
|
|
246
|
+
if dataset.harvest:
|
|
247
|
+
item.remote_url = dataset.harvest.remote_url
|
|
248
|
+
|
|
249
|
+
# Use `item.remote_id` from this point, because `inner_process_dataset` could have modified it.
|
|
250
|
+
|
|
251
|
+
self.ensure_unique_remote_id(item)
|
|
235
252
|
|
|
236
|
-
# Use `item.remote_id` because `inner_process_dataset` could have modified it.
|
|
237
253
|
dataset.harvest = self.update_dataset_harvest_info(dataset.harvest, item.remote_id)
|
|
238
254
|
dataset.archived = None
|
|
239
255
|
|
|
@@ -291,6 +307,10 @@ class BaseBackend(object):
|
|
|
291
307
|
raise HarvestSkipException("missing identifier")
|
|
292
308
|
|
|
293
309
|
dataservice = self.inner_process_dataservice(item, **kwargs)
|
|
310
|
+
if dataservice.harvest:
|
|
311
|
+
item.remote_url = dataservice.harvest.remote_url
|
|
312
|
+
|
|
313
|
+
self.ensure_unique_remote_id(item)
|
|
294
314
|
|
|
295
315
|
dataservice.harvest = self.update_dataservice_harvest_info(
|
|
296
316
|
dataservice.harvest, remote_id
|
|
@@ -325,6 +345,12 @@ class BaseBackend(object):
|
|
|
325
345
|
item.ended = datetime.utcnow()
|
|
326
346
|
self.save_job()
|
|
327
347
|
|
|
348
|
+
def ensure_unique_remote_id(self, item):
|
|
349
|
+
if item.remote_id in self.remote_ids:
|
|
350
|
+
raise HarvestValidationError(f"Identifier '{item.remote_id}' already exists")
|
|
351
|
+
|
|
352
|
+
self.remote_ids.add(item.remote_id)
|
|
353
|
+
|
|
328
354
|
def update_dataset_harvest_info(self, harvest: HarvestDatasetMetadata | None, remote_id: int):
|
|
329
355
|
if not harvest:
|
|
330
356
|
harvest = HarvestDatasetMetadata()
|
|
@@ -3,6 +3,8 @@ import logging
|
|
|
3
3
|
from urllib.parse import urljoin
|
|
4
4
|
from uuid import UUID
|
|
5
5
|
|
|
6
|
+
from dateutil.parser import ParserError
|
|
7
|
+
|
|
6
8
|
from udata import uris
|
|
7
9
|
from udata.core.dataset.constants import UpdateFrequency
|
|
8
10
|
from udata.core.dataset.models import HarvestDatasetMetadata, HarvestResourceMetadata
|
|
@@ -173,7 +175,10 @@ class CkanBackend(BaseBackend):
|
|
|
173
175
|
continue
|
|
174
176
|
elif key == "spatial":
|
|
175
177
|
# GeoJSON representation (Polygon or Point)
|
|
176
|
-
|
|
178
|
+
if isinstance(value, dict):
|
|
179
|
+
spatial_geom = value
|
|
180
|
+
else:
|
|
181
|
+
spatial_geom = json.loads(value)
|
|
177
182
|
elif key == "spatial-text":
|
|
178
183
|
# Textual representation of the extent / location
|
|
179
184
|
qs = GeoZone.objects(db.Q(name=value) | db.Q(slug=value))
|
|
@@ -199,10 +204,16 @@ class CkanBackend(BaseBackend):
|
|
|
199
204
|
log.debug("frequency value not handled: %s", value)
|
|
200
205
|
# Temporal coverage start
|
|
201
206
|
elif key == "temporal_start":
|
|
202
|
-
|
|
207
|
+
try:
|
|
208
|
+
temporal_start = daterange_start(value)
|
|
209
|
+
except ParserError:
|
|
210
|
+
log.warning(f"Unparseable temporal_start value: '{value}'")
|
|
203
211
|
# Temporal coverage end
|
|
204
212
|
elif key == "temporal_end":
|
|
205
|
-
|
|
213
|
+
try:
|
|
214
|
+
temporal_end = daterange_end(value)
|
|
215
|
+
except ParserError:
|
|
216
|
+
log.warning(f"Unparseable temporal_end value: '{value}'")
|
|
206
217
|
else:
|
|
207
218
|
dataset.extras[extra["key"]] = value
|
|
208
219
|
|
|
@@ -213,12 +224,17 @@ class CkanBackend(BaseBackend):
|
|
|
213
224
|
dataset.spatial.zones = [spatial_zone]
|
|
214
225
|
|
|
215
226
|
if spatial_geom:
|
|
227
|
+
if "type" not in spatial_geom:
|
|
228
|
+
raise HarvestException(f"Spatial geometry {spatial_geom} without `type`")
|
|
229
|
+
|
|
216
230
|
if spatial_geom["type"] == "Polygon":
|
|
217
231
|
coordinates = [spatial_geom["coordinates"]]
|
|
218
232
|
elif spatial_geom["type"] == "MultiPolygon":
|
|
219
233
|
coordinates = spatial_geom["coordinates"]
|
|
220
234
|
else:
|
|
221
|
-
raise HarvestException(
|
|
235
|
+
raise HarvestException(
|
|
236
|
+
f"Unsupported spatial geometry {spatial_geom['type']} in {spatial_geom}. (Supported types are `Polygon` and `MultiPolygon`)"
|
|
237
|
+
)
|
|
222
238
|
dataset.spatial.geom = {"type": "MultiPolygon", "coordinates": coordinates}
|
|
223
239
|
|
|
224
240
|
if temporal_start and temporal_end:
|
|
@@ -267,5 +283,6 @@ class CkanBackend(BaseBackend):
|
|
|
267
283
|
|
|
268
284
|
class DkanBackend(CkanBackend):
|
|
269
285
|
name = "dkan"
|
|
286
|
+
display_name = "DKAN"
|
|
270
287
|
schema = dkan_schema
|
|
271
288
|
filters = []
|
udata/harvest/backends/dcat.py
CHANGED
|
@@ -225,7 +225,9 @@ class DcatBackend(BaseBackend):
|
|
|
225
225
|
|
|
226
226
|
dataset = self.get_dataset(item.remote_id)
|
|
227
227
|
remote_url_prefix = self.get_extra_config_value("remote_url_prefix")
|
|
228
|
-
dataset = dataset_from_rdf(
|
|
228
|
+
dataset = dataset_from_rdf(
|
|
229
|
+
page, dataset, node=node, remote_url_prefix=remote_url_prefix, dryrun=self.dryrun
|
|
230
|
+
)
|
|
229
231
|
if dataset.organization:
|
|
230
232
|
dataset.organization.compute_aggregate_metrics = False
|
|
231
233
|
self.organizations_to_update.add(dataset.organization)
|
|
@@ -242,6 +244,7 @@ class DcatBackend(BaseBackend):
|
|
|
242
244
|
node,
|
|
243
245
|
[item.dataset for item in self.job.items],
|
|
244
246
|
remote_url_prefix=remote_url_prefix,
|
|
247
|
+
dryrun=self.dryrun,
|
|
245
248
|
)
|
|
246
249
|
|
|
247
250
|
def get_node_from_item(self, graph, item):
|
udata/harvest/commands.py
CHANGED
|
@@ -4,6 +4,7 @@ import click
|
|
|
4
4
|
|
|
5
5
|
from udata.commands import KO, OK, cli, green, red
|
|
6
6
|
from udata.harvest.backends import get_all_backends, is_backend_enabled
|
|
7
|
+
from udata.models import Dataset
|
|
7
8
|
|
|
8
9
|
from . import actions
|
|
9
10
|
|
|
@@ -156,3 +157,35 @@ def attach(domain, filename):
|
|
|
156
157
|
log.info("Attaching datasets for domain %s", domain)
|
|
157
158
|
result = actions.attach(domain, filename)
|
|
158
159
|
log.info("Attached %s datasets to %s", result.success, domain)
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
@grp.command()
|
|
163
|
+
@click.argument("dataset_id")
|
|
164
|
+
def detach(dataset_id):
|
|
165
|
+
"""
|
|
166
|
+
Detach a dataset_id from its harvest source
|
|
167
|
+
|
|
168
|
+
The dataset will be cleaned from harvested information
|
|
169
|
+
"""
|
|
170
|
+
log.info(f"Detaching dataset {dataset_id}")
|
|
171
|
+
dataset = Dataset.get(dataset_id)
|
|
172
|
+
actions.detach(dataset)
|
|
173
|
+
log.info("Done")
|
|
174
|
+
|
|
175
|
+
|
|
176
|
+
@grp.command()
|
|
177
|
+
@click.argument("identifier")
|
|
178
|
+
def detach_all_from_source(identifier):
|
|
179
|
+
"""
|
|
180
|
+
Detach all datasets from a harvest source
|
|
181
|
+
|
|
182
|
+
All the datasets will be cleaned from harvested information.
|
|
183
|
+
Make sure the harvest source won't create new duplicate datasets,
|
|
184
|
+
either by deactivating it or filtering its scope, etc.
|
|
185
|
+
"""
|
|
186
|
+
log.info(f"Detaching datasets from harvest source {identifier}")
|
|
187
|
+
count = actions.detach_all_from_source(actions.get_source(identifier))
|
|
188
|
+
log.info(f"Detached {count} datasets")
|
|
189
|
+
log.warning(
|
|
190
|
+
"Make sure the harvest source won't create new duplicate datasets, either by deactivating it or filtering its scope, etc."
|
|
191
|
+
)
|
udata/harvest/filters.py
CHANGED
|
@@ -3,6 +3,9 @@ from voluptuous import Invalid
|
|
|
3
3
|
|
|
4
4
|
from udata import tags, uris
|
|
5
5
|
|
|
6
|
+
TRUTHY_STRINGS = ("on", "t", "true", "y", "yes", "1")
|
|
7
|
+
FALSY_STRINGS = ("f", "false", "n", "no", "off", "0")
|
|
8
|
+
|
|
6
9
|
|
|
7
10
|
def boolean(value):
|
|
8
11
|
"""
|
|
@@ -15,17 +18,25 @@ def boolean(value):
|
|
|
15
18
|
if value is None or isinstance(value, bool):
|
|
16
19
|
return value
|
|
17
20
|
|
|
18
|
-
|
|
19
|
-
return bool(
|
|
20
|
-
|
|
21
|
+
if isinstance(value, int):
|
|
22
|
+
return bool(value)
|
|
23
|
+
|
|
24
|
+
if isinstance(value, str):
|
|
21
25
|
lower_value = value.strip().lower()
|
|
26
|
+
|
|
22
27
|
if not lower_value:
|
|
23
28
|
return None
|
|
24
|
-
if lower_value in
|
|
29
|
+
if lower_value in FALSY_STRINGS:
|
|
25
30
|
return False
|
|
26
|
-
if lower_value in
|
|
31
|
+
if lower_value in TRUTHY_STRINGS:
|
|
27
32
|
return True
|
|
28
|
-
raise Invalid(
|
|
33
|
+
raise Invalid(
|
|
34
|
+
f"Unable to parse string '{value}' as boolean. Supported values are {','.join(TRUTHY_STRINGS)} for `True` and {','.join(FALSY_STRINGS)} for `False`."
|
|
35
|
+
)
|
|
36
|
+
|
|
37
|
+
raise Invalid(
|
|
38
|
+
f"Cannot convert value {value} of type {type(value)} to boolean. Supported types are `bool`, `int` and `str`"
|
|
39
|
+
)
|
|
29
40
|
|
|
30
41
|
|
|
31
42
|
def to_date(value):
|
udata/harvest/models.py
CHANGED
|
@@ -66,6 +66,7 @@ class HarvestLog(db.EmbeddedDocument):
|
|
|
66
66
|
|
|
67
67
|
class HarvestItem(db.EmbeddedDocument):
|
|
68
68
|
remote_id = db.StringField()
|
|
69
|
+
remote_url = db.StringField()
|
|
69
70
|
dataset = db.ReferenceField(Dataset)
|
|
70
71
|
dataservice = db.ReferenceField(Dataservice)
|
|
71
72
|
status = db.StringField(
|
|
@@ -172,6 +173,21 @@ class HarvestSource(Owned, db.Document):
|
|
|
172
173
|
def __str__(self):
|
|
173
174
|
return self.name or ""
|
|
174
175
|
|
|
176
|
+
@property
|
|
177
|
+
def permissions(self):
|
|
178
|
+
from udata.auth import admin_permission
|
|
179
|
+
|
|
180
|
+
from .permissions import HarvestSourceAdminPermission, HarvestSourcePermission
|
|
181
|
+
|
|
182
|
+
return {
|
|
183
|
+
"edit": HarvestSourceAdminPermission(self),
|
|
184
|
+
"delete": HarvestSourceAdminPermission(self),
|
|
185
|
+
"run": HarvestSourceAdminPermission(self),
|
|
186
|
+
"preview": HarvestSourcePermission(self),
|
|
187
|
+
"validate": admin_permission,
|
|
188
|
+
"schedule": admin_permission,
|
|
189
|
+
}
|
|
190
|
+
|
|
175
191
|
|
|
176
192
|
class HarvestJob(db.Document):
|
|
177
193
|
"""Keep track of harvestings"""
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
from udata.auth import Permission, UserNeed
|
|
2
|
+
from udata.core.dataset.permissions import OwnablePermission
|
|
3
|
+
from udata.core.organization.permissions import OrganizationAdminNeed
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class HarvestSourcePermission(OwnablePermission):
|
|
7
|
+
"""Permission for basic harvest source operations (preview)
|
|
8
|
+
Allows organization admins, editors, or owner.
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
pass
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class HarvestSourceAdminPermission(Permission):
|
|
15
|
+
"""Permission for sensitive harvest source operations (edit, delete, run)
|
|
16
|
+
Allows only organization admins or owner (not editors).
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
def __init__(self, source) -> None:
|
|
20
|
+
needs = []
|
|
21
|
+
|
|
22
|
+
if source.organization:
|
|
23
|
+
needs.append(OrganizationAdminNeed(source.organization.id))
|
|
24
|
+
elif source.owner:
|
|
25
|
+
needs.append(UserNeed(source.owner.fs_uniquifier))
|
|
26
|
+
|
|
27
|
+
super(HarvestSourceAdminPermission, self).__init__(*needs)
|
|
@@ -200,6 +200,24 @@ def spatial_geom_multipolygon(resource_data):
|
|
|
200
200
|
return data, {"multipolygon": multipolygon}
|
|
201
201
|
|
|
202
202
|
|
|
203
|
+
@pytest.fixture
|
|
204
|
+
def spatial_geom_polygon_as_dict(resource_data):
|
|
205
|
+
"""
|
|
206
|
+
Test case where extra["value"] is already a dict in CKAN (e.g., datasud.fr).
|
|
207
|
+
In some CKAN instances, the spatial value is returned as a dict directly
|
|
208
|
+
instead of a JSON string, so json.loads() would fail.
|
|
209
|
+
"""
|
|
210
|
+
polygon = faker.polygon()
|
|
211
|
+
data = {
|
|
212
|
+
"name": faker.unique_string(),
|
|
213
|
+
"title": faker.sentence(),
|
|
214
|
+
"notes": faker.paragraph(),
|
|
215
|
+
"resources": [resource_data],
|
|
216
|
+
"extras": [{"key": "spatial", "value": polygon}],
|
|
217
|
+
}
|
|
218
|
+
return data, {"polygon": polygon}
|
|
219
|
+
|
|
220
|
+
|
|
203
221
|
@pytest.fixture
|
|
204
222
|
def known_spatial_text_name(resource_data):
|
|
205
223
|
zone = GeoZoneFactory()
|
|
@@ -422,6 +440,21 @@ class CkanBackendTest(PytestOnlyDBTestCase):
|
|
|
422
440
|
dataset = dataset_for(result)
|
|
423
441
|
assert dataset.spatial.geom == multipolygon
|
|
424
442
|
|
|
443
|
+
@pytest.mark.ckan_data("spatial_geom_polygon_as_dict")
|
|
444
|
+
def test_geospatial_geom_polygon_as_dict(self, result, kwargs):
|
|
445
|
+
"""
|
|
446
|
+
Test that spatial geometry works when the value is already a dict.
|
|
447
|
+
Some CKAN instances (e.g., datasud.fr) return the spatial value as a dict
|
|
448
|
+
directly instead of a JSON string.
|
|
449
|
+
"""
|
|
450
|
+
polygon = kwargs["polygon"]
|
|
451
|
+
dataset = dataset_for(result)
|
|
452
|
+
|
|
453
|
+
assert dataset.spatial.geom == {
|
|
454
|
+
"type": "MultiPolygon",
|
|
455
|
+
"coordinates": [polygon["coordinates"]],
|
|
456
|
+
}
|
|
457
|
+
|
|
425
458
|
@pytest.mark.ckan_data("skipped_no_resources")
|
|
426
459
|
def test_skip_no_resources(self, source, result):
|
|
427
460
|
job = source.get_last_job()
|
|
@@ -11,8 +11,8 @@ from udata.core.activity.models import new_activity
|
|
|
11
11
|
from udata.core.dataservices.factories import DataserviceFactory
|
|
12
12
|
from udata.core.dataservices.models import HarvestMetadata as HarvestDataserviceMetadata
|
|
13
13
|
from udata.core.dataset.activities import UserCreatedDataset
|
|
14
|
-
from udata.core.dataset.factories import DatasetFactory
|
|
15
|
-
from udata.core.dataset.models import HarvestDatasetMetadata
|
|
14
|
+
from udata.core.dataset.factories import DatasetFactory, ResourceFactory
|
|
15
|
+
from udata.core.dataset.models import HarvestDatasetMetadata, HarvestResourceMetadata
|
|
16
16
|
from udata.core.organization.factories import OrganizationFactory
|
|
17
17
|
from udata.core.user.factories import UserFactory
|
|
18
18
|
from udata.harvest.backends import get_enabled_backends
|
|
@@ -460,6 +460,50 @@ class HarvestActionsTest(MockBackendsMixin, PytestOnlyDBTestCase):
|
|
|
460
460
|
assert result.success == len(datasets)
|
|
461
461
|
assert result.errors == 1
|
|
462
462
|
|
|
463
|
+
def test_detach(self):
|
|
464
|
+
dataset = DatasetFactory(
|
|
465
|
+
harvest=HarvestDatasetMetadata(
|
|
466
|
+
source_id="source id", domain="test.org", remote_id="id"
|
|
467
|
+
),
|
|
468
|
+
resources=[
|
|
469
|
+
ResourceFactory(
|
|
470
|
+
harvest=HarvestResourceMetadata(issued_at=datetime.now(), uri="test.org")
|
|
471
|
+
)
|
|
472
|
+
],
|
|
473
|
+
)
|
|
474
|
+
|
|
475
|
+
actions.detach(dataset)
|
|
476
|
+
|
|
477
|
+
dataset.reload()
|
|
478
|
+
assert dataset.harvest is None
|
|
479
|
+
for resource in dataset.resources:
|
|
480
|
+
assert resource.harvest is None
|
|
481
|
+
|
|
482
|
+
def test_detach_all(self):
|
|
483
|
+
source = HarvestSourceFactory()
|
|
484
|
+
datasets = [
|
|
485
|
+
DatasetFactory(
|
|
486
|
+
harvest=HarvestDatasetMetadata(
|
|
487
|
+
source_id=str(source.id), domain="test.org", remote_id=str(i)
|
|
488
|
+
),
|
|
489
|
+
resources=[
|
|
490
|
+
ResourceFactory(
|
|
491
|
+
harvest=HarvestResourceMetadata(issued_at=datetime.now(), uri="test.org")
|
|
492
|
+
)
|
|
493
|
+
],
|
|
494
|
+
)
|
|
495
|
+
for i in range(3)
|
|
496
|
+
]
|
|
497
|
+
|
|
498
|
+
result = actions.detach_all_from_source(source)
|
|
499
|
+
|
|
500
|
+
assert result == len(datasets)
|
|
501
|
+
for dataset in datasets:
|
|
502
|
+
dataset.reload()
|
|
503
|
+
assert dataset.harvest is None
|
|
504
|
+
for resource in dataset.resources:
|
|
505
|
+
assert resource.harvest is None
|
|
506
|
+
|
|
463
507
|
|
|
464
508
|
class ExecutionTestMixin(MockBackendsMixin, PytestOnlyDBTestCase):
|
|
465
509
|
def action(self, *args, **kwargs):
|