udata 7.0.8.dev28841__py2.py3-none-any.whl → 9.0.1.dev29390__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of udata might be problematic. Click here for more details.
- udata/__init__.py +1 -1
- udata/api/__init__.py +6 -4
- udata/api/oauth2.py +2 -1
- udata/api_fields.py +254 -0
- udata/commands/purge.py +8 -2
- udata/core/badges/models.py +2 -1
- udata/core/dataservices/__init__.py +0 -0
- udata/core/dataservices/api.py +92 -0
- udata/core/dataservices/models.py +142 -0
- udata/core/dataservices/permissions.py +7 -0
- udata/core/dataservices/tasks.py +25 -0
- udata/core/dataset/apiv2.py +2 -0
- udata/core/dataset/csv.py +8 -1
- udata/core/dataset/models.py +1 -0
- udata/core/dataset/rdf.py +77 -15
- udata/core/metrics/commands.py +18 -3
- udata/core/metrics/models.py +2 -3
- udata/core/organization/api_fields.py +28 -3
- udata/core/organization/csv.py +5 -3
- udata/core/organization/models.py +3 -1
- udata/core/owned.py +39 -2
- udata/core/reuse/csv.py +3 -0
- udata/core/site/api.py +4 -1
- udata/core/spatial/api.py +5 -10
- udata/core/spatial/models.py +7 -2
- udata/core/spatial/tasks.py +7 -0
- udata/core/spatial/tests/test_api.py +26 -0
- udata/core/user/api.py +11 -7
- udata/core/user/models.py +13 -2
- udata/harvest/backends/base.py +93 -103
- udata/harvest/backends/dcat.py +65 -90
- udata/harvest/tasks.py +3 -13
- udata/harvest/tests/dcat/bnodes.xml +10 -1
- udata/harvest/tests/dcat/catalog.xml +1 -0
- udata/harvest/tests/factories.py +13 -6
- udata/harvest/tests/test_actions.py +2 -2
- udata/harvest/tests/test_base_backend.py +9 -5
- udata/harvest/tests/test_dcat_backend.py +17 -1
- udata/rdf.py +4 -0
- udata/routing.py +6 -0
- udata/settings.py +4 -1
- udata/static/admin.css +2 -2
- udata/static/admin.css.map +1 -1
- udata/static/chunks/{0.6f1698738c9b0618b673.js → 0.93c3ae13b5b94753ee80.js} +3 -3
- udata/static/chunks/0.93c3ae13b5b94753ee80.js.map +1 -0
- udata/static/chunks/{14.f4037a917d5364cb564b.js → 14.e64890872b31c55fcdf7.js} +2 -2
- udata/static/chunks/14.e64890872b31c55fcdf7.js.map +1 -0
- udata/static/chunks/{2.7c89fae92899be371ed3.js → 2.614b3e73b072982fd9b1.js} +2 -2
- udata/static/chunks/2.614b3e73b072982fd9b1.js.map +1 -0
- udata/static/chunks/{5.3dc97ea195d251881552.js → 5.48417db6b33328fa9d6a.js} +2 -2
- udata/static/chunks/5.48417db6b33328fa9d6a.js.map +1 -0
- udata/static/common.js +1 -1
- udata/static/common.js.map +1 -1
- udata/tasks.py +1 -0
- udata/tests/api/__init__.py +3 -0
- udata/tests/api/test_dataservices_api.py +236 -0
- udata/tests/api/test_organizations_api.py +78 -5
- udata/tests/api/test_user_api.py +47 -13
- udata/tests/dataservice/test_dataservice_tasks.py +46 -0
- udata/tests/dataset/test_dataset_rdf.py +17 -2
- udata/tests/plugin.py +5 -0
- udata/tests/site/test_site_rdf.py +16 -0
- {udata-7.0.8.dev28841.dist-info → udata-9.0.1.dev29390.dist-info}/METADATA +27 -1
- {udata-7.0.8.dev28841.dist-info → udata-9.0.1.dev29390.dist-info}/RECORD +68 -60
- udata/core/metrics/api.py +0 -10
- udata/static/chunks/0.6f1698738c9b0618b673.js.map +0 -1
- udata/static/chunks/14.f4037a917d5364cb564b.js.map +0 -1
- udata/static/chunks/2.7c89fae92899be371ed3.js.map +0 -1
- udata/static/chunks/5.3dc97ea195d251881552.js.map +0 -1
- {udata-7.0.8.dev28841.dist-info → udata-9.0.1.dev29390.dist-info}/LICENSE +0 -0
- {udata-7.0.8.dev28841.dist-info → udata-9.0.1.dev29390.dist-info}/WHEEL +0 -0
- {udata-7.0.8.dev28841.dist-info → udata-9.0.1.dev29390.dist-info}/entry_points.txt +0 -0
- {udata-7.0.8.dev28841.dist-info → udata-9.0.1.dev29390.dist-info}/top_level.txt +0 -0
udata/core/dataset/csv.py
CHANGED
|
@@ -19,6 +19,9 @@ class DatasetCsvAdapter(csv.Adapter):
|
|
|
19
19
|
('url', 'external_url'),
|
|
20
20
|
('organization', 'organization.name'),
|
|
21
21
|
('organization_id', 'organization.id'),
|
|
22
|
+
('owner', 'owner.slug'), # in case it's owned by a user, or introduce 'owner_type'?
|
|
23
|
+
('owner_id', 'owner.id'),
|
|
24
|
+
# 'contact_point', # ?
|
|
22
25
|
'description',
|
|
23
26
|
'frequency',
|
|
24
27
|
'license',
|
|
@@ -26,19 +29,20 @@ class DatasetCsvAdapter(csv.Adapter):
|
|
|
26
29
|
'temporal_coverage.end',
|
|
27
30
|
'spatial.granularity',
|
|
28
31
|
('spatial.zones', serialize_spatial_zones),
|
|
29
|
-
'private',
|
|
30
32
|
('featured', lambda o: o.featured or False),
|
|
31
33
|
'created_at',
|
|
32
34
|
'last_modified',
|
|
33
35
|
('tags', lambda o: ','.join(o.tags)),
|
|
34
36
|
('archived', lambda o: o.archived or False),
|
|
35
37
|
('resources_count', lambda o: len(o.resources)),
|
|
38
|
+
('main_resources_count', lambda o: len([r for r in o.resources if r.type == 'main'])),
|
|
36
39
|
'downloads',
|
|
37
40
|
('harvest.backend', lambda r: r.harvest and r.harvest.backend),
|
|
38
41
|
('harvest.domain', lambda r: r.harvest and r.harvest.domain),
|
|
39
42
|
('harvest.created_at', lambda r: r.harvest and r.harvest.created_at),
|
|
40
43
|
('harvest.modified_at', lambda r: r.harvest and r.harvest.modified_at),
|
|
41
44
|
('quality_score', lambda o: format(o.quality['score'], '.2f')),
|
|
45
|
+
# schema? what is the schema of a dataset?
|
|
42
46
|
)
|
|
43
47
|
|
|
44
48
|
def dynamic_fields(self):
|
|
@@ -85,6 +89,9 @@ class ResourcesCsvAdapter(csv.NestedAdapter):
|
|
|
85
89
|
('downloads', lambda o: int(o.metrics.get('views', 0))),
|
|
86
90
|
('harvest.created_at', lambda o: o.harvest and o.harvest.created_at),
|
|
87
91
|
('harvest.modified_at', lambda o: o.harvest and o.harvest.modified_at),
|
|
92
|
+
('schema_name', 'schema.name'),
|
|
93
|
+
('schema_version', 'schema.version'),
|
|
94
|
+
('preview_url', lambda o: o.preview_url or False),
|
|
88
95
|
)
|
|
89
96
|
attribute = 'resources'
|
|
90
97
|
|
udata/core/dataset/models.py
CHANGED
udata/core/dataset/rdf.py
CHANGED
|
@@ -22,9 +22,10 @@ from udata.frontend.markdown import parse_html
|
|
|
22
22
|
from udata.core.dataset.models import HarvestDatasetMetadata, HarvestResourceMetadata
|
|
23
23
|
from udata.models import db, ContactPoint
|
|
24
24
|
from udata.rdf import (
|
|
25
|
-
DCAT, DCT, FREQ, SCV, SKOS, SPDX, SCHEMA, EUFREQ, EUFORMAT, IANAFORMAT, VCARD, RDFS,
|
|
26
|
-
namespace_manager, schema_from_rdf, url_from_rdf
|
|
25
|
+
DCAT, DCATAP, DCT, FREQ, SCV, SKOS, SPDX, SCHEMA, EUFREQ, EUFORMAT, IANAFORMAT, VCARD, RDFS,
|
|
26
|
+
HVD_LEGISLATION, namespace_manager, schema_from_rdf, url_from_rdf
|
|
27
27
|
)
|
|
28
|
+
from udata.tags import slug as slugify_tag
|
|
28
29
|
from udata.utils import get_by, safe_unicode
|
|
29
30
|
from udata.uris import endpoint_for
|
|
30
31
|
|
|
@@ -76,6 +77,17 @@ EU_RDF_REQUENCIES = {
|
|
|
76
77
|
EUFREQ.NEVER: 'punctual',
|
|
77
78
|
}
|
|
78
79
|
|
|
80
|
+
# Map High Value Datasets URIs to keyword categories
|
|
81
|
+
EU_HVD_CATEGORIES = {
|
|
82
|
+
"http://data.europa.eu/bna/c_164e0bf5": "Météorologiques",
|
|
83
|
+
"http://data.europa.eu/bna/c_a9135398": "Entreprises et propriété d'entreprises",
|
|
84
|
+
"http://data.europa.eu/bna/c_ac64a52d": "Géospatiales",
|
|
85
|
+
"http://data.europa.eu/bna/c_b79e35eb": "Mobilité",
|
|
86
|
+
"http://data.europa.eu/bna/c_dd313021": "Observation de la terre et environnement",
|
|
87
|
+
"http://data.europa.eu/bna/c_e1da4e07": "Statistiques"
|
|
88
|
+
}
|
|
89
|
+
TAG_TO_EU_HVD_CATEGORIES = {slugify_tag(EU_HVD_CATEGORIES[uri]): uri for uri in EU_HVD_CATEGORIES}
|
|
90
|
+
|
|
79
91
|
|
|
80
92
|
class HTMLDetector(HTMLParser):
|
|
81
93
|
def __init__(self, *args, **kwargs):
|
|
@@ -131,7 +143,7 @@ def owner_to_rdf(dataset, graph=None):
|
|
|
131
143
|
return
|
|
132
144
|
|
|
133
145
|
|
|
134
|
-
def resource_to_rdf(resource, dataset=None, graph=None):
|
|
146
|
+
def resource_to_rdf(resource, dataset=None, graph=None, is_hvd=False):
|
|
135
147
|
'''
|
|
136
148
|
Map a Resource domain model to a DCAT/RDF graph
|
|
137
149
|
'''
|
|
@@ -170,6 +182,9 @@ def resource_to_rdf(resource, dataset=None, graph=None):
|
|
|
170
182
|
checksum.add(SPDX.algorithm, getattr(SPDX, algorithm))
|
|
171
183
|
checksum.add(SPDX.checksumValue, Literal(resource.checksum.value))
|
|
172
184
|
r.add(SPDX.checksum, checksum)
|
|
185
|
+
if is_hvd:
|
|
186
|
+
# DCAT-AP HVD applicable legislation is also expected at the distribution level
|
|
187
|
+
r.add(DCATAP.applicableLegislation, URIRef(HVD_LEGISLATION))
|
|
173
188
|
return r
|
|
174
189
|
|
|
175
190
|
|
|
@@ -204,11 +219,20 @@ def dataset_to_rdf(dataset, graph=None):
|
|
|
204
219
|
if dataset.acronym:
|
|
205
220
|
d.set(SKOS.altLabel, Literal(dataset.acronym))
|
|
206
221
|
|
|
222
|
+
# Add DCAT-AP HVD properties if the dataset is tagged hvd.
|
|
223
|
+
# See https://semiceu.github.io/DCAT-AP/releases/2.2.0-hvd/
|
|
224
|
+
is_hvd = current_app.config['HVD_SUPPORT'] and 'hvd' in dataset.tags
|
|
225
|
+
if is_hvd:
|
|
226
|
+
d.add(DCATAP.applicableLegislation, URIRef(HVD_LEGISLATION))
|
|
227
|
+
|
|
207
228
|
for tag in dataset.tags:
|
|
208
229
|
d.add(DCAT.keyword, Literal(tag))
|
|
230
|
+
# Add HVD category if this dataset is tagged HVD
|
|
231
|
+
if is_hvd and tag in TAG_TO_EU_HVD_CATEGORIES:
|
|
232
|
+
d.add(DCATAP.hvdCategory, URIRef(TAG_TO_EU_HVD_CATEGORIES[tag]))
|
|
209
233
|
|
|
210
234
|
for resource in dataset.resources:
|
|
211
|
-
d.add(DCAT.distribution, resource_to_rdf(resource, dataset, graph))
|
|
235
|
+
d.add(DCAT.distribution, resource_to_rdf(resource, dataset, graph, is_hvd))
|
|
212
236
|
|
|
213
237
|
if dataset.temporal_coverage:
|
|
214
238
|
d.set(DCT.temporal, temporal_to_rdf(dataset.temporal_coverage, graph))
|
|
@@ -371,23 +395,51 @@ def spatial_from_rdf(graph):
|
|
|
371
395
|
else:
|
|
372
396
|
continue
|
|
373
397
|
|
|
374
|
-
if geojson['type'] == 'Polygon':
|
|
375
|
-
geojson['type'] = 'MultiPolygon'
|
|
376
|
-
geojson['coordinates'] = [geojson['coordinates']]
|
|
377
|
-
|
|
378
398
|
geojsons.append(geojson)
|
|
379
399
|
except Exception as e:
|
|
380
400
|
log.exception(f"Exception during `spatial_from_rdf` for term {term}: {e}", stack_info=True)
|
|
381
401
|
|
|
402
|
+
if not geojsons:
|
|
403
|
+
return None
|
|
404
|
+
|
|
405
|
+
# We first try to build a big MultiPolygon with all the spatial coverages found in RDF.
|
|
406
|
+
# We deduplicate the coordinates because some backend provides the same coordinates multiple
|
|
407
|
+
# times in different format. We only support in this first pass Polygons and MultiPolygons. Not sure
|
|
408
|
+
# if there are other types of spatial coverage worth integrating (points? line strings?). But these other
|
|
409
|
+
# formats are not compatible to be merged in the unique stored representation in MongoDB, we'll deal with them in a second pass.
|
|
410
|
+
# The merging lose the properties and other information inside the GeoJSON…
|
|
411
|
+
# Note that having multiple `Polygon` is not really the DCAT way of doing things, the standard require that you use
|
|
412
|
+
# a `MultiPolygon` in this case. We support this right now, and wait and see if it raises problems in the future for
|
|
413
|
+
# people following the standard. (see https://github.com/datagouv/data.gouv.fr/issues/1362#issuecomment-2112774115)
|
|
414
|
+
polygons = []
|
|
382
415
|
for geojson in geojsons:
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
416
|
+
if geojson['type'] == 'Polygon':
|
|
417
|
+
if geojson['coordinates'] not in polygons:
|
|
418
|
+
polygons.append(geojson['coordinates'])
|
|
419
|
+
elif geojson['type'] == 'MultiPolygon':
|
|
420
|
+
for coordinates in geojson['coordinates']:
|
|
421
|
+
if coordinates not in polygons:
|
|
422
|
+
polygons.append(coordinates)
|
|
423
|
+
else:
|
|
424
|
+
log.warning(f"Unsupported GeoJSON type '{geojson['type']}'")
|
|
388
425
|
continue
|
|
389
426
|
|
|
390
|
-
|
|
427
|
+
if not polygons:
|
|
428
|
+
log.warning(f"No supported types found in the GeoJSON data.")
|
|
429
|
+
return None
|
|
430
|
+
|
|
431
|
+
spatial_coverage = SpatialCoverage(geom={
|
|
432
|
+
'type': 'MultiPolygon',
|
|
433
|
+
'coordinates': polygons,
|
|
434
|
+
})
|
|
435
|
+
|
|
436
|
+
try:
|
|
437
|
+
spatial_coverage.clean()
|
|
438
|
+
return spatial_coverage
|
|
439
|
+
except ValidationError as e:
|
|
440
|
+
log.warning(f"Cannot save the spatial coverage {coordinates} (error was {e})")
|
|
441
|
+
return None
|
|
442
|
+
|
|
391
443
|
|
|
392
444
|
def frequency_from_rdf(term):
|
|
393
445
|
if isinstance(term, str):
|
|
@@ -469,9 +521,19 @@ def remote_url_from_rdf(rdf):
|
|
|
469
521
|
|
|
470
522
|
|
|
471
523
|
def theme_labels_from_rdf(rdf):
|
|
524
|
+
'''
|
|
525
|
+
Get theme labels to use as keywords.
|
|
526
|
+
Map HVD keywords from known URIs resources if HVD support is activated.
|
|
527
|
+
'''
|
|
472
528
|
for theme in rdf.objects(DCAT.theme):
|
|
473
529
|
if isinstance(theme, RdfResource):
|
|
474
|
-
|
|
530
|
+
uri = theme.identifier.toPython()
|
|
531
|
+
if current_app.config['HVD_SUPPORT'] and uri in EU_HVD_CATEGORIES:
|
|
532
|
+
label = EU_HVD_CATEGORIES[uri]
|
|
533
|
+
# Additionnally yield hvd keyword
|
|
534
|
+
yield 'hvd'
|
|
535
|
+
else:
|
|
536
|
+
label = rdf_value(theme, SKOS.prefLabel)
|
|
475
537
|
else:
|
|
476
538
|
label = theme.toPython()
|
|
477
539
|
if label:
|
udata/core/metrics/commands.py
CHANGED
|
@@ -5,7 +5,7 @@ import click
|
|
|
5
5
|
from flask import current_app
|
|
6
6
|
|
|
7
7
|
from udata.commands import cli, success
|
|
8
|
-
from udata.models import User, Dataset, Reuse, Organization, Site
|
|
8
|
+
from udata.models import User, Dataset, Reuse, Organization, Site, GeoZone
|
|
9
9
|
|
|
10
10
|
log = logging.getLogger(__name__)
|
|
11
11
|
|
|
@@ -24,11 +24,12 @@ def grp():
|
|
|
24
24
|
help='Compute datasets metrics')
|
|
25
25
|
@click.option('-r', '--reuses', is_flag=True, help='Compute reuses metrics')
|
|
26
26
|
@click.option('-u', '--users', is_flag=True, help='Compute users metrics')
|
|
27
|
+
@click.option('-g', '--geozones', is_flag=True, help='Compute geo levels metrics')
|
|
27
28
|
@click.option('--drop', is_flag=True, help='Clear old metrics before computing new ones')
|
|
28
29
|
def update(site=False, organizations=False, users=False, datasets=False,
|
|
29
|
-
reuses=False, drop=False):
|
|
30
|
+
reuses=False, geozones = False, drop=False):
|
|
30
31
|
'''Update all metrics for the current date'''
|
|
31
|
-
do_all = not any((site, organizations, users, datasets, reuses))
|
|
32
|
+
do_all = not any((site, organizations, users, datasets, reuses, geozones))
|
|
32
33
|
|
|
33
34
|
if do_all or site:
|
|
34
35
|
log.info('Update site metrics')
|
|
@@ -114,4 +115,18 @@ def update(site=False, organizations=False, users=False, datasets=False,
|
|
|
114
115
|
except Exception as e:
|
|
115
116
|
log.info(f'Error during update: {e}')
|
|
116
117
|
continue
|
|
118
|
+
|
|
119
|
+
if do_all or geozones:
|
|
120
|
+
log.info('Update GeoZone metrics')
|
|
121
|
+
all_geozones = GeoZone.objects.timeout(False)
|
|
122
|
+
with click.progressbar(all_geozones, length=GeoZone.objects.count()) as geozones_bar:
|
|
123
|
+
for geozone in geozones_bar:
|
|
124
|
+
try:
|
|
125
|
+
if drop:
|
|
126
|
+
geozone.metrics.clear()
|
|
127
|
+
geozone.count_datasets()
|
|
128
|
+
except Exception as e:
|
|
129
|
+
log.info(f'Error during update: {e}')
|
|
130
|
+
continue
|
|
131
|
+
|
|
117
132
|
success('All metrics have been updated')
|
udata/core/metrics/models.py
CHANGED
|
@@ -1,5 +1,4 @@
|
|
|
1
|
-
from
|
|
2
|
-
|
|
1
|
+
from udata.api_fields import field
|
|
3
2
|
from udata.mongo import db
|
|
4
3
|
|
|
5
4
|
|
|
@@ -7,7 +6,7 @@ __all__ = ('WithMetrics',)
|
|
|
7
6
|
|
|
8
7
|
|
|
9
8
|
class WithMetrics(object):
|
|
10
|
-
metrics = db.DictField()
|
|
9
|
+
metrics = field(db.DictField())
|
|
11
10
|
|
|
12
11
|
__metrics_keys__ = []
|
|
13
12
|
|
|
@@ -1,5 +1,8 @@
|
|
|
1
|
+
from flask import request
|
|
2
|
+
|
|
1
3
|
from udata.api import api, fields, base_reference
|
|
2
4
|
from udata.core.badges.fields import badge_fields
|
|
5
|
+
from udata.core.organization.permissions import OrganizationPrivatePermission
|
|
3
6
|
|
|
4
7
|
from .constants import ORG_ROLES, DEFAULT_ROLE, MEMBERSHIP_STATUS, BIGGEST_LOGO_SIZE
|
|
5
8
|
|
|
@@ -27,9 +30,29 @@ org_ref_fields = api.inherit('OrganizationReference', base_reference, {
|
|
|
27
30
|
|
|
28
31
|
from udata.core.user.api_fields import user_ref_fields # noqa: required
|
|
29
32
|
|
|
33
|
+
def check_can_access_email():
|
|
34
|
+
# This endpoint is secure, only organization member has access.
|
|
35
|
+
if request.endpoint == 'api.request_membership':
|
|
36
|
+
return True
|
|
37
|
+
|
|
38
|
+
if request.endpoint != 'api.organization':
|
|
39
|
+
return False
|
|
40
|
+
|
|
41
|
+
org = request.view_args.get('org')
|
|
42
|
+
if org is None:
|
|
43
|
+
return False
|
|
44
|
+
|
|
45
|
+
return OrganizationPrivatePermission(org).can()
|
|
46
|
+
|
|
47
|
+
member_user_with_email_fields = api.inherit('MemberUserWithEmail', user_ref_fields, {
|
|
48
|
+
'email': fields.Raw(
|
|
49
|
+
attribute=lambda o: o.email if check_can_access_email() else None,
|
|
50
|
+
description='The user email (only present on show organization endpoint if the current user has edit permission on the org)', readonly=True),
|
|
51
|
+
})
|
|
52
|
+
|
|
30
53
|
request_fields = api.model('MembershipRequest', {
|
|
31
54
|
'id': fields.String(readonly=True),
|
|
32
|
-
'user': fields.Nested(
|
|
55
|
+
'user': fields.Nested(member_user_with_email_fields),
|
|
33
56
|
'created': fields.ISODateTime(
|
|
34
57
|
description='The request creation date', readonly=True),
|
|
35
58
|
'status': fields.String(
|
|
@@ -40,10 +63,12 @@ request_fields = api.model('MembershipRequest', {
|
|
|
40
63
|
})
|
|
41
64
|
|
|
42
65
|
member_fields = api.model('Member', {
|
|
43
|
-
'user': fields.Nested(
|
|
66
|
+
'user': fields.Nested(member_user_with_email_fields),
|
|
44
67
|
'role': fields.String(
|
|
45
68
|
description='The member role in the organization', required=True,
|
|
46
|
-
enum=list(ORG_ROLES), default=DEFAULT_ROLE)
|
|
69
|
+
enum=list(ORG_ROLES), default=DEFAULT_ROLE),
|
|
70
|
+
'since': fields.ISODateTime(
|
|
71
|
+
description='The date the user joined the organization', readonly=True),
|
|
47
72
|
})
|
|
48
73
|
|
|
49
74
|
org_fields = api.model('Organization', {
|
udata/core/organization/csv.py
CHANGED
|
@@ -15,18 +15,20 @@ class OrganizationCsvAdapter(csv.Adapter):
|
|
|
15
15
|
('url', 'external_url'),
|
|
16
16
|
'description',
|
|
17
17
|
('logo', lambda o: o.logo(external=True)),
|
|
18
|
-
('badges', lambda o: [badge.kind for badge in o.badges]),
|
|
18
|
+
('badges', lambda o: ','.join([badge.kind for badge in o.badges])),
|
|
19
19
|
'created_at',
|
|
20
20
|
'last_modified',
|
|
21
|
+
'business_number_id',
|
|
22
|
+
('members_count', lambda o: len(o.members)),
|
|
21
23
|
)
|
|
22
24
|
|
|
23
25
|
def dynamic_fields(self):
|
|
24
26
|
return csv.metric_fields(Organization) + self.get_dynamic_field_downloads()
|
|
25
|
-
|
|
27
|
+
|
|
26
28
|
def get_dynamic_field_downloads(self):
|
|
27
29
|
downloads_counts = self.get_downloads_counts()
|
|
28
30
|
return [('downloads', lambda o: downloads_counts.get(str(o.id), 0))]
|
|
29
|
-
|
|
31
|
+
|
|
30
32
|
def get_downloads_counts(self):
|
|
31
33
|
'''
|
|
32
34
|
Prefetch all the resources' downloads for all selected organization into memory
|
|
@@ -5,9 +5,11 @@ from blinker import Signal
|
|
|
5
5
|
from mongoengine.signals import pre_save, post_save
|
|
6
6
|
from werkzeug.utils import cached_property
|
|
7
7
|
|
|
8
|
+
from udata.core.badges.models import BadgeMixin
|
|
9
|
+
from udata.core.metrics.models import WithMetrics
|
|
8
10
|
from udata.core.storages import avatars, default_image_basename
|
|
9
11
|
from udata.frontend.markdown import mdstrip
|
|
10
|
-
from udata.
|
|
12
|
+
from udata.mongo import db
|
|
11
13
|
from udata.i18n import lazy_gettext as _
|
|
12
14
|
from udata.uris import endpoint_for
|
|
13
15
|
from .constants import ASSOCIATION, CERTIFIED, COMPANY, LOCAL_AUTHORITY, LOGO_SIZES, ORG_BID_SIZE_LIMIT, ORG_ROLES, DEFAULT_ROLE, MEMBERSHIP_STATUS, LOGO_MAX_SIZE, PUBLIC_SERVICE
|
udata/core/owned.py
CHANGED
|
@@ -4,7 +4,15 @@ from blinker import signal
|
|
|
4
4
|
from mongoengine import NULLIFY, Q, post_save
|
|
5
5
|
from mongoengine.fields import ReferenceField
|
|
6
6
|
|
|
7
|
+
from udata.api_fields import field
|
|
8
|
+
from udata.core.organization.models import Organization
|
|
9
|
+
from udata.core.user.models import User
|
|
7
10
|
from udata.mongo.queryset import UDataQuerySet
|
|
11
|
+
from udata.core.user.api_fields import user_ref_fields
|
|
12
|
+
from udata.core.organization.api_fields import org_ref_fields
|
|
13
|
+
from udata.core.organization.permissions import OrganizationPrivatePermission
|
|
14
|
+
from udata.mongo.errors import FieldValidationError
|
|
15
|
+
from udata.i18n import lazy_gettext as _
|
|
8
16
|
|
|
9
17
|
log = logging.getLogger(__name__)
|
|
10
18
|
|
|
@@ -15,14 +23,42 @@ class OwnedQuerySet(UDataQuerySet):
|
|
|
15
23
|
for owner in owners:
|
|
16
24
|
qs |= Q(owner=owner) | Q(organization=owner)
|
|
17
25
|
return self(qs)
|
|
26
|
+
|
|
27
|
+
def check_owner_is_current_user(owner):
|
|
28
|
+
from udata.auth import current_user, admin_permission
|
|
29
|
+
if current_user.is_authenticated and owner and not admin_permission and current_user.id != owner:
|
|
30
|
+
raise FieldValidationError(_('You can only set yourself as owner'), field="owner")
|
|
31
|
+
|
|
32
|
+
def check_organization_is_valid_for_current_user(organization):
|
|
33
|
+
from udata.auth import current_user
|
|
34
|
+
from udata.models import Organization
|
|
35
|
+
|
|
36
|
+
org = Organization.objects(id=organization).first()
|
|
37
|
+
if org is None:
|
|
38
|
+
raise FieldValidationError(_("Unknown organization"), field="organization")
|
|
39
|
+
|
|
40
|
+
if current_user.is_authenticated and org and not OrganizationPrivatePermission(org).can():
|
|
41
|
+
raise FieldValidationError(_("Permission denied for this organization"), field="organization")
|
|
18
42
|
|
|
19
43
|
|
|
20
44
|
class Owned(object):
|
|
21
45
|
'''
|
|
22
46
|
A mixin to factorize owning behvaior between users and organizations.
|
|
23
47
|
'''
|
|
24
|
-
owner =
|
|
25
|
-
|
|
48
|
+
owner = field(
|
|
49
|
+
ReferenceField(User, reverse_delete_rule=NULLIFY),
|
|
50
|
+
nested_fields=user_ref_fields,
|
|
51
|
+
description="Only present if organization is not set. Can only be set to the current authenticated user.",
|
|
52
|
+
check=check_owner_is_current_user,
|
|
53
|
+
allow_null=True,
|
|
54
|
+
)
|
|
55
|
+
organization = field(
|
|
56
|
+
ReferenceField(Organization, reverse_delete_rule=NULLIFY),
|
|
57
|
+
nested_fields=org_ref_fields,
|
|
58
|
+
description="Only present if owner is not set. Can only be set to an organization of the current authenticated user.",
|
|
59
|
+
check=check_organization_is_valid_for_current_user,
|
|
60
|
+
allow_null=True,
|
|
61
|
+
)
|
|
26
62
|
|
|
27
63
|
on_owner_change = signal('Owned.on_owner_change')
|
|
28
64
|
|
|
@@ -38,6 +74,7 @@ class Owned(object):
|
|
|
38
74
|
'''
|
|
39
75
|
Verify owner consistency and fetch original owner before the new one erase it.
|
|
40
76
|
'''
|
|
77
|
+
|
|
41
78
|
changed_fields = self._get_changed_fields()
|
|
42
79
|
if 'organization' in changed_fields and 'owner' in changed_fields:
|
|
43
80
|
# Ownership changes (org to owner or the other way around) have already been made
|
udata/core/reuse/csv.py
CHANGED
|
@@ -15,10 +15,13 @@ class ReuseCsvAdapter(csv.Adapter):
|
|
|
15
15
|
('remote_url', 'url'),
|
|
16
16
|
('organization', 'organization.name'),
|
|
17
17
|
('organization_id', 'organization.id'),
|
|
18
|
+
('owner', 'owner.slug'), # in case it's owned by a user
|
|
19
|
+
('owner_id', 'owner.id'),
|
|
18
20
|
('image', lambda r: r.image(external=True)),
|
|
19
21
|
('featured', lambda r: r.featured or False),
|
|
20
22
|
'created_at',
|
|
21
23
|
'last_modified',
|
|
24
|
+
'topic',
|
|
22
25
|
('tags', lambda r: ','.join(r.tags)),
|
|
23
26
|
('datasets', lambda r: ','.join([str(d.id) for d in r.datasets])),
|
|
24
27
|
)
|
udata/core/site/api.py
CHANGED
|
@@ -105,7 +105,10 @@ class SiteRdfCatalogFormat(API):
|
|
|
105
105
|
params = multi_to_dict(request.args)
|
|
106
106
|
page = int(params.get('page', 1))
|
|
107
107
|
page_size = int(params.get('page_size', 100))
|
|
108
|
-
datasets = Dataset.objects.visible()
|
|
108
|
+
datasets = Dataset.objects.visible()
|
|
109
|
+
if 'tag' in params:
|
|
110
|
+
datasets = datasets.filter(tags=params.get('tag', ''))
|
|
111
|
+
datasets = datasets.paginate(page, page_size)
|
|
109
112
|
catalog = build_catalog(current_site, datasets, format=format)
|
|
110
113
|
# bypass flask-restplus make_response, since graph_response
|
|
111
114
|
# is handling the content negociation directly
|
udata/core/spatial/api.py
CHANGED
|
@@ -85,7 +85,7 @@ dataset_parser.add_argument(
|
|
|
85
85
|
location='args', default=25)
|
|
86
86
|
|
|
87
87
|
|
|
88
|
-
@ns.route('/zones/<
|
|
88
|
+
@ns.route('/zones/<list:ids>/', endpoint='zones')
|
|
89
89
|
class ZonesAPI(API):
|
|
90
90
|
@api.doc('spatial_zones',
|
|
91
91
|
params={'ids': 'A zone identifiers list (comma separated)'})
|
|
@@ -101,7 +101,7 @@ class ZonesAPI(API):
|
|
|
101
101
|
}
|
|
102
102
|
|
|
103
103
|
|
|
104
|
-
@ns.route('/zone/<
|
|
104
|
+
@ns.route('/zone/<id>/datasets/', endpoint='zone_datasets')
|
|
105
105
|
class ZoneDatasetsAPI(API):
|
|
106
106
|
@api.doc('spatial_zone_datasets', params={'id': 'A zone identifier'})
|
|
107
107
|
@api.expect(dataset_parser)
|
|
@@ -118,7 +118,7 @@ class ZoneDatasetsAPI(API):
|
|
|
118
118
|
return datasets
|
|
119
119
|
|
|
120
120
|
|
|
121
|
-
@ns.route('/zone/<
|
|
121
|
+
@ns.route('/zone/<id>/', endpoint='zone')
|
|
122
122
|
class ZoneAPI(API):
|
|
123
123
|
@api.doc('spatial_zone', params={'id': 'A zone identifier'})
|
|
124
124
|
def get(self, id):
|
|
@@ -152,7 +152,7 @@ class SpatialGranularitiesAPI(API):
|
|
|
152
152
|
} for id, name in spatial_granularities]
|
|
153
153
|
|
|
154
154
|
|
|
155
|
-
@ns.route('/coverage/<
|
|
155
|
+
@ns.route('/coverage/<level>/', endpoint='spatial_coverage')
|
|
156
156
|
class SpatialCoverageAPI(API):
|
|
157
157
|
@api.doc('spatial_coverage')
|
|
158
158
|
@api.marshal_list_with(feature_collection_fields)
|
|
@@ -162,11 +162,6 @@ class SpatialCoverageAPI(API):
|
|
|
162
162
|
features = []
|
|
163
163
|
|
|
164
164
|
for zone in GeoZone.objects(level=level.id):
|
|
165
|
-
# fetch nested levels IDs
|
|
166
|
-
ids = []
|
|
167
|
-
ids.append(zone.id)
|
|
168
|
-
# Count datasets in zone
|
|
169
|
-
nb_datasets = Dataset.objects(spatial__zones__in=ids).count()
|
|
170
165
|
features.append({
|
|
171
166
|
'id': zone.id,
|
|
172
167
|
'type': 'Feature',
|
|
@@ -174,7 +169,7 @@ class SpatialCoverageAPI(API):
|
|
|
174
169
|
'name': _(zone.name),
|
|
175
170
|
'code': zone.code,
|
|
176
171
|
'uri': zone.uri,
|
|
177
|
-
'datasets':
|
|
172
|
+
'datasets': zone.metrics.get('datasets', 0)
|
|
178
173
|
}
|
|
179
174
|
})
|
|
180
175
|
|
udata/core/spatial/models.py
CHANGED
|
@@ -3,6 +3,7 @@ from werkzeug.local import LocalProxy
|
|
|
3
3
|
from werkzeug.utils import cached_property
|
|
4
4
|
|
|
5
5
|
from udata.app import cache
|
|
6
|
+
from udata.core.metrics.models import WithMetrics
|
|
6
7
|
from udata.uris import endpoint_for
|
|
7
8
|
from udata.i18n import _, get_locale, language
|
|
8
9
|
from udata.mongo import db
|
|
@@ -21,7 +22,6 @@ class GeoLevel(db.Document):
|
|
|
21
22
|
max_value=ADMIN_LEVEL_MAX,
|
|
22
23
|
default=100)
|
|
23
24
|
|
|
24
|
-
|
|
25
25
|
class GeoZoneQuerySet(db.BaseQuerySet):
|
|
26
26
|
|
|
27
27
|
def resolve(self, geoid, id_only=False):
|
|
@@ -40,7 +40,7 @@ class GeoZoneQuerySet(db.BaseQuerySet):
|
|
|
40
40
|
return result.id if id_only and result else result
|
|
41
41
|
|
|
42
42
|
|
|
43
|
-
class GeoZone(db.Document):
|
|
43
|
+
class GeoZone(WithMetrics, db.Document):
|
|
44
44
|
SEPARATOR = ':'
|
|
45
45
|
|
|
46
46
|
id = db.StringField(primary_key=True)
|
|
@@ -101,6 +101,11 @@ class GeoZone(db.Document):
|
|
|
101
101
|
def external_url(self):
|
|
102
102
|
return endpoint_for('territories.territory', territory=self, _external=True)
|
|
103
103
|
|
|
104
|
+
def count_datasets(self):
|
|
105
|
+
from udata.models import Dataset
|
|
106
|
+
self.metrics['datasets'] = Dataset.objects(spatial__zones=self.id).visible().count()
|
|
107
|
+
self.save()
|
|
108
|
+
|
|
104
109
|
def toGeoJSON(self):
|
|
105
110
|
return {
|
|
106
111
|
'id': self.id,
|
|
@@ -10,6 +10,7 @@ from udata.core.dataset.factories import DatasetFactory
|
|
|
10
10
|
from udata.core.spatial.factories import (
|
|
11
11
|
SpatialCoverageFactory, GeoZoneFactory, GeoLevelFactory
|
|
12
12
|
)
|
|
13
|
+
from udata.core.spatial.tasks import compute_geozones_metrics
|
|
13
14
|
|
|
14
15
|
|
|
15
16
|
class SpatialApiTest(APITestCase):
|
|
@@ -229,6 +230,31 @@ class SpatialApiTest(APITestCase):
|
|
|
229
230
|
'features': [],
|
|
230
231
|
})
|
|
231
232
|
|
|
233
|
+
def test_coverage_datasets_count(self):
|
|
234
|
+
GeoLevelFactory(id='fr:commune')
|
|
235
|
+
paris = GeoZoneFactory(
|
|
236
|
+
id='fr:commune:75056', level='fr:commune',
|
|
237
|
+
name='Paris', code='75056')
|
|
238
|
+
arles = GeoZoneFactory(
|
|
239
|
+
id='fr:commune:13004', level='fr:commune',
|
|
240
|
+
name='Arles', code='13004')
|
|
241
|
+
|
|
242
|
+
for _ in range(3):
|
|
243
|
+
DatasetFactory(
|
|
244
|
+
spatial=SpatialCoverageFactory(zones=[paris.id]))
|
|
245
|
+
for _ in range(2):
|
|
246
|
+
DatasetFactory(
|
|
247
|
+
spatial=SpatialCoverageFactory(zones=[arles.id]))
|
|
248
|
+
|
|
249
|
+
compute_geozones_metrics()
|
|
250
|
+
|
|
251
|
+
response = self.get(url_for('api.spatial_coverage', level='fr:commune'))
|
|
252
|
+
self.assert200(response)
|
|
253
|
+
self.assertEqual(response.json['features'][0]['id'], 'fr:commune:13004')
|
|
254
|
+
self.assertEqual(response.json['features'][0]['properties']['datasets'], 2)
|
|
255
|
+
self.assertEqual(response.json['features'][1]['id'], 'fr:commune:75056')
|
|
256
|
+
self.assertEqual(response.json['features'][1]['properties']['datasets'], 3)
|
|
257
|
+
|
|
232
258
|
|
|
233
259
|
class SpatialTerritoriesApiTest(APITestCase):
|
|
234
260
|
modules = []
|
udata/core/user/api.py
CHANGED
|
@@ -226,6 +226,7 @@ class UserListAPI(API):
|
|
|
226
226
|
fields = user_fields
|
|
227
227
|
form = UserProfileForm
|
|
228
228
|
|
|
229
|
+
@api.secure(admin_permission)
|
|
229
230
|
@api.doc('list_users')
|
|
230
231
|
@api.expect(user_parser.parser)
|
|
231
232
|
@api.marshal_with(user_page_fields)
|
|
@@ -269,6 +270,12 @@ class UserAvatarAPI(API):
|
|
|
269
270
|
return {'image': user.avatar}
|
|
270
271
|
|
|
271
272
|
|
|
273
|
+
|
|
274
|
+
delete_parser = api.parser()
|
|
275
|
+
delete_parser.add_argument(
|
|
276
|
+
'no_mail', type=bool, help='Do not send a mail to notify the user of the deletion',
|
|
277
|
+
location='args', default=False)
|
|
278
|
+
|
|
272
279
|
@ns.route('/<user:user>/', endpoint='user')
|
|
273
280
|
@api.response(404, 'User not found')
|
|
274
281
|
@api.response(410, 'User is not active or has been deleted')
|
|
@@ -297,22 +304,19 @@ class UserAPI(API):
|
|
|
297
304
|
|
|
298
305
|
@api.secure(admin_permission)
|
|
299
306
|
@api.doc('delete_user')
|
|
307
|
+
@api.expect(delete_parser)
|
|
300
308
|
@api.response(204, 'Object deleted')
|
|
301
309
|
@api.response(403, 'When trying to delete yourself')
|
|
302
310
|
def delete(self, user):
|
|
303
311
|
'''Delete a user given its identifier'''
|
|
312
|
+
args = delete_parser.parse_args()
|
|
304
313
|
if user.deleted:
|
|
305
314
|
api.abort(410, 'User has already been deleted')
|
|
306
315
|
if user == current_user._get_current_object():
|
|
307
316
|
api.abort(403, 'You cannot delete yourself with this API. ' +
|
|
308
317
|
'Use the "me" API instead.')
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
storage.delete(user.avatar.filename)
|
|
312
|
-
storage.delete(user.avatar.original)
|
|
313
|
-
for key, value in user.avatar.thumbnails.items():
|
|
314
|
-
storage.delete(value)
|
|
315
|
-
user.mark_as_deleted()
|
|
318
|
+
|
|
319
|
+
user.mark_as_deleted(notify=not args['no_mail'])
|
|
316
320
|
return '', 204
|
|
317
321
|
|
|
318
322
|
|