udata 11.1.2.dev6__py3-none-any.whl → 11.1.2.dev8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of udata might be problematic. Click here for more details.
- udata/commands/db.py +2 -3
- udata/core/dataservices/api.py +1 -2
- udata/core/dataservices/models.py +2 -3
- udata/core/dataset/api.py +1 -2
- udata/core/dataset/preview.py +3 -3
- udata/core/dataset/rdf.py +9 -12
- udata/core/metrics/helpers.py +6 -7
- udata/core/metrics/tasks.py +3 -6
- udata/core/post/api.py +1 -2
- udata/core/reuse/api.py +1 -2
- udata/core/user/api.py +1 -3
- udata/migrations/2025-01-05-dataservices-fields-changes.py +1 -2
- udata/static/chunks/{11.51d706fb9521c16976bc.js → 11.b6f741fcc366abfad9c4.js} +3 -3
- udata/static/chunks/{11.51d706fb9521c16976bc.js.map → 11.b6f741fcc366abfad9c4.js.map} +1 -1
- udata/static/chunks/{13.39e106d56f794ebd06a0.js → 13.2d06442dd9a05d9777b5.js} +2 -2
- udata/static/chunks/{13.39e106d56f794ebd06a0.js.map → 13.2d06442dd9a05d9777b5.js.map} +1 -1
- udata/static/chunks/{17.70cbb4a91b002338007e.js → 17.e8e4caaad5cb0cc0bacc.js} +2 -2
- udata/static/chunks/{17.70cbb4a91b002338007e.js.map → 17.e8e4caaad5cb0cc0bacc.js.map} +1 -1
- udata/static/chunks/{19.a348a5fff8fe2801e52a.js → 19.f03a102365af4315f9db.js} +3 -3
- udata/static/chunks/{19.a348a5fff8fe2801e52a.js.map → 19.f03a102365af4315f9db.js.map} +1 -1
- udata/static/chunks/{5.343ca020a2d38cec1a14.js → 5.0fa1408dae4e76b87b2e.js} +3 -3
- udata/static/chunks/{5.343ca020a2d38cec1a14.js.map → 5.0fa1408dae4e76b87b2e.js.map} +1 -1
- udata/static/chunks/{6.a3b07de9dd2ca2d24e85.js → 6.d663709d877baa44a71e.js} +3 -3
- udata/static/chunks/{6.a3b07de9dd2ca2d24e85.js.map → 6.d663709d877baa44a71e.js.map} +1 -1
- udata/static/chunks/{8.462bb3029de008497675.js → 8.778091d55cd8ea39af6b.js} +2 -2
- udata/static/chunks/{8.462bb3029de008497675.js.map → 8.778091d55cd8ea39af6b.js.map} +1 -1
- udata/static/common.js +1 -1
- udata/static/common.js.map +1 -1
- udata/uris.py +1 -2
- {udata-11.1.2.dev6.dist-info → udata-11.1.2.dev8.dist-info}/METADATA +3 -4
- {udata-11.1.2.dev6.dist-info → udata-11.1.2.dev8.dist-info}/RECORD +35 -35
- {udata-11.1.2.dev6.dist-info → udata-11.1.2.dev8.dist-info}/WHEEL +0 -0
- {udata-11.1.2.dev6.dist-info → udata-11.1.2.dev8.dist-info}/entry_points.txt +0 -0
- {udata-11.1.2.dev6.dist-info → udata-11.1.2.dev8.dist-info}/licenses/LICENSE +0 -0
- {udata-11.1.2.dev6.dist-info → udata-11.1.2.dev8.dist-info}/top_level.txt +0 -0
udata/commands/db.py
CHANGED
|
@@ -5,7 +5,6 @@ import os
|
|
|
5
5
|
import sys
|
|
6
6
|
import traceback
|
|
7
7
|
from itertools import groupby
|
|
8
|
-
from typing import Optional
|
|
9
8
|
from uuid import uuid4
|
|
10
9
|
|
|
11
10
|
import click
|
|
@@ -434,8 +433,8 @@ def check_integrity(models):
|
|
|
434
433
|
def check_duplicate_resources_ids(
|
|
435
434
|
skip_duplicates_inside_dataset: bool,
|
|
436
435
|
skip_duplicates_outside_dataset: bool,
|
|
437
|
-
exclude_org:
|
|
438
|
-
only_org:
|
|
436
|
+
exclude_org: str | None,
|
|
437
|
+
only_org: str | None,
|
|
439
438
|
fix: bool,
|
|
440
439
|
):
|
|
441
440
|
resources = {}
|
udata/core/dataservices/api.py
CHANGED
|
@@ -1,5 +1,4 @@
|
|
|
1
1
|
from datetime import datetime
|
|
2
|
-
from typing import List
|
|
3
2
|
|
|
4
3
|
import mongoengine
|
|
5
4
|
from bson import ObjectId
|
|
@@ -63,7 +62,7 @@ class DataservicesAtomFeedAPI(API):
|
|
|
63
62
|
_("Latest APIs"), description=None, feed_url=request.url, link=request.url_root
|
|
64
63
|
)
|
|
65
64
|
|
|
66
|
-
dataservices:
|
|
65
|
+
dataservices: list[Dataservice] = (
|
|
67
66
|
Dataservice.objects.visible().order_by("-created_at").limit(current_site.feed_size)
|
|
68
67
|
)
|
|
69
68
|
for dataservice in dataservices:
|
|
@@ -144,9 +144,8 @@ def filter_by_topic(base_query, filter_value):
|
|
|
144
144
|
else:
|
|
145
145
|
return base_query.filter(
|
|
146
146
|
id__in=[
|
|
147
|
-
|
|
148
|
-
for
|
|
149
|
-
if elt.element.__class__.__name__ == "Dataservice"
|
|
147
|
+
element.element.id
|
|
148
|
+
for element in topic.elements.filter(__raw__={"element._cls": "Dataservice"})
|
|
150
149
|
]
|
|
151
150
|
)
|
|
152
151
|
|
udata/core/dataset/api.py
CHANGED
|
@@ -20,7 +20,6 @@ These changes might lead to backward compatibility breakage meaning:
|
|
|
20
20
|
import logging
|
|
21
21
|
import os
|
|
22
22
|
from datetime import datetime
|
|
23
|
-
from typing import List
|
|
24
23
|
|
|
25
24
|
import mongoengine
|
|
26
25
|
from bson.objectid import ObjectId
|
|
@@ -332,7 +331,7 @@ class DatasetsAtomFeedAPI(API):
|
|
|
332
331
|
link=request.url_root,
|
|
333
332
|
)
|
|
334
333
|
|
|
335
|
-
datasets:
|
|
334
|
+
datasets: list[Dataset] = (
|
|
336
335
|
Dataset.objects.visible().order_by("-created_at_internal").limit(current_site.feed_size)
|
|
337
336
|
)
|
|
338
337
|
for dataset in datasets:
|
udata/core/dataset/preview.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
from abc import ABC, abstractmethod
|
|
4
|
-
from typing import TYPE_CHECKING
|
|
4
|
+
from typing import TYPE_CHECKING
|
|
5
5
|
|
|
6
6
|
from flask import current_app
|
|
7
7
|
|
|
@@ -12,12 +12,12 @@ if TYPE_CHECKING:
|
|
|
12
12
|
# Define an abstract class
|
|
13
13
|
class Preview(ABC):
|
|
14
14
|
@abstractmethod
|
|
15
|
-
def preview_url(self, resource: Resource) ->
|
|
15
|
+
def preview_url(self, resource: Resource) -> str | None:
|
|
16
16
|
return None
|
|
17
17
|
|
|
18
18
|
|
|
19
19
|
class TabularAPIPreview(Preview):
|
|
20
|
-
def preview_url(self, resource: Resource) ->
|
|
20
|
+
def preview_url(self, resource: Resource) -> str | None:
|
|
21
21
|
preview_base_url = current_app.config["TABULAR_EXPLORE_URL"]
|
|
22
22
|
if not preview_base_url:
|
|
23
23
|
return None
|
udata/core/dataset/rdf.py
CHANGED
|
@@ -6,7 +6,6 @@ import calendar
|
|
|
6
6
|
import json
|
|
7
7
|
import logging
|
|
8
8
|
from datetime import date, datetime
|
|
9
|
-
from typing import Optional
|
|
10
9
|
|
|
11
10
|
from dateutil.parser import parse as parse_dt
|
|
12
11
|
from flask import current_app
|
|
@@ -102,9 +101,7 @@ EU_RDF_REQUENCIES = {
|
|
|
102
101
|
}
|
|
103
102
|
|
|
104
103
|
|
|
105
|
-
def temporal_to_rdf(
|
|
106
|
-
daterange: db.DateRange, graph: Optional[Graph] = None
|
|
107
|
-
) -> Optional[RdfResource]:
|
|
104
|
+
def temporal_to_rdf(daterange: db.DateRange, graph: Graph | None = None) -> RdfResource | None:
|
|
108
105
|
if not daterange:
|
|
109
106
|
return
|
|
110
107
|
graph = graph or Graph(namespace_manager=namespace_manager)
|
|
@@ -117,13 +114,13 @@ def temporal_to_rdf(
|
|
|
117
114
|
return pot
|
|
118
115
|
|
|
119
116
|
|
|
120
|
-
def frequency_to_rdf(frequency: str, graph:
|
|
117
|
+
def frequency_to_rdf(frequency: str, graph: Graph | None = None) -> str | None:
|
|
121
118
|
if not frequency:
|
|
122
119
|
return
|
|
123
120
|
return RDF_FREQUENCIES.get(frequency, getattr(FREQ, frequency))
|
|
124
121
|
|
|
125
122
|
|
|
126
|
-
def owner_to_rdf(dataset: Dataset, graph:
|
|
123
|
+
def owner_to_rdf(dataset: Dataset, graph: Graph | None = None) -> RdfResource | None:
|
|
127
124
|
from udata.core.organization.rdf import organization_to_rdf
|
|
128
125
|
from udata.core.user.rdf import user_to_rdf
|
|
129
126
|
|
|
@@ -134,7 +131,7 @@ def owner_to_rdf(dataset: Dataset, graph: Optional[Graph] = None) -> Optional[Rd
|
|
|
134
131
|
return
|
|
135
132
|
|
|
136
133
|
|
|
137
|
-
def detect_ogc_service(resource: Resource) ->
|
|
134
|
+
def detect_ogc_service(resource: Resource) -> str | None:
|
|
138
135
|
"""
|
|
139
136
|
Detect if the resource points towards an OGC Service based on either
|
|
140
137
|
* a known OGC Service format
|
|
@@ -153,8 +150,8 @@ def detect_ogc_service(resource: Resource) -> Optional[str]:
|
|
|
153
150
|
def ogc_service_to_rdf(
|
|
154
151
|
dataset: Dataset,
|
|
155
152
|
resource: Resource,
|
|
156
|
-
ogc_service_type:
|
|
157
|
-
graph:
|
|
153
|
+
ogc_service_type: str | None = None,
|
|
154
|
+
graph: Graph | None = None,
|
|
158
155
|
is_hvd: bool = False,
|
|
159
156
|
) -> RdfResource:
|
|
160
157
|
"""
|
|
@@ -196,8 +193,8 @@ def ogc_service_to_rdf(
|
|
|
196
193
|
|
|
197
194
|
def resource_to_rdf(
|
|
198
195
|
resource: Resource,
|
|
199
|
-
dataset:
|
|
200
|
-
graph:
|
|
196
|
+
dataset: Dataset | None = None,
|
|
197
|
+
graph: Graph | None = None,
|
|
201
198
|
is_hvd: bool = False,
|
|
202
199
|
) -> RdfResource:
|
|
203
200
|
"""
|
|
@@ -261,7 +258,7 @@ def dataset_to_graph_id(dataset: Dataset) -> URIRef | BNode:
|
|
|
261
258
|
return BNode()
|
|
262
259
|
|
|
263
260
|
|
|
264
|
-
def dataset_to_rdf(dataset: Dataset, graph:
|
|
261
|
+
def dataset_to_rdf(dataset: Dataset, graph: Graph | None = None) -> RdfResource:
|
|
265
262
|
"""
|
|
266
263
|
Map a dataset domain model to a DCAT/RDF graph
|
|
267
264
|
"""
|
udata/core/metrics/helpers.py
CHANGED
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
import logging
|
|
2
2
|
from collections import OrderedDict
|
|
3
3
|
from datetime import datetime, timedelta
|
|
4
|
-
from typing import Dict, List, Union
|
|
5
4
|
from urllib.parse import urlencode
|
|
6
5
|
|
|
7
6
|
import requests
|
|
@@ -14,13 +13,13 @@ from pymongo.command_cursor import CommandCursor
|
|
|
14
13
|
log = logging.getLogger(__name__)
|
|
15
14
|
|
|
16
15
|
|
|
17
|
-
def get_last_13_months() ->
|
|
16
|
+
def get_last_13_months() -> list[str]:
|
|
18
17
|
dstart = datetime.today().replace(day=1) - timedelta(days=365)
|
|
19
18
|
months = rrule(freq=MONTHLY, count=13, dtstart=dstart)
|
|
20
19
|
return [month.strftime("%Y-%m") for month in months]
|
|
21
20
|
|
|
22
21
|
|
|
23
|
-
def compute_monthly_metrics(metrics_data:
|
|
22
|
+
def compute_monthly_metrics(metrics_data: list[dict], metrics_labels: list[str]) -> OrderedDict:
|
|
24
23
|
# Initialize default monthly_metrics
|
|
25
24
|
monthly_metrics = OrderedDict(
|
|
26
25
|
(month, {label: 0 for label in metrics_labels}) for month in get_last_13_months()
|
|
@@ -35,7 +34,7 @@ def compute_monthly_metrics(metrics_data: List[Dict], metrics_labels: List[str])
|
|
|
35
34
|
return monthly_metrics
|
|
36
35
|
|
|
37
36
|
|
|
38
|
-
def metrics_by_label(monthly_metrics:
|
|
37
|
+
def metrics_by_label(monthly_metrics: dict, metrics_labels: list[str]) -> list[OrderedDict]:
|
|
39
38
|
metrics_by_label = []
|
|
40
39
|
for label in metrics_labels:
|
|
41
40
|
metrics_by_label.append(
|
|
@@ -45,8 +44,8 @@ def metrics_by_label(monthly_metrics: Dict, metrics_labels: List[str]) -> List[O
|
|
|
45
44
|
|
|
46
45
|
|
|
47
46
|
def get_metrics_for_model(
|
|
48
|
-
model: str, id:
|
|
49
|
-
) ->
|
|
47
|
+
model: str, id: str | ObjectId | None, metrics_labels: list[str]
|
|
48
|
+
) -> list[OrderedDict]:
|
|
50
49
|
"""
|
|
51
50
|
Get distant metrics for a particular model object
|
|
52
51
|
"""
|
|
@@ -69,7 +68,7 @@ def get_metrics_for_model(
|
|
|
69
68
|
return [{} for _ in range(len(metrics_labels))]
|
|
70
69
|
|
|
71
70
|
|
|
72
|
-
def get_download_url(model: str, id:
|
|
71
|
+
def get_download_url(model: str, id: str | ObjectId | None) -> str:
|
|
73
72
|
api_namespace = model + "s" if model != "site" else model
|
|
74
73
|
base_url = f"{current_app.config['METRICS_API']}/{api_namespace}/data/csv/"
|
|
75
74
|
args = {"metric_month__sort": "asc"}
|
udata/core/metrics/tasks.py
CHANGED
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
import logging
|
|
2
2
|
import time
|
|
3
3
|
from functools import wraps
|
|
4
|
-
from typing import Dict, List
|
|
5
4
|
|
|
6
5
|
import requests
|
|
7
6
|
from flask import current_app
|
|
@@ -17,9 +16,7 @@ log = logging.getLogger(__name__)
|
|
|
17
16
|
def log_timing(func):
|
|
18
17
|
@wraps(func)
|
|
19
18
|
def timeit_wrapper(*args, **kwargs):
|
|
20
|
-
|
|
21
|
-
name = func.__name__
|
|
22
|
-
model = name.removeprefix("update_") if hasattr(name, "removeprefix") else name
|
|
19
|
+
model = func.__name__.removeprefix("update_")
|
|
23
20
|
|
|
24
21
|
log.info(f"Processing {model}…")
|
|
25
22
|
start_time = time.perf_counter()
|
|
@@ -31,7 +28,7 @@ def log_timing(func):
|
|
|
31
28
|
return timeit_wrapper
|
|
32
29
|
|
|
33
30
|
|
|
34
|
-
def save_model(model: db.Document, model_id: str, metrics:
|
|
31
|
+
def save_model(model: db.Document, model_id: str, metrics: dict[str, int]) -> None:
|
|
35
32
|
try:
|
|
36
33
|
result = model.objects(id=model_id).update(
|
|
37
34
|
**{f"set__metrics__{key}": value for key, value in metrics.items()}
|
|
@@ -43,7 +40,7 @@ def save_model(model: db.Document, model_id: str, metrics: Dict[str, int]) -> No
|
|
|
43
40
|
log.exception(e)
|
|
44
41
|
|
|
45
42
|
|
|
46
|
-
def iterate_on_metrics(target: str, value_keys:
|
|
43
|
+
def iterate_on_metrics(target: str, value_keys: list[str], page_size: int = 50) -> dict:
|
|
47
44
|
"""
|
|
48
45
|
Yield all elements with not zero values for the keys inside `value_keys`.
|
|
49
46
|
If you pass ['visit', 'download_resource'], it will do a `OR` and get
|
udata/core/post/api.py
CHANGED
|
@@ -1,5 +1,4 @@
|
|
|
1
1
|
from datetime import datetime
|
|
2
|
-
from typing import List
|
|
3
2
|
|
|
4
3
|
from feedgenerator.django.utils.feedgenerator import Atom1Feed
|
|
5
4
|
from flask import make_response, request
|
|
@@ -122,7 +121,7 @@ class PostsAtomFeedAPI(API):
|
|
|
122
121
|
link=request.url_root,
|
|
123
122
|
)
|
|
124
123
|
|
|
125
|
-
posts:
|
|
124
|
+
posts: list[Post] = Post.objects().published().order_by("-published").limit(15)
|
|
126
125
|
for post in posts:
|
|
127
126
|
feed.add_item(
|
|
128
127
|
post.name,
|
udata/core/reuse/api.py
CHANGED
|
@@ -1,5 +1,4 @@
|
|
|
1
1
|
from datetime import datetime
|
|
2
|
-
from typing import List
|
|
3
2
|
|
|
4
3
|
import mongoengine
|
|
5
4
|
from bson.objectid import ObjectId
|
|
@@ -144,7 +143,7 @@ class ReusesAtomFeedAPI(API):
|
|
|
144
143
|
link=request.url_root,
|
|
145
144
|
)
|
|
146
145
|
|
|
147
|
-
reuses:
|
|
146
|
+
reuses: list[Reuse] = Reuse.objects.visible().order_by("-created_at").limit(15)
|
|
148
147
|
for reuse in reuses:
|
|
149
148
|
author_name = None
|
|
150
149
|
author_uri = None
|
udata/core/user/api.py
CHANGED
|
@@ -1,5 +1,3 @@
|
|
|
1
|
-
from typing import Optional
|
|
2
|
-
|
|
3
1
|
from flask_security import current_user, logout_user
|
|
4
2
|
from slugify import slugify
|
|
5
3
|
|
|
@@ -370,7 +368,7 @@ suggest_parser.add_argument(
|
|
|
370
368
|
)
|
|
371
369
|
|
|
372
370
|
|
|
373
|
-
def suggest_size(value: str) ->
|
|
371
|
+
def suggest_size(value: str) -> int | None:
|
|
374
372
|
"""Parse an integer that must be between 1 and 20."""
|
|
375
373
|
help_message = "The size must be an integer between 1 and 20."
|
|
376
374
|
try:
|
|
@@ -3,7 +3,6 @@ This migration keeps only the "Local authority" badge if the organization also h
|
|
|
3
3
|
"""
|
|
4
4
|
|
|
5
5
|
import logging
|
|
6
|
-
from typing import List
|
|
7
6
|
|
|
8
7
|
from mongoengine.connection import get_db
|
|
9
8
|
|
|
@@ -84,7 +83,7 @@ def migrate(db):
|
|
|
84
83
|
)
|
|
85
84
|
log.info(f"\t{count.modified_count} open dataservices to DATASERVICE_ACCESS_TYPE_OPEN")
|
|
86
85
|
|
|
87
|
-
dataservices:
|
|
86
|
+
dataservices: list[Dataservice] = get_db().dataservice.find()
|
|
88
87
|
for dataservice in dataservices:
|
|
89
88
|
if (
|
|
90
89
|
"endpoint_description_url" not in dataservice
|