udata 10.0.6__py2.py3-none-any.whl → 10.0.6.dev32994__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of udata might be problematic. Click here for more details.
- udata/__init__.py +1 -1
- udata/core/dataservices/rdf.py +6 -4
- udata/core/dataservices/tasks.py +4 -2
- udata/core/dataset/tasks.py +1 -10
- udata/core/organization/models.py +1 -1
- udata/core/post/api.py +5 -15
- udata/core/post/tests/test_api.py +2 -16
- udata/harvest/tasks.py +2 -3
- udata/static/chunks/{10.8ca60413647062717b1e.js → 10.471164b2a9fe15614797.js} +3 -3
- udata/static/chunks/{10.8ca60413647062717b1e.js.map → 10.471164b2a9fe15614797.js.map} +1 -1
- udata/static/chunks/{11.b6f741fcc366abfad9c4.js → 11.51d706fb9521c16976bc.js} +3 -3
- udata/static/chunks/{11.b6f741fcc366abfad9c4.js.map → 11.51d706fb9521c16976bc.js.map} +1 -1
- udata/static/chunks/{13.2d06442dd9a05d9777b5.js → 13.f29411b06be1883356a3.js} +2 -2
- udata/static/chunks/{13.2d06442dd9a05d9777b5.js.map → 13.f29411b06be1883356a3.js.map} +1 -1
- udata/static/chunks/{17.e8e4caaad5cb0cc0bacc.js → 17.3bd0340930d4a314ce9c.js} +2 -2
- udata/static/chunks/{17.e8e4caaad5cb0cc0bacc.js.map → 17.3bd0340930d4a314ce9c.js.map} +1 -1
- udata/static/chunks/{19.f03a102365af4315f9db.js → 19.8da42e8359d72afc2618.js} +3 -3
- udata/static/chunks/{19.f03a102365af4315f9db.js.map → 19.8da42e8359d72afc2618.js.map} +1 -1
- udata/static/chunks/{8.778091d55cd8ea39af6b.js → 8.54e44b102164ae5e7a67.js} +2 -2
- udata/static/chunks/{8.778091d55cd8ea39af6b.js.map → 8.54e44b102164ae5e7a67.js.map} +1 -1
- udata/static/chunks/{9.033d7e190ca9e226a5d0.js → 9.07515e5187f475bce828.js} +3 -3
- udata/static/chunks/{9.033d7e190ca9e226a5d0.js.map → 9.07515e5187f475bce828.js.map} +1 -1
- udata/static/common.js +1 -1
- udata/static/common.js.map +1 -1
- udata/tests/dataservice/test_dataservice_tasks.py +0 -5
- udata/tests/dataset/test_dataset_tasks.py +3 -19
- udata/tests/organization/test_organization_model.py +0 -6
- {udata-10.0.6.dist-info → udata-10.0.6.dev32994.dist-info}/METADATA +2 -9
- {udata-10.0.6.dist-info → udata-10.0.6.dev32994.dist-info}/RECORD +33 -33
- {udata-10.0.6.dist-info → udata-10.0.6.dev32994.dist-info}/LICENSE +0 -0
- {udata-10.0.6.dist-info → udata-10.0.6.dev32994.dist-info}/WHEEL +0 -0
- {udata-10.0.6.dist-info → udata-10.0.6.dev32994.dist-info}/entry_points.txt +0 -0
- {udata-10.0.6.dist-info → udata-10.0.6.dev32994.dist-info}/top_level.txt +0 -0
udata/__init__.py
CHANGED
udata/core/dataservices/rdf.py
CHANGED
|
@@ -156,10 +156,12 @@ def dataservice_to_rdf(dataservice: Dataservice, graph=None):
|
|
|
156
156
|
|
|
157
157
|
if is_hvd:
|
|
158
158
|
# We also want to automatically add any HVD category tags of the dataservice's datasets.
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
159
|
+
for dataset in dataservice.datasets:
|
|
160
|
+
if "hvd" not in dataset.tags: # Only check HVD datasets for their categories.
|
|
161
|
+
continue
|
|
162
|
+
for tag in dataset.tags:
|
|
163
|
+
if tag in TAG_TO_EU_HVD_CATEGORIES:
|
|
164
|
+
hvd_category_tags.add(tag)
|
|
163
165
|
for tag in hvd_category_tags:
|
|
164
166
|
d.add(DCATAP.hvdCategory, URIRef(TAG_TO_EU_HVD_CATEGORIES[tag]))
|
|
165
167
|
|
udata/core/dataservices/tasks.py
CHANGED
|
@@ -1,7 +1,8 @@
|
|
|
1
1
|
from celery.utils.log import get_task_logger
|
|
2
2
|
|
|
3
3
|
from udata.core.dataservices.models import Dataservice
|
|
4
|
-
|
|
4
|
+
|
|
5
|
+
# from udata.harvest.models import HarvestJob
|
|
5
6
|
from udata.models import Discussion, Follow, Transfer
|
|
6
7
|
from udata.tasks import job
|
|
7
8
|
|
|
@@ -17,7 +18,8 @@ def purge_dataservices(self):
|
|
|
17
18
|
# Remove discussions
|
|
18
19
|
Discussion.objects(subject=dataservice).delete()
|
|
19
20
|
# Remove HarvestItem references
|
|
20
|
-
|
|
21
|
+
# TODO: uncomment when adding dataservice harvest
|
|
22
|
+
# HarvestJob.objects(items__dataservice=dataservice).update(set__items__S__dataservice=None)
|
|
21
23
|
# Remove associated Transfers
|
|
22
24
|
Transfer.objects(subject=dataservice).delete()
|
|
23
25
|
# Remove dataservice
|
udata/core/dataset/tasks.py
CHANGED
|
@@ -48,11 +48,6 @@ def purge_datasets(self):
|
|
|
48
48
|
datasets = topic.datasets
|
|
49
49
|
datasets.remove(dataset)
|
|
50
50
|
topic.update(datasets=datasets)
|
|
51
|
-
# Remove dataservices related dataset
|
|
52
|
-
for dataservice in Dataservice.objects(datasets=dataset):
|
|
53
|
-
datasets = dataservice.datasets
|
|
54
|
-
datasets.remove(dataset)
|
|
55
|
-
dataservice.update(datasets=datasets)
|
|
56
51
|
# Remove HarvestItem references
|
|
57
52
|
HarvestJob.objects(items__dataset=dataset).update(set__items__S__dataset=None)
|
|
58
53
|
# Remove associated Transfers
|
|
@@ -128,11 +123,7 @@ def send_frequency_reminder(self):
|
|
|
128
123
|
def update_datasets_reuses_metrics(self):
|
|
129
124
|
all_datasets = Dataset.objects.visible().timeout(False)
|
|
130
125
|
for dataset in all_datasets:
|
|
131
|
-
|
|
132
|
-
dataset.count_reuses()
|
|
133
|
-
except Exception as e:
|
|
134
|
-
log.error(f"Error for dataset {dataset} during reuses metrics update: {e}")
|
|
135
|
-
continue
|
|
126
|
+
dataset.count_reuses()
|
|
136
127
|
|
|
137
128
|
|
|
138
129
|
def get_queryset(model_cls):
|
|
@@ -117,7 +117,7 @@ class Organization(WithMetrics, OrganizationBadgeMixin, db.Datetimed, db.Documen
|
|
|
117
117
|
max_length=255, required=True, populate_from="name", update=True, follow=True
|
|
118
118
|
)
|
|
119
119
|
description = db.StringField(required=True)
|
|
120
|
-
url = db.
|
|
120
|
+
url = db.StringField()
|
|
121
121
|
image_url = db.StringField()
|
|
122
122
|
logo = db.ImageField(
|
|
123
123
|
fs=avatars, basename=default_image_basename, max_size=LOGO_MAX_SIZE, thumbnails=LOGO_SIZES
|
udata/core/post/api.py
CHANGED
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
from datetime import datetime
|
|
2
2
|
|
|
3
3
|
from udata.api import API, api, fields
|
|
4
|
-
from udata.auth import Permission as AdminPermission
|
|
5
4
|
from udata.auth import admin_permission
|
|
6
5
|
from udata.core.dataset.api_fields import dataset_fields
|
|
7
6
|
from udata.core.reuse.models import Reuse
|
|
@@ -61,13 +60,6 @@ parser = api.page_parser()
|
|
|
61
60
|
parser.add_argument(
|
|
62
61
|
"sort", type=str, default="-created_at", location="args", help="The sorting attribute"
|
|
63
62
|
)
|
|
64
|
-
parser.add_argument(
|
|
65
|
-
"with_drafts",
|
|
66
|
-
type=bool,
|
|
67
|
-
default=False,
|
|
68
|
-
location="args",
|
|
69
|
-
help="`True` also returns the unpublished posts (only for super-admins)",
|
|
70
|
-
)
|
|
71
63
|
|
|
72
64
|
|
|
73
65
|
@ns.route("/", endpoint="posts")
|
|
@@ -78,13 +70,11 @@ class PostsAPI(API):
|
|
|
78
70
|
def get(self):
|
|
79
71
|
"""List all posts"""
|
|
80
72
|
args = parser.parse_args()
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
return posts.order_by(args["sort"]).paginate(args["page"], args["page_size"])
|
|
73
|
+
return (
|
|
74
|
+
Post.objects.published()
|
|
75
|
+
.order_by(args["sort"])
|
|
76
|
+
.paginate(args["page"], args["page_size"])
|
|
77
|
+
)
|
|
88
78
|
|
|
89
79
|
@api.doc("create_post")
|
|
90
80
|
@api.secure(admin_permission)
|
|
@@ -15,28 +15,14 @@ class PostsAPITest:
|
|
|
15
15
|
|
|
16
16
|
def test_post_api_list(self, api):
|
|
17
17
|
"""It should fetch a post list from the API"""
|
|
18
|
-
PostFactory.create_batch(3)
|
|
19
|
-
|
|
18
|
+
posts = PostFactory.create_batch(3)
|
|
19
|
+
posts.append(PostFactory(published=None))
|
|
20
20
|
|
|
21
21
|
response = api.get(url_for("api.posts"))
|
|
22
22
|
assert200(response)
|
|
23
23
|
# Response should not contain the unpublished post
|
|
24
24
|
assert len(response.json["data"]) == 3
|
|
25
25
|
|
|
26
|
-
api.login(AdminFactory())
|
|
27
|
-
|
|
28
|
-
response = api.get(url_for("api.posts"))
|
|
29
|
-
assert200(response)
|
|
30
|
-
|
|
31
|
-
assert len(response.json["data"]) == 3
|
|
32
|
-
assert str(draft.id) not in [post["id"] for post in response.json["data"]]
|
|
33
|
-
|
|
34
|
-
response = api.get(url_for("api.posts", with_drafts=True))
|
|
35
|
-
assert200(response)
|
|
36
|
-
|
|
37
|
-
assert len(response.json["data"]) == 4
|
|
38
|
-
assert str(draft.id) in [post["id"] for post in response.json["data"]]
|
|
39
|
-
|
|
40
26
|
def test_post_api_get(self, api):
|
|
41
27
|
"""It should fetch a post from the API"""
|
|
42
28
|
post = PostFactory()
|
udata/harvest/tasks.py
CHANGED
|
@@ -13,9 +13,8 @@ def harvest(self, ident):
|
|
|
13
13
|
log.info('Launching harvest job for source "%s"', ident)
|
|
14
14
|
|
|
15
15
|
source = HarvestSource.get(ident)
|
|
16
|
-
if source.deleted
|
|
17
|
-
|
|
18
|
-
return # Ignore deleted and inactive sources
|
|
16
|
+
if source.deleted:
|
|
17
|
+
return # Ignore deleted sources
|
|
19
18
|
Backend = backends.get(current_app, source.backend)
|
|
20
19
|
backend = Backend(source)
|
|
21
20
|
|