udata 14.5.1.dev9__py3-none-any.whl → 14.7.3.dev4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- udata/api_fields.py +85 -15
- udata/auth/forms.py +1 -1
- udata/core/badges/tests/test_tasks.py +0 -2
- udata/core/dataservices/apiv2.py +1 -1
- udata/core/dataset/models.py +15 -3
- udata/core/dataset/rdf.py +10 -14
- udata/core/organization/apiv2.py +1 -1
- udata/core/organization/models.py +25 -5
- udata/core/pages/models.py +49 -0
- udata/core/pages/tests/test_api.py +165 -1
- udata/core/post/api.py +1 -1
- udata/core/post/constants.py +8 -0
- udata/core/post/models.py +27 -3
- udata/core/post/tests/test_api.py +116 -2
- udata/core/post/tests/test_models.py +24 -0
- udata/core/reuse/apiv2.py +1 -1
- udata/core/user/models.py +21 -6
- udata/features/notifications/models.py +4 -1
- udata/features/transfer/actions.py +2 -0
- udata/features/transfer/models.py +17 -0
- udata/features/transfer/notifications.py +96 -0
- udata/harvest/backends/ckan/harvesters.py +10 -2
- udata/migrations/2021-08-17-harvest-integrity.py +23 -16
- udata/migrations/2025-12-16-create-transfer-request-notifications.py +69 -0
- udata/migrations/2026-01-14-add-default-kind-to-posts.py +17 -0
- udata/tasks.py +1 -0
- udata/tests/apiv2/test_dataservices.py +14 -0
- udata/tests/apiv2/test_organizations.py +9 -0
- udata/tests/apiv2/test_reuses.py +11 -0
- udata/tests/dataset/test_dataset_rdf.py +49 -0
- udata/tests/search/test_search_integration.py +37 -0
- udata/tests/test_transfer.py +181 -2
- udata/translations/ar/LC_MESSAGES/udata.mo +0 -0
- udata/translations/ar/LC_MESSAGES/udata.po +310 -158
- udata/translations/de/LC_MESSAGES/udata.mo +0 -0
- udata/translations/de/LC_MESSAGES/udata.po +314 -160
- udata/translations/es/LC_MESSAGES/udata.mo +0 -0
- udata/translations/es/LC_MESSAGES/udata.po +313 -160
- udata/translations/fr/LC_MESSAGES/udata.mo +0 -0
- udata/translations/fr/LC_MESSAGES/udata.po +476 -202
- udata/translations/it/LC_MESSAGES/udata.mo +0 -0
- udata/translations/it/LC_MESSAGES/udata.po +318 -162
- udata/translations/pt/LC_MESSAGES/udata.mo +0 -0
- udata/translations/pt/LC_MESSAGES/udata.po +316 -161
- udata/translations/sr/LC_MESSAGES/udata.mo +0 -0
- udata/translations/sr/LC_MESSAGES/udata.po +324 -164
- udata/translations/udata.pot +169 -124
- udata/utils.py +23 -0
- {udata-14.5.1.dev9.dist-info → udata-14.7.3.dev4.dist-info}/METADATA +2 -2
- {udata-14.5.1.dev9.dist-info → udata-14.7.3.dev4.dist-info}/RECORD +54 -50
- udata/tests/apiv2/test_search.py +0 -30
- {udata-14.5.1.dev9.dist-info → udata-14.7.3.dev4.dist-info}/WHEEL +0 -0
- {udata-14.5.1.dev9.dist-info → udata-14.7.3.dev4.dist-info}/entry_points.txt +0 -0
- {udata-14.5.1.dev9.dist-info → udata-14.7.3.dev4.dist-info}/licenses/LICENSE +0 -0
- {udata-14.5.1.dev9.dist-info → udata-14.7.3.dev4.dist-info}/top_level.txt +0 -0
udata/core/post/models.py
CHANGED
|
@@ -3,12 +3,14 @@ from flask import url_for
|
|
|
3
3
|
from udata.api_fields import field, generate_fields
|
|
4
4
|
from udata.core.dataset.api_fields import dataset_fields
|
|
5
5
|
from udata.core.linkable import Linkable
|
|
6
|
+
from udata.core.pages.models import Page
|
|
6
7
|
from udata.core.storages import default_image_basename, images
|
|
8
|
+
from udata.core.user.api_fields import user_ref_fields
|
|
7
9
|
from udata.i18n import lazy_gettext as _
|
|
8
10
|
from udata.mongo import db
|
|
9
11
|
from udata.uris import cdata_url
|
|
10
12
|
|
|
11
|
-
from .constants import BODY_TYPES, IMAGE_SIZES
|
|
13
|
+
from .constants import BODY_TYPES, IMAGE_SIZES, POST_KINDS
|
|
12
14
|
|
|
13
15
|
__all__ = ("Post",)
|
|
14
16
|
|
|
@@ -41,9 +43,15 @@ class Post(db.Datetimed, Linkable, db.Document):
|
|
|
41
43
|
sortable=True,
|
|
42
44
|
)
|
|
43
45
|
content = field(
|
|
44
|
-
db.StringField(
|
|
46
|
+
db.StringField(),
|
|
45
47
|
markdown=True,
|
|
46
48
|
)
|
|
49
|
+
content_as_page = field(
|
|
50
|
+
db.ReferenceField("Page", reverse_delete_rule=db.DENY),
|
|
51
|
+
nested_fields=Page.__read_fields__,
|
|
52
|
+
allow_null=True,
|
|
53
|
+
description="Reference to a Page when body_type is 'blocs'",
|
|
54
|
+
)
|
|
47
55
|
image_url = field(
|
|
48
56
|
db.StringField(),
|
|
49
57
|
)
|
|
@@ -82,6 +90,7 @@ class Post(db.Datetimed, Linkable, db.Document):
|
|
|
82
90
|
|
|
83
91
|
owner = field(
|
|
84
92
|
db.ReferenceField("User"),
|
|
93
|
+
nested_fields=user_ref_fields,
|
|
85
94
|
readonly=True,
|
|
86
95
|
allow_null=True,
|
|
87
96
|
description="The owner user",
|
|
@@ -95,7 +104,12 @@ class Post(db.Datetimed, Linkable, db.Document):
|
|
|
95
104
|
|
|
96
105
|
body_type = field(
|
|
97
106
|
db.StringField(choices=list(BODY_TYPES), default="markdown", required=False),
|
|
98
|
-
|
|
107
|
+
)
|
|
108
|
+
|
|
109
|
+
kind = field(
|
|
110
|
+
db.StringField(choices=list(POST_KINDS), default="news", required=False),
|
|
111
|
+
filterable={},
|
|
112
|
+
description="Post kind (news or page)",
|
|
99
113
|
)
|
|
100
114
|
|
|
101
115
|
meta = {
|
|
@@ -114,6 +128,16 @@ class Post(db.Datetimed, Linkable, db.Document):
|
|
|
114
128
|
|
|
115
129
|
verbose_name = _("post")
|
|
116
130
|
|
|
131
|
+
def clean(self):
|
|
132
|
+
if self.body_type == "blocs":
|
|
133
|
+
if not self.content_as_page:
|
|
134
|
+
raise db.ValidationError("content_as_page is required when body_type is 'blocs'")
|
|
135
|
+
else:
|
|
136
|
+
if not self.content:
|
|
137
|
+
raise db.ValidationError(
|
|
138
|
+
"content is required when body_type is 'markdown' or 'html'"
|
|
139
|
+
)
|
|
140
|
+
|
|
117
141
|
def __str__(self):
|
|
118
142
|
return self.name or ""
|
|
119
143
|
|
|
@@ -1,12 +1,14 @@
|
|
|
1
1
|
from flask import url_for
|
|
2
2
|
|
|
3
3
|
from udata.core.dataset.factories import DatasetFactory
|
|
4
|
+
from udata.core.pages.factories import PageFactory
|
|
5
|
+
from udata.core.pages.models import DatasetsListBloc
|
|
4
6
|
from udata.core.post.factories import PostFactory
|
|
5
7
|
from udata.core.post.models import Post
|
|
6
8
|
from udata.core.reuse.factories import ReuseFactory
|
|
7
9
|
from udata.core.user.factories import AdminFactory, UserFactory
|
|
8
10
|
from udata.tests.api import APITestCase
|
|
9
|
-
from udata.tests.helpers import assert200, assert201, assert204
|
|
11
|
+
from udata.tests.helpers import assert200, assert201, assert204, assert400
|
|
10
12
|
|
|
11
13
|
|
|
12
14
|
class PostsAPITest(APITestCase):
|
|
@@ -56,9 +58,13 @@ class PostsAPITest(APITestCase):
|
|
|
56
58
|
|
|
57
59
|
def test_post_api_get(self):
|
|
58
60
|
"""It should fetch a post from the API"""
|
|
59
|
-
|
|
61
|
+
admin = AdminFactory()
|
|
62
|
+
post = PostFactory(owner=admin)
|
|
60
63
|
response = self.get(url_for("api.post", post=post))
|
|
61
64
|
assert200(response)
|
|
65
|
+
owner = response.json["owner"]
|
|
66
|
+
assert isinstance(owner, dict)
|
|
67
|
+
assert owner["id"] == str(admin.id)
|
|
62
68
|
|
|
63
69
|
def test_post_api_create(self):
|
|
64
70
|
"""It should create a post from the API"""
|
|
@@ -159,3 +165,111 @@ class PostsAPITest(APITestCase):
|
|
|
159
165
|
response = self.get(url_for("api.posts", with_drafts=True))
|
|
160
166
|
assert200(response)
|
|
161
167
|
assert len(response.json["data"]) == 3
|
|
168
|
+
|
|
169
|
+
def test_post_api_create_with_blocs_body_type_and_page(self):
|
|
170
|
+
"""It should create a post with body_type='blocs' when content_as_page is provided"""
|
|
171
|
+
page = PageFactory()
|
|
172
|
+
data = PostFactory.as_dict()
|
|
173
|
+
data["datasets"] = [str(d.id) for d in data["datasets"]]
|
|
174
|
+
data["reuses"] = [str(r.id) for r in data["reuses"]]
|
|
175
|
+
data["body_type"] = "blocs"
|
|
176
|
+
data["content_as_page"] = str(page.id)
|
|
177
|
+
self.login(AdminFactory())
|
|
178
|
+
response = self.post(url_for("api.posts"), data)
|
|
179
|
+
assert201(response)
|
|
180
|
+
assert Post.objects.count() == 1
|
|
181
|
+
post = Post.objects.first()
|
|
182
|
+
assert post.body_type == "blocs"
|
|
183
|
+
assert post.content_as_page.id == page.id
|
|
184
|
+
|
|
185
|
+
def test_post_api_create_with_blocs_body_type_without_page(self):
|
|
186
|
+
"""It should fail to create a post with body_type='blocs' without content_as_page"""
|
|
187
|
+
data = PostFactory.as_dict()
|
|
188
|
+
data["datasets"] = [str(d.id) for d in data["datasets"]]
|
|
189
|
+
data["reuses"] = [str(r.id) for r in data["reuses"]]
|
|
190
|
+
data["body_type"] = "blocs"
|
|
191
|
+
self.login(AdminFactory())
|
|
192
|
+
response = self.post(url_for("api.posts"), data)
|
|
193
|
+
assert400(response)
|
|
194
|
+
|
|
195
|
+
def test_post_api_get_with_blocs_returns_page_blocs(self):
|
|
196
|
+
"""It should return blocs from the associated page when fetching a post"""
|
|
197
|
+
datasets = DatasetFactory.create_batch(2)
|
|
198
|
+
bloc = DatasetsListBloc(title="Featured datasets", datasets=datasets)
|
|
199
|
+
page = PageFactory(blocs=[bloc])
|
|
200
|
+
post = PostFactory(body_type="blocs", content_as_page=page)
|
|
201
|
+
response = self.get(url_for("api.post", post=post))
|
|
202
|
+
assert200(response)
|
|
203
|
+
assert response.json["body_type"] == "blocs"
|
|
204
|
+
assert "content_as_page" in response.json
|
|
205
|
+
page_data = response.json["content_as_page"]
|
|
206
|
+
assert "blocs" in page_data
|
|
207
|
+
assert len(page_data["blocs"]) == 1
|
|
208
|
+
assert page_data["blocs"][0]["class"] == "DatasetsListBloc"
|
|
209
|
+
assert page_data["blocs"][0]["title"] == "Featured datasets"
|
|
210
|
+
assert len(page_data["blocs"][0]["datasets"]) == 2
|
|
211
|
+
|
|
212
|
+
def test_post_api_update_to_blocs_without_content_as_page(self):
|
|
213
|
+
"""It should fail to update body_type to 'blocs' without providing content_as_page"""
|
|
214
|
+
post = PostFactory(body_type="markdown")
|
|
215
|
+
self.login(AdminFactory())
|
|
216
|
+
response = self.put(url_for("api.post", post=post), {"body_type": "blocs"})
|
|
217
|
+
assert400(response)
|
|
218
|
+
|
|
219
|
+
def test_post_api_update_to_blocs_with_content_as_page(self):
|
|
220
|
+
"""It should update body_type to 'blocs' when content_as_page is provided"""
|
|
221
|
+
post = PostFactory(body_type="markdown")
|
|
222
|
+
page = PageFactory()
|
|
223
|
+
self.login(AdminFactory())
|
|
224
|
+
response = self.put(
|
|
225
|
+
url_for("api.post", post=post), {"body_type": "blocs", "content_as_page": str(page.id)}
|
|
226
|
+
)
|
|
227
|
+
assert200(response)
|
|
228
|
+
post.reload()
|
|
229
|
+
assert post.body_type == "blocs"
|
|
230
|
+
assert post.content_as_page.id == page.id
|
|
231
|
+
|
|
232
|
+
def test_post_api_update_remove_content_as_page_from_blocs_post(self):
|
|
233
|
+
"""It should fail to remove content_as_page from a post with body_type='blocs'"""
|
|
234
|
+
page = PageFactory()
|
|
235
|
+
post = PostFactory(body_type="blocs", content_as_page=page)
|
|
236
|
+
self.login(AdminFactory())
|
|
237
|
+
response = self.put(url_for("api.post", post=post), {"content_as_page": None})
|
|
238
|
+
assert400(response)
|
|
239
|
+
|
|
240
|
+
def test_post_api_update_body_type_preserves_content_as_page(self):
|
|
241
|
+
"""Switching from 'blocs' to 'markdown' preserves content_as_page so user can switch back"""
|
|
242
|
+
page = PageFactory()
|
|
243
|
+
post = PostFactory(body_type="blocs", content_as_page=page)
|
|
244
|
+
self.login(AdminFactory())
|
|
245
|
+
response = self.put(url_for("api.post", post=post), {"body_type": "markdown"})
|
|
246
|
+
assert200(response)
|
|
247
|
+
post.reload()
|
|
248
|
+
assert post.body_type == "markdown"
|
|
249
|
+
assert post.content_as_page.id == page.id
|
|
250
|
+
|
|
251
|
+
def test_post_api_filter_by_kind(self):
|
|
252
|
+
"""It should filter posts by kind"""
|
|
253
|
+
news_post = PostFactory(kind="news")
|
|
254
|
+
page_post = PostFactory(kind="page")
|
|
255
|
+
|
|
256
|
+
response = self.get(url_for("api.posts", kind="news"))
|
|
257
|
+
assert200(response)
|
|
258
|
+
assert len(response.json["data"]) == 1
|
|
259
|
+
assert response.json["data"][0]["id"] == str(news_post.id)
|
|
260
|
+
|
|
261
|
+
response = self.get(url_for("api.posts", kind="page"))
|
|
262
|
+
assert200(response)
|
|
263
|
+
assert len(response.json["data"]) == 1
|
|
264
|
+
assert response.json["data"][0]["id"] == str(page_post.id)
|
|
265
|
+
|
|
266
|
+
def test_rss_feed_only_returns_news(self):
|
|
267
|
+
"""RSS feed should only return posts with kind=news"""
|
|
268
|
+
news_post = PostFactory(kind="news")
|
|
269
|
+
page_post = PostFactory(kind="page")
|
|
270
|
+
|
|
271
|
+
response = self.get(url_for("api.recent_posts_atom_feed"))
|
|
272
|
+
assert200(response)
|
|
273
|
+
content = response.data.decode("utf-8")
|
|
274
|
+
assert news_post.name in content
|
|
275
|
+
assert page_post.name not in content
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import mongoengine
|
|
2
|
+
import pytest
|
|
3
|
+
|
|
4
|
+
from udata.core.pages.factories import PageFactory
|
|
5
|
+
from udata.core.pages.models import Page
|
|
6
|
+
from udata.core.post.factories import PostFactory
|
|
7
|
+
from udata.tests.api import PytestOnlyDBTestCase
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class PostTest(PytestOnlyDBTestCase):
|
|
11
|
+
def test_page_deletion_raises_if_reference_still_exists(self):
|
|
12
|
+
page = PageFactory()
|
|
13
|
+
post = PostFactory(body_type="blocs", content_as_page=page)
|
|
14
|
+
|
|
15
|
+
assert Page.objects().count() == 1
|
|
16
|
+
|
|
17
|
+
with pytest.raises(mongoengine.errors.OperationError):
|
|
18
|
+
page.delete()
|
|
19
|
+
|
|
20
|
+
# Delete the post referencing the page before being able to delete the page itself
|
|
21
|
+
post.delete()
|
|
22
|
+
page.delete()
|
|
23
|
+
|
|
24
|
+
assert Page.objects().count() == 0
|
udata/core/reuse/apiv2.py
CHANGED
|
@@ -11,7 +11,7 @@ apiv2.inherit("Reuse (read)", Reuse.__read_fields__)
|
|
|
11
11
|
|
|
12
12
|
ns = apiv2.namespace("reuses", "Reuse related operations")
|
|
13
13
|
|
|
14
|
-
search_parser = ReuseSearch.as_request_parser()
|
|
14
|
+
search_parser = ReuseSearch.as_request_parser(store_missing=False)
|
|
15
15
|
|
|
16
16
|
DEFAULT_SORTING = "-created_at"
|
|
17
17
|
|
udata/core/user/models.py
CHANGED
|
@@ -12,7 +12,7 @@ from flask_security import MongoEngineUserDatastore, RoleMixin, UserMixin
|
|
|
12
12
|
from mongoengine.signals import post_save, pre_save
|
|
13
13
|
from werkzeug.utils import cached_property
|
|
14
14
|
|
|
15
|
-
from udata.api_fields import field
|
|
15
|
+
from udata.api_fields import field, generate_fields
|
|
16
16
|
from udata.core import storages
|
|
17
17
|
from udata.core.discussions.models import Discussion
|
|
18
18
|
from udata.core.linkable import Linkable
|
|
@@ -23,7 +23,7 @@ from udata.models import Follow, WithMetrics, db
|
|
|
23
23
|
from udata.uris import cdata_url
|
|
24
24
|
|
|
25
25
|
from . import mails
|
|
26
|
-
from .constants import AVATAR_SIZES
|
|
26
|
+
from .constants import AVATAR_SIZES, BIGGEST_AVATAR_SIZE
|
|
27
27
|
|
|
28
28
|
__all__ = ("User", "Role", "datastore")
|
|
29
29
|
|
|
@@ -45,9 +45,12 @@ class UserSettings(db.EmbeddedDocument):
|
|
|
45
45
|
prefered_language = db.StringField()
|
|
46
46
|
|
|
47
47
|
|
|
48
|
+
@generate_fields()
|
|
48
49
|
class User(WithMetrics, UserMixin, Linkable, db.Document):
|
|
49
50
|
slug = field(
|
|
50
|
-
db.SlugField(max_length=255, required=True, populate_from="fullname"),
|
|
51
|
+
db.SlugField(max_length=255, required=True, populate_from="fullname"),
|
|
52
|
+
auditable=False,
|
|
53
|
+
show_as_ref=True,
|
|
51
54
|
)
|
|
52
55
|
email = field(db.StringField(max_length=255, required=True, unique=True))
|
|
53
56
|
password = field(db.StringField())
|
|
@@ -55,12 +58,16 @@ class User(WithMetrics, UserMixin, Linkable, db.Document):
|
|
|
55
58
|
fs_uniquifier = field(db.StringField(max_length=64, unique=True, sparse=True))
|
|
56
59
|
roles = field(db.ListField(db.ReferenceField(Role), default=[]))
|
|
57
60
|
|
|
58
|
-
first_name = field(db.StringField(max_length=255, required=True))
|
|
59
|
-
last_name = field(db.StringField(max_length=255, required=True))
|
|
61
|
+
first_name = field(db.StringField(max_length=255, required=True), show_as_ref=True)
|
|
62
|
+
last_name = field(db.StringField(max_length=255, required=True), show_as_ref=True)
|
|
60
63
|
|
|
61
64
|
avatar_url = field(db.URLField())
|
|
62
65
|
avatar = field(
|
|
63
|
-
db.ImageField(fs=avatars, basename=default_image_basename, thumbnails=AVATAR_SIZES)
|
|
66
|
+
db.ImageField(fs=avatars, basename=default_image_basename, thumbnails=AVATAR_SIZES),
|
|
67
|
+
show_as_ref=True,
|
|
68
|
+
thumbnail_info={
|
|
69
|
+
"size": BIGGEST_AVATAR_SIZE,
|
|
70
|
+
},
|
|
64
71
|
)
|
|
65
72
|
website = field(db.URLField())
|
|
66
73
|
about = field(
|
|
@@ -199,6 +206,14 @@ class User(WithMetrics, UserMixin, Linkable, db.Document):
|
|
|
199
206
|
"""Return the number of followers of the user."""
|
|
200
207
|
return self.metrics.get("followers", 0)
|
|
201
208
|
|
|
209
|
+
@field(description="Link to the API endpoint for this user", show_as_ref=True)
|
|
210
|
+
def uri(self, *args, **kwargs):
|
|
211
|
+
return self.self_api_url(*args, **kwargs)
|
|
212
|
+
|
|
213
|
+
@field(description="Link to the udata web page for this user", show_as_ref=True)
|
|
214
|
+
def page(self, *args, **kwargs):
|
|
215
|
+
return self.self_web_url(*args, **kwargs)
|
|
216
|
+
|
|
202
217
|
def generate_api_key(self):
|
|
203
218
|
payload = {
|
|
204
219
|
"user": str(self.id),
|
|
@@ -5,6 +5,7 @@ from udata.api_fields import field, generate_fields
|
|
|
5
5
|
from udata.core.organization.notifications import MembershipRequestNotificationDetails
|
|
6
6
|
from udata.core.user.api_fields import user_ref_fields
|
|
7
7
|
from udata.core.user.models import User
|
|
8
|
+
from udata.features.transfer.notifications import TransferRequestNotificationDetails
|
|
8
9
|
from udata.models import db
|
|
9
10
|
from udata.mongo.datetime_fields import Datetimed
|
|
10
11
|
from udata.mongo.queryset import UDataQuerySet
|
|
@@ -51,6 +52,8 @@ class Notification(Datetimed, db.Document):
|
|
|
51
52
|
filterable={},
|
|
52
53
|
)
|
|
53
54
|
details = field(
|
|
54
|
-
db.GenericEmbeddedDocumentField(
|
|
55
|
+
db.GenericEmbeddedDocumentField(
|
|
56
|
+
choices=(MembershipRequestNotificationDetails, TransferRequestNotificationDetails)
|
|
57
|
+
),
|
|
55
58
|
generic=True,
|
|
56
59
|
)
|
|
@@ -36,6 +36,7 @@ def accept_transfer(transfer, comment=None):
|
|
|
36
36
|
transfer.status = "accepted"
|
|
37
37
|
transfer.response_comment = comment
|
|
38
38
|
transfer.save()
|
|
39
|
+
Transfer.after_handle.send(transfer)
|
|
39
40
|
|
|
40
41
|
subject = transfer.subject
|
|
41
42
|
recipient = transfer.recipient
|
|
@@ -59,5 +60,6 @@ def refuse_transfer(transfer, comment=None):
|
|
|
59
60
|
transfer.status = "refused"
|
|
60
61
|
transfer.response_comment = comment
|
|
61
62
|
transfer.save()
|
|
63
|
+
Transfer.after_handle.send(transfer)
|
|
62
64
|
|
|
63
65
|
return transfer
|
|
@@ -1,6 +1,9 @@
|
|
|
1
1
|
import logging
|
|
2
2
|
from datetime import datetime
|
|
3
3
|
|
|
4
|
+
from blinker import Signal
|
|
5
|
+
from mongoengine.signals import post_save
|
|
6
|
+
|
|
4
7
|
from udata.i18n import lazy_gettext as _
|
|
5
8
|
from udata.mongo import db
|
|
6
9
|
|
|
@@ -30,6 +33,9 @@ class Transfer(db.Document):
|
|
|
30
33
|
responder = db.ReferenceField("User")
|
|
31
34
|
response_comment = db.StringField()
|
|
32
35
|
|
|
36
|
+
on_create = Signal()
|
|
37
|
+
after_handle = Signal()
|
|
38
|
+
|
|
33
39
|
meta = {
|
|
34
40
|
"indexes": [
|
|
35
41
|
"owner",
|
|
@@ -38,3 +44,14 @@ class Transfer(db.Document):
|
|
|
38
44
|
"status",
|
|
39
45
|
]
|
|
40
46
|
}
|
|
47
|
+
|
|
48
|
+
@classmethod
|
|
49
|
+
def post_save(cls, sender, document, **kwargs):
|
|
50
|
+
"""Handle post save signal for Transfer documents."""
|
|
51
|
+
# Only trigger on_create signal on creation, not on every save
|
|
52
|
+
if kwargs.get("created"):
|
|
53
|
+
cls.on_create.send(document)
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
# Connect the post_save signal
|
|
57
|
+
post_save.connect(Transfer.post_save, sender=Transfer)
|
|
@@ -1,11 +1,107 @@
|
|
|
1
1
|
import logging
|
|
2
|
+
from datetime import datetime
|
|
2
3
|
|
|
4
|
+
from udata.api_fields import field, generate_fields
|
|
5
|
+
from udata.core.dataservices.models import Dataservice
|
|
6
|
+
from udata.core.dataset.models import Dataset
|
|
7
|
+
from udata.core.organization.models import Organization
|
|
8
|
+
from udata.core.reuse.models import Reuse
|
|
9
|
+
from udata.core.user.models import User
|
|
3
10
|
from udata.features.notifications.actions import notifier
|
|
4
11
|
from udata.models import Transfer
|
|
12
|
+
from udata.mongo import db
|
|
5
13
|
|
|
6
14
|
log = logging.getLogger(__name__)
|
|
7
15
|
|
|
8
16
|
|
|
17
|
+
@generate_fields()
|
|
18
|
+
class TransferRequestNotificationDetails(db.EmbeddedDocument):
|
|
19
|
+
transfer_owner = field(
|
|
20
|
+
db.GenericReferenceField(choices=(User, Organization), required=True),
|
|
21
|
+
readonly=True,
|
|
22
|
+
auditable=False,
|
|
23
|
+
allow_null=True,
|
|
24
|
+
filterable={},
|
|
25
|
+
)
|
|
26
|
+
transfer_recipient = field(
|
|
27
|
+
db.GenericReferenceField(choices=(User, Organization), required=True),
|
|
28
|
+
readonly=True,
|
|
29
|
+
auditable=False,
|
|
30
|
+
allow_null=True,
|
|
31
|
+
filterable={},
|
|
32
|
+
)
|
|
33
|
+
transfer_subject = field(
|
|
34
|
+
db.GenericReferenceField(choices=(Dataset, Dataservice, Reuse), required=True),
|
|
35
|
+
readonly=True,
|
|
36
|
+
auditable=False,
|
|
37
|
+
allow_null=True,
|
|
38
|
+
filterable={},
|
|
39
|
+
)
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
@Transfer.on_create.connect
|
|
43
|
+
def on_transfer_created(transfer, **kwargs):
|
|
44
|
+
"""Create notification when a new transfer request is created"""
|
|
45
|
+
|
|
46
|
+
from udata.features.notifications.models import Notification
|
|
47
|
+
|
|
48
|
+
recipient = transfer.recipient
|
|
49
|
+
owner = transfer.owner
|
|
50
|
+
users = []
|
|
51
|
+
|
|
52
|
+
if isinstance(recipient, User):
|
|
53
|
+
users = [recipient]
|
|
54
|
+
elif isinstance(recipient, Organization):
|
|
55
|
+
users = [member.user for member in recipient.members if member.role == "admin"]
|
|
56
|
+
|
|
57
|
+
for user in users:
|
|
58
|
+
try:
|
|
59
|
+
# we don't want notifications for the same transfer, if the previous one is stil no handled
|
|
60
|
+
existing = Notification.objects(
|
|
61
|
+
user=user,
|
|
62
|
+
details__transfer_recipient=recipient,
|
|
63
|
+
details__transfer_owner=owner,
|
|
64
|
+
details__transfer_subject=transfer.subject,
|
|
65
|
+
handled_at=None,
|
|
66
|
+
).first()
|
|
67
|
+
|
|
68
|
+
if not existing:
|
|
69
|
+
notification = Notification(
|
|
70
|
+
user=user,
|
|
71
|
+
details=TransferRequestNotificationDetails(
|
|
72
|
+
transfer_owner=owner,
|
|
73
|
+
transfer_recipient=recipient,
|
|
74
|
+
transfer_subject=transfer.subject,
|
|
75
|
+
),
|
|
76
|
+
)
|
|
77
|
+
notification.created_at = transfer.created
|
|
78
|
+
notification.save()
|
|
79
|
+
except Exception as e:
|
|
80
|
+
log.error(
|
|
81
|
+
f"Error creating notification for admin user {user.id} "
|
|
82
|
+
f"and recipient {recipient.id}: {e}"
|
|
83
|
+
)
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
@Transfer.after_handle.connect
|
|
87
|
+
def on_handle_transfer(transfer, **kwargs):
|
|
88
|
+
"""Update handled_at timestamp on related notifications when a transfer is handled"""
|
|
89
|
+
from udata.features.notifications.models import Notification
|
|
90
|
+
|
|
91
|
+
# Find all non handled notifications related to this transfer
|
|
92
|
+
notifications = Notification.objects(
|
|
93
|
+
details__transfer_subject=transfer.subject,
|
|
94
|
+
details__transfer_owner=transfer.owner,
|
|
95
|
+
details__transfer_recipient=transfer.recipient,
|
|
96
|
+
handled_at=None,
|
|
97
|
+
)
|
|
98
|
+
|
|
99
|
+
# Update handled_at for all matching notifications
|
|
100
|
+
for notification in notifications:
|
|
101
|
+
notification.handled_at = datetime.utcnow()
|
|
102
|
+
notification.save()
|
|
103
|
+
|
|
104
|
+
|
|
9
105
|
@notifier("transfer_request")
|
|
10
106
|
def transfer_request_notifications(user):
|
|
11
107
|
"""Notify user about pending transfer requests"""
|
|
@@ -3,6 +3,8 @@ import logging
|
|
|
3
3
|
from urllib.parse import urljoin
|
|
4
4
|
from uuid import UUID
|
|
5
5
|
|
|
6
|
+
from dateutil.parser import ParserError
|
|
7
|
+
|
|
6
8
|
from udata import uris
|
|
7
9
|
from udata.core.dataset.constants import UpdateFrequency
|
|
8
10
|
from udata.core.dataset.models import HarvestDatasetMetadata, HarvestResourceMetadata
|
|
@@ -202,10 +204,16 @@ class CkanBackend(BaseBackend):
|
|
|
202
204
|
log.debug("frequency value not handled: %s", value)
|
|
203
205
|
# Temporal coverage start
|
|
204
206
|
elif key == "temporal_start":
|
|
205
|
-
|
|
207
|
+
try:
|
|
208
|
+
temporal_start = daterange_start(value)
|
|
209
|
+
except ParserError:
|
|
210
|
+
log.warning(f"Unparseable temporal_start value: '{value}'")
|
|
206
211
|
# Temporal coverage end
|
|
207
212
|
elif key == "temporal_end":
|
|
208
|
-
|
|
213
|
+
try:
|
|
214
|
+
temporal_end = daterange_end(value)
|
|
215
|
+
except ParserError:
|
|
216
|
+
log.warning(f"Unparseable temporal_end value: '{value}'")
|
|
209
217
|
else:
|
|
210
218
|
dataset.extras[extra["key"]] = value
|
|
211
219
|
|
|
@@ -5,6 +5,7 @@ Remove Harvest db integrity problems
|
|
|
5
5
|
|
|
6
6
|
import logging
|
|
7
7
|
|
|
8
|
+
import click
|
|
8
9
|
import mongoengine
|
|
9
10
|
|
|
10
11
|
from udata.core.jobs.models import PeriodicTask
|
|
@@ -16,29 +17,35 @@ log = logging.getLogger(__name__)
|
|
|
16
17
|
def migrate(db):
|
|
17
18
|
log.info("Processing HarvestJob source references.")
|
|
18
19
|
|
|
19
|
-
harvest_jobs = HarvestJob.objects().no_cache()
|
|
20
|
+
harvest_jobs = HarvestJob.objects().no_cache()
|
|
21
|
+
total = harvest_jobs.count()
|
|
20
22
|
count = 0
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
23
|
+
with click.progressbar(harvest_jobs, length=total, label="Checking sources refs") as jobs:
|
|
24
|
+
for harvest_job in jobs:
|
|
25
|
+
try:
|
|
26
|
+
if harvest_job.source is None:
|
|
27
|
+
raise mongoengine.errors.DoesNotExist()
|
|
28
|
+
harvest_job.source.id
|
|
29
|
+
except mongoengine.errors.DoesNotExist:
|
|
30
|
+
count += 1
|
|
31
|
+
harvest_job.delete()
|
|
27
32
|
|
|
28
33
|
log.info(f"Completed, removed {count} HarvestJob objects")
|
|
29
34
|
|
|
30
35
|
log.info("Processing HarvestJob items references.")
|
|
31
36
|
|
|
32
|
-
harvest_jobs = HarvestJob.objects.filter(items__0__exists=True).no_cache()
|
|
37
|
+
harvest_jobs = HarvestJob.objects.filter(items__0__exists=True).no_cache()
|
|
38
|
+
total = harvest_jobs.count()
|
|
33
39
|
count = 0
|
|
34
|
-
|
|
35
|
-
for
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
40
|
+
with click.progressbar(harvest_jobs, length=total, label="Checking items refs") as jobs:
|
|
41
|
+
for harvest_job in jobs:
|
|
42
|
+
for item in harvest_job.items:
|
|
43
|
+
try:
|
|
44
|
+
item.dataset and item.dataset.id
|
|
45
|
+
except mongoengine.errors.DoesNotExist:
|
|
46
|
+
count += 1
|
|
47
|
+
item.dataset = None
|
|
48
|
+
harvest_job.save()
|
|
42
49
|
|
|
43
50
|
log.info(f"Completed, modified {count} HarvestJob objects")
|
|
44
51
|
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Create TransferRequestNotification for all pending transfer requests
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import logging
|
|
6
|
+
|
|
7
|
+
import click
|
|
8
|
+
|
|
9
|
+
from udata.features.notifications.models import Notification
|
|
10
|
+
from udata.features.transfer.models import Transfer
|
|
11
|
+
from udata.features.transfer.notifications import TransferRequestNotificationDetails
|
|
12
|
+
|
|
13
|
+
log = logging.getLogger(__name__)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def migrate(db):
|
|
17
|
+
log.info("Processing pending transfer requests...")
|
|
18
|
+
|
|
19
|
+
created_count = 0
|
|
20
|
+
|
|
21
|
+
# Get all pending transfers
|
|
22
|
+
transfers = Transfer.objects(status="pending")
|
|
23
|
+
|
|
24
|
+
with click.progressbar(transfers, length=transfers.count()) as transfer_list:
|
|
25
|
+
for transfer in transfer_list:
|
|
26
|
+
try:
|
|
27
|
+
# Get the recipient (could be a user or an organization)
|
|
28
|
+
recipient = transfer.recipient
|
|
29
|
+
|
|
30
|
+
# For organizations, we need to find admins who should receive notifications
|
|
31
|
+
if recipient._cls == "Organization":
|
|
32
|
+
# Get all admin users for this organization
|
|
33
|
+
recipient_users = [
|
|
34
|
+
member.user for member in recipient.members if member.role == "admin"
|
|
35
|
+
]
|
|
36
|
+
else:
|
|
37
|
+
# For users, just use the recipient directly
|
|
38
|
+
recipient_users = [recipient]
|
|
39
|
+
|
|
40
|
+
# Create a notification for each recipient user
|
|
41
|
+
for recipient_user in recipient_users:
|
|
42
|
+
try:
|
|
43
|
+
# Check if notification already exists
|
|
44
|
+
existing = Notification.objects(
|
|
45
|
+
user=recipient_user,
|
|
46
|
+
details__transfer_recipient=recipient,
|
|
47
|
+
details__transfer_owner=transfer.owner,
|
|
48
|
+
details__transfer_subject=transfer.subject,
|
|
49
|
+
).first()
|
|
50
|
+
if not existing:
|
|
51
|
+
notification = Notification(user=recipient_user)
|
|
52
|
+
notification.details = TransferRequestNotificationDetails(
|
|
53
|
+
transfer_owner=transfer.owner,
|
|
54
|
+
transfer_recipient=recipient,
|
|
55
|
+
transfer_subject=transfer.subject,
|
|
56
|
+
)
|
|
57
|
+
# Set the created_at to match the transfer creation date
|
|
58
|
+
notification.created_at = transfer.created
|
|
59
|
+
notification.save()
|
|
60
|
+
created_count += 1
|
|
61
|
+
except Exception as e:
|
|
62
|
+
log.error(
|
|
63
|
+
f"Error creating notification for user {recipient_user.id} "
|
|
64
|
+
f"and transfer {transfer.id}: {e}"
|
|
65
|
+
)
|
|
66
|
+
except Exception as e:
|
|
67
|
+
log.error(f"Error creating notification for transfer {transfer.id}: {e}")
|
|
68
|
+
|
|
69
|
+
log.info(f"Created {created_count} TransferRequestNotifications")
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
"""
|
|
2
|
+
This migration sets Post.kind to "news" for posts that don't have a kind.
|
|
3
|
+
This is necessary because the default value is only applied to new documents,
|
|
4
|
+
not existing ones.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import logging
|
|
8
|
+
|
|
9
|
+
from udata.models import Post
|
|
10
|
+
|
|
11
|
+
log = logging.getLogger(__name__)
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def migrate(db):
|
|
15
|
+
log.info("Processing posts without kind...")
|
|
16
|
+
count = Post.objects(kind__exists=False).update(kind="news")
|
|
17
|
+
log.info(f"\tSet kind='news' for {count} posts")
|
udata/tasks.py
CHANGED
|
@@ -161,6 +161,7 @@ def init_app(app):
|
|
|
161
161
|
import udata.core.metrics.tasks # noqa
|
|
162
162
|
import udata.core.tags.tasks # noqa
|
|
163
163
|
import udata.core.activity.tasks # noqa
|
|
164
|
+
import udata.core.dataservices.tasks # noqa
|
|
164
165
|
import udata.core.dataset.tasks # noqa
|
|
165
166
|
import udata.core.dataset.transport # noqa
|
|
166
167
|
import udata.core.dataset.recommendations # noqa
|