udata 10.0.9.dev33787__py2.py3-none-any.whl → 10.0.9.dev33847__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of udata might be problematic. Click here for more details.

Files changed (51) hide show
  1. udata/commands/fixtures.py +42 -8
  2. udata/commands/tests/test_fixtures.py +12 -2
  3. udata/core/contact_point/api.py +13 -1
  4. udata/core/contact_point/api_fields.py +10 -1
  5. udata/core/contact_point/factories.py +3 -1
  6. udata/core/contact_point/forms.py +1 -0
  7. udata/core/contact_point/models.py +9 -1
  8. udata/core/dataservices/models.py +11 -4
  9. udata/core/dataservices/rdf.py +12 -6
  10. udata/core/dataset/api.py +1 -0
  11. udata/core/dataset/api_fields.py +3 -3
  12. udata/core/dataset/apiv2.py +5 -3
  13. udata/core/dataset/forms.py +4 -4
  14. udata/core/dataset/models.py +1 -1
  15. udata/core/dataset/rdf.py +23 -13
  16. udata/forms/fields.py +1 -4
  17. udata/harvest/api.py +5 -1
  18. udata/harvest/tests/dcat/evian.json +1 -1
  19. udata/harvest/tests/test_dcat_backend.py +5 -5
  20. udata/migrations/2024-12-05-contact-point-is-now-a-list.py +33 -0
  21. udata/rdf.py +75 -44
  22. udata/static/chunks/{10.471164b2a9fe15614797.js → 10.8ca60413647062717b1e.js} +3 -3
  23. udata/static/chunks/{10.471164b2a9fe15614797.js.map → 10.8ca60413647062717b1e.js.map} +1 -1
  24. udata/static/chunks/{11.51d706fb9521c16976bc.js → 11.0f04e49a40a0a381bcce.js} +3 -3
  25. udata/static/chunks/{11.51d706fb9521c16976bc.js.map → 11.0f04e49a40a0a381bcce.js.map} +1 -1
  26. udata/static/chunks/{13.f29411b06be1883356a3.js → 13.d9c1735d14038b94c17e.js} +2 -2
  27. udata/static/chunks/{13.f29411b06be1883356a3.js.map → 13.d9c1735d14038b94c17e.js.map} +1 -1
  28. udata/static/chunks/{17.3bd0340930d4a314ce9c.js → 17.81c57c0dedf812e43013.js} +2 -2
  29. udata/static/chunks/{17.3bd0340930d4a314ce9c.js.map → 17.81c57c0dedf812e43013.js.map} +1 -1
  30. udata/static/chunks/{8.54e44b102164ae5e7a67.js → 8.494b003a94383b142c18.js} +2 -2
  31. udata/static/chunks/{8.54e44b102164ae5e7a67.js.map → 8.494b003a94383b142c18.js.map} +1 -1
  32. udata/static/chunks/{9.07515e5187f475bce828.js → 9.033d7e190ca9e226a5d0.js} +3 -3
  33. udata/static/chunks/{9.07515e5187f475bce828.js.map → 9.033d7e190ca9e226a5d0.js.map} +1 -1
  34. udata/static/common.js +1 -1
  35. udata/static/common.js.map +1 -1
  36. udata/tests/api/test_contact_points.py +36 -5
  37. udata/tests/api/test_dataservices_api.py +2 -2
  38. udata/tests/api/test_datasets_api.py +8 -6
  39. udata/tests/api/test_organizations_api.py +1 -0
  40. udata/tests/api/test_user_api.py +1 -0
  41. udata/tests/contact_point/test_contact_point_models.py +19 -0
  42. udata/tests/dataset/test_dataset_rdf.py +30 -3
  43. udata/tests/organization/test_organization_tasks.py +1 -0
  44. udata/tests/test_rdf.py +41 -7
  45. udata/translations/udata.pot +23 -7
  46. {udata-10.0.9.dev33787.dist-info → udata-10.0.9.dev33847.dist-info}/METADATA +3 -1
  47. {udata-10.0.9.dev33787.dist-info → udata-10.0.9.dev33847.dist-info}/RECORD +51 -49
  48. {udata-10.0.9.dev33787.dist-info → udata-10.0.9.dev33847.dist-info}/LICENSE +0 -0
  49. {udata-10.0.9.dev33787.dist-info → udata-10.0.9.dev33847.dist-info}/WHEEL +0 -0
  50. {udata-10.0.9.dev33787.dist-info → udata-10.0.9.dev33847.dist-info}/entry_points.txt +0 -0
  51. {udata-10.0.9.dev33787.dist-info → udata-10.0.9.dev33847.dist-info}/top_level.txt +0 -0
@@ -39,7 +39,7 @@ COMMUNITY_RES_URL = "/api/1/datasets/community_resources"
39
39
  DISCUSSION_URL = "/api/1/discussions"
40
40
 
41
41
 
42
- DEFAULT_FIXTURE_FILE_TAG: str = "v4.0.0"
42
+ DEFAULT_FIXTURE_FILE_TAG: str = "v5.0.0"
43
43
  DEFAULT_FIXTURE_FILE: str = f"https://raw.githubusercontent.com/opendatateam/udata-fixtures/{DEFAULT_FIXTURE_FILE_TAG}/results.json" # noqa
44
44
 
45
45
  DEFAULT_FIXTURES_RESULTS_FILENAME: str = "results.json"
@@ -113,14 +113,24 @@ def generate_fixtures_file(data_source: str, results_filename: str) -> None:
113
113
  for slug in bar:
114
114
  json_fixture = {}
115
115
 
116
- json_dataset = requests.get(f"{data_source}{DATASET_URL}/{slug}/").json()
116
+ url = f"{data_source}{DATASET_URL}/{slug}/"
117
+ response = requests.get(url)
118
+ if not response.ok:
119
+ print(f"Got a status code {response.status_code} while getting {url}, skipping")
120
+ continue
121
+ json_dataset = response.json()
122
+ json_dataset = remove_unwanted_keys(json_dataset, "dataset")
117
123
  json_resources = json_dataset.pop("resources")
124
+ json_resources = remove_unwanted_keys(json_resources, "resources")
118
125
  if json_dataset["organization"] is None:
119
126
  json_owner = json_dataset.pop("owner")
120
- json_dataset["owner"] = json_owner["id"]
127
+ if json_owner:
128
+ json_owner = remove_unwanted_keys(json_owner, "user")
129
+ json_dataset["owner"] = json_owner["id"]
121
130
  else:
122
131
  json_org = json_dataset.pop("organization")
123
132
  json_org = requests.get(f"{data_source}{ORG_URL}/{json_org['id']}/").json()
133
+ json_org = remove_unwanted_keys(json_org, "organization")
124
134
  json_fixture["organization"] = json_org
125
135
  json_fixture["resources"] = json_resources
126
136
  json_fixture["dataset"] = json_dataset
@@ -128,21 +138,29 @@ def generate_fixtures_file(data_source: str, results_filename: str) -> None:
128
138
  json_reuses = requests.get(
129
139
  f"{data_source}{REUSE_URL}/?dataset={json_dataset['id']}"
130
140
  ).json()["data"]
141
+ for reuse in json_reuses:
142
+ reuse = remove_unwanted_keys(reuse, "reuse")
131
143
  json_fixture["reuses"] = json_reuses
132
144
 
133
145
  json_community = requests.get(
134
146
  f"{data_source}{COMMUNITY_RES_URL}/?dataset={json_dataset['id']}"
135
147
  ).json()["data"]
148
+ for community_resource in json_community:
149
+ community_resource = remove_unwanted_keys(community_resource, "community")
136
150
  json_fixture["community_resources"] = json_community
137
151
 
138
152
  json_discussion = requests.get(
139
153
  f"{data_source}{DISCUSSION_URL}/?for={json_dataset['id']}"
140
154
  ).json()["data"]
155
+ for discussion in json_discussion:
156
+ discussion = remove_unwanted_keys(discussion, "discussion")
141
157
  json_fixture["discussions"] = json_discussion
142
158
 
143
159
  json_dataservices = requests.get(
144
160
  f"{data_source}{DATASERVICES_URL}/?dataset={json_dataset['id']}"
145
161
  ).json()["data"]
162
+ for dataservice in json_dataservices:
163
+ dataservice = remove_unwanted_keys(dataservice, "dataservice")
146
164
  json_fixture["dataservices"] = json_dataservices
147
165
 
148
166
  json_result.append(json_fixture)
@@ -153,7 +171,7 @@ def generate_fixtures_file(data_source: str, results_filename: str) -> None:
153
171
 
154
172
 
155
173
  def get_or_create(data, key, model, factory):
156
- """Try getting the object. If it doesn't exist yet, create it with the provided factory."""
174
+ """Try getting the object from data[key]. If it doesn't exist yet, create it with the provided factory."""
157
175
  if key not in data or data[key] is None:
158
176
  return
159
177
  data[key] = remove_unwanted_keys(data[key], key)
@@ -175,6 +193,17 @@ def get_or_create_user(data):
175
193
  return get_or_create(data, "user", User, UserFactory)
176
194
 
177
195
 
196
+ def get_or_create_contact_point(data):
197
+ obj = ContactPoint.objects(id=data["id"]).first()
198
+ if not obj:
199
+ if not data.get("role"):
200
+ data["role"] = (
201
+ "contact" if (data.get("email") or data.get("contact_form")) else "creator"
202
+ )
203
+ obj = ContactPointFactory(**data)
204
+ return obj
205
+
206
+
178
207
  @cli.command()
179
208
  @click.argument("source", default=DEFAULT_FIXTURE_FILE)
180
209
  def import_fixtures(source):
@@ -192,7 +221,11 @@ def import_fixtures(source):
192
221
  user = UserFactory()
193
222
  dataset = fixture["dataset"]
194
223
  dataset = remove_unwanted_keys(dataset, "dataset")
195
- if fixture["organization"]:
224
+ contact_points = []
225
+ for contact_point in dataset.get("contact_points") or []:
226
+ contact_points.append(get_or_create_contact_point(contact_point))
227
+ dataset["contact_points"] = contact_points
228
+ if fixture.get("organization"):
196
229
  organization = fixture["organization"]
197
230
  organization["members"] = [
198
231
  Member(user=get_or_create_user(member), role=member["role"])
@@ -229,8 +262,9 @@ def import_fixtures(source):
229
262
  DiscussionFactory(**discussion, subject=dataset)
230
263
  for dataservice in fixture["dataservices"]:
231
264
  dataservice = remove_unwanted_keys(dataservice, "dataservice")
232
- dataservice["contact_point"] = get_or_create(
233
- dataservice, "contact_point", ContactPoint, ContactPointFactory
234
- )
265
+ contact_points = []
266
+ for contact_point in dataservice.get("contact_points") or []:
267
+ contact_points.append(get_or_create_contact_point(contact_point))
268
+ dataservice["contact_points"] = contact_points
235
269
  dataservice["organization"] = get_or_create_organization(dataservice)
236
270
  DataserviceFactory(**dataservice, datasets=[dataset])
@@ -6,6 +6,7 @@ from werkzeug.wrappers.response import Response
6
6
 
7
7
  import udata.commands.fixtures
8
8
  from udata import models
9
+ from udata.core.contact_point.factories import ContactPointFactory
9
10
  from udata.core.dataservices.factories import DataserviceFactory
10
11
  from udata.core.dataset.factories import (
11
12
  CommunityResourceFactory,
@@ -31,9 +32,12 @@ class FixturesTest:
31
32
  org = OrganizationFactory(
32
33
  members=[Member(user=user, role="editor"), Member(user=admin, role="admin")]
33
34
  )
35
+ contact_point = ContactPointFactory(role="contact")
34
36
  # Set the same slug we're 'exporting' from the FIXTURE_DATASET_SLUG config, see the
35
37
  # @pytest.mark.options above.
36
- dataset = DatasetFactory(slug="some-test-dataset-slug", organization=org)
38
+ dataset = DatasetFactory(
39
+ slug="some-test-dataset-slug", organization=org, contact_points=[contact_point]
40
+ )
37
41
  res = ResourceFactory()
38
42
  dataset.add_resource(res)
39
43
  ReuseFactory(datasets=[dataset], owner=user)
@@ -48,12 +52,13 @@ class FixturesTest:
48
52
  ],
49
53
  closed_by=admin,
50
54
  )
51
- DataserviceFactory(datasets=[dataset], organization=org)
55
+ DataserviceFactory(datasets=[dataset], organization=org, contact_points=[contact_point])
52
56
 
53
57
  with NamedTemporaryFile(mode="w+", delete=True) as fixtures_fd:
54
58
  # Get the fixtures from the local instance.
55
59
  monkeypatch.setattr(requests, "get", lambda url: api.get(url))
56
60
  monkeypatch.setattr(Response, "json", Response.get_json)
61
+ Response.ok = True
57
62
  result = cli("generate-fixtures-file", "", fixtures_fd.name)
58
63
  fixtures_fd.flush()
59
64
  assert "Fixtures saved to file " in result.output
@@ -65,6 +70,7 @@ class FixturesTest:
65
70
  models.CommunityResource.drop_collection()
66
71
  models.User.drop_collection()
67
72
  models.Dataservice.drop_collection()
73
+ models.ContactPoint.drop_collection()
68
74
 
69
75
  assert models.Organization.objects(slug=org.slug).count() == 0
70
76
  assert models.Dataset.objects.count() == 0
@@ -72,6 +78,7 @@ class FixturesTest:
72
78
  assert models.CommunityResource.objects.count() == 0
73
79
  assert models.User.objects.count() == 0
74
80
  assert models.Dataservice.objects.count() == 0
81
+ assert models.ContactPoint.objects.count() == 0
75
82
 
76
83
  # Then load them in the database to make sure they're correct.
77
84
  result = cli("import-fixtures", fixtures_fd.name)
@@ -82,6 +89,8 @@ class FixturesTest:
82
89
  assert result_org.members[1].user.id == admin.id
83
90
  assert result_org.members[1].role == "admin"
84
91
  assert models.Dataset.objects.count() > 0
92
+ result_dataset = models.Dataset.objects.first()
93
+ assert result_dataset.contact_points == [contact_point]
85
94
  assert models.Discussion.objects.count() > 0
86
95
  result_discussion = models.Discussion.objects.first()
87
96
  assert result_discussion.user.id == user.id
@@ -95,6 +104,7 @@ class FixturesTest:
95
104
  # Make sure we also import the dataservice organization
96
105
  result_dataservice = models.Dataservice.objects.first()
97
106
  assert result_dataservice.organization == org
107
+ assert result_dataservice.contact_points == [contact_point]
98
108
 
99
109
  def test_import_fixtures_from_default_file(self, cli):
100
110
  """Test importing fixtures from udata.commands.fixture.DEFAULT_FIXTURE_FILE."""
@@ -4,8 +4,9 @@ from udata.api import API, api
4
4
  from udata.api.parsers import ModelApiParser
5
5
  from udata.core.dataset.permissions import OwnablePermission
6
6
 
7
- from .api_fields import contact_point_fields
7
+ from .api_fields import contact_point_fields, contact_point_roles_fields
8
8
  from .forms import ContactPointForm
9
+ from .models import CONTACT_ROLES
9
10
 
10
11
 
11
12
  class ContactPointApiParser(ModelApiParser):
@@ -69,3 +70,14 @@ class ContactPointAPI(API):
69
70
 
70
71
  contact_point.delete()
71
72
  return "", 204
73
+
74
+
75
+ @ns.route("/roles/", endpoint="contact_point_roles")
76
+ class ContactPointRolesAPI(API):
77
+ """Contact point roles endpoint"""
78
+
79
+ @api.doc("contact_point_roles")
80
+ @api.marshal_list_with(contact_point_roles_fields)
81
+ def get(self):
82
+ """List all contact point roles"""
83
+ return [{"id": id, "label": label} for id, label in CONTACT_ROLES.items()]
@@ -2,7 +2,15 @@ from udata.api import api, fields
2
2
  from udata.core.organization.api_fields import org_ref_fields
3
3
  from udata.core.user.api_fields import user_ref_fields
4
4
 
5
- DEFAULT_MASK = ",".join(("id", "name", "email", "contact_form"))
5
+ DEFAULT_MASK = ",".join(("id", "name", "email", "contact_form", "role"))
6
+
7
+ contact_point_roles_fields = api.model(
8
+ "ContactPointRoles",
9
+ {
10
+ "id": fields.String(description="The contact role identifier"),
11
+ "label": fields.String(description="The contact role display name"),
12
+ },
13
+ )
6
14
 
7
15
  contact_point_fields = api.model(
8
16
  "ContactPoint",
@@ -17,6 +25,7 @@ contact_point_fields = api.model(
17
25
  "owner": fields.Nested(
18
26
  user_ref_fields, allow_null=True, description="The user information"
19
27
  ),
28
+ "role": fields.String(description="The role of the contact", required=True),
20
29
  },
21
30
  mask=DEFAULT_MASK,
22
31
  )
@@ -1,8 +1,9 @@
1
1
  import factory
2
+ import factory.fuzzy
2
3
 
3
4
  from udata.factories import ModelFactory
4
5
 
5
- from .models import ContactPoint
6
+ from .models import CONTACT_ROLES, ContactPoint
6
7
 
7
8
 
8
9
  class ContactPointFactory(ModelFactory):
@@ -12,3 +13,4 @@ class ContactPointFactory(ModelFactory):
12
13
  name = factory.Faker("name")
13
14
  contact_form = factory.Faker("url")
14
15
  email = factory.Sequence(lambda n: "contact_point{}@example.com".format(n))
16
+ role = factory.fuzzy.FuzzyChoice(CONTACT_ROLES.keys())
@@ -18,3 +18,4 @@ class ContactPointForm(ModelForm):
18
18
  )
19
19
  owner = fields.CurrentUserField()
20
20
  organization = fields.PublishAsField(_("Publish as"))
21
+ role = fields.StringField(_("Role"))
@@ -5,15 +5,23 @@ from udata.mongo import db
5
5
  __all__ = ("ContactPoint",)
6
6
 
7
7
 
8
+ CONTACT_ROLES = {
9
+ "contact": _("Contact"),
10
+ "creator": _("Creator"),
11
+ "publisher": _("Publisher"),
12
+ }
13
+
14
+
8
15
  class ContactPoint(db.Document, Owned):
9
16
  name = db.StringField(max_length=255, required=True)
10
17
  email = db.StringField(max_length=255)
11
18
  contact_form = db.URLField()
19
+ role = db.StringField(required=True, choices=list(CONTACT_ROLES))
12
20
 
13
21
  meta = {"queryset_class": OwnedQuerySet}
14
22
 
15
23
  def validate(self, clean=True):
16
- if not self.email and not self.contact_form:
24
+ if self.role == "contact" and not self.email and not self.contact_form:
17
25
  raise db.ValidationError(
18
26
  _("At least an email or a contact form is required for a contact point")
19
27
  )
@@ -162,10 +162,17 @@ class Dataservice(WithMetrics, Owned, db.Document):
162
162
 
163
163
  extras = field(db.ExtrasField())
164
164
 
165
- contact_point = field(
166
- db.ReferenceField("ContactPoint", reverse_delete_rule=db.NULLIFY),
167
- nested_fields=contact_api_fields.contact_point_fields,
168
- allow_null=True,
165
+ contact_points = field(
166
+ db.ListField(
167
+ field(
168
+ db.ReferenceField("ContactPoint", reverse_delete_rule=db.PULL),
169
+ nested_fields=contact_api_fields.contact_point_fields,
170
+ allow_null=True,
171
+ ),
172
+ ),
173
+ filterable={
174
+ "key": "contact_point",
175
+ },
169
176
  )
170
177
 
171
178
  created_at = field(
@@ -6,13 +6,14 @@ from udata.core.dataservices.models import HarvestMetadata as HarvestDataservice
6
6
  from udata.core.dataset.models import Dataset, License
7
7
  from udata.core.dataset.rdf import dataset_to_graph_id, sanitize_html
8
8
  from udata.rdf import (
9
+ CONTACT_POINT_ENTITY_TO_ROLE,
9
10
  DCAT,
10
11
  DCATAP,
11
12
  DCT,
12
13
  HVD_LEGISLATION,
13
14
  TAG_TO_EU_HVD_CATEGORIES,
14
- contact_point_from_rdf,
15
- contact_point_to_rdf,
15
+ contact_points_from_rdf,
16
+ contact_points_to_rdf,
16
17
  namespace_manager,
17
18
  rdf_value,
18
19
  remote_url_from_rdf,
@@ -43,7 +44,13 @@ def dataservice_from_rdf(
43
44
  dataservice.base_api_url = url_from_rdf(d, DCAT.endpointURL)
44
45
  dataservice.endpoint_description_url = url_from_rdf(d, DCAT.endpointDescription)
45
46
 
46
- dataservice.contact_point = contact_point_from_rdf(d, dataservice) or dataservice.contact_point
47
+ roles = [ # Imbricated list of contact points for each role
48
+ contact_points_from_rdf(d, rdf_entity, role, dataservice)
49
+ for rdf_entity, role in CONTACT_POINT_ENTITY_TO_ROLE.items()
50
+ ]
51
+ dataservice.contact_points = [ # Flattened list of contact points
52
+ contact_point for role in roles for contact_point in role
53
+ ] or dataservice.contact_points
47
54
 
48
55
  datasets = []
49
56
  for dataset_node in d.objects(DCAT.servesDataset):
@@ -176,9 +183,8 @@ def dataservice_to_rdf(dataservice: Dataservice, graph=None):
176
183
  for dataset in dataservice.datasets:
177
184
  d.add(DCAT.servesDataset, dataset_to_graph_id(dataset))
178
185
 
179
- contact_point = contact_point_to_rdf(dataservice.contact_point, graph)
180
- if contact_point:
181
- d.set(DCAT.contactPoint, contact_point)
186
+ for contact_point, predicate in contact_points_to_rdf(dataservice.contact_points, graph):
187
+ d.set(predicate, contact_point)
182
188
 
183
189
  return d
184
190
 
udata/core/dataset/api.py CHANGED
@@ -108,6 +108,7 @@ class DatasetApiParser(ModelApiParser):
108
108
  self.parser.add_argument("schema", type=str, location="args")
109
109
  self.parser.add_argument("schema_version", type=str, location="args")
110
110
  self.parser.add_argument("topic", type=str, location="args")
111
+ self.parser.add_argument("credit", type=str, location="args")
111
112
  self.parser.add_argument("dataservice", type=str, location="args")
112
113
 
113
114
  @staticmethod
@@ -272,7 +272,7 @@ DEFAULT_MASK = ",".join(
272
272
  "archived",
273
273
  "quality",
274
274
  "internal",
275
- "contact_point",
275
+ "contact_points",
276
276
  )
277
277
  )
278
278
 
@@ -386,8 +386,8 @@ dataset_fields = api.model(
386
386
  readonly=True,
387
387
  description="Site internal and specific object's data",
388
388
  ),
389
- "contact_point": fields.Nested(
390
- contact_point_fields, allow_null=True, description="The dataset's contact points"
389
+ "contact_points": fields.List(
390
+ fields.Nested(contact_point_fields, description="The dataset contact points"),
391
391
  ),
392
392
  },
393
393
  mask=DEFAULT_MASK,
@@ -66,7 +66,7 @@ DEFAULT_MASK_APIV2 = ",".join(
66
66
  "quality",
67
67
  "harvest",
68
68
  "internal",
69
- "contact_point",
69
+ "contact_points",
70
70
  )
71
71
  )
72
72
 
@@ -206,8 +206,10 @@ dataset_fields = apiv2.model(
206
206
  readonly=True,
207
207
  description="Site internal and specific object's data",
208
208
  ),
209
- "contact_point": fields.Nested(
210
- contact_point_fields, allow_null=True, description="The dataset's contact point"
209
+ "contact_points": fields.List(
210
+ fields.Nested(contact_point_fields),
211
+ required=False,
212
+ description="The dataset contact points",
211
213
  ),
212
214
  },
213
215
  mask=DEFAULT_MASK_APIV2,
@@ -122,13 +122,13 @@ def validate_contact_point(form, field):
122
122
  """Validates contact point with dataset's org or owner"""
123
123
  from udata.models import ContactPoint
124
124
 
125
- if field.data:
125
+ for contact_point in field.data or []:
126
126
  if form.organization.data:
127
127
  contact_point = ContactPoint.objects(
128
- id=field.data.id, organization=form.organization.data
128
+ id=contact_point.id, organization=form.organization.data
129
129
  ).first()
130
130
  elif form.owner.data:
131
- contact_point = ContactPoint.objects(id=field.data.id, owner=form.owner.data).first()
131
+ contact_point = ContactPoint.objects(id=contact_point.id, owner=form.owner.data).first()
132
132
  if not contact_point:
133
133
  raise validators.ValidationError(
134
134
  _("Wrong contact point id or contact point ownership mismatch")
@@ -175,7 +175,7 @@ class DatasetForm(ModelForm):
175
175
  organization = fields.PublishAsField(_("Publish as"))
176
176
  extras = fields.ExtrasField()
177
177
  resources = fields.NestedModelList(ResourceForm)
178
- contact_point = fields.ContactPointField(validators=[validate_contact_point])
178
+ contact_points = fields.ContactPointListField(validators=[validate_contact_point])
179
179
 
180
180
 
181
181
  class ResourcesListForm(ModelForm):
@@ -563,7 +563,7 @@ class Dataset(WithMetrics, DatasetBadgeMixin, Owned, db.Document):
563
563
 
564
564
  featured = db.BooleanField(required=True, default=False)
565
565
 
566
- contact_point = db.ReferenceField("ContactPoint", reverse_delete_rule=db.NULLIFY)
566
+ contact_points = db.ListField(db.ReferenceField("ContactPoint", reverse_delete_rule=db.PULL))
567
567
 
568
568
  created_at_internal = DateTimeField(
569
569
  verbose_name=_("Creation date"), default=datetime.utcnow, required=True
udata/core/dataset/rdf.py CHANGED
@@ -23,12 +23,14 @@ from udata.harvest.exceptions import HarvestSkipException
23
23
  from udata.models import db
24
24
  from udata.rdf import (
25
25
  ADMS,
26
+ CONTACT_POINT_ENTITY_TO_ROLE,
26
27
  DCAT,
27
28
  DCATAP,
28
29
  DCT,
29
30
  EUFORMAT,
30
31
  EUFREQ,
31
32
  FREQ,
33
+ GEODCAT,
32
34
  HVD_LEGISLATION,
33
35
  IANAFORMAT,
34
36
  SCHEMA,
@@ -36,8 +38,8 @@ from udata.rdf import (
36
38
  SKOS,
37
39
  SPDX,
38
40
  TAG_TO_EU_HVD_CATEGORIES,
39
- contact_point_from_rdf,
40
- contact_point_to_rdf,
41
+ contact_points_from_rdf,
42
+ contact_points_to_rdf,
41
43
  namespace_manager,
42
44
  rdf_unique_values,
43
45
  rdf_value,
@@ -174,10 +176,9 @@ def ogc_service_to_rdf(
174
176
  if dataset.license.url:
175
177
  service.add(DCT.license, URIRef(dataset.license.url))
176
178
 
177
- if dataset and dataset.contact_point:
178
- contact_point = contact_point_to_rdf(dataset.contact_point, graph)
179
- if contact_point:
180
- service.set(DCAT.contactPoint, contact_point)
179
+ if dataset and dataset.contact_points:
180
+ for contact_point, predicate in contact_points_to_rdf(dataset.contact_points, graph):
181
+ service.set(predicate, contact_point)
181
182
 
182
183
  if is_hvd:
183
184
  # DCAT-AP HVD applicable legislation is also expected at the distribution > accessService level
@@ -361,13 +362,16 @@ def dataset_to_rdf(dataset: Dataset, graph: Optional[Graph] = None) -> RdfResour
361
362
  if frequency:
362
363
  d.set(DCT.accrualPeriodicity, frequency)
363
364
 
364
- publisher = owner_to_rdf(dataset, graph)
365
- if publisher:
366
- d.set(DCT.publisher, publisher)
365
+ owner_role = DCT.publisher
366
+ if any(contact_point.role == "publisher" for contact_point in dataset.contact_points):
367
+ # There's already a publisher, so the owner should instead be a distributor.
368
+ owner_role = GEODCAT.distributor
369
+ owner = owner_to_rdf(dataset, graph)
370
+ if owner:
371
+ d.set(owner_role, owner)
367
372
 
368
- contact_point = contact_point_to_rdf(dataset.contact_point, graph)
369
- if contact_point:
370
- d.set(DCAT.contactPoint, contact_point)
373
+ for contact_point, predicate in contact_points_to_rdf(dataset.contact_points, graph):
374
+ d.set(predicate, contact_point)
371
375
 
372
376
  return d
373
377
 
@@ -748,7 +752,13 @@ def dataset_from_rdf(graph: Graph, dataset=None, node=None, remote_url_prefix: s
748
752
  description = d.value(DCT.description) or d.value(DCT.abstract)
749
753
  dataset.description = sanitize_html(description)
750
754
  dataset.frequency = frequency_from_rdf(d.value(DCT.accrualPeriodicity))
751
- dataset.contact_point = contact_point_from_rdf(d, dataset) or dataset.contact_point
755
+ roles = [ # Imbricated list of contact points for each role
756
+ contact_points_from_rdf(d, rdf_entity, role, dataset)
757
+ for rdf_entity, role in CONTACT_POINT_ENTITY_TO_ROLE.items()
758
+ ]
759
+ dataset.contact_points = [ # Flattened list of contact points
760
+ contact_point for role in roles for contact_point in role
761
+ ] or dataset.contact_points
752
762
  schema = schema_from_rdf(d)
753
763
  if schema:
754
764
  dataset.schema = schema
udata/forms/fields.py CHANGED
@@ -787,12 +787,9 @@ class PublishAsField(ModelFieldMixin, Field):
787
787
  return True
788
788
 
789
789
 
790
- class ContactPointField(ModelFieldMixin, Field):
790
+ class ContactPointListField(ModelList, Field):
791
791
  model = ContactPoint
792
792
 
793
- def __init__(self, *args, **kwargs):
794
- super(ContactPointField, self).__init__(*args, **kwargs)
795
-
796
793
 
797
794
  def field_parse(cls, value, *args, **kwargs):
798
795
  kwargs["_form"] = WTForm()
udata/harvest/api.py CHANGED
@@ -36,7 +36,11 @@ error_fields = api.model(
36
36
  description="The error creation date", required=True, readonly=True
37
37
  ),
38
38
  "message": fields.String(description="The error short message", required=True),
39
- "details": fields.String(description="Optional details (ie. stacktrace)"),
39
+ "details": fields.Raw(
40
+ attribute=lambda o: o.details if admin_permission else None,
41
+ description="Optional details (only for super-admins)",
42
+ readonly=True,
43
+ ),
40
44
  },
41
45
  )
42
46
 
@@ -461,4 +461,4 @@
461
461
  ]
462
462
  }
463
463
  ]
464
- }
464
+ }
@@ -517,9 +517,9 @@ class DcatBackendTest:
517
517
  assert dataset.temporal_coverage is not None
518
518
  assert dataset.temporal_coverage.start == date(2016, 1, 1)
519
519
  assert dataset.temporal_coverage.end == date(2016, 12, 5)
520
- assert dataset.contact_point.email == "hello@its.me"
521
- assert dataset.contact_point.name == "Organization contact"
522
- assert dataset.contact_point.contact_form == "https://data.support.com"
520
+ assert dataset.contact_points[0].email == "hello@its.me"
521
+ assert dataset.contact_points[0].name == "Organization contact"
522
+ assert dataset.contact_points[0].contact_form == "https://data.support.com"
523
523
  assert dataset.frequency is None
524
524
  # test dct:license nested in distribution
525
525
  assert dataset.license.id == "lov1"
@@ -866,10 +866,10 @@ class CswIso19139DcatBackendTest:
866
866
  ],
867
867
  }
868
868
  assert (
869
- dataset.contact_point.name
869
+ dataset.contact_points[0].name
870
870
  == "DDTM 80 (Direction Départementale des Territoires et de la Mer de la Somme)"
871
871
  )
872
- assert dataset.contact_point.email == "ddtm-sap-bsig@somme.gouv.fr"
872
+ assert dataset.contact_points[0].email == "ddtm-sap-bsig@somme.gouv.fr"
873
873
 
874
874
  # License is not properly mapped in XSLT conversion
875
875
  assert dataset.license is None
@@ -0,0 +1,33 @@
1
+ """
2
+ The purpose here is to change the contact_point reference field
3
+ to a list of reference field and rename it to contact_points
4
+ """
5
+
6
+ import logging
7
+
8
+ from mongoengine.connection import get_db
9
+
10
+ log = logging.getLogger(__name__)
11
+
12
+
13
+ def migrate(db):
14
+ log.info("Processing Contact Point references.")
15
+
16
+ db = get_db()
17
+
18
+ # Add a `contact` role to each existing contact point.
19
+ db.contact_point.update_many({}, {"$set": {"role": "contact"}})
20
+
21
+ count = 0
22
+ for collection in [db.dataset, db.dataservice]:
23
+ for obj in collection.find({"contact_point": {"$exists": True}}):
24
+ # Change `contact_point` to be a list of contact points.
25
+ collection.update_one(
26
+ {"_id": obj["_id"]}, {"$set": {"contact_point": [obj["contact_point"]]}}
27
+ )
28
+ # If we rename after updating the field to be a list, then we can re-run the migration.
29
+ count += collection.update_many(
30
+ {}, {"$rename": {"contact_point": "contact_points"}}
31
+ ).modified_count
32
+
33
+ log.info(f"Completed {count} objects")