udata 10.0.6__py2.py3-none-any.whl → 10.0.6.dev32964__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of udata might be problematic. Click here for more details.

Files changed (35) hide show
  1. udata/__init__.py +1 -1
  2. udata/commands/dcat.py +4 -13
  3. udata/core/dataservices/rdf.py +6 -4
  4. udata/core/dataservices/tasks.py +4 -2
  5. udata/core/dataset/tasks.py +1 -10
  6. udata/core/organization/models.py +1 -1
  7. udata/core/post/api.py +5 -15
  8. udata/core/post/tests/test_api.py +2 -16
  9. udata/harvest/tasks.py +2 -3
  10. udata/static/chunks/{10.8ca60413647062717b1e.js → 10.471164b2a9fe15614797.js} +3 -3
  11. udata/static/chunks/{10.8ca60413647062717b1e.js.map → 10.471164b2a9fe15614797.js.map} +1 -1
  12. udata/static/chunks/{11.b6f741fcc366abfad9c4.js → 11.51d706fb9521c16976bc.js} +3 -3
  13. udata/static/chunks/{11.b6f741fcc366abfad9c4.js.map → 11.51d706fb9521c16976bc.js.map} +1 -1
  14. udata/static/chunks/{13.2d06442dd9a05d9777b5.js → 13.f29411b06be1883356a3.js} +2 -2
  15. udata/static/chunks/{13.2d06442dd9a05d9777b5.js.map → 13.f29411b06be1883356a3.js.map} +1 -1
  16. udata/static/chunks/{17.e8e4caaad5cb0cc0bacc.js → 17.3bd0340930d4a314ce9c.js} +2 -2
  17. udata/static/chunks/{17.e8e4caaad5cb0cc0bacc.js.map → 17.3bd0340930d4a314ce9c.js.map} +1 -1
  18. udata/static/chunks/{19.f03a102365af4315f9db.js → 19.8da42e8359d72afc2618.js} +3 -3
  19. udata/static/chunks/{19.f03a102365af4315f9db.js.map → 19.8da42e8359d72afc2618.js.map} +1 -1
  20. udata/static/chunks/{8.778091d55cd8ea39af6b.js → 8.54e44b102164ae5e7a67.js} +2 -2
  21. udata/static/chunks/{8.778091d55cd8ea39af6b.js.map → 8.54e44b102164ae5e7a67.js.map} +1 -1
  22. udata/static/chunks/{9.033d7e190ca9e226a5d0.js → 9.07515e5187f475bce828.js} +3 -3
  23. udata/static/chunks/{9.033d7e190ca9e226a5d0.js.map → 9.07515e5187f475bce828.js.map} +1 -1
  24. udata/static/common.js +1 -1
  25. udata/static/common.js.map +1 -1
  26. udata/tests/dataservice/test_dataservice_tasks.py +0 -5
  27. udata/tests/dataset/test_dataset_tasks.py +3 -19
  28. udata/tests/organization/test_organization_model.py +0 -6
  29. {udata-10.0.6.dist-info → udata-10.0.6.dev32964.dist-info}/METADATA +3 -10
  30. {udata-10.0.6.dist-info → udata-10.0.6.dev32964.dist-info}/RECORD +34 -35
  31. udata/tests/test_dcat_commands.py +0 -26
  32. {udata-10.0.6.dist-info → udata-10.0.6.dev32964.dist-info}/LICENSE +0 -0
  33. {udata-10.0.6.dist-info → udata-10.0.6.dev32964.dist-info}/WHEEL +0 -0
  34. {udata-10.0.6.dist-info → udata-10.0.6.dev32964.dist-info}/entry_points.txt +0 -0
  35. {udata-10.0.6.dist-info → udata-10.0.6.dev32964.dist-info}/top_level.txt +0 -0
udata/__init__.py CHANGED
@@ -4,5 +4,5 @@
4
4
  udata
5
5
  """
6
6
 
7
- __version__ = "10.0.6"
7
+ __version__ = "10.0.6.dev"
8
8
  __description__ = "Open data portal"
udata/commands/dcat.py CHANGED
@@ -2,8 +2,7 @@ import logging
2
2
 
3
3
  import click
4
4
  import mongoengine
5
- from rdflib import Graph, URIRef
6
- from rdflib.namespace import RDF
5
+ from rdflib import Graph
7
6
 
8
7
  from udata.commands import cli, cyan, echo, green, magenta, yellow
9
8
  from udata.core.dataset.factories import DatasetFactory
@@ -13,8 +12,7 @@ from udata.harvest.backends.dcat import (
13
12
  CswIso19139DcatBackend,
14
13
  DcatBackend,
15
14
  )
16
- from udata.harvest.models import HarvestItem
17
- from udata.rdf import DCAT, DCT, namespace_manager
15
+ from udata.rdf import namespace_manager
18
16
 
19
17
  log = logging.getLogger(__name__)
20
18
 
@@ -63,22 +61,15 @@ def parse_url(url, csw, iso, quiet=False, rid=""):
63
61
  backend.job = MockJob()
64
62
  format = backend.get_format()
65
63
  echo(yellow("Detected format: {}".format(format)))
66
- graphs = backend.walk_graph(url, format)
64
+ graphs = backend.parse_graph(url, format)
67
65
 
68
66
  # serialize/unserialize graph like in the job mechanism
69
67
  graph = Graph(namespace_manager=namespace_manager)
70
- for page_number, subgraph in graphs:
68
+ for subgraph in graphs:
71
69
  serialized = subgraph.serialize(format=format, indent=None)
72
70
  _subgraph = Graph(namespace_manager=namespace_manager)
73
71
  graph += _subgraph.parse(data=serialized, format=format)
74
72
 
75
- for node in subgraph.subjects(RDF.type, DCAT.Dataset):
76
- identifier = subgraph.value(node, DCT.identifier)
77
- kwargs = {"nid": str(node), "page": page_number}
78
- kwargs["type"] = "uriref" if isinstance(node, URIRef) else "blank"
79
- item = HarvestItem(remote_id=str(identifier), kwargs=kwargs)
80
- backend.job.items.append(item)
81
-
82
73
  for item in backend.job.items:
83
74
  if not rid or rid in item.remote_id:
84
75
  echo(magenta("Processing item {}".format(item.remote_id)))
@@ -156,10 +156,12 @@ def dataservice_to_rdf(dataservice: Dataservice, graph=None):
156
156
 
157
157
  if is_hvd:
158
158
  # We also want to automatically add any HVD category tags of the dataservice's datasets.
159
- dataset_ids = [dat.id for dat in dataservice.datasets]
160
- for tag in TAG_TO_EU_HVD_CATEGORIES:
161
- if Dataset.objects(id__in=dataset_ids, tags="hvd").filter(tags=tag).first():
162
- hvd_category_tags.add(tag)
159
+ for dataset in dataservice.datasets:
160
+ if "hvd" not in dataset.tags: # Only check HVD datasets for their categories.
161
+ continue
162
+ for tag in dataset.tags:
163
+ if tag in TAG_TO_EU_HVD_CATEGORIES:
164
+ hvd_category_tags.add(tag)
163
165
  for tag in hvd_category_tags:
164
166
  d.add(DCATAP.hvdCategory, URIRef(TAG_TO_EU_HVD_CATEGORIES[tag]))
165
167
 
@@ -1,7 +1,8 @@
1
1
  from celery.utils.log import get_task_logger
2
2
 
3
3
  from udata.core.dataservices.models import Dataservice
4
- from udata.harvest.models import HarvestJob
4
+
5
+ # from udata.harvest.models import HarvestJob
5
6
  from udata.models import Discussion, Follow, Transfer
6
7
  from udata.tasks import job
7
8
 
@@ -17,7 +18,8 @@ def purge_dataservices(self):
17
18
  # Remove discussions
18
19
  Discussion.objects(subject=dataservice).delete()
19
20
  # Remove HarvestItem references
20
- HarvestJob.objects(items__dataservice=dataservice).update(set__items__S__dataservice=None)
21
+ # TODO: uncomment when adding dataservice harvest
22
+ # HarvestJob.objects(items__dataservice=dataservice).update(set__items__S__dataservice=None)
21
23
  # Remove associated Transfers
22
24
  Transfer.objects(subject=dataservice).delete()
23
25
  # Remove dataservice
@@ -48,11 +48,6 @@ def purge_datasets(self):
48
48
  datasets = topic.datasets
49
49
  datasets.remove(dataset)
50
50
  topic.update(datasets=datasets)
51
- # Remove dataservices related dataset
52
- for dataservice in Dataservice.objects(datasets=dataset):
53
- datasets = dataservice.datasets
54
- datasets.remove(dataset)
55
- dataservice.update(datasets=datasets)
56
51
  # Remove HarvestItem references
57
52
  HarvestJob.objects(items__dataset=dataset).update(set__items__S__dataset=None)
58
53
  # Remove associated Transfers
@@ -128,11 +123,7 @@ def send_frequency_reminder(self):
128
123
  def update_datasets_reuses_metrics(self):
129
124
  all_datasets = Dataset.objects.visible().timeout(False)
130
125
  for dataset in all_datasets:
131
- try:
132
- dataset.count_reuses()
133
- except Exception as e:
134
- log.error(f"Error for dataset {dataset} during reuses metrics update: {e}")
135
- continue
126
+ dataset.count_reuses()
136
127
 
137
128
 
138
129
  def get_queryset(model_cls):
@@ -117,7 +117,7 @@ class Organization(WithMetrics, OrganizationBadgeMixin, db.Datetimed, db.Documen
117
117
  max_length=255, required=True, populate_from="name", update=True, follow=True
118
118
  )
119
119
  description = db.StringField(required=True)
120
- url = db.URLField()
120
+ url = db.StringField()
121
121
  image_url = db.StringField()
122
122
  logo = db.ImageField(
123
123
  fs=avatars, basename=default_image_basename, max_size=LOGO_MAX_SIZE, thumbnails=LOGO_SIZES
udata/core/post/api.py CHANGED
@@ -1,7 +1,6 @@
1
1
  from datetime import datetime
2
2
 
3
3
  from udata.api import API, api, fields
4
- from udata.auth import Permission as AdminPermission
5
4
  from udata.auth import admin_permission
6
5
  from udata.core.dataset.api_fields import dataset_fields
7
6
  from udata.core.reuse.models import Reuse
@@ -61,13 +60,6 @@ parser = api.page_parser()
61
60
  parser.add_argument(
62
61
  "sort", type=str, default="-created_at", location="args", help="The sorting attribute"
63
62
  )
64
- parser.add_argument(
65
- "with_drafts",
66
- type=bool,
67
- default=False,
68
- location="args",
69
- help="`True` also returns the unpublished posts (only for super-admins)",
70
- )
71
63
 
72
64
 
73
65
  @ns.route("/", endpoint="posts")
@@ -78,13 +70,11 @@ class PostsAPI(API):
78
70
  def get(self):
79
71
  """List all posts"""
80
72
  args = parser.parse_args()
81
-
82
- posts = Post.objects()
83
-
84
- if not (AdminPermission().can() and args["with_drafts"]):
85
- posts = posts.published()
86
-
87
- return posts.order_by(args["sort"]).paginate(args["page"], args["page_size"])
73
+ return (
74
+ Post.objects.published()
75
+ .order_by(args["sort"])
76
+ .paginate(args["page"], args["page_size"])
77
+ )
88
78
 
89
79
  @api.doc("create_post")
90
80
  @api.secure(admin_permission)
@@ -15,28 +15,14 @@ class PostsAPITest:
15
15
 
16
16
  def test_post_api_list(self, api):
17
17
  """It should fetch a post list from the API"""
18
- PostFactory.create_batch(3)
19
- draft = PostFactory(published=None)
18
+ posts = PostFactory.create_batch(3)
19
+ posts.append(PostFactory(published=None))
20
20
 
21
21
  response = api.get(url_for("api.posts"))
22
22
  assert200(response)
23
23
  # Response should not contain the unpublished post
24
24
  assert len(response.json["data"]) == 3
25
25
 
26
- api.login(AdminFactory())
27
-
28
- response = api.get(url_for("api.posts"))
29
- assert200(response)
30
-
31
- assert len(response.json["data"]) == 3
32
- assert str(draft.id) not in [post["id"] for post in response.json["data"]]
33
-
34
- response = api.get(url_for("api.posts", with_drafts=True))
35
- assert200(response)
36
-
37
- assert len(response.json["data"]) == 4
38
- assert str(draft.id) in [post["id"] for post in response.json["data"]]
39
-
40
26
  def test_post_api_get(self, api):
41
27
  """It should fetch a post from the API"""
42
28
  post = PostFactory()
udata/harvest/tasks.py CHANGED
@@ -13,9 +13,8 @@ def harvest(self, ident):
13
13
  log.info('Launching harvest job for source "%s"', ident)
14
14
 
15
15
  source = HarvestSource.get(ident)
16
- if source.deleted or not source.active:
17
- log.info('Ignoring inactive or deleted source "%s"', ident)
18
- return # Ignore deleted and inactive sources
16
+ if source.deleted:
17
+ return # Ignore deleted sources
19
18
  Backend = backends.get(current_app, source.backend)
20
19
  backend = Backend(source)
21
20