udata 10.8.3.dev37191__py2.py3-none-any.whl → 10.8.3.dev37212__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of udata might be problematic. Click here for more details.

Files changed (24) hide show
  1. udata/core/organization/models.py +14 -9
  2. udata/harvest/backends/dcat.py +13 -1
  3. udata/static/chunks/{10.471164b2a9fe15614797.js → 10.8ca60413647062717b1e.js} +3 -3
  4. udata/static/chunks/{10.471164b2a9fe15614797.js.map → 10.8ca60413647062717b1e.js.map} +1 -1
  5. udata/static/chunks/{11.51d706fb9521c16976bc.js → 11.b6f741fcc366abfad9c4.js} +3 -3
  6. udata/static/chunks/{11.51d706fb9521c16976bc.js.map → 11.b6f741fcc366abfad9c4.js.map} +1 -1
  7. udata/static/chunks/{13.f29411b06be1883356a3.js → 13.2d06442dd9a05d9777b5.js} +2 -2
  8. udata/static/chunks/{13.f29411b06be1883356a3.js.map → 13.2d06442dd9a05d9777b5.js.map} +1 -1
  9. udata/static/chunks/{17.3bd0340930d4a314ce9c.js → 17.e8e4caaad5cb0cc0bacc.js} +2 -2
  10. udata/static/chunks/{17.3bd0340930d4a314ce9c.js.map → 17.e8e4caaad5cb0cc0bacc.js.map} +1 -1
  11. udata/static/chunks/{19.8da42e8359d72afc2618.js → 19.f03a102365af4315f9db.js} +3 -3
  12. udata/static/chunks/{19.8da42e8359d72afc2618.js.map → 19.f03a102365af4315f9db.js.map} +1 -1
  13. udata/static/chunks/{8.54e44b102164ae5e7a67.js → 8.778091d55cd8ea39af6b.js} +2 -2
  14. udata/static/chunks/{8.54e44b102164ae5e7a67.js.map → 8.778091d55cd8ea39af6b.js.map} +1 -1
  15. udata/static/chunks/{9.07515e5187f475bce828.js → 9.033d7e190ca9e226a5d0.js} +3 -3
  16. udata/static/chunks/{9.07515e5187f475bce828.js.map → 9.033d7e190ca9e226a5d0.js.map} +1 -1
  17. udata/static/common.js +1 -1
  18. udata/static/common.js.map +1 -1
  19. {udata-10.8.3.dev37191.dist-info → udata-10.8.3.dev37212.dist-info}/METADATA +2 -1
  20. {udata-10.8.3.dev37191.dist-info → udata-10.8.3.dev37212.dist-info}/RECORD +24 -24
  21. {udata-10.8.3.dev37191.dist-info → udata-10.8.3.dev37212.dist-info}/LICENSE +0 -0
  22. {udata-10.8.3.dev37191.dist-info → udata-10.8.3.dev37212.dist-info}/WHEEL +0 -0
  23. {udata-10.8.3.dev37191.dist-info → udata-10.8.3.dev37212.dist-info}/entry_points.txt +0 -0
  24. {udata-10.8.3.dev37191.dist-info → udata-10.8.3.dev37212.dist-info}/top_level.txt +0 -0
@@ -188,6 +188,10 @@ class Organization(
188
188
  after_delete = Signal()
189
189
  on_delete = Signal()
190
190
 
191
+ def __init__(self, *args, **kwargs):
192
+ super().__init__(*args, **kwargs)
193
+ self.compute_aggregate_metrics = True
194
+
191
195
  @classmethod
192
196
  def pre_save(cls, sender, document, **kwargs):
193
197
  cls.before_save.send(document)
@@ -307,15 +311,16 @@ class Organization(
307
311
  from udata.models import Dataset, Follow, Reuse
308
312
 
309
313
  self.metrics["datasets"] = Dataset.objects(organization=self).visible().count()
310
- self.metrics["datasets_by_months"] = get_stock_metrics(
311
- Dataset.objects(organization=self).visible(), date_label="created_at_internal"
312
- )
313
- self.metrics["datasets_followers_by_months"] = get_stock_metrics(
314
- Follow.objects(following__in=Dataset.objects(organization=self)), date_label="since"
315
- )
316
- self.metrics["datasets_reuses_by_months"] = get_stock_metrics(
317
- Reuse.objects(datasets__in=Dataset.objects(organization=self)).visible()
318
- )
314
+ if self.compute_aggregate_metrics:
315
+ self.metrics["datasets_by_months"] = get_stock_metrics(
316
+ Dataset.objects(organization=self).visible(), date_label="created_at_internal"
317
+ )
318
+ self.metrics["datasets_followers_by_months"] = get_stock_metrics(
319
+ Follow.objects(following__in=Dataset.objects(organization=self)), date_label="since"
320
+ )
321
+ self.metrics["datasets_reuses_by_months"] = get_stock_metrics(
322
+ Reuse.objects(datasets__in=Dataset.objects(organization=self)).visible()
323
+ )
319
324
 
320
325
  self.save(signal_kwargs={"ignores": ["post_save"]})
321
326
 
@@ -66,6 +66,10 @@ def extract_graph(source, target, node, specs):
66
66
  class DcatBackend(BaseBackend):
67
67
  display_name = "DCAT"
68
68
 
69
+ def __init__(self, *args, **kwargs):
70
+ super().__init__(*args, **kwargs)
71
+ self.organizations_to_update = set()
72
+
69
73
  def inner_harvest(self):
70
74
  fmt = self.get_format()
71
75
  self.job.data = {"format": fmt}
@@ -76,6 +80,10 @@ class DcatBackend(BaseBackend):
76
80
  self.process_one_datasets_page(page_number, page)
77
81
  pages.append((page_number, page))
78
82
 
83
+ for org in self.organizations_to_update:
84
+ org.compute_aggregate_metrics = True
85
+ org.count_datasets()
86
+
79
87
  # We do a second pass to have all datasets in memory and attach datasets
80
88
  # to dataservices. It could be better to be one pass of graph walking and
81
89
  # then one pass of attaching datasets to dataservices.
@@ -217,7 +225,11 @@ class DcatBackend(BaseBackend):
217
225
 
218
226
  dataset = self.get_dataset(item.remote_id)
219
227
  remote_url_prefix = self.get_extra_config_value("remote_url_prefix")
220
- return dataset_from_rdf(page, dataset, node=node, remote_url_prefix=remote_url_prefix)
228
+ dataset = dataset_from_rdf(page, dataset, node=node, remote_url_prefix=remote_url_prefix)
229
+ if dataset.organization:
230
+ dataset.organization.compute_aggregate_metrics = False
231
+ self.organizations_to_update.add(dataset.organization)
232
+ return dataset
221
233
 
222
234
  def inner_process_dataservice(self, item: HarvestItem, page_number: int, page: Graph, node):
223
235
  item.kwargs["page_number"] = page_number