udata 10.4.1.dev35201__py2.py3-none-any.whl → 10.4.2__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of udata might be problematic. Click here for more details.

Files changed (50) hide show
  1. udata/__init__.py +1 -1
  2. udata/core/activity/__init__.py +2 -0
  3. udata/core/activity/api.py +10 -2
  4. udata/core/activity/models.py +28 -1
  5. udata/core/activity/tasks.py +19 -4
  6. udata/core/dataservices/activities.py +53 -0
  7. udata/core/dataservices/api.py +43 -0
  8. udata/core/dataservices/models.py +16 -20
  9. udata/core/dataset/activities.py +52 -5
  10. udata/core/dataset/api.py +44 -0
  11. udata/core/dataset/csv.py +0 -1
  12. udata/core/dataset/models.py +49 -47
  13. udata/core/dataset/rdf.py +1 -1
  14. udata/core/metrics/commands.py +1 -0
  15. udata/core/metrics/helpers.py +102 -0
  16. udata/core/metrics/models.py +1 -0
  17. udata/core/metrics/tasks.py +1 -0
  18. udata/core/organization/activities.py +3 -2
  19. udata/core/organization/api.py +11 -0
  20. udata/core/organization/api_fields.py +6 -5
  21. udata/core/organization/models.py +31 -31
  22. udata/core/owned.py +1 -1
  23. udata/core/post/api.py +34 -0
  24. udata/core/reuse/activities.py +6 -5
  25. udata/core/reuse/api.py +42 -1
  26. udata/core/reuse/models.py +8 -16
  27. udata/core/site/models.py +33 -0
  28. udata/core/topic/activities.py +36 -0
  29. udata/core/topic/models.py +23 -15
  30. udata/core/user/activities.py +17 -6
  31. udata/core/user/api.py +1 -0
  32. udata/core/user/api_fields.py +6 -1
  33. udata/core/user/models.py +39 -32
  34. udata/migrations/2025-05-22-purge-duplicate-activities.py +101 -0
  35. udata/mongo/datetime_fields.py +1 -0
  36. udata/settings.py +4 -0
  37. udata/tests/api/test_activities_api.py +29 -1
  38. udata/tests/api/test_dataservices_api.py +53 -0
  39. udata/tests/api/test_datasets_api.py +61 -0
  40. udata/tests/api/test_organizations_api.py +27 -2
  41. udata/tests/api/test_reuses_api.py +54 -0
  42. udata/tests/dataset/test_csv_adapter.py +6 -3
  43. udata/tests/dataset/test_dataset_model.py +49 -0
  44. udata/tests/test_topics.py +19 -0
  45. {udata-10.4.1.dev35201.dist-info → udata-10.4.2.dist-info}/METADATA +17 -2
  46. {udata-10.4.1.dev35201.dist-info → udata-10.4.2.dist-info}/RECORD +50 -46
  47. {udata-10.4.1.dev35201.dist-info → udata-10.4.2.dist-info}/LICENSE +0 -0
  48. {udata-10.4.1.dev35201.dist-info → udata-10.4.2.dist-info}/WHEEL +0 -0
  49. {udata-10.4.1.dev35201.dist-info → udata-10.4.2.dist-info}/entry_points.txt +0 -0
  50. {udata-10.4.1.dev35201.dist-info → udata-10.4.2.dist-info}/top_level.txt +0 -0
@@ -19,6 +19,7 @@ from werkzeug.utils import cached_property
19
19
  from udata.api_fields import field
20
20
  from udata.app import cache
21
21
  from udata.core import storages
22
+ from udata.core.activity.models import Auditable
22
23
  from udata.core.owned import Owned, OwnedQuerySet
23
24
  from udata.frontend.markdown import mdstrip
24
25
  from udata.i18n import lazy_gettext as _
@@ -540,45 +541,57 @@ class DatasetBadgeMixin(BadgeMixin):
540
541
  __badges__ = BADGES
541
542
 
542
543
 
543
- class Dataset(WithMetrics, DatasetBadgeMixin, Owned, db.Document):
544
- title = db.StringField(required=True)
545
- acronym = db.StringField(max_length=128)
544
+ class Dataset(Auditable, WithMetrics, DatasetBadgeMixin, Owned, db.Document):
545
+ title = field(db.StringField(required=True))
546
+ acronym = field(db.StringField(max_length=128))
546
547
  # /!\ do not set directly the slug when creating or updating a dataset
547
548
  # this will break the search indexation
548
- slug = db.SlugField(
549
- max_length=255, required=True, populate_from="title", update=True, follow=True
549
+ slug = field(
550
+ db.SlugField(
551
+ max_length=255, required=True, populate_from="title", update=True, follow=True
552
+ ),
553
+ auditable=False,
550
554
  )
551
- description = db.StringField(required=True, default="")
552
- license = db.ReferenceField("License")
555
+ description = field(db.StringField(required=True, default=""))
556
+ license = field(db.ReferenceField("License"))
553
557
 
554
- tags = db.TagListField()
555
- resources = db.ListField(db.EmbeddedDocumentField(Resource))
558
+ tags = field(db.TagListField())
559
+ resources = field(db.ListField(db.EmbeddedDocumentField(Resource)), auditable=False)
556
560
 
557
- private = db.BooleanField(default=False)
558
- frequency = db.StringField(choices=list(UPDATE_FREQUENCIES.keys()))
559
- frequency_date = db.DateTimeField(verbose_name=_("Future date of update"))
560
- temporal_coverage = db.EmbeddedDocumentField(db.DateRange)
561
- spatial = db.EmbeddedDocumentField(SpatialCoverage)
562
- schema = db.EmbeddedDocumentField(Schema)
561
+ private = field(db.BooleanField(default=False))
562
+ frequency = field(db.StringField(choices=list(UPDATE_FREQUENCIES.keys())))
563
+ frequency_date = field(db.DateTimeField(verbose_name=_("Future date of update")))
564
+ temporal_coverage = field(db.EmbeddedDocumentField(db.DateRange))
565
+ spatial = field(db.EmbeddedDocumentField(SpatialCoverage))
566
+ schema = field(db.EmbeddedDocumentField(Schema))
563
567
 
564
- ext = db.MapField(db.GenericEmbeddedDocumentField())
565
- extras = db.ExtrasField()
566
- harvest = db.EmbeddedDocumentField(HarvestDatasetMetadata)
568
+ ext = field(db.MapField(db.GenericEmbeddedDocumentField()), auditable=False)
569
+ extras = field(db.ExtrasField(), auditable=False)
570
+ harvest = field(db.EmbeddedDocumentField(HarvestDatasetMetadata), auditable=False)
567
571
 
568
- quality_cached = db.DictField()
572
+ quality_cached = field(db.DictField(), auditable=False)
569
573
 
570
- featured = db.BooleanField(required=True, default=False)
574
+ featured = field(
575
+ db.BooleanField(required=True, default=False),
576
+ auditable=False,
577
+ )
571
578
 
572
- contact_points = db.ListField(db.ReferenceField("ContactPoint", reverse_delete_rule=db.PULL))
579
+ contact_points = field(
580
+ db.ListField(db.ReferenceField("ContactPoint", reverse_delete_rule=db.PULL))
581
+ )
573
582
 
574
- created_at_internal = DateTimeField(
575
- verbose_name=_("Creation date"), default=datetime.utcnow, required=True
583
+ created_at_internal = field(
584
+ DateTimeField(verbose_name=_("Creation date"), default=datetime.utcnow, required=True),
585
+ auditable=False,
576
586
  )
577
- last_modified_internal = DateTimeField(
578
- verbose_name=_("Last modification date"), default=datetime.utcnow, required=True
587
+ last_modified_internal = field(
588
+ DateTimeField(
589
+ verbose_name=_("Last modification date"), default=datetime.utcnow, required=True
590
+ ),
591
+ auditable=False,
579
592
  )
580
- deleted = db.DateTimeField()
581
- archived = db.DateTimeField()
593
+ deleted = field(db.DateTimeField(), auditable=False)
594
+ archived = field(db.DateTimeField())
582
595
 
583
596
  def __str__(self):
584
597
  return self.title or ""
@@ -654,18 +667,6 @@ class Dataset(WithMetrics, DatasetBadgeMixin, Owned, db.Document):
654
667
  def pre_save(cls, sender, document, **kwargs):
655
668
  cls.before_save.send(document)
656
669
 
657
- @classmethod
658
- def post_save(cls, sender, document, **kwargs):
659
- if "post_save" in kwargs.get("ignores", []):
660
- return
661
- cls.after_save.send(document)
662
- if kwargs.get("created"):
663
- cls.on_create.send(document)
664
- else:
665
- cls.on_update.send(document)
666
- if document.deleted:
667
- cls.on_delete.send(document)
668
-
669
670
  def clean(self):
670
671
  super(Dataset, self).clean()
671
672
  if self.frequency in LEGACY_FREQUENCIES:
@@ -897,10 +898,6 @@ class Dataset(WithMetrics, DatasetBadgeMixin, Owned, db.Document):
897
898
 
898
899
  return result
899
900
 
900
- @property
901
- def downloads(self):
902
- return sum(resource.metrics.get("views", 0) for resource in self.resources)
903
-
904
901
  @staticmethod
905
902
  def normalize_score(score):
906
903
  """
@@ -977,7 +974,12 @@ class Dataset(WithMetrics, DatasetBadgeMixin, Owned, db.Document):
977
974
  def remove_resource(self, resource):
978
975
  # Deletes resource's file from file storage
979
976
  if resource.fs_filename is not None:
980
- storages.resources.delete(resource.fs_filename)
977
+ try:
978
+ storages.resources.delete(resource.fs_filename)
979
+ except FileNotFoundError as e:
980
+ log.error(
981
+ f"File not found while deleting resource #{resource.id} in dataset {self.id}: {e}"
982
+ )
981
983
 
982
984
  self.resources.remove(resource)
983
985
  self.on_resource_removed.send(self.__class__, document=self, resource_id=resource.id)
@@ -1045,19 +1047,19 @@ class Dataset(WithMetrics, DatasetBadgeMixin, Owned, db.Document):
1045
1047
  from udata.models import Discussion
1046
1048
 
1047
1049
  self.metrics["discussions"] = Discussion.objects(subject=self, closed=None).count()
1048
- self.save()
1050
+ self.save(signal_kwargs={"ignores": ["post_save"]})
1049
1051
 
1050
1052
  def count_reuses(self):
1051
1053
  from udata.models import Reuse
1052
1054
 
1053
1055
  self.metrics["reuses"] = Reuse.objects(datasets=self).visible().count()
1054
- self.save()
1056
+ self.save(signal_kwargs={"ignores": ["post_save"]})
1055
1057
 
1056
1058
  def count_followers(self):
1057
1059
  from udata.models import Follow
1058
1060
 
1059
1061
  self.metrics["followers"] = Follow.objects(until=None).followers(self).count()
1060
- self.save()
1062
+ self.save(signal_kwargs={"ignores": ["post_save"]})
1061
1063
 
1062
1064
 
1063
1065
  pre_init.connect(Dataset.pre_init, sender=Dataset)
udata/core/dataset/rdf.py CHANGED
@@ -769,7 +769,7 @@ def dataset_from_rdf(graph: Graph, dataset=None, node=None, remote_url_prefix: s
769
769
  # Support dct:abstract if dct:description is missing (sometimes used instead)
770
770
  description = d.value(DCT.description) or d.value(DCT.abstract)
771
771
  dataset.description = sanitize_html(description)
772
- dataset.frequency = frequency_from_rdf(d.value(DCT.accrualPeriodicity))
772
+ dataset.frequency = frequency_from_rdf(d.value(DCT.accrualPeriodicity)) or dataset.frequency
773
773
  roles = [ # Imbricated list of contact points for each role
774
774
  contact_points_from_rdf(d, rdf_entity, role, dataset)
775
775
  for rdf_entity, role in CONTACT_POINT_ENTITY_TO_ROLE.items()
@@ -60,6 +60,7 @@ def update(
60
60
  site.count_max_org_followers()
61
61
  site.count_max_org_reuses()
62
62
  site.count_max_org_datasets()
63
+ site.count_stock_metrics()
63
64
  except Exception as e:
64
65
  log.info(f"Error during update: {e}")
65
66
 
@@ -0,0 +1,102 @@
1
+ import logging
2
+ from collections import OrderedDict
3
+ from datetime import datetime, timedelta
4
+ from typing import Dict, List, Union
5
+
6
+ import requests
7
+ from bson import ObjectId
8
+ from dateutil.rrule import MONTHLY, rrule
9
+ from flask import current_app
10
+ from mongoengine import QuerySet
11
+ from pymongo.command_cursor import CommandCursor
12
+
13
+ log = logging.getLogger(__name__)
14
+
15
+
16
+ def get_last_13_months() -> List[str]:
17
+ dstart = datetime.today().replace(day=1) - timedelta(days=365)
18
+ months = rrule(freq=MONTHLY, count=13, dtstart=dstart)
19
+ return [month.strftime("%Y-%m") for month in months]
20
+
21
+
22
+ def compute_monthly_metrics(metrics_data: List[Dict], metrics_labels: List[str]) -> OrderedDict:
23
+ # Initialize default monthly_metrics
24
+ monthly_metrics = OrderedDict(
25
+ (month, {label: 0 for label in metrics_labels}) for month in get_last_13_months()
26
+ )
27
+ # Update monthly_metrics with metrics_data values
28
+ for entry in metrics_data:
29
+ entry_month = entry["metric_month"]
30
+ if entry_month in monthly_metrics:
31
+ for metric_label in metrics_labels:
32
+ label = f"monthly_{metric_label}"
33
+ monthly_metrics[entry_month][metric_label] = entry.get(label) or 0
34
+ return monthly_metrics
35
+
36
+
37
+ def metrics_by_label(monthly_metrics: Dict, metrics_labels: List[str]) -> List[OrderedDict]:
38
+ metrics_by_label = []
39
+ for label in metrics_labels:
40
+ metrics_by_label.append(
41
+ OrderedDict((month, monthly_metrics[month][label]) for month in monthly_metrics)
42
+ )
43
+ return metrics_by_label
44
+
45
+
46
+ def get_metrics_for_model(
47
+ model: str, id: Union[str, ObjectId, None], metrics_labels: List[str]
48
+ ) -> List[OrderedDict]:
49
+ """
50
+ Get distant metrics for a particular model object
51
+ """
52
+ if not current_app.config["METRICS_API"]:
53
+ # TODO: How to best deal with no METRICS_API, prevent calling or return empty?
54
+ # raise ValueError("missing config METRICS_API to use this function")
55
+ return [{} for _ in range(len(metrics_labels))]
56
+ models = model + "s" if id else model # TODO: not clean of a hack
57
+ model_metrics_api = f"{current_app.config['METRICS_API']}/{models}/data/"
58
+ try:
59
+ params = {"metric_month__sort": "desc"}
60
+ if id:
61
+ params[f"{model}_id__exact"] = id
62
+ res = requests.get(model_metrics_api, params)
63
+ res.raise_for_status()
64
+ monthly_metrics = compute_monthly_metrics(res.json()["data"], metrics_labels)
65
+ return metrics_by_label(monthly_metrics, metrics_labels)
66
+ except requests.exceptions.RequestException as e:
67
+ log.exception(f"Error while getting metrics for {model}({id}): {e}")
68
+ return [{} for _ in range(len(metrics_labels))]
69
+
70
+
71
+ def compute_monthly_aggregated_metrics(aggregation_res: CommandCursor) -> OrderedDict:
72
+ monthly_metrics = OrderedDict((month, 0) for month in get_last_13_months())
73
+ for monthly_count in aggregation_res:
74
+ year, month = monthly_count["_id"].split("-")
75
+ monthly_label = year + "-" + month.zfill(2)
76
+ if monthly_label in monthly_metrics:
77
+ monthly_metrics[monthly_label] = monthly_count["count"]
78
+ return monthly_metrics
79
+
80
+
81
+ def get_stock_metrics(objects: QuerySet, date_label: str = "created_at") -> OrderedDict:
82
+ """
83
+ Get stock metrics for a particular model object
84
+ """
85
+ pipeline = [
86
+ {"$match": {date_label: {"$gte": datetime.now() - timedelta(days=365)}}},
87
+ {
88
+ "$group": {
89
+ "_id": {
90
+ "$concat": [
91
+ {"$substr": [{"$year": f"${date_label}"}, 0, 4]},
92
+ "-",
93
+ {"$substr": [{"$month": f"${date_label}"}, 0, 12]},
94
+ ]
95
+ },
96
+ "count": {"$sum": 1},
97
+ }
98
+ },
99
+ ]
100
+ aggregation_res = objects.aggregate(*pipeline)
101
+
102
+ return compute_monthly_aggregated_metrics(aggregation_res)
@@ -8,6 +8,7 @@ class WithMetrics(object):
8
8
  metrics = field(
9
9
  db.DictField(),
10
10
  readonly=True,
11
+ auditable=False,
11
12
  )
12
13
 
13
14
  __metrics_keys__ = []
@@ -24,5 +24,6 @@ def compute_site_metrics(self):
24
24
  site.count_max_org_followers()
25
25
  site.count_max_org_reuses()
26
26
  site.count_max_org_datasets()
27
+ site.count_stock_metrics()
27
28
  # Sending signal
28
29
  on_site_metrics_computed.send(site)
@@ -32,6 +32,7 @@ def on_user_created_organization(organization):
32
32
 
33
33
 
34
34
  @Organization.on_update.connect
35
- def on_user_updated_organization(organization):
35
+ def on_user_updated_organization(organization, **kwargs):
36
+ changed_fields = kwargs.get("changed_fields", [])
36
37
  if current_user and current_user.is_authenticated:
37
- UserUpdatedOrganization.emit(organization, organization)
38
+ UserUpdatedOrganization.emit(organization, organization, changed_fields)
@@ -12,6 +12,7 @@ from udata.core.badges import api as badges_api
12
12
  from udata.core.badges.fields import badge_fields
13
13
  from udata.core.contact_point.api import ContactPointApiParser
14
14
  from udata.core.contact_point.api_fields import contact_point_page_fields
15
+ from udata.core.dataservices.csv import DataserviceCsvAdapter
15
16
  from udata.core.dataservices.models import Dataservice
16
17
  from udata.core.dataset.api import DatasetApiParser
17
18
  from udata.core.dataset.api_fields import dataset_page_fields
@@ -178,6 +179,16 @@ class DatasetsCsvAPI(API):
178
179
  return csv.stream(adapter, "{0}-datasets".format(org.slug))
179
180
 
180
181
 
182
+ @ns.route("/<org:org>/dataservices.csv", endpoint="organization_dataservices_csv")
183
+ @api.response(404, "Organization not found")
184
+ @api.response(410, "Organization has been deleted")
185
+ class DataservicesCsv(API):
186
+ def get(self, org):
187
+ dataservices = Dataservice.objects(organization=str(org.id)).visible()
188
+ adapter = DataserviceCsvAdapter(dataservices)
189
+ return csv.stream(adapter, "{0}-dataservices".format(org.slug))
190
+
191
+
181
192
  @ns.route("/<org:org>/discussions.csv", endpoint="organization_discussions_csv", doc=common_doc)
182
193
  @api.response(404, "Organization not found")
183
194
  @api.response(410, "Organization has been deleted")
@@ -42,9 +42,6 @@ org_ref_fields = api.inherit(
42
42
  },
43
43
  )
44
44
 
45
- # This import is not at the top of the file to avoid circular imports
46
- from udata.core.user.api_fields import user_ref_fields # noqa
47
-
48
45
 
49
46
  def check_can_access_user_private_info():
50
47
  # This endpoint is secure, only organization member has access.
@@ -64,14 +61,18 @@ def check_can_access_user_private_info():
64
61
  def member_email_with_visibility_check(email):
65
62
  if current_user_is_admin_or_self():
66
63
  return email
64
+ name, domain = email.split("@")
67
65
  if check_can_access_user_private_info():
68
66
  # Obfuscate email partially for other members
69
- name, domain = email.split("@")
70
67
  name = name[:2] + "*" * (len(name) - 2)
71
68
  return f"{name}@{domain}"
72
- return None
69
+ # Return only domain for other users
70
+ return f"***@{domain}"
73
71
 
74
72
 
73
+ # This import is not at the top of the file to avoid circular imports
74
+ from udata.core.user.api_fields import user_ref_fields # noqa
75
+
75
76
  member_user_with_email_fields = api.inherit(
76
77
  "MemberUserWithEmail",
77
78
  user_ref_fields,
@@ -6,6 +6,7 @@ from mongoengine.signals import post_save, pre_save
6
6
  from werkzeug.utils import cached_property
7
7
 
8
8
  from udata.api_fields import field
9
+ from udata.core.activity.models import Auditable
9
10
  from udata.core.badges.models import Badge, BadgeMixin, BadgesList
10
11
  from udata.core.metrics.models import WithMetrics
11
12
  from udata.core.storages import avatars, default_image_basename
@@ -110,29 +111,35 @@ class OrganizationBadgeMixin(BadgeMixin):
110
111
  __badges__ = BADGES
111
112
 
112
113
 
113
- class Organization(WithMetrics, OrganizationBadgeMixin, db.Datetimed, db.Document):
114
- name = db.StringField(required=True)
115
- acronym = db.StringField(max_length=128)
116
- slug = db.SlugField(
117
- max_length=255, required=True, populate_from="name", update=True, follow=True
114
+ class Organization(Auditable, WithMetrics, OrganizationBadgeMixin, db.Datetimed, db.Document):
115
+ name = field(db.StringField(required=True))
116
+ acronym = field(db.StringField(max_length=128))
117
+ slug = field(
118
+ db.SlugField(max_length=255, required=True, populate_from="name", update=True, follow=True),
119
+ auditable=False,
118
120
  )
119
- description = db.StringField(required=True)
120
- url = db.URLField()
121
- image_url = db.StringField()
122
- logo = db.ImageField(
123
- fs=avatars, basename=default_image_basename, max_size=LOGO_MAX_SIZE, thumbnails=LOGO_SIZES
121
+ description = field(db.StringField(required=True))
122
+ url = field(db.URLField())
123
+ image_url = field(db.StringField())
124
+ logo = field(
125
+ db.ImageField(
126
+ fs=avatars,
127
+ basename=default_image_basename,
128
+ max_size=LOGO_MAX_SIZE,
129
+ thumbnails=LOGO_SIZES,
130
+ )
124
131
  )
125
- business_number_id = db.StringField(max_length=ORG_BID_SIZE_LIMIT)
132
+ business_number_id = field(db.StringField(max_length=ORG_BID_SIZE_LIMIT))
126
133
 
127
- members = db.ListField(db.EmbeddedDocumentField(Member))
128
- teams = db.ListField(db.EmbeddedDocumentField(Team))
129
- requests = db.ListField(db.EmbeddedDocumentField(MembershipRequest))
134
+ members = field(db.ListField(db.EmbeddedDocumentField(Member)))
135
+ teams = field(db.ListField(db.EmbeddedDocumentField(Team)))
136
+ requests = field(db.ListField(db.EmbeddedDocumentField(MembershipRequest)))
130
137
 
131
- ext = db.MapField(db.GenericEmbeddedDocumentField())
132
- zone = db.StringField()
133
- extras = db.OrganizationExtrasField()
138
+ ext = field(db.MapField(db.GenericEmbeddedDocumentField()))
139
+ zone = field(db.StringField())
140
+ extras = field(db.OrganizationExtrasField(), auditable=False)
134
141
 
135
- deleted = db.DateTimeField()
142
+ deleted = field(db.DateTimeField())
136
143
 
137
144
  meta = {
138
145
  "indexes": [
@@ -168,19 +175,12 @@ class Organization(WithMetrics, OrganizationBadgeMixin, db.Datetimed, db.Documen
168
175
  on_update = Signal()
169
176
  before_delete = Signal()
170
177
  after_delete = Signal()
178
+ on_delete = Signal()
171
179
 
172
180
  @classmethod
173
181
  def pre_save(cls, sender, document, **kwargs):
174
182
  cls.before_save.send(document)
175
183
 
176
- @classmethod
177
- def post_save(cls, sender, document, **kwargs):
178
- cls.after_save.send(document)
179
- if kwargs.get("created"):
180
- cls.on_create.send(document)
181
- else:
182
- cls.on_update.send(document)
183
-
184
184
  def url_for(self, *args, **kwargs):
185
185
  return endpoint_for("organizations.show", "api.organization", org=self, *args, **kwargs)
186
186
 
@@ -296,31 +296,31 @@ class Organization(WithMetrics, OrganizationBadgeMixin, db.Datetimed, db.Documen
296
296
 
297
297
  def count_members(self):
298
298
  self.metrics["members"] = len(self.members)
299
- self.save()
299
+ self.save(signal_kwargs={"ignores": ["post_save"]})
300
300
 
301
301
  def count_datasets(self):
302
302
  from udata.models import Dataset
303
303
 
304
304
  self.metrics["datasets"] = Dataset.objects(organization=self).visible().count()
305
- self.save()
305
+ self.save(signal_kwargs={"ignores": ["post_save"]})
306
306
 
307
307
  def count_reuses(self):
308
308
  from udata.models import Reuse
309
309
 
310
310
  self.metrics["reuses"] = Reuse.objects(organization=self).visible().count()
311
- self.save()
311
+ self.save(signal_kwargs={"ignores": ["post_save"]})
312
312
 
313
313
  def count_dataservices(self):
314
314
  from udata.models import Dataservice
315
315
 
316
316
  self.metrics["dataservices"] = Dataservice.objects(organization=self).visible().count()
317
- self.save()
317
+ self.save(signal_kwargs={"ignores": ["post_save"]})
318
318
 
319
319
  def count_followers(self):
320
320
  from udata.models import Follow
321
321
 
322
322
  self.metrics["followers"] = Follow.objects(until=None).followers(self).count()
323
- self.save()
323
+ self.save(signal_kwargs={"ignores": ["post_save"]})
324
324
 
325
325
 
326
326
  pre_save.connect(Organization.pre_save, sender=Organization)
udata/core/owned.py CHANGED
@@ -80,7 +80,7 @@ def check_organization_is_valid_for_current_user(organization, **_kwargs):
80
80
 
81
81
  class Owned(object):
82
82
  """
83
- A mixin to factorize owning behvaior between users and organizations.
83
+ A mixin to factorize owning behavior between users and organizations.
84
84
  """
85
85
 
86
86
  owner = field(
udata/core/post/api.py CHANGED
@@ -1,4 +1,8 @@
1
1
  from datetime import datetime
2
+ from typing import List
3
+
4
+ from feedgenerator.django.utils.feedgenerator import Atom1Feed
5
+ from flask import make_response, request
2
6
 
3
7
  from udata.api import API, api, fields
4
8
  from udata.auth import Permission as AdminPermission
@@ -11,6 +15,8 @@ from udata.core.storages.api import (
11
15
  uploaded_image_fields,
12
16
  )
13
17
  from udata.core.user.api_fields import user_ref_fields
18
+ from udata.frontend.markdown import md
19
+ from udata.i18n import gettext as _
14
20
 
15
21
  from .forms import PostForm
16
22
  from .models import Post
@@ -105,6 +111,34 @@ class PostsAPI(API):
105
111
  return form.save(), 201
106
112
 
107
113
 
114
+ @ns.route("/recent.atom", endpoint="recent_posts_atom_feed")
115
+ class PostsAtomFeedAPI(API):
116
+ @api.doc("recent_posts_atom_feed")
117
+ def get(self):
118
+ feed = Atom1Feed(
119
+ _("Latests posts"),
120
+ description=None,
121
+ feed_url=request.url,
122
+ link=request.url_root,
123
+ )
124
+
125
+ posts: List[Post] = Post.objects().published().order_by("-published").limit(15)
126
+ for post in posts:
127
+ feed.add_item(
128
+ post.name,
129
+ unique_id=post.id,
130
+ description=post.headline,
131
+ content=md(post.content),
132
+ author_name="data.gouv.fr",
133
+ link=post.external_url,
134
+ updateddate=post.last_modified,
135
+ pubdate=post.published,
136
+ )
137
+ response = make_response(feed.writeString("utf-8"))
138
+ response.headers["Content-Type"] = "application/atom+xml"
139
+ return response
140
+
141
+
108
142
  @ns.route("/<post:post>/", endpoint="post")
109
143
  @api.response(404, "Object not found")
110
144
  @api.param("post", "The post ID or slug")
@@ -33,17 +33,18 @@ class UserDeletedReuse(ReuseRelatedActivity, Activity):
33
33
 
34
34
  @Reuse.on_create.connect
35
35
  def on_user_created_reuse(reuse):
36
- if not reuse.private and current_user and current_user.is_authenticated:
36
+ if current_user and current_user.is_authenticated:
37
37
  UserCreatedReuse.emit(reuse, reuse.organization)
38
38
 
39
39
 
40
40
  @Reuse.on_update.connect
41
- def on_user_updated_reuse(reuse):
42
- if not reuse.private and current_user and current_user.is_authenticated:
43
- UserUpdatedReuse.emit(reuse, reuse.organization)
41
+ def on_user_updated_reuse(reuse, **kwargs):
42
+ changed_fields = kwargs.get("changed_fields", [])
43
+ if current_user and current_user.is_authenticated:
44
+ UserUpdatedReuse.emit(reuse, reuse.organization, changed_fields)
44
45
 
45
46
 
46
47
  @Reuse.on_delete.connect
47
48
  def on_user_deleted_reuse(reuse):
48
- if not reuse.private and current_user and current_user.is_authenticated:
49
+ if current_user and current_user.is_authenticated:
49
50
  UserDeletedReuse.emit(reuse, reuse.organization)
udata/core/reuse/api.py CHANGED
@@ -1,8 +1,10 @@
1
1
  from datetime import datetime
2
+ from typing import List
2
3
 
3
4
  import mongoengine
4
5
  from bson.objectid import ObjectId
5
- from flask import request
6
+ from feedgenerator.django.utils.feedgenerator import Atom1Feed
7
+ from flask import make_response, request
6
8
  from flask_login import current_user
7
9
 
8
10
  from udata.api import API, api, errors
@@ -20,6 +22,8 @@ from udata.core.storages.api import (
20
22
  parse_uploaded_image,
21
23
  uploaded_image_fields,
22
24
  )
25
+ from udata.frontend.markdown import md
26
+ from udata.i18n import gettext as _
23
27
  from udata.models import Dataset
24
28
  from udata.utils import id_or_404
25
29
 
@@ -130,6 +134,43 @@ class ReuseListAPI(API):
130
134
  return patch_and_save(reuse, request), 201
131
135
 
132
136
 
137
+ @ns.route("/recent.atom", endpoint="recent_reuses_atom_feed")
138
+ class ReusesAtomFeedAPI(API):
139
+ @api.doc("recent_reuses_atom_feed")
140
+ def get(self):
141
+ feed = Atom1Feed(
142
+ _("Latests reuses"),
143
+ description=None,
144
+ feed_url=request.url,
145
+ link=request.url_root,
146
+ )
147
+
148
+ reuses: List[Reuse] = Reuse.objects.visible().order_by("-created_at").limit(15)
149
+ for reuse in reuses:
150
+ author_name = None
151
+ author_uri = None
152
+ if reuse.organization:
153
+ author_name = reuse.organization.name
154
+ author_uri = reuse.organization.external_url
155
+ elif reuse.owner:
156
+ author_name = reuse.owner.fullname
157
+ author_uri = reuse.owner.external_url
158
+ feed.add_item(
159
+ reuse.title,
160
+ unique_id=reuse.id,
161
+ description=reuse.description,
162
+ content=md(reuse.description),
163
+ author_name=author_name,
164
+ author_link=author_uri,
165
+ link=reuse.external_url,
166
+ updateddate=reuse.last_modified,
167
+ pubdate=reuse.created_at,
168
+ )
169
+ response = make_response(feed.writeString("utf-8"))
170
+ response.headers["Content-Type"] = "application/atom+xml"
171
+ return response
172
+
173
+
133
174
  @ns.route("/<reuse:reuse>/", endpoint="reuse", doc=common_doc)
134
175
  @api.response(404, "Reuse not found")
135
176
  @api.response(410, "Reuse has been deleted")