udata 10.8.3.dev37152__py2.py3-none-any.whl → 10.8.3.dev37185__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of udata might be problematic. Click here for more details.

Files changed (29) hide show
  1. udata/core/dataset/api_fields.py +2 -0
  2. udata/core/dataset/apiv2.py +4 -0
  3. udata/core/dataset/constants.py +1 -0
  4. udata/core/dataset/csv.py +1 -0
  5. udata/core/dataset/forms.py +6 -0
  6. udata/core/dataset/models.py +2 -0
  7. udata/harvest/backends/dcat.py +5 -3
  8. udata/static/chunks/{10.8ca60413647062717b1e.js → 10.471164b2a9fe15614797.js} +3 -3
  9. udata/static/chunks/{10.8ca60413647062717b1e.js.map → 10.471164b2a9fe15614797.js.map} +1 -1
  10. udata/static/chunks/{11.b6f741fcc366abfad9c4.js → 11.51d706fb9521c16976bc.js} +3 -3
  11. udata/static/chunks/{11.b6f741fcc366abfad9c4.js.map → 11.51d706fb9521c16976bc.js.map} +1 -1
  12. udata/static/chunks/{13.2d06442dd9a05d9777b5.js → 13.f29411b06be1883356a3.js} +2 -2
  13. udata/static/chunks/{13.2d06442dd9a05d9777b5.js.map → 13.f29411b06be1883356a3.js.map} +1 -1
  14. udata/static/chunks/{17.e8e4caaad5cb0cc0bacc.js → 17.3bd0340930d4a314ce9c.js} +2 -2
  15. udata/static/chunks/{17.e8e4caaad5cb0cc0bacc.js.map → 17.3bd0340930d4a314ce9c.js.map} +1 -1
  16. udata/static/chunks/{19.f03a102365af4315f9db.js → 19.8da42e8359d72afc2618.js} +3 -3
  17. udata/static/chunks/{19.f03a102365af4315f9db.js.map → 19.8da42e8359d72afc2618.js.map} +1 -1
  18. udata/static/chunks/{8.778091d55cd8ea39af6b.js → 8.54e44b102164ae5e7a67.js} +2 -2
  19. udata/static/chunks/{8.778091d55cd8ea39af6b.js.map → 8.54e44b102164ae5e7a67.js.map} +1 -1
  20. udata/static/chunks/{9.033d7e190ca9e226a5d0.js → 9.07515e5187f475bce828.js} +3 -3
  21. udata/static/chunks/{9.033d7e190ca9e226a5d0.js.map → 9.07515e5187f475bce828.js.map} +1 -1
  22. udata/static/common.js +1 -1
  23. udata/static/common.js.map +1 -1
  24. {udata-10.8.3.dev37152.dist-info → udata-10.8.3.dev37185.dist-info}/METADATA +4 -2
  25. {udata-10.8.3.dev37152.dist-info → udata-10.8.3.dev37185.dist-info}/RECORD +29 -29
  26. {udata-10.8.3.dev37152.dist-info → udata-10.8.3.dev37185.dist-info}/LICENSE +0 -0
  27. {udata-10.8.3.dev37152.dist-info → udata-10.8.3.dev37185.dist-info}/WHEEL +0 -0
  28. {udata-10.8.3.dev37152.dist-info → udata-10.8.3.dev37185.dist-info}/entry_points.txt +0 -0
  29. {udata-10.8.3.dev37152.dist-info → udata-10.8.3.dev37185.dist-info}/top_level.txt +0 -0
@@ -266,6 +266,7 @@ DEFAULT_MASK = ",".join(
266
266
  "acronym",
267
267
  "slug",
268
268
  "description",
269
+ "description_short",
269
270
  "created_at",
270
271
  "last_modified",
271
272
  "deleted",
@@ -327,6 +328,7 @@ dataset_fields = api.model(
327
328
  "description": fields.Markdown(
328
329
  description="The dataset description in markdown", required=True
329
330
  ),
331
+ "description_short": fields.String(description="The dataset short description"),
330
332
  "created_at": fields.ISODateTime(
331
333
  description="This date is computed between harvested creation date if any and site's internal creation date",
332
334
  required=True,
@@ -44,6 +44,7 @@ DEFAULT_MASK_APIV2 = ",".join(
44
44
  "acronym",
45
45
  "slug",
46
46
  "description",
47
+ "description_short",
47
48
  "created_at",
48
49
  "last_modified",
49
50
  "deleted",
@@ -105,6 +106,9 @@ dataset_fields = apiv2.model(
105
106
  "description": fields.Markdown(
106
107
  description="The dataset description in markdown", required=True
107
108
  ),
109
+ "description_short": fields.String(
110
+ description="The dataset short description", required=False
111
+ ),
108
112
  "created_at": fields.ISODateTime(
109
113
  description="The dataset creation date", required=True, readonly=True
110
114
  ),
@@ -89,5 +89,6 @@ SCHEMA_CACHE_DURATION = 60 * 5 # In seconds
89
89
 
90
90
  TITLE_SIZE_LIMIT = 350
91
91
  DESCRIPTION_SIZE_LIMIT = 100000
92
+ DESCRIPTION_SHORT_SIZE_LIMIT = 200
92
93
 
93
94
  FULL_OBJECTS_HEADER = "X-Get-Datasets-Full-Objects"
udata/core/dataset/csv.py CHANGED
@@ -26,6 +26,7 @@ class DatasetCsvAdapter(csv.Adapter):
26
26
  ("owner_id", "owner.id"),
27
27
  # 'contact_point', # ?
28
28
  "description",
29
+ "description_short",
29
30
  "frequency",
30
31
  "license",
31
32
  "temporal_coverage.start",
@@ -7,6 +7,7 @@ from udata.mongo.errors import FieldValidationError
7
7
  from .constants import (
8
8
  CHECKSUM_TYPES,
9
9
  DEFAULT_FREQUENCY,
10
+ DESCRIPTION_SHORT_SIZE_LIMIT,
10
11
  DESCRIPTION_SIZE_LIMIT,
11
12
  LEGACY_FREQUENCIES,
12
13
  RESOURCE_FILETYPES,
@@ -151,6 +152,11 @@ class DatasetForm(ModelForm):
151
152
  [validators.DataRequired(), validators.Length(max=DESCRIPTION_SIZE_LIMIT)],
152
153
  description=_("The details about the dataset (collection process, specifics...)."),
153
154
  )
155
+ description_short = fields.StringField(
156
+ _("Short description"),
157
+ [validators.Length(max=DESCRIPTION_SHORT_SIZE_LIMIT)],
158
+ description=_("A short description of the dataset."),
159
+ )
154
160
  license = fields.ModelSelectField(_("License"), model=License, allow_blank=True)
155
161
  frequency = fields.SelectField(
156
162
  _("Update frequency"),
@@ -35,6 +35,7 @@ from .constants import (
35
35
  CHECKSUM_TYPES,
36
36
  CLOSED_FORMATS,
37
37
  DEFAULT_LICENSE,
38
+ DESCRIPTION_SHORT_SIZE_LIMIT,
38
39
  LEGACY_FREQUENCIES,
39
40
  MAX_DISTANCE,
40
41
  PIVOTAL_DATA,
@@ -560,6 +561,7 @@ class Dataset(Auditable, WithMetrics, DatasetBadgeMixin, Owned, Linkable, db.Doc
560
561
  auditable=False,
561
562
  )
562
563
  description = field(db.StringField(required=True, default=""))
564
+ description_short = field(db.StringField(max_length=DESCRIPTION_SHORT_SIZE_LIMIT))
563
565
  license = field(db.ReferenceField("License"))
564
566
 
565
567
  tags = field(db.TagListField())
@@ -72,16 +72,16 @@ class DcatBackend(BaseBackend):
72
72
  fmt = self.get_format()
73
73
  self.job.data = {"format": fmt}
74
74
 
75
- serialized_graphs = []
75
+ pages = []
76
76
 
77
77
  for page_number, page in self.walk_graph(self.source.url, fmt):
78
78
  self.process_one_datasets_page(page_number, page)
79
- serialized_graphs.append(page.serialize(format=fmt, indent=None))
79
+ pages.append((page_number, page))
80
80
 
81
81
  # We do a second pass to have all datasets in memory and attach datasets
82
82
  # to dataservices. It could be better to be one pass of graph walking and
83
83
  # then one pass of attaching datasets to dataservices.
84
- for page_number, page in self.walk_graph(self.source.url, fmt):
84
+ for page_number, page in pages:
85
85
  self.process_one_dataservices_page(page_number, page)
86
86
 
87
87
  if not self.dryrun and self.has_reached_max_items():
@@ -100,6 +100,8 @@ class DcatBackend(BaseBackend):
100
100
 
101
101
  bucket = current_app.config.get("HARVEST_GRAPHS_S3_BUCKET")
102
102
 
103
+ serialized_graphs = [p.serialize(format=fmt, indent=None) for _, p in pages]
104
+
103
105
  if (
104
106
  bucket is not None
105
107
  and sum([len(g.encode("utf-8")) for g in serialized_graphs])