udata 9.1.2.dev30754__py2.py3-none-any.whl → 9.1.3__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of udata might be problematic. Click here for more details.

tasks/helpers.py CHANGED
@@ -1,10 +1,11 @@
1
1
  from os.path import abspath, dirname, join
2
+ from typing import Callable
2
3
 
3
4
  #: Project absolute root path
4
5
  ROOT = abspath(join(dirname(__file__), ".."))
5
6
 
6
7
 
7
- def color(code):
8
+ def color(code: str) -> Callable:
8
9
  """A simple ANSI color wrapper factory"""
9
10
  return lambda t: "\033[{0}{1}\033[0;m".format(code, t)
10
11
 
@@ -17,25 +18,25 @@ purple = color("1;35m")
17
18
  white = color("1;39m")
18
19
 
19
20
 
20
- def header(text, *args, **kwargs):
21
+ def header(text: str, *args, **kwargs) -> None:
21
22
  """Display an header"""
22
23
  text = text.format(*args, **kwargs)
23
24
  print(" ".join((blue(">>"), cyan(text))))
24
25
 
25
26
 
26
- def info(text, *args, **kwargs):
27
+ def info(text: str, *args, **kwargs) -> None:
27
28
  """Display informations"""
28
29
  text = text.format(*args, **kwargs)
29
30
  print(" ".join((purple(">>>"), text)))
30
31
 
31
32
 
32
- def success(text, *args, **kwargs):
33
+ def success(text: str, *args, **kwargs) -> None:
33
34
  """Display a success message"""
34
35
  text = text.format(*args, **kwargs)
35
36
  print(" ".join((green("✔"), white(text))))
36
37
 
37
38
 
38
- def error(text, *args, **kwargs):
39
+ def error(text: str, *args, **kwargs) -> None:
39
40
  """Display an error message"""
40
41
  text = text.format(*args, **kwargs)
41
42
  print(red("✘ {0}".format(text)))
udata/__init__.py CHANGED
@@ -4,5 +4,5 @@
4
4
  udata
5
5
  """
6
6
 
7
- __version__ = "9.1.2.dev"
7
+ __version__ = "9.1.3"
8
8
  __description__ = "Open data portal"
udata/api_fields.py CHANGED
@@ -335,7 +335,7 @@ def wrap_primary_key(
335
335
  field_name: str,
336
336
  foreign_field: mongoengine.fields.ReferenceField | mongoengine.fields.GenericReferenceField,
337
337
  value: str,
338
- document_type: type = None,
338
+ document_type=None,
339
339
  ):
340
340
  """
341
341
  We need to wrap the `String` inside an `ObjectId` most of the time. If the foreign ID is a `String` we need to get
udata/assets.py CHANGED
@@ -2,7 +2,7 @@ from flask import current_app, url_for
2
2
  from flask_cdn import url_for as cdn_url_for
3
3
 
4
4
 
5
- def cdn_for(endpoint, **kwargs):
5
+ def cdn_for(endpoint: str, **kwargs) -> str:
6
6
  """
7
7
  Get a CDN URL for a static assets.
8
8
 
@@ -1,11 +1,22 @@
1
+ """Commands to download fixtures from the udata-fixtures repository, import them locally.
2
+
3
+ When "downloading" (generating) the fixtures, save the json as is.
4
+ When "importing" the fixtures, massage them so then can be loaded properly.
5
+ """
6
+
1
7
  import json
2
8
  import logging
9
+ import pathlib
3
10
 
4
11
  import click
5
12
  import requests
6
13
  from flask import current_app
7
14
 
8
15
  from udata.commands import cli
16
+ from udata.core.contact_point.factories import ContactPointFactory
17
+ from udata.core.contact_point.models import ContactPoint
18
+ from udata.core.dataservices.factories import DataserviceFactory
19
+ from udata.core.dataservices.models import Dataservice
9
20
  from udata.core.dataset.factories import (
10
21
  CommunityResourceFactory,
11
22
  DatasetFactory,
@@ -21,21 +32,79 @@ log = logging.getLogger(__name__)
21
32
 
22
33
 
23
34
  DATASET_URL = "/api/1/datasets"
35
+ DATASERVICES_URL = "/api/1/dataservices"
24
36
  ORG_URL = "/api/1/organizations"
25
37
  REUSE_URL = "/api/1/reuses"
26
38
  COMMUNITY_RES_URL = "/api/1/datasets/community_resources"
27
39
  DISCUSSION_URL = "/api/1/discussions"
28
40
 
29
41
 
30
- DEFAULT_FIXTURE_FILE = (
31
- "https://raw.githubusercontent.com/opendatateam/udata-fixtures/main/results.json" # noqa
32
- )
42
+ DEFAULT_FIXTURE_FILE_TAG: str = "v2.0.0"
43
+ DEFAULT_FIXTURE_FILE: str = f"https://raw.githubusercontent.com/opendatateam/udata-fixtures/{DEFAULT_FIXTURE_FILE_TAG}/results.json" # noqa
44
+
45
+ DEFAULT_FIXTURES_RESULTS_FILENAME: str = "results.json"
46
+
47
+ UNWANTED_KEYS: dict[str, list[str]] = {
48
+ "dataset": [
49
+ "uri",
50
+ "page",
51
+ "last_update",
52
+ "last_modified",
53
+ "license",
54
+ "badges",
55
+ "spatial",
56
+ "quality",
57
+ ],
58
+ "resource": ["latest", "preview_url", "last_modified"],
59
+ "organization": ["members", "page", "uri", "logo_thumbnail"],
60
+ "reuse": ["datasets", "image_thumbnail", "page", "uri", "organization", "owner"],
61
+ "community": [
62
+ "dataset",
63
+ "organization",
64
+ "owner",
65
+ "latest",
66
+ "last_modified",
67
+ "preview_url",
68
+ ],
69
+ "discussion": ["subject", "user", "url", "class"],
70
+ "message": ["posted_by"],
71
+ "dataservice": [
72
+ "datasets",
73
+ "license",
74
+ "organization",
75
+ "owner",
76
+ "self_api_url",
77
+ "self_web_url",
78
+ ],
79
+ }
80
+
81
+
82
+ def remove_unwanted_keys(obj: dict, filter_type: str) -> dict:
83
+ """Remove UNWANTED_KEYS from a dict."""
84
+ for unwanted_key in UNWANTED_KEYS[filter_type]:
85
+ if unwanted_key in obj:
86
+ del obj[unwanted_key]
87
+ fix_dates(obj)
88
+ return obj
89
+
90
+
91
+ def fix_dates(obj: dict) -> dict:
92
+ """Fix dates from the fixtures so they can be safely reloaded later on."""
93
+ if "internal" not in obj:
94
+ return obj
95
+ obj["created_at_internal"] = obj["internal"]["created_at_internal"]
96
+ obj["last_modified_internal"] = obj["internal"]["last_modified_internal"]
97
+ del obj["internal"]
98
+ del obj["created_at"]
99
+ return obj
33
100
 
34
101
 
35
102
  @cli.command()
36
103
  @click.argument("data-source")
37
- def generate_fixtures_file(data_source):
38
- """Build sample fixture file based on datasets slugs list (users, datasets, reuses)."""
104
+ @click.argument("results-filename", default=DEFAULT_FIXTURES_RESULTS_FILENAME)
105
+ def generate_fixtures_file(data_source: str, results_filename: str) -> None:
106
+ """Build sample fixture file based on datasets slugs list (users, datasets, reuses, dataservices)."""
107
+ results_file = pathlib.Path(results_filename)
39
108
  datasets_slugs = current_app.config["FIXTURE_DATASET_SLUGS"]
40
109
  json_result = []
41
110
 
@@ -44,31 +113,13 @@ def generate_fixtures_file(data_source):
44
113
  json_fixture = {}
45
114
 
46
115
  json_dataset = requests.get(f"{data_source}{DATASET_URL}/{slug}/").json()
47
- del json_dataset["uri"]
48
- del json_dataset["page"]
49
- del json_dataset["last_update"]
50
- del json_dataset["last_modified"]
51
- del json_dataset["license"]
52
- del json_dataset["badges"]
53
- del json_dataset["spatial"]
54
- del json_dataset["quality"]
55
- json_dataset["created_at_internal"] = json_dataset.pop("created_at")
56
116
  json_resources = json_dataset.pop("resources")
57
- for res in json_resources:
58
- del res["latest"]
59
- del res["preview_url"]
60
- del res["last_modified"]
61
- res["created_at_internal"] = res.pop("created_at")
62
117
  if json_dataset["organization"] is None:
63
118
  json_owner = json_dataset.pop("owner")
64
119
  json_dataset["owner"] = json_owner["id"]
65
120
  else:
66
121
  json_org = json_dataset.pop("organization")
67
122
  json_org = requests.get(f"{data_source}{ORG_URL}/{json_org['id']}/").json()
68
- del json_org["members"]
69
- del json_org["page"]
70
- del json_org["uri"]
71
- del json_org["logo_thumbnail"]
72
123
  json_fixture["organization"] = json_org
73
124
  json_fixture["resources"] = json_resources
74
125
  json_fixture["dataset"] = json_dataset
@@ -76,50 +127,34 @@ def generate_fixtures_file(data_source):
76
127
  json_reuses = requests.get(
77
128
  f"{data_source}{REUSE_URL}/?dataset={json_dataset['id']}"
78
129
  ).json()["data"]
79
- for reuse in json_reuses:
80
- del reuse["datasets"]
81
- del reuse["image_thumbnail"]
82
- del reuse["page"]
83
- del reuse["uri"]
84
- del reuse["organization"]
85
- del reuse["owner"]
86
130
  json_fixture["reuses"] = json_reuses
87
131
 
88
132
  json_community = requests.get(
89
133
  f"{data_source}{COMMUNITY_RES_URL}/?dataset={json_dataset['id']}"
90
134
  ).json()["data"]
91
- for com in json_community:
92
- del com["dataset"]
93
- del com["organization"]
94
- del com["owner"]
95
- del com["latest"]
96
- del com["last_modified"]
97
- del com["preview_url"]
98
- com["created_at_internal"] = com.pop("created_at")
99
135
  json_fixture["community_resources"] = json_community
100
136
 
101
137
  json_discussion = requests.get(
102
138
  f"{data_source}{DISCUSSION_URL}/?for={json_dataset['id']}"
103
139
  ).json()["data"]
104
- for discussion in json_discussion:
105
- del discussion["subject"]
106
- del discussion["user"]
107
- del discussion["url"]
108
- del discussion["class"]
109
- for message in discussion["discussion"]:
110
- del message["posted_by"]
111
140
  json_fixture["discussions"] = json_discussion
112
141
 
142
+ json_dataservices = requests.get(
143
+ f"{data_source}{DATASERVICES_URL}/?dataset={json_dataset['id']}"
144
+ ).json()["data"]
145
+ json_fixture["dataservices"] = json_dataservices
146
+
113
147
  json_result.append(json_fixture)
114
148
 
115
- with open("results.json", "w") as f:
116
- json.dump(json_result, f)
149
+ with results_file.open("w") as f:
150
+ json.dump(json_result, f, indent=2)
151
+ print(f"Fixtures saved to file {results_filename}")
117
152
 
118
153
 
119
154
  @cli.command()
120
155
  @click.argument("source", default=DEFAULT_FIXTURE_FILE)
121
- def generate_fixtures(source):
122
- """Build sample fixture data (users, datasets, reuses) from local or remote file."""
156
+ def import_fixtures(source):
157
+ """Build sample fixture data (users, datasets, reuses, dataservices) from local or remote file."""
123
158
  if source.startswith("http"):
124
159
  json_fixtures = requests.get(source).json()
125
160
  else:
@@ -129,24 +164,32 @@ def generate_fixtures(source):
129
164
  with click.progressbar(json_fixtures) as bar:
130
165
  for fixture in bar:
131
166
  user = UserFactory()
167
+ dataset = fixture["dataset"]
168
+ dataset = remove_unwanted_keys(dataset, "dataset")
132
169
  if not fixture["organization"]:
133
- dataset = DatasetFactory(**fixture["dataset"], owner=user)
170
+ dataset = DatasetFactory(**dataset, owner=user)
134
171
  else:
135
172
  org = Organization.objects(id=fixture["organization"]["id"]).first()
136
173
  if not org:
137
- org = OrganizationFactory(
138
- **fixture["organization"], members=[Member(user=user)]
139
- )
140
- dataset = DatasetFactory(**fixture["dataset"], organization=org)
174
+ organization = fixture["organization"]
175
+ organization = remove_unwanted_keys(organization, "organization")
176
+ org = OrganizationFactory(**organization, members=[Member(user=user)])
177
+ dataset = DatasetFactory(**dataset, organization=org)
141
178
  for resource in fixture["resources"]:
179
+ resource = remove_unwanted_keys(resource, "resource")
142
180
  res = ResourceFactory(**resource)
143
181
  dataset.add_resource(res)
144
182
  for reuse in fixture["reuses"]:
183
+ reuse = remove_unwanted_keys(reuse, "reuse")
145
184
  ReuseFactory(**reuse, datasets=[dataset], owner=user)
146
185
  for community in fixture["community_resources"]:
186
+ community = remove_unwanted_keys(community, "community")
147
187
  CommunityResourceFactory(**community, dataset=dataset, owner=user)
148
188
  for discussion in fixture["discussions"]:
189
+ discussion = remove_unwanted_keys(discussion, "discussion")
149
190
  messages = discussion.pop("discussion")
191
+ for message in messages:
192
+ message = remove_unwanted_keys(message, "message")
150
193
  DiscussionFactory(
151
194
  **discussion,
152
195
  subject=dataset,
@@ -155,3 +198,17 @@ def generate_fixtures(source):
155
198
  MessageDiscussionFactory(**message, posted_by=user) for message in messages
156
199
  ],
157
200
  )
201
+ for dataservice in fixture["dataservices"]:
202
+ dataservice = remove_unwanted_keys(dataservice, "dataservice")
203
+ if not dataservice["contact_point"]:
204
+ DataserviceFactory(**dataservice, datasets=[dataset])
205
+ else:
206
+ contact_point = ContactPoint.objects(
207
+ id=dataservice["contact_point"]["id"]
208
+ ).first()
209
+ if not contact_point:
210
+ contact_point = ContactPointFactory(**dataservice["contact_point"])
211
+ dataservice.pop("contact_point")
212
+ DataserviceFactory(
213
+ **dataservice, datasets=[dataset], contact_point=contact_point
214
+ )
udata/commands/init.py CHANGED
@@ -11,7 +11,7 @@ from udata.i18n import gettext as _
11
11
  from udata.search.commands import index
12
12
 
13
13
  from .db import migrate
14
- from .fixtures import generate_fixtures
14
+ from .fixtures import import_fixtures
15
15
 
16
16
  log = logging.getLogger(__name__)
17
17
 
@@ -44,6 +44,6 @@ def init(ctx):
44
44
 
45
45
  text = _("Do you want to create some sample data?")
46
46
  if click.confirm(text, default=True):
47
- ctx.invoke(generate_fixtures)
47
+ ctx.invoke(import_fixtures)
48
48
 
49
49
  success(_("Your udata instance is ready!"))
@@ -0,0 +1,74 @@
1
+ import json
2
+ from tempfile import NamedTemporaryFile
3
+
4
+ import pytest
5
+ import requests
6
+ import werkzeug.test
7
+ from pytest_mock import MockerFixture
8
+ from werkzeug.wrappers.response import Response
9
+
10
+ import udata.commands.fixtures
11
+ from udata import models
12
+ from udata.core.dataservices.factories import DataserviceFactory
13
+ from udata.core.dataset.factories import (
14
+ CommunityResourceFactory,
15
+ DatasetFactory,
16
+ ResourceFactory,
17
+ )
18
+ from udata.core.discussions.factories import DiscussionFactory, MessageDiscussionFactory
19
+ from udata.core.organization.factories import OrganizationFactory
20
+ from udata.core.organization.models import Member, Organization
21
+ from udata.core.reuse.factories import ReuseFactory
22
+ from udata.core.user.factories import UserFactory
23
+
24
+
25
+ @pytest.mark.usefixtures("clean_db")
26
+ class FixturesTest:
27
+ @pytest.mark.frontend
28
+ @pytest.mark.options(FIXTURE_DATASET_SLUGS=["some-test-dataset-slug"])
29
+ def test_generate_fixtures_file_then_import(self, app, cli, api, monkeypatch):
30
+ """Test generating fixtures from the current env, then importing them back."""
31
+ assert models.Dataset.objects.count() == 0 # Start with a clean slate.
32
+ user = UserFactory()
33
+ org = OrganizationFactory(**{}, members=[Member(user=user)])
34
+ # Set the same slug we're 'exporting' from the FIXTURE_DATASET_SLUG config, see the
35
+ # @pytest.mark.options above.
36
+ dataset = DatasetFactory(**{}, slug="some-test-dataset-slug", organization=org)
37
+ res = ResourceFactory(**{})
38
+ dataset.add_resource(res)
39
+ ReuseFactory(**{}, datasets=[dataset], owner=user)
40
+ CommunityResourceFactory(**{}, dataset=dataset, owner=user)
41
+ DiscussionFactory(
42
+ **{},
43
+ subject=dataset,
44
+ user=user,
45
+ discussion=[MessageDiscussionFactory(**{}, posted_by=user)],
46
+ )
47
+ DataserviceFactory(**{}, datasets=[dataset])
48
+
49
+ with NamedTemporaryFile(mode="w+", delete=True) as fixtures_fd:
50
+ # Get the fixtures from the local instance.
51
+ monkeypatch.setattr(requests, "get", lambda url: api.get(url))
52
+ monkeypatch.setattr(Response, "json", Response.get_json)
53
+ result = cli("generate-fixtures-file", "", fixtures_fd.name)
54
+ fixtures_fd.flush()
55
+ assert "Fixtures saved to file " in result.output
56
+
57
+ # Then load them in the database to make sure they're correct.
58
+ result = cli("import-fixtures", fixtures_fd.name)
59
+ assert models.Organization.objects(slug=org.slug).count() > 0
60
+ assert models.Dataset.objects.count() > 0
61
+ assert models.Discussion.objects.count() > 0
62
+ assert models.CommunityResource.objects.count() > 0
63
+ assert models.User.objects.count() > 0
64
+ assert models.Dataservice.objects.count() > 0
65
+
66
+ def test_import_fixtures_from_default_file(self, cli):
67
+ """Test importing fixtures from udata.commands.fixture.DEFAULT_FIXTURE_FILE."""
68
+ cli("import-fixtures")
69
+ assert models.Organization.objects.count() > 0
70
+ assert models.Dataset.objects.count() > 0
71
+ assert models.Reuse.objects.count() > 0
72
+ assert models.User.objects.count() > 0
73
+ if udata.commands.fixtures.DEFAULT_FIXTURE_FILE_TAG > "v1.0.0":
74
+ assert models.Dataservice.objects.count() > 0
@@ -4,12 +4,12 @@ from datetime import datetime
4
4
  from flask import current_app, render_template
5
5
 
6
6
  from udata import i18n
7
- from udata.models import Discussion, Message
7
+ from udata.models import Dataset, Discussion, Message
8
8
 
9
9
  log = logging.getLogger(__name__)
10
10
 
11
11
 
12
- def archive(dataset, comment=False):
12
+ def archive(dataset: Dataset, comment=False) -> None:
13
13
  """Archive a dataset"""
14
14
  if dataset.archived:
15
15
  log.warning("Dataset %s already archived, bumping date", dataset)
udata/core/reports/api.py CHANGED
@@ -4,6 +4,7 @@ from flask_login import current_user
4
4
 
5
5
  from udata.api import API, api, fields
6
6
  from udata.api_fields import patch
7
+ from udata.auth import admin_permission
7
8
 
8
9
  from .constants import reports_reasons_translations
9
10
  from .models import Report
@@ -16,6 +17,7 @@ class ReportsAPI(API):
16
17
  @api.doc("list_reports")
17
18
  @api.expect(Report.__index_parser__)
18
19
  @api.marshal_with(Report.__page_fields__)
20
+ @api.secure(admin_permission)
19
21
  def get(self):
20
22
  query = Report.objects
21
23
 
@@ -37,6 +39,15 @@ class ReportsAPI(API):
37
39
  return report, 201
38
40
 
39
41
 
42
+ @ns.route("/<report:report>/", endpoint="report")
43
+ class ReportAPI(API):
44
+ @api.doc("get_report")
45
+ @api.marshal_with(Report.__read_fields__)
46
+ @api.secure(admin_permission)
47
+ def get(self, report):
48
+ return report
49
+
50
+
40
51
  @ns.route("/reasons/", endpoint="reports_reasons")
41
52
  class ReportsReasonsAPI(API):
42
53
  @api.doc("list_reports_reasons")
@@ -3,10 +3,11 @@ from datetime import datetime
3
3
  from bson import DBRef
4
4
  from mongoengine import DO_NOTHING, NULLIFY, signals
5
5
 
6
- from udata.api_fields import field, generate_fields
6
+ from udata.api_fields import field, function_field, generate_fields
7
7
  from udata.core.user.api_fields import user_ref_fields
8
8
  from udata.core.user.models import User
9
9
  from udata.mongo import db
10
+ from udata.uris import endpoint_for
10
11
 
11
12
  from .constants import REPORT_REASONS_CHOICES, REPORTABLE_MODELS
12
13
 
@@ -45,6 +46,10 @@ class Report(db.Document):
45
46
  readonly=True,
46
47
  )
47
48
 
49
+ @function_field(description="Link to the API endpoint for this report")
50
+ def self_api_url(self):
51
+ return endpoint_for("api.report", report=self, _external=True)
52
+
48
53
  @classmethod
49
54
  def mark_as_deleted_soft_delete(cls, sender, document, **kwargs):
50
55
  """
@@ -3,5 +3,5 @@ from udata.models import Reuse
3
3
 
4
4
  @Reuse.on_create.connect
5
5
  @Reuse.on_update.connect
6
- def update_reuses_dataset_metric(reuse, **kwargs):
6
+ def update_reuses_dataset_metric(reuse: Reuse, **kwargs) -> None:
7
7
  reuse.count_datasets()
@@ -3,10 +3,11 @@ from udata.core.organization.permissions import (
3
3
  OrganizationAdminNeed,
4
4
  OrganizationEditorNeed,
5
5
  )
6
+ from udata.core.reuse.models import Reuse
6
7
 
7
8
 
8
9
  class ReuseEditPermission(Permission):
9
- def __init__(self, reuse):
10
+ def __init__(self, reuse: Reuse) -> None:
10
11
  needs = []
11
12
 
12
13
  if reuse.organization:
@@ -38,7 +38,7 @@ class ReuseSearch(ModelSearchAdapter):
38
38
  }
39
39
 
40
40
  @classmethod
41
- def is_indexable(cls, reuse):
41
+ def is_indexable(cls, reuse: Reuse) -> bool:
42
42
  return reuse.deleted is None and len(reuse.datasets) > 0 and not reuse.private
43
43
 
44
44
  @classmethod
@@ -55,7 +55,7 @@ class ReuseSearch(ModelSearchAdapter):
55
55
  return reuses.order_by(sort).skip(offset).limit(args["page_size"]), reuses.count()
56
56
 
57
57
  @classmethod
58
- def serialize(cls, reuse):
58
+ def serialize(cls, reuse: Reuse) -> dict:
59
59
  organization = None
60
60
  owner = None
61
61
  if reuse.organization:
udata/core/reuse/tasks.py CHANGED
@@ -10,7 +10,7 @@ log = get_logger(__name__)
10
10
 
11
11
 
12
12
  @job("purge-reuses")
13
- def purge_reuses(self):
13
+ def purge_reuses(self) -> None:
14
14
  for reuse in Reuse.objects(deleted__ne=None):
15
15
  log.info(f"Purging reuse {reuse}")
16
16
  # Remove followers
@@ -32,7 +32,7 @@ def purge_reuses(self):
32
32
 
33
33
 
34
34
  @task
35
- def notify_new_reuse(reuse_id):
35
+ def notify_new_reuse(reuse_id: int) -> None:
36
36
  reuse = Reuse.objects.get(pk=reuse_id)
37
37
  for dataset in reuse.datasets:
38
38
  if dataset.organization:
@@ -23,7 +23,7 @@ _template_hooks = {}
23
23
 
24
24
 
25
25
  @hook.app_template_global()
26
- def package_version(name):
26
+ def package_version(name: str) -> str:
27
27
  return pkg_resources.get_distribution(name).version
28
28
 
29
29
 
udata/models/__init__.py CHANGED
@@ -22,6 +22,7 @@ from udata.core.jobs.models import * # noqa
22
22
  from udata.core.tags.models import * # noqa
23
23
  from udata.core.spam.models import * # noqa
24
24
  from udata.core.reports.models import * # noqa
25
+ from udata.core.dataservices.models import * # noqa
25
26
 
26
27
  from udata.features.transfer.models import * # noqa
27
28
  from udata.features.territories.models import * # noqa
udata/routing.py CHANGED
@@ -153,6 +153,10 @@ class ContactPointConverter(ModelConverter):
153
153
  model = models.ContactPoint
154
154
 
155
155
 
156
+ class ReportConverter(ModelConverter):
157
+ model = models.Report
158
+
159
+
156
160
  class TerritoryConverter(PathConverter):
157
161
  DEFAULT_PREFIX = "fr" # TODO: make it a setting parameter
158
162
 
@@ -231,3 +235,4 @@ def init_app(app):
231
235
  app.url_map.converters["post"] = PostConverter
232
236
  app.url_map.converters["territory"] = TerritoryConverter
233
237
  app.url_map.converters["contact_point"] = ContactPointConverter
238
+ app.url_map.converters["report"] = ReportConverter
udata/sentry.py CHANGED
@@ -8,7 +8,9 @@ from werkzeug.exceptions import HTTPException
8
8
  from udata import entrypoints
9
9
  from udata.core.storages.api import UploadProgress
10
10
 
11
+ from .app import UDataApp
11
12
  from .auth import PermissionDenied
13
+ from .frontend import package_version
12
14
 
13
15
  log = logging.getLogger(__name__)
14
16
 
@@ -24,7 +26,7 @@ ERROR_PARSE_DSN_MSG = "Unable to parse Sentry DSN"
24
26
  IGNORED_EXCEPTIONS = HTTPException, PermissionDenied, UploadProgress
25
27
 
26
28
 
27
- def public_dsn(dsn):
29
+ def public_dsn(dsn: str) -> str | None:
28
30
  """Check if DSN is public or raise a warning and turn it into a public one"""
29
31
  m = RE_DSN.match(dsn)
30
32
  if not m:
@@ -41,7 +43,7 @@ def public_dsn(dsn):
41
43
  return public
42
44
 
43
45
 
44
- def init_app(app):
46
+ def init_app(app: UDataApp):
45
47
  if app.config["SENTRY_DSN"]:
46
48
  try:
47
49
  import sentry_sdk
@@ -62,6 +64,13 @@ def init_app(app):
62
64
  dsn=app.config["SENTRY_PUBLIC_DSN"],
63
65
  integrations=[FlaskIntegration(), CeleryIntegration()],
64
66
  ignore_errors=list(exceptions),
67
+ release=f"udata@{package_version('udata')}",
68
+ environment=app.config.get("SITE_ID", None),
69
+ # Set traces_sample_rate to 1.0 to capture 100%
70
+ # of transactions for performance monitoring.
71
+ # Sentry recommends adjusting this value in production.
72
+ traces_sample_rate=app.config.get("SENTRY_SAMPLE_RATE", None),
73
+ profiles_sample_rate=app.config.get("SENTRY_SAMPLE_RATE", None),
65
74
  )
66
75
 
67
76
  # Set log level
udata/settings.py CHANGED
@@ -113,6 +113,7 @@ class Defaults(object):
113
113
  SENTRY_USER_ATTRS = ["slug", "email", "fullname"]
114
114
  SENTRY_LOGGING = "WARNING"
115
115
  SENTRY_IGNORE_EXCEPTIONS = []
116
+ SENTRY_SAMPLE_RATE: float = 1.0
116
117
 
117
118
  # Flask WTF settings
118
119
  CSRF_SESSION_KEY = "Default uData csrf key"
udata/tags.py CHANGED
@@ -6,11 +6,11 @@ MIN_TAG_LENGTH = LocalProxy(lambda: current_app.config["TAG_MIN_LENGTH"])
6
6
  MAX_TAG_LENGTH = LocalProxy(lambda: current_app.config["TAG_MAX_LENGTH"])
7
7
 
8
8
 
9
- def slug(value):
9
+ def slug(value: str) -> str:
10
10
  return slugify(value.lower())
11
11
 
12
12
 
13
- def normalize(value):
13
+ def normalize(value: str) -> str:
14
14
  value = slug(value)
15
15
  if len(value) < MIN_TAG_LENGTH:
16
16
  value = ""
@@ -19,5 +19,5 @@ def normalize(value):
19
19
  return value
20
20
 
21
21
 
22
- def tags_list(value):
22
+ def tags_list(value: str) -> list:
23
23
  return list(set(slug(tag) for tag in value.split(",") if tag.strip()))
@@ -1,5 +1,4 @@
1
1
  from flask import url_for
2
- from mongoengine.base.datastructures import LazyReference
3
2
 
4
3
  from udata.core.dataset.factories import DatasetFactory
5
4
  from udata.core.dataset.models import Dataset
@@ -9,8 +8,8 @@ from udata.core.reports.constants import (
9
8
  reports_reasons_translations,
10
9
  )
11
10
  from udata.core.reports.models import Report
12
- from udata.core.user.factories import UserFactory
13
- from udata.i18n import gettext as _
11
+ from udata.core.reuse.factories import ReuseFactory
12
+ from udata.core.user.factories import AdminFactory, UserFactory
14
13
 
15
14
  from . import APITestCase
16
15
 
@@ -100,6 +99,11 @@ class ReportsAPITest(APITestCase):
100
99
  reports[1].reload()
101
100
  self.assertIsNotNone(reports[1].subject_deleted_at)
102
101
 
102
+ # Should be logged as admin
103
+ response = self.get(url_for("api.reports"))
104
+ self.assert403(response)
105
+
106
+ self.login(AdminFactory())
103
107
  response = self.get(url_for("api.reports"))
104
108
  self.assert200(response)
105
109
 
@@ -119,3 +123,49 @@ class ReportsAPITest(APITestCase):
119
123
  self.assertEqual(REASON_SPAM, reports[1]["reason"])
120
124
  self.assertEqual(str(user.id), reports[1]["by"]["id"])
121
125
  self.assertIsNotNone(reports[1]["subject_deleted_at"])
126
+
127
+ def test_reports_api_list(self):
128
+ user = UserFactory()
129
+
130
+ spam_dataset = DatasetFactory.create(owner=user)
131
+ spam_reuse = ReuseFactory.create(owner=user)
132
+
133
+ Report(subject=spam_dataset, reason="spam").save()
134
+ Report(subject=spam_reuse, reason="spam").save()
135
+
136
+ # Should be logged as admin
137
+ response = self.get(url_for("api.reports"))
138
+ self.assert401(response)
139
+
140
+ self.login(AdminFactory())
141
+ response = self.get(url_for("api.reports"))
142
+ self.assert200(response)
143
+
144
+ payload = response.json
145
+ self.assertEqual(payload["total"], 2)
146
+ # Returned by order of creation by default
147
+ self.assertEqual(payload["data"][0]["subject"]["id"], str(spam_dataset.id))
148
+ self.assertEqual(
149
+ payload["data"][0]["self_api_url"],
150
+ url_for("api.report", report=payload["data"][0]["id"], _external=True),
151
+ )
152
+
153
+ self.assertEqual(payload["data"][1]["subject"]["id"], str(spam_reuse.id))
154
+
155
+ def test_reports_api_get(self):
156
+ user = UserFactory()
157
+
158
+ spam_dataset = DatasetFactory.create(owner=user)
159
+
160
+ report = Report(subject=spam_dataset, reason="spam").save()
161
+
162
+ # Should be logged as admin
163
+ response = self.get(url_for("api.report", report=report))
164
+ self.assert401(response)
165
+
166
+ self.login(AdminFactory())
167
+ response = self.get(url_for("api.report", report=report))
168
+ self.assert200(response)
169
+
170
+ payload = response.json
171
+ self.assertEqual(payload["subject"]["id"], str(spam_dataset.id))
udata/utils.py CHANGED
@@ -3,6 +3,7 @@ import math
3
3
  import re
4
4
  from datetime import date, datetime
5
5
  from math import ceil
6
+ from typing import Any
6
7
  from uuid import UUID, uuid4
7
8
  from xml.sax.saxutils import escape
8
9
 
@@ -120,7 +121,7 @@ def daterange_start(value):
120
121
  return result.replace(day=1, month=1)
121
122
 
122
123
 
123
- def daterange_end(value):
124
+ def daterange_end(value: date | datetime | str | None) -> date | None:
124
125
  """Parse a date range end boundary"""
125
126
  if not value:
126
127
  return None
@@ -143,7 +144,7 @@ def daterange_end(value):
143
144
  return result.replace(month=12, day=31)
144
145
 
145
146
 
146
- def to_naive_datetime(given_date):
147
+ def to_naive_datetime(given_date: Any) -> datetime:
147
148
  if isinstance(given_date, str):
148
149
  given_date = parse_dt(given_date)
149
150
  if isinstance(given_date, date) and not isinstance(given_date, datetime):
@@ -153,7 +154,7 @@ def to_naive_datetime(given_date):
153
154
  return given_date
154
155
 
155
156
 
156
- def to_iso(dt):
157
+ def to_iso(dt: date | datetime) -> str | None:
157
158
  """
158
159
  Format a date or datetime into an ISO-8601 string
159
160
 
@@ -165,7 +166,7 @@ def to_iso(dt):
165
166
  return to_iso_date(dt)
166
167
 
167
168
 
168
- def to_iso_date(dt):
169
+ def to_iso_date(dt: date | datetime) -> str | None:
169
170
  """
170
171
  Format a date or datetime into an ISO-8601 date string.
171
172
 
@@ -175,7 +176,7 @@ def to_iso_date(dt):
175
176
  return "{dt.year:04d}-{dt.month:02d}-{dt.day:02d}".format(dt=dt)
176
177
 
177
178
 
178
- def to_iso_datetime(dt):
179
+ def to_iso_datetime(dt: date | datetime) -> str | None:
179
180
  """
180
181
  Format a date or datetime into an ISO-8601 datetime string.
181
182
 
@@ -193,7 +194,7 @@ def to_iso_datetime(dt):
193
194
  return "T".join((date_str, time_str))
194
195
 
195
196
 
196
- def to_bool(value):
197
+ def to_bool(value: bool | str | int) -> bool:
197
198
  """
198
199
  Transform a value into a boolean with the following rules:
199
200
 
@@ -212,24 +213,24 @@ def to_bool(value):
212
213
  return False
213
214
 
214
215
 
215
- def clean_string(value):
216
+ def clean_string(value: str):
216
217
  """
217
218
  Clean an user input string (Prevent it from containing XSS)
218
219
  """
219
220
  return escape(value)
220
221
 
221
222
 
222
- def not_none_dict(d):
223
+ def not_none_dict(d: dict) -> dict:
223
224
  """Filter out None values from a dict"""
224
225
  return {k: v for k, v in d.items() if v is not None}
225
226
 
226
227
 
227
- def hash_url(url):
228
+ def hash_url(url: str) -> str | None:
228
229
  """Hash an URL to make it indexable"""
229
230
  return hashlib.sha1(url.encode("utf-8")).hexdigest() if url else None
230
231
 
231
232
 
232
- def recursive_get(obj, key):
233
+ def recursive_get(obj: Any, key: Any):
233
234
  """
234
235
  Get an attribute or a key recursively.
235
236
 
@@ -249,14 +250,14 @@ def recursive_get(obj, key):
249
250
  return recursive_get(value, parts) if parts else value
250
251
 
251
252
 
252
- def unique_string(length=UUID_LENGTH):
253
+ def unique_string(length: int = UUID_LENGTH) -> str:
253
254
  """Generate a unique string"""
254
255
  # We need a string at least as long as length
255
256
  string = str(uuid4()) * int(math.ceil(length / float(UUID_LENGTH)))
256
257
  return string[:length] if length else string
257
258
 
258
259
 
259
- def is_uuid(uuid_string, version=4):
260
+ def is_uuid(uuid_string: str, version: int = 4) -> bool:
260
261
  try:
261
262
  # If uuid_string is a valid hex code but not a valid uuid,
262
263
  # UUID() will still make a valide uuid out of it.
@@ -290,7 +291,7 @@ class UDataProvider(BaseProvider):
290
291
  Might be conributed to upstream Faker project
291
292
  """
292
293
 
293
- def unique_string(self, length=UUID_LENGTH):
294
+ def unique_string(self, length: int = UUID_LENGTH) -> str:
294
295
  """Generate a unique string"""
295
296
  return unique_string(length)
296
297
 
@@ -302,7 +303,7 @@ class UnicodeLoremProvider(LoremProvider):
302
303
  word_list = [w + "é" for w in LoremProvider.word_list]
303
304
 
304
305
 
305
- def safe_unicode(string):
306
+ def safe_unicode(string: bytes) -> str | None:
306
307
  """Safely transform any object into utf8 decoded str"""
307
308
  if string is None:
308
309
  return None
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: udata
3
- Version: 9.1.2.dev30754
3
+ Version: 9.1.3
4
4
  Summary: Open data portal
5
5
  Home-page: https://github.com/opendatateam/udata
6
6
  Author: Opendata Team
@@ -39,7 +39,7 @@ Requires-Dist: celery ==5.3.1
39
39
  Requires-Dist: celerybeat-mongo ==0.2.0
40
40
  Requires-Dist: certifi ==2024.7.4
41
41
  Requires-Dist: cffi ==1.16.0
42
- Requires-Dist: chardet ==3.0.4
42
+ Requires-Dist: charset-normalizer ==3.3.2
43
43
  Requires-Dist: click ==8.1.2
44
44
  Requires-Dist: click-didyoumean ==0.3.1
45
45
  Requires-Dist: click-plugins ==1.1.1
@@ -96,9 +96,10 @@ Requires-Dist: rdflib ==6.0.0
96
96
  Requires-Dist: redis ==4.5.2
97
97
  Requires-Dist: referencing ==0.35.1
98
98
  Requires-Dist: regex ==2024.5.15
99
- Requires-Dist: requests ==2.24.0
99
+ Requires-Dist: requests ==2.32.3
100
100
  Requires-Dist: rpds-py ==0.19.0
101
101
  Requires-Dist: s3transfer ==0.6.2
102
+ Requires-Dist: sentry-sdk[flask] ==2.9.0
102
103
  Requires-Dist: six ==1.16.0
103
104
  Requires-Dist: speaklater ==1.3
104
105
  Requires-Dist: stringdist ==1.0.9
@@ -108,7 +109,7 @@ Requires-Dist: tzdata ==2024.1
108
109
  Requires-Dist: unidecode ==0.4.21
109
110
  Requires-Dist: uritools ==4.0.3
110
111
  Requires-Dist: urlextract ==0.14.0
111
- Requires-Dist: urllib3 ==1.25.11
112
+ Requires-Dist: urllib3 ==1.26.19
112
113
  Requires-Dist: vine ==5.1.0
113
114
  Requires-Dist: voluptuous ==0.11.7
114
115
  Requires-Dist: wcwidth ==0.2.13
@@ -137,7 +138,16 @@ It is collectively taken care of by members of the
137
138
 
138
139
  # Changelog
139
140
 
140
- ## Current (in progress)
141
+ ## 9.1.3 (2024-08-01)
142
+
143
+ - Adds latest `sentry-sdk[flask]` as an install dependency, and update Sentry logic to be able to send environment, app version and profiling/performance info [#3086](https://github.com/opendatateam/udata/pull/3086)
144
+ - Add report get endpoint and make report get and list admin only [#3115](https://github.com/opendatateam/udata/pull/3115)
145
+ - Fix the version of udata-fixtures used by `udata import-fixtures` [#3114](https://github.com/opendatateam/udata/pull/3114)
146
+ - Update to the version v2.0.0 of udata-fixtures (with the dataservices)
147
+ - Add type hints [#3111](https://github.com/opendatateam/udata/pull/3111)
148
+ - Make sure requests v2.32.3 is used everywhere consistently [#3116](https://github.com/opendatateam/udata/pull/3116)
149
+
150
+ ## 9.1.2 (2024-07-29)
141
151
 
142
152
  - Add a `archived` field for reuses [#3088](https://github.com/opendatateam/udata/pull/3088)
143
153
  - Add linter and formatter with `pyproject.toml` config, add lint and formatting step in CI, add pre-commit hook to lint and format, update docs and lint and format the code [#3085](https://github.com/opendatateam/udata/pull/3085)
@@ -149,6 +159,8 @@ It is collectively taken care of by members of the
149
159
  - Add a warning on harvest source deletion [#3098](https://github.com/opendatateam/udata/pull/3098)
150
160
  - Fix license in dataservices API (now returns ID instead of title) [#3097](https://github.com/opendatateam/udata/pull/3097)
151
161
  - Fix missing title on new dataset preview [#3100](https://github.com/opendatateam/udata/pull/3100)
162
+ - Fix the fixtures tests which was not running, and then was failing [#3105](https://github.com/opendatateam/udata/pull/3105)
163
+ - Refactoring of the fixtures, the `generate-fixtures` command has been renamed to `import-fixtures` [#3106](https://github.com/opendatateam/udata/pull/3106)
152
164
 
153
165
  ## 9.1.1 (2024-07-16)
154
166
 
@@ -1,9 +1,9 @@
1
1
  tasks/__init__.py,sha256=oZ9yTY3eyOXRbN18_83l1b4BpqjIBVqw-FqD1LFWGxo,8122
2
- tasks/helpers.py,sha256=0a9iXzVe2GC2f6ouFoXDfMGN2s1eu4urTUuM9CCTZP8,994
3
- udata/__init__.py,sha256=y6WWIlMmNt4ADXtwHIWm-Dzb-txi6Y8hxw0fqe5TJwU,101
4
- udata/api_fields.py,sha256=y9ZWNbIbG4HYIsfSypzE_QNGiHuTgnIBzONR5N45tm4,15437
2
+ tasks/helpers.py,sha256=70fS9tI_m0DTWmKx9Zl5-LG-nxdz_ZaPyvvsFkN2r48,1091
3
+ udata/__init__.py,sha256=kyLhL9038A_1or1SR2iGdVbubW4x0ZVhRxnaGE3Kq_U,97
4
+ udata/api_fields.py,sha256=P51s1QnBLQvtvfK3PI2hOhWaobP6Ok_MqAz5PhJayzk,15429
5
5
  udata/app.py,sha256=lMxCLveZyMmaimkryDjD-VpHBC5OMEgd95VszM84y-8,7284
6
- udata/assets.py,sha256=jGxFWVu6JvDLK1SXBZC-rY-DGkGxguF3LFikOklyzdE,645
6
+ udata/assets.py,sha256=H5Hrc2vnKM0IFLyWfLXmJ2Kj35w1i8W1D8Cgy8_cUj4,657
7
7
  udata/cors.py,sha256=QyhlcnkLo9SmJhgK3FRjLcD5YC-03bU514UVMAHfbSc,2995
8
8
  udata/entrypoints.py,sha256=mbAAUVT8ZenzSYdang2PbAwZcK1pENtA3axBmPRiWCw,2717
9
9
  udata/errors.py,sha256=E8W7b4PH7c5B85g_nsUMt8fHqMVpDFOZFkO6wMPl6bA,117
@@ -11,16 +11,16 @@ udata/factories.py,sha256=MoklZnU8iwNL25dm3JsoXhoQs1PQWSVYL1WvcUBtJqM,492
11
11
  udata/i18n.py,sha256=Q7UELAhKOW7DmUX8BjEMnqqhQxcR6d3ioSmj90A62cg,8992
12
12
  udata/mail.py,sha256=MShopArrtXyn8SnXtNbWUR0C4mTj1gI3a3Lf_1We-j4,2167
13
13
  udata/rdf.py,sha256=GDqnBuJd2KryJDpJjPuH0WpkZIPoOEa2uOACr5mNCik,13835
14
- udata/routing.py,sha256=fwtnA7mbV535Jpblm2urhUs2UBgsevYGNE711V8T5oI,7120
15
- udata/sentry.py,sha256=sLkVw-BF_ad4hOveDpnk6oFjLijGTAFS2STMbcg1w84,2678
16
- udata/settings.py,sha256=EUPPsPi3pKxnXSLxmKWhKtdBmtWfJl7torWpSWU9kbA,17817
14
+ udata/routing.py,sha256=x9WcpYikR45j3C_0Bi2Zoa2treSCW_oDYWMHwbdLAmE,7242
15
+ udata/sentry.py,sha256=ekcxqUSqxfM98TtvCsPaOoX5i2l6PEcYt7kb4l3od-Q,3223
16
+ udata/settings.py,sha256=ZcLUXKv-nRPjDSmb-vS1gVAT2xqChjdi2GMx1d5zpcw,17853
17
17
  udata/sitemap.py,sha256=oRRWoPI7ZsFFnUAOqGT1YuXFFKHBe8EcRnUCNHD7xjM,979
18
- udata/tags.py,sha256=a4o4LtddJR_nxmz9cFxkdtXPs8-_Uiz2T79aWE0Jhy8,594
18
+ udata/tags.py,sha256=ydq4uokd6bzdeGVSpEXASVtGvDfO2LfQs9mptvvKJCM,631
19
19
  udata/tasks.py,sha256=hLdmHV7ozeq803BRjU6X3DT3oVsZrV1beQILpbjsfQI,4978
20
20
  udata/terms.md,sha256=nFx978tUQ3vTEv6POykXaZvcQ5e_gcvmO4ZgcfbSWXo,187
21
21
  udata/tracking.py,sha256=WOcqA1RlHN8EPFuEc2kNau54mec4-pvi-wUFrMXevzg,345
22
22
  udata/uris.py,sha256=Nxy-kvj46FhRDNW7sebFI-X7nFJEP5ViwgGQvAHmBW0,3561
23
- udata/utils.py,sha256=PWpwUYwkbOL2_RsiYwEQf0Um3WTgBkJgYvn4Z92Or9Q,8340
23
+ udata/utils.py,sha256=AbPqkgtdjuQScOCoTcZl3eDiEmU0p-agr1lqvEJwaS0,8661
24
24
  udata/worker.py,sha256=K-Wafye5-uXP4kQlffRKws2J9YbJ6m6n2QjcVsY8Nsg,118
25
25
  udata/wsgi.py,sha256=MY8en9K9eDluvJYUxTdzqSDoYaDgCVZ69ZcUvxAvgqA,77
26
26
  udata/admin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -42,17 +42,17 @@ udata/commands/__init__.py,sha256=Won_rW_hIU9TA3o4oNe6kI46l1fnDBM_oW0Hc1XS9F8,77
42
42
  udata/commands/cache.py,sha256=bLdrf_fCWFYX9ULlL2ADsZRwijkI4pArsJxfx24OivM,341
43
43
  udata/commands/db.py,sha256=64x0614tCUl2f-c6b9K_TcRMyydZCnmCWrz5WI6tUkg,14701
44
44
  udata/commands/dcat.py,sha256=L1BTywlHFBOkddZnKfWr8P4QjRMZksYCwhUNfJv5tkc,3409
45
- udata/commands/fixtures.py,sha256=qEBYuHQ-1nhhFhxsxHKAIEwWgJ0EO0zewOKijJwrR5Q,6122
45
+ udata/commands/fixtures.py,sha256=exRyCLhPZLtI6SQf7oXp_tIcz84MJ1soi0jdwODQXMc,8369
46
46
  udata/commands/images.py,sha256=0rVojLik5DYgJ6W4uNEfMP2g2QUU2V761tj3z6lo8no,2050
47
47
  udata/commands/info.py,sha256=A5WMo3_N_rlt3cySVJrZqKWrbIowX97ZLKMIFQE5178,1545
48
- udata/commands/init.py,sha256=rJcfAhGghl9FL1m8TO54w__Q1pjRwexlSBm04_xIvYM,1524
48
+ udata/commands/init.py,sha256=8CpH8MklzPkpxczs43lFM5ZNrHCJRbUtzHapgYNHs7M,1520
49
49
  udata/commands/purge.py,sha256=78kwiQV0Y63ulbAEkZCLNq1V1wCIekIEMdmW2HRREhU,1202
50
50
  udata/commands/serve.py,sha256=0-Uy_fRCYABsmJ8MFxpLZJyAl61TGCk7KsT8WiiexBc,2512
51
51
  udata/commands/static.py,sha256=OUYPAR1giaPk52DK-v-nQYUSx-YQ4cF7YXLt1t5wtBU,2199
52
52
  udata/commands/test.py,sha256=0snHTDolowQK-DmAKnhF_mBuDOBMApAbEc35GEiwH0M,893
53
53
  udata/commands/worker.py,sha256=bjXQGCwkbZxkcxLMPA2Lr0nkNjXLpGNDMkkQXjwBLPI,3976
54
54
  udata/commands/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
55
- udata/commands/tests/fixtures.py,sha256=87Oycmn68CfOzmDKMmnvzmYfD_iF_Q2yAgSjf30bfPk,1728
55
+ udata/commands/tests/test_fixtures.py,sha256=7gZSyiqrn6G84tZ1koPkgnYwnrskmZaqjRTXEPmnA9s,3326
56
56
  udata/core/__init__.py,sha256=O7C9WWCXiLWnWPnPbFRszWhOmvRQiI4gD-5qkWvPGRo,385
57
57
  udata/core/owned.py,sha256=xcIg9fNndowuLs7BEkuxAsEdf6JApomU_Gl6L-SgTJI,4515
58
58
  udata/core/activity/__init__.py,sha256=OaiFyq7HB4xL4SuMPD1N8IFNpntwx9ZayVzelciOieI,298
@@ -87,7 +87,7 @@ udata/core/dataservices/permissions.py,sha256=98zM_R4v2ZtRubflB7ajaVQz-DVc-pZBMg
87
87
  udata/core/dataservices/rdf.py,sha256=l1vItR7jhaTxs5f6kU6MAR32uvRR233GDbp1hIUbP3k,4673
88
88
  udata/core/dataservices/tasks.py,sha256=3NEnsLssZANE97pc3hGnjGtXD2Z5VFguSyfBKE2UJ0g,970
89
89
  udata/core/dataset/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
90
- udata/core/dataset/actions.py,sha256=6earjeEZmckcao0kzA7i27KyqyWUSmyBiCbttwS-GYw,1196
90
+ udata/core/dataset/actions.py,sha256=mX6xox0PiMrbcAPZ3VZsI26rfM-ciYfEXxN6sqqImKA,1222
91
91
  udata/core/dataset/activities.py,sha256=v8k1jwhdx62Z2ARZq8Q-x86OWSsBK99hRloPl74OCgA,1502
92
92
  udata/core/dataset/api.py,sha256=zmchWUnGPV-7iQ4QXta1nCvhtYAAijdRz9FuZst1BM0,28781
93
93
  udata/core/dataset/api_fields.py,sha256=ZF24FhKYe5jlV8jXG6YR0Hko9WOuV0446FAlLkEgAWE,17295
@@ -163,9 +163,9 @@ udata/core/post/permissions.py,sha256=uofU0TehhOGYyUoRXf3wuy816_D3xwMmaJbDvV336s
163
163
  udata/core/post/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
164
164
  udata/core/post/tests/test_api.py,sha256=y4fXgjC0y9tqR4iOWHin9rADSPdo1BJjXJGilnqL23c,3946
165
165
  udata/core/reports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
166
- udata/core/reports/api.py,sha256=wrHJempfYupq0etLWM-RPq6-f3LqXWw3hulnoPHb5v4,1376
166
+ udata/core/reports/api.py,sha256=RzmsU3IWD0u8oVKSEDvxE2yO2vRXxVx_gJBwFN0DyJY,1680
167
167
  udata/core/reports/constants.py,sha256=LRZSX3unyqZeB4yQjK3ws_hGbJcXYk4bu1Rhnhi5DEs,1235
168
- udata/core/reports/models.py,sha256=qgWXjiwMJ5w5uDdjM9K-f-h4mb-dJ8Che83MmIJ7pNg,2474
168
+ udata/core/reports/models.py,sha256=AsW5p2ZIdR4c6vNzglEN7MX03It-t9u7ktOsVZqvzSs,2702
169
169
  udata/core/reuse/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
170
170
  udata/core/reuse/activities.py,sha256=mAdHhqqpUF5zSh4e5AEo0J7alc3RflTbudDaKOzyTQw,1406
171
171
  udata/core/reuse/api.py,sha256=KjxcP1RdARFgRRJotp4pAwClmdLdH4JhhyjYmRzJ_y4,10232
@@ -175,12 +175,12 @@ udata/core/reuse/constants.py,sha256=JgDBrjOKSt9q0auv9rjzbGsch83H-Oi8YXAKeI5hO4o
175
175
  udata/core/reuse/csv.py,sha256=4TcQbk4kWN_HbnpEUfgfW9_x7ZqNDk5s4YuJ839p3EE,896
176
176
  udata/core/reuse/factories.py,sha256=GrQqYTIvwQrwkvJrbTr38-2faFW_PC99gn3yOVpgFec,850
177
177
  udata/core/reuse/forms.py,sha256=Fv2XfHt-zP7WpC8Z8qTlXFRlx8HHEd8nU28lUEA5MXk,1836
178
- udata/core/reuse/metrics.py,sha256=uh0CxErJ8OxrQvajCSfKvQRbhB9rXKzTZ1AhyEMQvP0,161
178
+ udata/core/reuse/metrics.py,sha256=sVh7BlW3OKRvFDHFyD4pPUV91jOOhj8qeWbBkLPn5Gg,176
179
179
  udata/core/reuse/models.py,sha256=3KSmNSOPuFRSJclBCxoXo9hg7DXIZTqZLRy5DepHA9Y,5861
180
- udata/core/reuse/permissions.py,sha256=V6C-JwVQcFA9AiRJtPt9yHxBv2Y5SpFxcYW_vRK0tHU,564
181
- udata/core/reuse/search.py,sha256=n17ihaxqRop-EnFPJkHflOySz6SDNCscgXOKdW3s9-s,3026
180
+ udata/core/reuse/permissions.py,sha256=j-ancS7gvLl5vJu0TNYqpYD-2So-UzoDE4IHLxRoMGg,621
181
+ udata/core/reuse/search.py,sha256=NkCRE8mGBdmlau0S9SsNPG0_zvB7MMNws64HGsq4yWM,3056
182
182
  udata/core/reuse/signals.py,sha256=nDrEUpYKN0AdYiEbrR0z3nzXzjaRcD8SAMutwIDsQPM,155
183
- udata/core/reuse/tasks.py,sha256=LJ_CQi_IhEkq_C3nQDXCRZkI72p5wOGmvImwyZvfbHY,1508
183
+ udata/core/reuse/tasks.py,sha256=oyi6SGNfCKRLLuS-tLUCs_jcfa43jOn4Wuzq65Vnj8A,1529
184
184
  udata/core/site/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
185
185
  udata/core/site/api.py,sha256=GH8NPEoiScbaUBoSlF-SuZea60c8jfCcQ3782lJOIVI,6213
186
186
  udata/core/site/factories.py,sha256=O0nLFmjrFyemzcs-YwNukq5nqd383KBGrgL5bOQsGbA,378
@@ -272,7 +272,7 @@ udata/forms/__init__.py,sha256=OXNShgt05tADLYQ-cXPdB16qQtj2i48GxSsCVdR6cfI,843
272
272
  udata/forms/fields.py,sha256=9s8ywCD2LCtHnD5Z2fx9cxH6XI2nbHEj73kz17qZGHk,28576
273
273
  udata/forms/validators.py,sha256=CRgmB6oow5O8LDR45LajlJJ9HX3RBCI08fapoWMH1vo,2727
274
274
  udata/forms/widgets.py,sha256=XMVxBlQMGfb0nQvqwLMsAVcEdsBdaZGQZ82F81FdmlM,1332
275
- udata/frontend/__init__.py,sha256=nzXjOFmzbpseU476gYt0jkHCjkpvmHi42qESi1dJEr8,3555
275
+ udata/frontend/__init__.py,sha256=Aa5BX19jTAcjGiGKszPabbOju5thvA7XFz4aYvxwXQw,3567
276
276
  udata/frontend/csv.py,sha256=FAlIAfcuigo01D_hSRTnvZ7B-HMHXqn5TXeCm6sO230,8520
277
277
  udata/frontend/markdown.py,sha256=ppaiy_Mv6iNRjNqPjVquc9Wx7breY1mcsN41b4fM18w,4353
278
278
  udata/harvest/__init__.py,sha256=C4y5w4vGb_F9Opy62lzV3eHo4DkNyRgPCq-wsarPXiQ,28
@@ -348,7 +348,7 @@ udata/migrations/2024-01-29-fix-reuse-and-dataset-with-private-None.py,sha256=9L
348
348
  udata/migrations/2024-03-22-migrate-activity-kwargs-to-extras.py,sha256=ucoXSqZlGToMCNiba0pRw_WoYb-lSBKvnbBLe-YVVrs,402
349
349
  udata/migrations/2024-06-11-fix-reuse-datasets-references.py,sha256=4xL5_YFfUsbWFuSqXVzhkbzns20m5AclFfu0cdXhyk4,787
350
350
  udata/migrations/__init__.py,sha256=RBCBDaTlLjuMs_Qzwji6Z6T4r7FCGXhESKoxQbT5qAA,11221
351
- udata/models/__init__.py,sha256=4a93jdfy9mkZlbZQLbwaUo53bc_pGGkleczbafmOpwk,1360
351
+ udata/models/__init__.py,sha256=txbZwa-lRG3mq99eQ9E5YcFWiNUdjDVSyJJvlqUMFfs,1413
352
352
  udata/mongo/__init__.py,sha256=y4Rv-kq3o_kcEulcNpePLzocXPBNpx3Jd82G-VZPaMc,1421
353
353
  udata/mongo/badges_field.py,sha256=UmSaQkiOFtIb116GAT2B0OE6ypOrq8Jx7GdULEr05LU,985
354
354
  udata/mongo/datetime_fields.py,sha256=ZIyyl7ltMMY9yQkEK3YsCGO3ZzSF0ixWPj12RE6Rmas,1894
@@ -607,7 +607,7 @@ udata/tests/api/test_fields.py,sha256=OW85Z5MES5HeWOpapeem8OvR1cIcrqW-xMWpdZO4LZ
607
607
  udata/tests/api/test_follow_api.py,sha256=fccgVNfcqET221PPS3p7qzb9hpvbBBUGhV-l4UeOpyk,3352
608
608
  udata/tests/api/test_me_api.py,sha256=ZJKGH9fFv-4cSGcYAGd6IJA_PwPjVGIqWNy_DhFA8ms,13827
609
609
  udata/tests/api/test_organizations_api.py,sha256=5hBs7-gsMfy4IFsrLYyUMGyxhtCs71K4QvPFAXyDsz4,35049
610
- udata/tests/api/test_reports_api.py,sha256=4BHpCxDOkmbxe2xd2qCd0xXUjdAqgom7PP9117V8Bds,4515
610
+ udata/tests/api/test_reports_api.py,sha256=fCSz9NwMXBs6cxdXBVVI6y564AtovmZYw3xkgxQ9KE8,6217
611
611
  udata/tests/api/test_reuses_api.py,sha256=v9RJcJ_fdR8dgnq6H7j58_7n83BhoFJJ9YOd7CJwsyw,16769
612
612
  udata/tests/api/test_swagger.py,sha256=eE6La9qdTYTIUFevRVPJgtj17Jq_8uOlsDwzCNR0LL8,760
613
613
  udata/tests/api/test_tags_api.py,sha256=MgSmKZeQ8L-fO-LwOGlDm_YN7lmEyvTpkDgaPiENHw8,2429
@@ -636,7 +636,6 @@ udata/tests/dataset/test_dataset_tasks.py,sha256=rSafDjCiOyEb2_tVUDN4wqGylF6Yf9V
636
636
  udata/tests/dataset/test_resource_preview.py,sha256=fp9mSL7unhyM66GR0gwhgX3OGQ4TJt7G9xU-CjsL3HI,3908
637
637
  udata/tests/features/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
638
638
  udata/tests/features/territories/__init__.py,sha256=gMD73RL-ymcWvGPDPM0aPxz7WAfd1VEDL8YHRI7HT0Q,956
639
- udata/tests/features/territories/commands.py,sha256=LMestg7oCC52GSHmrpD4l10MRgquLnS_TsjPzy72enQ,240
640
639
  udata/tests/features/territories/test_territories_api.py,sha256=vhZ1HK-booNbRGpUww4pN9XPp39rgaaI_4sbtJgHApk,7576
641
640
  udata/tests/forms/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
642
641
  udata/tests/forms/test_basic_fields.py,sha256=RPVdW3fQF6hhxpqF6FPr6GEpDoFO7-hAptDNXvajA_Q,2096
@@ -698,9 +697,9 @@ udata/translations/pt/LC_MESSAGES/udata.mo,sha256=WpPzAqVd2Onv_kz45ULUySKPLrpjcc
698
697
  udata/translations/pt/LC_MESSAGES/udata.po,sha256=18Op9RUITewoDRewlOdYzzq6gjsf1lsvepACV1d7zxs,44976
699
698
  udata/translations/sr/LC_MESSAGES/udata.mo,sha256=NIYRNhVoETZUvIvWm3cCW7DtMBAnS2vXzZjMF5ZzD_c,28500
700
699
  udata/translations/sr/LC_MESSAGES/udata.po,sha256=rQB-4V4WJ7bURj6g2j653vItr5TMHadcLQxec7_fDmg,51545
701
- udata-9.1.2.dev30754.dist-info/LICENSE,sha256=V8j_M8nAz8PvAOZQocyRDX7keai8UJ9skgmnwqETmdY,34520
702
- udata-9.1.2.dev30754.dist-info/METADATA,sha256=D06WZQsv3VzIZVH3HWE59CpCc_YKL-_bcq6nJ1OeISw,127267
703
- udata-9.1.2.dev30754.dist-info/WHEEL,sha256=DZajD4pwLWue70CAfc7YaxT1wLUciNBvN_TTcvXpltE,110
704
- udata-9.1.2.dev30754.dist-info/entry_points.txt,sha256=3SKiqVy4HUqxf6iWspgMqH8d88Htk6KoLbG1BU-UddQ,451
705
- udata-9.1.2.dev30754.dist-info/top_level.txt,sha256=39OCg-VWFWOq4gCKnjKNu-s3OwFlZIu_dVH8Gl6ndHw,12
706
- udata-9.1.2.dev30754.dist-info/RECORD,,
700
+ udata-9.1.3.dist-info/LICENSE,sha256=V8j_M8nAz8PvAOZQocyRDX7keai8UJ9skgmnwqETmdY,34520
701
+ udata-9.1.3.dist-info/METADATA,sha256=wwlRJ68jI7NpUJ5YtwXgXrHXj_B8DfG1_kJbWxymrs4,128344
702
+ udata-9.1.3.dist-info/WHEEL,sha256=DZajD4pwLWue70CAfc7YaxT1wLUciNBvN_TTcvXpltE,110
703
+ udata-9.1.3.dist-info/entry_points.txt,sha256=3SKiqVy4HUqxf6iWspgMqH8d88Htk6KoLbG1BU-UddQ,451
704
+ udata-9.1.3.dist-info/top_level.txt,sha256=39OCg-VWFWOq4gCKnjKNu-s3OwFlZIu_dVH8Gl6ndHw,12
705
+ udata-9.1.3.dist-info/RECORD,,
@@ -1,44 +0,0 @@
1
- import json
2
- from tempfile import NamedTemporaryFile
3
-
4
- from udata import models
5
- from udata.commands.fixtures import generate_fixtures
6
- from udata.tests import DBTestMixin, TestCase
7
-
8
-
9
- class FixturesTest(DBTestMixin, TestCase):
10
- def test_generate_fixtures(self):
11
- with NamedTemporaryFile(delete=True) as fixtures_fd:
12
- json_fixtures = [
13
- {
14
- "resources": [
15
- {
16
- "description": "test description",
17
- "filetype": "remote",
18
- "title": "test",
19
- "url": "https://dev.local",
20
- }
21
- ],
22
- "dataset": {
23
- "description": "### Le Test",
24
- "frequency": "punctual",
25
- "tags": ["action-publique"],
26
- "title": "test",
27
- },
28
- "organization": {"description": "test description", "name": "Test"},
29
- "reuses": [
30
- {
31
- "description": "test description",
32
- "title": "test",
33
- "url": "https://dev.local",
34
- }
35
- ],
36
- }
37
- ]
38
- with open(fixtures_fd, "w") as f:
39
- json.dump(json_fixtures, f)
40
- generate_fixtures(fixtures_fd)
41
- self.assertEqual(models.Organization.objects.count(), 1)
42
- self.assertEqual(models.Dataset.objects.count(), 1)
43
- self.assertEqual(models.Reuse.objects.count(), 1)
44
- self.assertEqual(models.User.objects.count(), 1)
@@ -1,9 +0,0 @@
1
- from udata.tests import TestCase
2
-
3
-
4
- class CommandsTest(TestCase):
5
- def test_import_commands(self):
6
- try:
7
- from udata.features.territories import commands # noqa
8
- except ImportError as e:
9
- self.fail(e)