udata 13.0.1.dev12__py3-none-any.whl → 14.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of udata might be problematic. Click here for more details.

Files changed (77) hide show
  1. udata/api/__init__.py +2 -8
  2. udata/app.py +12 -30
  3. udata/auth/forms.py +6 -4
  4. udata/commands/__init__.py +2 -14
  5. udata/commands/db.py +13 -25
  6. udata/commands/info.py +0 -16
  7. udata/core/avatars/api.py +43 -0
  8. udata/core/avatars/test_avatar_api.py +30 -0
  9. udata/core/dataservices/models.py +14 -2
  10. udata/core/dataset/tasks.py +36 -8
  11. udata/core/metrics/__init__.py +0 -6
  12. udata/core/site/models.py +2 -6
  13. udata/core/spatial/commands.py +2 -4
  14. udata/core/spatial/models.py +0 -10
  15. udata/core/spatial/tests/test_api.py +1 -36
  16. udata/core/user/models.py +10 -1
  17. udata/cors.py +2 -5
  18. udata/db/migrations.py +279 -0
  19. udata/frontend/__init__.py +3 -122
  20. udata/harvest/actions.py +3 -8
  21. udata/harvest/api.py +5 -14
  22. udata/harvest/backends/__init__.py +21 -9
  23. udata/harvest/backends/base.py +2 -2
  24. udata/harvest/backends/ckan/harvesters.py +2 -0
  25. udata/harvest/backends/dcat.py +3 -0
  26. udata/harvest/backends/maaf.py +1 -0
  27. udata/harvest/commands.py +6 -4
  28. udata/harvest/forms.py +9 -6
  29. udata/harvest/tasks.py +3 -5
  30. udata/harvest/tests/ckan/test_ckan_backend.py +2 -2
  31. udata/harvest/tests/ckan/test_ckan_backend_errors.py +1 -1
  32. udata/harvest/tests/ckan/test_ckan_backend_filters.py +1 -1
  33. udata/harvest/tests/ckan/test_dkan_backend.py +1 -1
  34. udata/harvest/tests/dcat/udata.xml +6 -6
  35. udata/harvest/tests/factories.py +1 -1
  36. udata/harvest/tests/test_actions.py +5 -3
  37. udata/harvest/tests/test_api.py +2 -1
  38. udata/harvest/tests/test_base_backend.py +2 -0
  39. udata/harvest/tests/test_dcat_backend.py +3 -3
  40. udata/i18n.py +14 -273
  41. udata/migrations/2025-11-13-delete-user-email-index.py +25 -0
  42. udata/models/__init__.py +0 -8
  43. udata/routing.py +0 -8
  44. udata/sentry.py +4 -10
  45. udata/settings.py +16 -17
  46. udata/tasks.py +3 -3
  47. udata/tests/__init__.py +1 -10
  48. udata/tests/api/test_dataservices_api.py +29 -1
  49. udata/tests/api/test_datasets_api.py +1 -2
  50. udata/tests/api/test_user_api.py +12 -0
  51. udata/tests/apiv2/test_topics.py +1 -1
  52. udata/tests/dataset/test_resource_preview.py +0 -1
  53. udata/tests/helpers.py +12 -0
  54. udata/tests/test_cors.py +1 -1
  55. udata/tests/test_migrations.py +181 -481
  56. udata/utils.py +5 -0
  57. {udata-13.0.1.dev12.dist-info → udata-14.0.0.dist-info}/METADATA +1 -2
  58. {udata-13.0.1.dev12.dist-info → udata-14.0.0.dist-info}/RECORD +62 -73
  59. {udata-13.0.1.dev12.dist-info → udata-14.0.0.dist-info}/entry_points.txt +3 -5
  60. udata/core/followers/views.py +0 -15
  61. udata/entrypoints.py +0 -93
  62. udata/features/identicon/__init__.py +0 -0
  63. udata/features/identicon/api.py +0 -13
  64. udata/features/identicon/backends.py +0 -131
  65. udata/features/identicon/tests/__init__.py +0 -0
  66. udata/features/identicon/tests/test_backends.py +0 -18
  67. udata/features/territories/__init__.py +0 -49
  68. udata/features/territories/api.py +0 -25
  69. udata/features/territories/models.py +0 -51
  70. udata/migrations/__init__.py +0 -367
  71. udata/tests/cli/test_db_cli.py +0 -68
  72. udata/tests/features/territories/__init__.py +0 -20
  73. udata/tests/features/territories/test_territories_api.py +0 -185
  74. udata/tests/frontend/test_hooks.py +0 -149
  75. {udata-13.0.1.dev12.dist-info → udata-14.0.0.dist-info}/WHEEL +0 -0
  76. {udata-13.0.1.dev12.dist-info → udata-14.0.0.dist-info}/licenses/LICENSE +0 -0
  77. {udata-13.0.1.dev12.dist-info → udata-14.0.0.dist-info}/top_level.txt +0 -0
udata/api/__init__.py CHANGED
@@ -1,8 +1,6 @@
1
- import inspect
2
1
  import logging
3
2
  import urllib.parse
4
3
  from functools import wraps
5
- from importlib import import_module
6
4
 
7
5
  import mongoengine
8
6
  from flask import (
@@ -19,7 +17,7 @@ from flask_restx import Api, Resource
19
17
  from flask_restx.reqparse import RequestParser
20
18
  from flask_storage import UnauthorizedFileType
21
19
 
22
- from udata import entrypoints, tracking
20
+ from udata import tracking
23
21
  from udata.app import csrf
24
22
  from udata.auth import Permission, PermissionDenied, RoleNeed, current_user, login_user
25
23
  from udata.i18n import get_locale
@@ -358,13 +356,9 @@ def init_app(app):
358
356
  import udata.core.contact_point.api # noqa
359
357
  import udata.features.transfer.api # noqa
360
358
  import udata.features.notifications.api # noqa
361
- import udata.features.identicon.api # noqa
362
- import udata.features.territories.api # noqa
359
+ import udata.core.avatars.api # noqa
363
360
  import udata.harvest.api # noqa
364
361
 
365
- for module in entrypoints.get_enabled("udata.apis", app).values():
366
- module if inspect.ismodule(module) else import_module(module)
367
-
368
362
  # api.init_app(app)
369
363
  app.register_blueprint(apiv1_blueprint)
370
364
  app.register_blueprint(apiv2_blueprint)
udata/app.py CHANGED
@@ -1,8 +1,8 @@
1
1
  import datetime
2
- import importlib
3
2
  import logging
4
3
  import os
5
4
  import types
5
+ from importlib.metadata import entry_points
6
6
  from os.path import abspath, dirname, exists, isfile, join
7
7
 
8
8
  import bson
@@ -24,7 +24,7 @@ from speaklater import is_lazy_string
24
24
  from werkzeug.exceptions import NotFound
25
25
  from werkzeug.middleware.proxy_fix import ProxyFix
26
26
 
27
- from udata import cors, entrypoints
27
+ from udata import cors
28
28
 
29
29
  APP_NAME = __name__.split(".")[0]
30
30
  ROOT_DIR = abspath(join(dirname(__file__)))
@@ -148,8 +148,6 @@ def init_logging(app):
148
148
  debug = app.debug or app.config.get("TESTING")
149
149
  log_level = logging.DEBUG if debug else logging.WARNING
150
150
  app.logger.setLevel(log_level)
151
- for name in entrypoints.get_roots(): # Entrypoints loggers
152
- logging.getLogger(name).setLevel(log_level)
153
151
  for logger in VERBOSE_LOGGERS:
154
152
  logging.getLogger(logger).setLevel(logging.WARNING)
155
153
  return app
@@ -168,20 +166,6 @@ def create_app(config="udata.settings.Defaults", override=None, init_logging=ini
168
166
  if override:
169
167
  app.config.from_object(override)
170
168
 
171
- # Loads defaults from plugins
172
- for pkg in entrypoints.get_roots(app):
173
- if pkg == "udata":
174
- continue # Defaults are already loaded
175
- module = "{}.settings".format(pkg)
176
- try:
177
- settings = importlib.import_module(module)
178
- except ImportError:
179
- continue
180
- for key, default in settings.__dict__.items():
181
- if key.startswith("__"):
182
- continue
183
- app.config.setdefault(key, default)
184
-
185
169
  app.json_encoder = UDataJsonEncoder
186
170
 
187
171
  # `ujson` doesn't support `cls` parameter https://github.com/ultrajson/ultrajson/issues/124
@@ -200,12 +184,21 @@ def create_app(config="udata.settings.Defaults", override=None, init_logging=ini
200
184
  def standalone(app):
201
185
  """Factory for an all in one application"""
202
186
  from udata import api, core, frontend
187
+ from udata.features import notifications
203
188
 
204
189
  core.init_app(app)
205
190
  frontend.init_app(app)
206
191
  api.init_app(app)
192
+ notifications.init_app(app)
207
193
 
208
- register_features(app)
194
+ eps = entry_points(group="udata.plugins")
195
+ for ep in eps:
196
+ plugin_module = ep.load()
197
+
198
+ if hasattr(plugin_module, "init_app"):
199
+ plugin_module.init_app(app)
200
+ else:
201
+ log.error(f"Plugin {ep.name} ({ep.value}) doesn't expose an `init_app()` function.")
209
202
 
210
203
  return app
211
204
 
@@ -215,7 +208,6 @@ def register_extensions(app):
215
208
  auth,
216
209
  i18n,
217
210
  mail,
218
- models,
219
211
  mongo,
220
212
  notifications, # noqa
221
213
  routing,
@@ -229,7 +221,6 @@ def register_extensions(app):
229
221
  tasks.init_app(app)
230
222
  i18n.init_app(app)
231
223
  mongo.init_app(app)
232
- models.init_app(app)
233
224
  routing.init_app(app)
234
225
  auth.init_app(app)
235
226
  cache.init_app(app)
@@ -278,12 +269,3 @@ def page_not_found(e: NotFound):
278
269
  return render_template("404.html", homepage_url=homepage_url()), 404
279
270
 
280
271
  return jsonify({"error": e.description, "status": 404}), 404
281
-
282
-
283
- def register_features(app):
284
- from udata.features import notifications
285
-
286
- notifications.init_app(app)
287
-
288
- for ep in entrypoints.get_enabled("udata.plugins", app).values():
289
- ep.init_app(app)
udata/auth/forms.py CHANGED
@@ -54,13 +54,15 @@ class ExtendedRegisterForm(WithCaptcha, RegisterForm):
54
54
  )
55
55
 
56
56
  def validate(self, **kwargs):
57
- # no register allowed when read only mode is on
58
- if not super().validate(**kwargs) or current_app.config.get("READ_ONLY_MODE"):
57
+ if current_app.config.get("READ_ONLY_MODE"):
59
58
  return False
60
59
 
61
60
  if not self.validate_captcha():
62
61
  return False
63
62
 
63
+ if not super().validate(**kwargs):
64
+ return False
65
+
64
66
  return True
65
67
 
66
68
 
@@ -91,10 +93,10 @@ class ExtendedResetPasswordForm(ResetPasswordForm):
91
93
 
92
94
  class ExtendedForgotPasswordForm(WithCaptcha, ForgotPasswordForm):
93
95
  def validate(self, **kwargs):
94
- if not super().validate(**kwargs):
96
+ if not self.validate_captcha():
95
97
  return False
96
98
 
97
- if not self.validate_captcha():
99
+ if not super().validate(**kwargs):
98
100
  return False
99
101
 
100
102
  return True
@@ -4,12 +4,10 @@ import sys
4
4
  from glob import iglob
5
5
 
6
6
  import click
7
- import pkg_resources
8
7
  from flask.cli import FlaskGroup, ScriptInfo, shell_command
9
8
 
10
- from udata import entrypoints
11
9
  from udata.app import VERBOSE_LOGGERS, create_app, standalone
12
- from udata.utils import safe_unicode
10
+ from udata.utils import get_udata_version, safe_unicode
13
11
 
14
12
  log = logging.getLogger(__name__)
15
13
 
@@ -149,11 +147,6 @@ def init_logging(app):
149
147
  logger.handlers = []
150
148
  logger.addHandler(handler)
151
149
 
152
- for name in entrypoints.get_roots(): # Entrypoints loggers
153
- logger = logging.getLogger(name)
154
- logger.setLevel(log_level)
155
- logger.handlers = []
156
-
157
150
  app.logger.setLevel(log_level)
158
151
  app.logger.handlers = []
159
152
  app.logger.addHandler(handler)
@@ -208,7 +201,6 @@ class UdataGroup(FlaskGroup):
208
201
  Load udata commands from:
209
202
  - `udata.commands.*` module
210
203
  - known internal modules with commands
211
- - plugins exporting a `udata.commands` entrypoint
212
204
  """
213
205
  if self._udata_commands_loaded:
214
206
  return
@@ -229,10 +221,6 @@ class UdataGroup(FlaskGroup):
229
221
  except Exception as e:
230
222
  error("Unable to import {0}".format(module), e)
231
223
 
232
- # Load commands from entry points for enabled plugins
233
- app = ctx.ensure_object(ScriptInfo).load_app()
234
- entrypoints.get_enabled("udata.commands", app)
235
-
236
224
  # Ensure loading happens once
237
225
  self._udata_commands_loaded = False
238
226
 
@@ -253,7 +241,7 @@ class UdataGroup(FlaskGroup):
253
241
  def print_version(ctx, param, value):
254
242
  if not value or ctx.resilient_parsing:
255
243
  return
256
- click.echo(pkg_resources.get_distribution("udata").version)
244
+ click.echo(get_udata_version())
257
245
  ctx.exit()
258
246
 
259
247
 
udata/commands/db.py CHANGED
@@ -11,9 +11,9 @@ import click
11
11
  import mongoengine
12
12
  from bson import DBRef
13
13
 
14
- from udata import migrations
15
14
  from udata.commands import cli, cyan, echo, green, magenta, red, white, yellow
16
15
  from udata.core.dataset.models import Dataset, Resource
16
+ from udata.db import migrations
17
17
  from udata.mongo.document import get_all_models
18
18
 
19
19
  # Date format used to for display
@@ -31,8 +31,7 @@ def grp():
31
31
  def log_status(migration, status):
32
32
  """Properly display a migration status line"""
33
33
  name = os.path.splitext(migration.filename)[0]
34
- display = ":".join((migration.plugin, name)) + " "
35
- log.info("%s [%s]", "{:.<70}".format(display), status)
34
+ echo("{:.<70} [{}]".format(name + " ", status))
36
35
 
37
36
 
38
37
  def status_label(record):
@@ -78,11 +77,6 @@ def migrate(record, dry_run=False):
78
77
  log_status(migration, status)
79
78
  try:
80
79
  output = migration.execute(recordonly=record, dryrun=dry_run)
81
- except migrations.RollbackError as re:
82
- format_output(re.migrate_exc.output, False)
83
- log_status(migration, red("Rollback"))
84
- format_output(re.output, not re.exc)
85
- success = False
86
80
  except migrations.MigrationError as me:
87
81
  format_output(me.output, False, traceback=me.traceback)
88
82
  success = False
@@ -92,35 +86,29 @@ def migrate(record, dry_run=False):
92
86
 
93
87
 
94
88
  @grp.command()
95
- @click.argument("plugin_or_specs")
96
- @click.argument("filename", default=None, required=False, metavar="[FILENAME]")
97
- def unrecord(plugin_or_specs, filename):
89
+ @click.argument("filename")
90
+ def unrecord(filename):
98
91
  """
99
92
  Remove a database migration record.
100
93
 
101
- \b
102
- A record can be expressed with the following syntaxes:
103
- - plugin filename
104
- - plugin filename.js
105
- - plugin:filename
106
- - plugin:fliename.js
94
+ FILENAME is the migration filename (e.g., 2024-01-01-my-migration.py)
107
95
  """
108
- migration = migrations.get(plugin_or_specs, filename)
109
- removed = migration.unrecord()
96
+ removed = migrations.unrecord(filename)
110
97
  if removed:
111
- log.info("Removed migration %s", migration.label)
98
+ echo("Removed migration {}".format(filename))
112
99
  else:
113
- log.error("Migration not found %s", migration.label)
100
+ echo(red("Migration not found {}".format(filename)))
114
101
 
115
102
 
116
103
  @grp.command()
117
- @click.argument("plugin_or_specs")
118
- @click.argument("filename", default=None, required=False, metavar="[FILENAME]")
119
- def info(plugin_or_specs, filename):
104
+ @click.argument("filename")
105
+ def info(filename):
120
106
  """
121
107
  Display detailed info about a migration
108
+
109
+ FILENAME is the migration filename (e.g., 2024-01-01-my-migration.py)
122
110
  """
123
- migration = migrations.get(plugin_or_specs, filename)
111
+ migration = migrations.get(filename)
124
112
  log_status(migration, status_label(migration.record))
125
113
  try:
126
114
  echo(migration.module.__doc__)
udata/commands/info.py CHANGED
@@ -3,9 +3,7 @@ import logging
3
3
  from click import echo
4
4
  from flask import current_app
5
5
 
6
- from udata import entrypoints
7
6
  from udata.commands import KO, OK, cli, green, red, white
8
- from udata.features.identicon.backends import get_config as avatar_config
9
7
 
10
8
  log = logging.getLogger(__name__)
11
9
 
@@ -35,17 +33,3 @@ def config():
35
33
  if key.startswith("__") or not key.isupper():
36
34
  continue
37
35
  echo("{0}: {1}".format(white(key), current_app.config[key]))
38
-
39
-
40
- @grp.command()
41
- def plugins():
42
- """Display some details about the local plugins"""
43
- plugins = current_app.config["PLUGINS"]
44
- for name, description in entrypoints.ENTRYPOINTS.items():
45
- echo("{0} ({1})".format(white(description), name))
46
- if name == "udata.avatars":
47
- actives = [avatar_config("provider")]
48
- else:
49
- actives = plugins
50
- for ep in sorted(entrypoints.iter_all(name), key=by_name):
51
- echo("> {0}: {1}".format(ep.name, is_active(ep, actives)))
@@ -0,0 +1,43 @@
1
+ import hashlib
2
+ import io
3
+
4
+ import pydenticon
5
+ from flask import current_app, send_file
6
+
7
+ from udata.api import API, api
8
+ from udata.app import cache
9
+
10
+ ns = api.namespace("avatars", "Avatars")
11
+
12
+
13
+ @cache.memoize()
14
+ def generate_pydenticon(identifier, size):
15
+ """
16
+ Use pydenticon to generate an identicon image.
17
+ All parameters are extracted from configuration.
18
+ """
19
+ blocks_size = current_app.config["AVATAR_INTERNAL_SIZE"]
20
+ foreground = current_app.config["AVATAR_INTERNAL_FOREGROUND"]
21
+ background = current_app.config["AVATAR_INTERNAL_BACKGROUND"]
22
+ generator = pydenticon.Generator(
23
+ blocks_size, blocks_size, digest=hashlib.sha1, foreground=foreground, background=background
24
+ )
25
+
26
+ # Pydenticon adds padding to the size and as a consequence
27
+ # we need to compute the size without the padding
28
+ padding = int(round(current_app.config["AVATAR_INTERNAL_PADDING"] * size / 100.0))
29
+ size = size - 2 * padding
30
+ padding = (padding,) * 4
31
+ return generator.generate(identifier, size, size, padding=padding, output_format="png")
32
+
33
+
34
+ @ns.route("/<identifier>/<int:size>/", endpoint="avatar")
35
+ class IdenticonAPI(API):
36
+ @api.doc("avatars")
37
+ def get(self, identifier, size):
38
+ """Get a deterministic avatar given an identifier at a given size"""
39
+ identicon = generate_pydenticon(identifier, size)
40
+ response = send_file(io.BytesIO(identicon), mimetype="image/png")
41
+ etag = hashlib.sha1(identicon).hexdigest()
42
+ response.set_etag(etag)
43
+ return response
@@ -0,0 +1,30 @@
1
+ from flask import url_for
2
+
3
+ from udata.tests.api import PytestOnlyAPITestCase
4
+ from udata.tests.helpers import assert200
5
+ from udata.utils import faker
6
+
7
+
8
+ def assert_stream_equal(response1, response2):
9
+ __tracebackhide__ = True
10
+ stream1 = list(response1.iter_encoded())
11
+ stream2 = list(response2.iter_encoded())
12
+ assert stream1 == stream2
13
+
14
+
15
+ class InternalBackendTest(PytestOnlyAPITestCase):
16
+ def test_base_rendering(self):
17
+ response = self.get(url_for("api.avatar", identifier=faker.word(), size=32))
18
+
19
+ assert200(response)
20
+ assert response.mimetype == "image/png"
21
+ assert response.is_streamed
22
+ etag, weak = response.get_etag()
23
+ assert etag is not None
24
+
25
+ def test_render_twice_the_same(self):
26
+ identifier = faker.word()
27
+ stream_a = self.get(url_for("api.avatar", identifier=identifier, size=32))
28
+ stream_b = self.get(url_for("api.avatar", identifier=identifier, size=32))
29
+
30
+ assert_stream_equal(stream_a, stream_b)
@@ -130,7 +130,7 @@ def filter_by_topic(base_query, filter_value):
130
130
  try:
131
131
  topic = Topic.objects.get(id=filter_value)
132
132
  except Topic.DoesNotExist:
133
- pass
133
+ return base_query
134
134
  else:
135
135
  return base_query.filter(
136
136
  id__in=[
@@ -140,11 +140,23 @@ def filter_by_topic(base_query, filter_value):
140
140
  )
141
141
 
142
142
 
143
+ def filter_by_reuse(base_query, filter_value):
144
+ from udata.core.reuse.models import Reuse
145
+
146
+ try:
147
+ reuse = Reuse.objects.get(id=filter_value)
148
+ except Reuse.DoesNotExist:
149
+ return base_query
150
+ else:
151
+ return base_query.filter(id__in=[dataservice.id for dataservice in reuse.dataservices])
152
+
153
+
143
154
  @generate_fields(
144
155
  searchable=True,
145
156
  nested_filters={"organization_badge": "organization.badges"},
146
157
  standalone_filters=[
147
- {"key": "topic", "constraints": "objectid", "query": filter_by_topic, "type": str}
158
+ {"key": "topic", "constraints": ["objectid"], "query": filter_by_topic, "type": str},
159
+ {"key": "reuse", "constraints": ["objectid"], "query": filter_by_reuse, "type": str},
148
160
  ],
149
161
  additional_sorts=[
150
162
  {"key": "followers", "value": "metrics.followers"},
@@ -1,6 +1,6 @@
1
1
  import collections
2
2
  import os
3
- from datetime import datetime
3
+ from datetime import date, datetime
4
4
  from tempfile import NamedTemporaryFile
5
5
 
6
6
  from celery.utils.log import get_task_logger
@@ -17,6 +17,7 @@ from udata.core.organization.constants import CERTIFIED, PUBLIC_SERVICE
17
17
  from udata.core.organization.models import Organization
18
18
  from udata.harvest.models import HarvestJob
19
19
  from udata.models import Activity, Discussion, Follow, TopicElement, Transfer, db
20
+ from udata.storage.s3 import store_bytes
20
21
  from udata.tasks import job
21
22
 
22
23
  from .models import Checksum, CommunityResource, Dataset, Resource
@@ -90,12 +91,14 @@ def get_queryset(model_cls):
90
91
  return model_cls.objects.filter(**params).no_cache()
91
92
 
92
93
 
94
+ def get_resource_for_csv_export_model(model, dataset):
95
+ for resource in dataset.resources:
96
+ if resource.extras.get("csv-export:model", "") == model:
97
+ return resource
98
+
99
+
93
100
  def get_or_create_resource(r_info, model, dataset):
94
- resource = None
95
- for r in dataset.resources:
96
- if r.extras.get("csv-export:model", "") == model:
97
- resource = r
98
- break
101
+ resource = get_resource_for_csv_export_model(model, dataset)
99
102
  if resource:
100
103
  for k, v in r_info.items():
101
104
  setattr(resource, k, v)
@@ -126,11 +129,16 @@ def store_resource(csvfile, model, dataset):
126
129
  return get_or_create_resource(r_info, model, dataset)
127
130
 
128
131
 
129
- def export_csv_for_model(model, dataset):
132
+ def export_csv_for_model(model, dataset, replace: bool = False):
130
133
  model_cls = getattr(udata_models, model.capitalize(), None)
131
134
  if not model_cls:
132
135
  log.error("Unknow model %s" % model)
133
136
  return
137
+
138
+ fs_filename_to_remove = None
139
+ if existing_resource := get_resource_for_csv_export_model(model, dataset):
140
+ fs_filename_to_remove = existing_resource.fs_filename
141
+
134
142
  queryset = get_queryset(model_cls)
135
143
  adapter = csv.get_adapter(model_cls)
136
144
  if not adapter:
@@ -156,6 +164,10 @@ def export_csv_for_model(model, dataset):
156
164
  else:
157
165
  dataset.last_modified_internal = datetime.utcnow()
158
166
  dataset.save()
167
+ # remove previous catalog if exists and replace is True
168
+ if replace and fs_filename_to_remove:
169
+ storages.resources.delete(fs_filename_to_remove)
170
+ return resource
159
171
  finally:
160
172
  csvfile.close()
161
173
  os.unlink(csvfile.name)
@@ -184,7 +196,23 @@ def export_csv(self, model=None):
184
196
 
185
197
  models = (model,) if model else ALLOWED_MODELS
186
198
  for model in models:
187
- export_csv_for_model(model, dataset)
199
+ resource = export_csv_for_model(model, dataset, replace=True)
200
+
201
+ # If we are the first day of the month, archive today catalogs
202
+ if (
203
+ current_app.config["EXPORT_CSV_ARCHIVE_S3_BUCKET"]
204
+ and resource
205
+ and date.today().day == 1
206
+ ):
207
+ log.info(
208
+ f"Archiving {model} csv catalog on {current_app.config['EXPORT_CSV_ARCHIVE_S3_BUCKET']} bucket"
209
+ )
210
+ with storages.resources.open(resource.fs_filename, "rb") as f:
211
+ store_bytes(
212
+ bucket=current_app.config["EXPORT_CSV_ARCHIVE_S3_BUCKET"],
213
+ filename=f"{current_app.config['EXPORT_CSV_ARCHIVE_S3_FILENAME_PREFIX']}{resource.title}",
214
+ bytes=f.read(),
215
+ )
188
216
 
189
217
 
190
218
  @job("bind-tabular-dataservice")
@@ -1,6 +1,3 @@
1
- from udata import entrypoints
2
-
3
-
4
1
  def init_app(app):
5
2
  # Load all core metrics
6
3
  import udata.core.user.metrics # noqa
@@ -9,6 +6,3 @@ def init_app(app):
9
6
  import udata.core.dataset.metrics # noqa
10
7
  import udata.core.reuse.metrics # noqa
11
8
  import udata.core.followers.metrics # noqa
12
-
13
- # Load metrics from plugins
14
- entrypoints.get_enabled("udata.metrics", app)
udata/core/site/models.py CHANGED
@@ -8,6 +8,7 @@ from udata.core.metrics.helpers import get_metrics_for_model, get_stock_metrics
8
8
  from udata.core.organization.models import Organization
9
9
  from udata.core.reuse.models import Reuse
10
10
  from udata.models import WithMetrics, db
11
+ from udata.utils import get_udata_version
11
12
 
12
13
  __all__ = ("Site", "SiteSettings")
13
14
 
@@ -66,12 +67,7 @@ class Site(WithMetrics, db.Document):
66
67
 
67
68
  @field(description="The current version of udata")
68
69
  def version(self):
69
- try:
70
- from importlib.metadata import version
71
-
72
- return version("udata")
73
- except Exception:
74
- return None
70
+ return get_udata_version()
75
71
 
76
72
  def count_users(self):
77
73
  from udata.models import User
@@ -21,10 +21,8 @@ from udata.core.spatial.models import GeoLevel, GeoZone, SpatialCoverage
21
21
  log = logging.getLogger(__name__)
22
22
 
23
23
 
24
- DEFAULT_GEOZONES_FILE = (
25
- "https://www.data.gouv.fr/fr/datasets/r/a1bb263a-6cc7-4871-ab4f-2470235a67bf"
26
- )
27
- DEFAULT_LEVELS_FILE = "https://www.data.gouv.fr/fr/datasets/r/e0206442-78b3-4a00-b71c-c065d20561c8"
24
+ DEFAULT_GEOZONES_FILE = "https://www.data.gouv.fr/datasets/r/a1bb263a-6cc7-4871-ab4f-2470235a67bf"
25
+ DEFAULT_LEVELS_FILE = "https://www.data.gouv.fr/datasets/r/e0206442-78b3-4a00-b71c-c065d20561c8"
28
26
 
29
27
 
30
28
  @cli.group("spatial")
@@ -1,5 +1,4 @@
1
1
  import geojson
2
- from flask import current_app
3
2
  from werkzeug.local import LocalProxy
4
3
  from werkzeug.utils import cached_property
5
4
 
@@ -85,10 +84,6 @@ class GeoZone(WithMetrics, db.Document):
85
84
  return name
86
85
  return self.level_name # Fallback that should never happen.
87
86
 
88
- @property
89
- def handled_level(self):
90
- return self.level in current_app.config.get("HANDLED_LEVELS")
91
-
92
87
  @property
93
88
  def url(self):
94
89
  return None
@@ -158,11 +153,6 @@ class SpatialCoverage(db.EmbeddedDocument):
158
153
  continue
159
154
  return _(top.name)
160
155
 
161
- @property
162
- def handled_zones(self):
163
- """Return only zones with a dedicated page."""
164
- return [zone for zone in self.zones if zone.handled_level]
165
-
166
156
  def clean(self):
167
157
  if self.zones and self.geom:
168
158
  raise db.ValidationError(
@@ -1,4 +1,3 @@
1
- import pytest
2
1
  from flask import url_for
3
2
 
4
3
  from udata.core.dataset.factories import DatasetFactory
@@ -13,9 +12,7 @@ from udata.core.spatial.models import spatial_granularities
13
12
  from udata.core.spatial.tasks import compute_geozones_metrics
14
13
  from udata.tests.api import APITestCase
15
14
  from udata.tests.api.test_datasets_api import SAMPLE_GEOM
16
- from udata.tests.features.territories import (
17
- create_geozones_fixtures,
18
- )
15
+ from udata.tests.helpers import create_geozones_fixtures
19
16
  from udata.utils import faker
20
17
 
21
18
 
@@ -258,38 +255,6 @@ class SpatialApiTest(APITestCase):
258
255
  self.assertEqual(response.json["features"][1]["properties"]["datasets"], 3)
259
256
 
260
257
 
261
- @pytest.mark.options(
262
- ACTIVATE_TERRITORIES=True,
263
- HANDLED_LEVELS=("fr:commune", "fr:departement", "fr:region", "country"),
264
- )
265
- class SpatialTerritoriesApiTest(APITestCase):
266
- def test_zone_datasets_with_dynamic_and_setting(self):
267
- paca, bdr, arles = create_geozones_fixtures()
268
- organization = OrganizationFactory()
269
- for _ in range(3):
270
- DatasetFactory(
271
- organization=organization, spatial=SpatialCoverageFactory(zones=[paca.id])
272
- )
273
-
274
- response = self.get(url_for("api.zone_datasets", id=paca.id, dynamic=1))
275
- self.assert200(response)
276
- # No dynamic datasets given that they are added by udata-front extension.
277
- self.assertEqual(len(response.json), 3)
278
-
279
- def test_zone_datasets_with_dynamic_and_setting_and_size(self):
280
- paca, bdr, arles = create_geozones_fixtures()
281
- organization = OrganizationFactory()
282
- for _ in range(3):
283
- DatasetFactory(
284
- organization=organization, spatial=SpatialCoverageFactory(zones=[paca.id])
285
- )
286
-
287
- response = self.get(url_for("api.zone_datasets", id=paca.id, dynamic=1, size=2))
288
- self.assert200(response)
289
- # No dynamic datasets given that they are added by udata-front extension.
290
- self.assertEqual(len(response.json), 2)
291
-
292
-
293
258
  class DatasetsSpatialAPITest(APITestCase):
294
259
  def test_create_spatial_zones(self):
295
260
  paca, _, _ = create_geozones_fixtures()
udata/core/user/models.py CHANGED
@@ -102,7 +102,16 @@ class User(WithMetrics, UserMixin, Linkable, db.Document):
102
102
  on_delete = Signal()
103
103
 
104
104
  meta = {
105
- "indexes": ["$slug", "-created_at", "slug", "apikey"],
105
+ "indexes": [
106
+ {
107
+ "fields": ["$last_name", "$first_name", "$email"],
108
+ "default_language": "french",
109
+ "weights": {"last_name": 10, "email": 10, "first_name": 5},
110
+ },
111
+ "-created_at",
112
+ "slug",
113
+ "apikey",
114
+ ],
106
115
  "ordering": ["-created_at"],
107
116
  "auto_create_index_on_save": True,
108
117
  }