udata 13.0.1.dev10__py3-none-any.whl → 14.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of udata might be problematic. Click here for more details.

Files changed (93) hide show
  1. udata/api/__init__.py +2 -8
  2. udata/app.py +12 -30
  3. udata/auth/forms.py +6 -4
  4. udata/commands/__init__.py +2 -14
  5. udata/commands/db.py +13 -25
  6. udata/commands/info.py +0 -18
  7. udata/core/avatars/api.py +43 -0
  8. udata/core/avatars/test_avatar_api.py +30 -0
  9. udata/core/dataservices/models.py +14 -2
  10. udata/core/dataset/tasks.py +36 -8
  11. udata/core/metrics/__init__.py +0 -6
  12. udata/core/site/models.py +2 -6
  13. udata/core/spatial/commands.py +2 -4
  14. udata/core/spatial/models.py +0 -10
  15. udata/core/spatial/tests/test_api.py +1 -36
  16. udata/core/user/models.py +10 -1
  17. udata/cors.py +2 -5
  18. udata/db/migrations.py +279 -0
  19. udata/frontend/__init__.py +3 -122
  20. udata/harvest/actions.py +3 -8
  21. udata/harvest/api.py +5 -14
  22. udata/harvest/backends/__init__.py +21 -9
  23. udata/harvest/backends/base.py +2 -2
  24. udata/harvest/backends/ckan/harvesters.py +2 -0
  25. udata/harvest/backends/dcat.py +3 -0
  26. udata/harvest/backends/maaf.py +1 -0
  27. udata/harvest/commands.py +6 -4
  28. udata/harvest/forms.py +9 -6
  29. udata/harvest/tasks.py +3 -5
  30. udata/harvest/tests/ckan/test_ckan_backend.py +2 -2
  31. udata/harvest/tests/ckan/test_ckan_backend_errors.py +1 -1
  32. udata/harvest/tests/ckan/test_ckan_backend_filters.py +1 -1
  33. udata/harvest/tests/ckan/test_dkan_backend.py +1 -1
  34. udata/harvest/tests/dcat/udata.xml +6 -6
  35. udata/harvest/tests/factories.py +1 -1
  36. udata/harvest/tests/test_actions.py +5 -3
  37. udata/harvest/tests/test_api.py +2 -1
  38. udata/harvest/tests/test_base_backend.py +2 -0
  39. udata/harvest/tests/test_dcat_backend.py +3 -3
  40. udata/i18n.py +14 -273
  41. udata/migrations/2025-11-13-delete-user-email-index.py +25 -0
  42. udata/models/__init__.py +0 -8
  43. udata/routing.py +0 -8
  44. udata/sentry.py +4 -10
  45. udata/settings.py +16 -17
  46. udata/tasks.py +3 -3
  47. udata/tests/__init__.py +1 -10
  48. udata/tests/api/test_dataservices_api.py +29 -1
  49. udata/tests/api/test_datasets_api.py +1 -2
  50. udata/tests/api/test_security_api.py +2 -1
  51. udata/tests/api/test_user_api.py +12 -0
  52. udata/tests/apiv2/test_topics.py +1 -1
  53. udata/tests/dataset/test_resource_preview.py +0 -1
  54. udata/tests/helpers.py +12 -0
  55. udata/tests/test_cors.py +1 -1
  56. udata/tests/test_mail.py +2 -2
  57. udata/tests/test_migrations.py +181 -481
  58. udata/translations/ar/LC_MESSAGES/udata.mo +0 -0
  59. udata/translations/ar/LC_MESSAGES/udata.po +267 -279
  60. udata/translations/de/LC_MESSAGES/udata.mo +0 -0
  61. udata/translations/de/LC_MESSAGES/udata.po +269 -281
  62. udata/translations/es/LC_MESSAGES/udata.mo +0 -0
  63. udata/translations/es/LC_MESSAGES/udata.po +267 -279
  64. udata/translations/fr/LC_MESSAGES/udata.mo +0 -0
  65. udata/translations/fr/LC_MESSAGES/udata.po +278 -290
  66. udata/translations/it/LC_MESSAGES/udata.mo +0 -0
  67. udata/translations/it/LC_MESSAGES/udata.po +269 -281
  68. udata/translations/pt/LC_MESSAGES/udata.mo +0 -0
  69. udata/translations/pt/LC_MESSAGES/udata.po +269 -281
  70. udata/translations/sr/LC_MESSAGES/udata.mo +0 -0
  71. udata/translations/sr/LC_MESSAGES/udata.po +270 -282
  72. udata/utils.py +5 -0
  73. {udata-13.0.1.dev10.dist-info → udata-14.0.0.dist-info}/METADATA +1 -3
  74. {udata-13.0.1.dev10.dist-info → udata-14.0.0.dist-info}/RECORD +78 -89
  75. {udata-13.0.1.dev10.dist-info → udata-14.0.0.dist-info}/entry_points.txt +3 -5
  76. udata/core/followers/views.py +0 -15
  77. udata/entrypoints.py +0 -94
  78. udata/features/identicon/__init__.py +0 -0
  79. udata/features/identicon/api.py +0 -13
  80. udata/features/identicon/backends.py +0 -131
  81. udata/features/identicon/tests/__init__.py +0 -0
  82. udata/features/identicon/tests/test_backends.py +0 -18
  83. udata/features/territories/__init__.py +0 -49
  84. udata/features/territories/api.py +0 -25
  85. udata/features/territories/models.py +0 -51
  86. udata/migrations/__init__.py +0 -367
  87. udata/tests/cli/test_db_cli.py +0 -68
  88. udata/tests/features/territories/__init__.py +0 -20
  89. udata/tests/features/territories/test_territories_api.py +0 -185
  90. udata/tests/frontend/test_hooks.py +0 -149
  91. {udata-13.0.1.dev10.dist-info → udata-14.0.0.dist-info}/WHEEL +0 -0
  92. {udata-13.0.1.dev10.dist-info → udata-14.0.0.dist-info}/licenses/LICENSE +0 -0
  93. {udata-13.0.1.dev10.dist-info → udata-14.0.0.dist-info}/top_level.txt +0 -0
@@ -1,131 +0,0 @@
1
- import hashlib
2
- import io
3
-
4
- import pydenticon
5
- from flask import current_app, redirect, send_file
6
-
7
- from udata import entrypoints
8
- from udata.app import cache
9
-
10
- ADORABLE_AVATARS_URL = "https://api.adorable.io/avatars/{size}/{identifier}.png" # noqa
11
- ROBOHASH_URL = "https://robohash.org/{identifier}.png?size={size}x{size}&set={skin}&bgset={bg}" # noqa
12
-
13
-
14
- # Default values overriden by theme default and local config
15
- DEFAULTS = {
16
- "AVATAR_PROVIDER": "internal",
17
- # Internal provider
18
- "AVATAR_INTERNAL_SIZE": 7,
19
- "AVATAR_INTERNAL_FOREGROUND": [
20
- "rgb(45,79,255)",
21
- "rgb(254,180,44)",
22
- "rgb(226,121,234)",
23
- "rgb(30,179,253)",
24
- "rgb(232,77,65)",
25
- "rgb(49,203,115)",
26
- "rgb(141,69,170)",
27
- ],
28
- "AVATAR_INTERNAL_BACKGROUND": "rgb(224,224,224)",
29
- "AVATAR_INTERNAL_PADDING": 10,
30
- # robohash prodiver
31
- "AVATAR_ROBOHASH_SKIN": "set1",
32
- "AVATAR_ROBOHASH_BACKGROUND": "bg1",
33
- }
34
-
35
-
36
- def get_config(key):
37
- """
38
- Get an identicon configuration parameter.
39
-
40
- Precedance order is:
41
- - application config (`udata.cfg`)
42
- - default
43
- """
44
- key = "AVATAR_{0}".format(key.upper())
45
- local_config = current_app.config.get(key)
46
- return local_config or DEFAULTS[key]
47
-
48
-
49
- def get_internal_config(key):
50
- return get_config("internal_{0}".format(key))
51
-
52
-
53
- def get_provider():
54
- """Get the current provider from config"""
55
- name = get_config("provider")
56
- available = entrypoints.get_all("udata.avatars")
57
- if name not in available:
58
- raise ValueError("Unknown avatar provider: {0}".format(name))
59
- return available[name]
60
-
61
-
62
- def get_identicon(identifier, size):
63
- """
64
- Get an identicon for a given identifier at a given size.
65
-
66
- Automatically select the provider from `AVATAR_PROVIDER`
67
-
68
- :returns: a HTTP response, either an image or a redirect
69
- """
70
- return get_provider()(identifier, size)
71
-
72
-
73
- @cache.memoize()
74
- def generate_pydenticon(identifier, size):
75
- """
76
- Use pydenticon to generate an identicon image.
77
- All parameters are extracted from configuration.
78
- """
79
- blocks_size = get_internal_config("size")
80
- foreground = get_internal_config("foreground")
81
- background = get_internal_config("background")
82
- generator = pydenticon.Generator(
83
- blocks_size, blocks_size, digest=hashlib.sha1, foreground=foreground, background=background
84
- )
85
-
86
- # Pydenticon adds padding to the size and as a consequence
87
- # we need to compute the size without the padding
88
- padding = int(round(get_internal_config("padding") * size / 100.0))
89
- size = size - 2 * padding
90
- padding = (padding,) * 4
91
- return generator.generate(identifier, size, size, padding=padding, output_format="png")
92
-
93
-
94
- def internal(identifier, size):
95
- """
96
- Internal provider
97
-
98
- Use pydenticon to generate an identicon.
99
- """
100
- identicon = generate_pydenticon(identifier, size)
101
- response = send_file(io.BytesIO(identicon), mimetype="image/png")
102
- etag = hashlib.sha1(identicon).hexdigest()
103
- response.set_etag(etag)
104
- return response
105
-
106
-
107
- def adorable(identifier, size):
108
- """
109
- Adorable Avatars provider
110
-
111
- Simply redirect to the external API.
112
-
113
- See: http://avatars.adorable.io/
114
- """
115
- url = ADORABLE_AVATARS_URL.format(identifier=identifier, size=size)
116
- return redirect(url)
117
-
118
-
119
- def robohash(identifier, size):
120
- """
121
- Robohash provider
122
-
123
- Redirect to the Robohash API
124
- with parameters extracted from configuration.
125
-
126
- See: https://robohash.org/
127
- """
128
- skin = get_config("robohash_skin")
129
- background = get_config("robohash_background")
130
- url = ROBOHASH_URL.format(identifier=identifier, size=size, skin=skin, bg=background)
131
- return redirect(url)
File without changes
@@ -1,18 +0,0 @@
1
- from udata.features.identicon.backends import internal
2
- from udata.tests.api import PytestOnlyAPITestCase
3
- from udata.tests.helpers import assert200
4
- from udata.utils import faker
5
-
6
-
7
- class InternalBackendTest(PytestOnlyAPITestCase):
8
- def test_base_rendering(self):
9
- response = internal(faker.word(), 32)
10
- assert200(response)
11
- assert response.mimetype == "image/png"
12
- assert response.is_streamed
13
- etag, weak = response.get_etag()
14
- assert etag is not None
15
-
16
- def test_render_twice_the_same(self):
17
- identifier = faker.word()
18
- self.assertStreamEqual(internal(identifier, 32), internal(identifier, 32))
@@ -1,49 +0,0 @@
1
- from flask import current_app
2
-
3
- from udata.models import GeoZone, db
4
-
5
-
6
- def check_for_territories(query):
7
- """
8
- Return a geozone queryset of territories given the `query`.
9
-
10
- Results are sorted by population and area (biggest first).
11
- """
12
- if not query or not current_app.config.get("ACTIVATE_TERRITORIES"):
13
- return []
14
-
15
- dbqs = db.Q()
16
- query = query.lower()
17
- is_digit = query.isdigit()
18
- query_length = len(query)
19
- for level in current_app.config.get("HANDLED_LEVELS"):
20
- if level == "country":
21
- continue # Level not fully handled yet.
22
- q = db.Q(level=level)
23
- if query_length == 2 and level == "fr:departement" and (is_digit or query in ("2a", "2b")):
24
- # Counties + Corsica.
25
- q &= db.Q(code=query)
26
- elif query_length == 3 and level == "fr:departement" and is_digit:
27
- # French DROM-COM.
28
- q &= db.Q(code=query)
29
- elif (
30
- query_length == 5
31
- and level == "fr:commune"
32
- and (is_digit or query.startswith("2a") or query.startswith("2b"))
33
- ):
34
- # INSEE code then postal codes with Corsica exceptions.
35
- q &= db.Q(code=query)
36
- elif query_length >= 4:
37
- # Check names starting with query or exact match.
38
- q &= db.Q(name__istartswith=query) | db.Q(name__iexact=query)
39
- else:
40
- continue
41
-
42
- # Meta Q object, ready to be passed to a queryset.
43
- dbqs |= q
44
-
45
- if dbqs.empty:
46
- return []
47
-
48
- # Sort matching results by population and area.
49
- return GeoZone.objects(dbqs).order_by("-population", "-area")
@@ -1,25 +0,0 @@
1
- from udata.api import API, api
2
- from udata.features.territories import check_for_territories
3
-
4
- suggest_parser = api.parser()
5
- suggest_parser.add_argument(
6
- "q", type=str, help="The string to autocomplete/suggest", location="args", required=True
7
- )
8
- suggest_parser.add_argument(
9
- "size", type=int, help="The maximum result size", location="args", required=False
10
- )
11
-
12
-
13
- @api.route("/territory/suggest/", endpoint="suggest_territory")
14
- class SuggestTerritoriesAPI(API):
15
- @api.doc("suggest_territory")
16
- @api.expect(suggest_parser)
17
- def get(self):
18
- args = suggest_parser.parse_args()
19
- territories = check_for_territories(args["q"])
20
- if args["size"]:
21
- territories = territories[: args["size"]]
22
- return [
23
- {"id": territory.id, "title": territory.name, "page": territory.external_url}
24
- for territory in territories
25
- ]
@@ -1,51 +0,0 @@
1
- from udata.models import License, Organization
2
-
3
- __all__ = ("TerritoryDataset", "ResourceBasedTerritoryDataset", "TERRITORY_DATASETS")
4
-
5
-
6
- TERRITORY_DATASETS = {"commune": {}, "departement": {}, "region": {}, "country": {}}
7
-
8
-
9
- class TerritoryDataset(object):
10
- order = 0
11
- id = ""
12
- title = ""
13
- organization_id = ""
14
- url_template = ""
15
- description = ""
16
- license_id = "fr-lo"
17
-
18
- def __init__(self, territory):
19
- self.territory = territory
20
-
21
- @property
22
- def url(self):
23
- return self.url_template.format(code=self.territory.code)
24
-
25
- @property
26
- def slug(self):
27
- return "{territory_id}:{id}".format(territory_id=self.territory.id, id=self.id)
28
-
29
- @property
30
- def organization(self):
31
- return Organization.objects.get(id=self.organization_id)
32
-
33
- @property
34
- def license(self):
35
- return License.objects(id=self.license_id).first()
36
-
37
-
38
- class ResourceBasedTerritoryDataset(TerritoryDataset):
39
- dataset_id = ""
40
- resource_id = ""
41
- territory_attr = ""
42
- csv_column = ""
43
-
44
- def url_for(self, external=False):
45
- return None
46
-
47
- url = property(url_for)
48
-
49
- @property
50
- def external_url(self):
51
- return self.url_for(external=True)
@@ -1,367 +0,0 @@
1
- """
2
- Data migrations logic
3
- """
4
-
5
- import importlib.util
6
- import inspect
7
- import logging
8
- import os
9
- import queue
10
- import traceback
11
- from datetime import datetime
12
- from logging.handlers import QueueHandler
13
-
14
- from flask import current_app
15
- from mongoengine.connection import get_db
16
- from pkg_resources import (
17
- resource_filename,
18
- resource_isdir,
19
- resource_listdir,
20
- resource_string,
21
- )
22
- from pymongo import ReturnDocument
23
-
24
- from udata import entrypoints
25
-
26
- log = logging.getLogger(__name__)
27
-
28
-
29
- class MigrationError(Exception):
30
- """
31
- Raised on migration execution error.
32
-
33
- :param msg str: A human readable message (a reason)
34
- :param output str: An optionnal array of logging output
35
- :param exc Exception: An optionnal underlying exception
36
- """
37
-
38
- def __init__(self, msg, output=None, exc=None, traceback=None):
39
- super().__init__(msg)
40
- self.msg = msg
41
- self.output = output
42
- self.exc = exc
43
- self.traceback = traceback
44
-
45
-
46
- class RollbackError(MigrationError):
47
- """
48
- Raised on rollback.
49
- Hold the initial migration error and rollback exception (if any)
50
- """
51
-
52
- def __init__(self, msg, output=None, exc=None, migrate_exc=None):
53
- super().__init__(msg)
54
- self.msg = msg
55
- self.output = output
56
- self.exc = exc
57
- self.migrate_exc = migrate_exc
58
-
59
-
60
- class MigrationFormatter(logging.Formatter):
61
- pass
62
-
63
-
64
- class Record(dict):
65
- """
66
- A simple wrapper to migrations document
67
- """
68
-
69
- __getattr__ = dict.get
70
-
71
- def load(self):
72
- specs = {"plugin": self["plugin"], "filename": self["filename"]}
73
- self.clear()
74
- data = get_db().migrations.find_one(specs)
75
- self.update(data or specs)
76
-
77
- def exists(self):
78
- return bool(self._id)
79
-
80
- def __bool__(self):
81
- return self.exists()
82
-
83
- @property
84
- def collection(self):
85
- return get_db().migrations
86
-
87
- @property
88
- def status(self):
89
- """
90
- Status is the status of the last operation.
91
-
92
- Will be `None` if the record doesn't exists.
93
- Possible values are:
94
- - success
95
- - rollback
96
- - rollback-error
97
- - error
98
- - recorded
99
- """
100
- if not self.exists():
101
- return
102
- op = self.ops[-1]
103
- if op["success"]:
104
- if op["type"] == "migrate":
105
- return "success"
106
- elif op["type"] == "rollback":
107
- return "rollback"
108
- elif op["type"] == "record":
109
- return "recorded"
110
- else:
111
- return "unknown"
112
- else:
113
- return "rollback-error" if op["type"] == "rollback" else "error"
114
-
115
- @property
116
- def last_date(self):
117
- if not self.exists():
118
- return
119
- op = self.ops[-1]
120
- return op["date"]
121
-
122
- @property
123
- def ok(self):
124
- """
125
- Is true if the migration is considered as successfully applied
126
- """
127
- if not self.exists():
128
- return False
129
- op = self.ops[-1]
130
- return op["success"] and op["type"] in ("migrate", "record")
131
-
132
- def add(self, _type, migration, output, state, success):
133
- script = inspect.getsource(migration)
134
- return Record(
135
- self.collection.find_one_and_update(
136
- {"plugin": self.plugin, "filename": self.filename},
137
- {
138
- "$push": {
139
- "ops": {
140
- "date": datetime.utcnow(),
141
- "type": _type,
142
- "script": script,
143
- "output": output,
144
- "state": state,
145
- "success": success,
146
- }
147
- }
148
- },
149
- upsert=True,
150
- return_document=ReturnDocument.AFTER,
151
- )
152
- )
153
-
154
- def delete(self):
155
- return self.collection.delete_one({"_id": self._id})
156
-
157
-
158
- class Migration:
159
- def __init__(self, plugin_or_specs, filename, module_name=None):
160
- if filename is None and ":" in plugin_or_specs:
161
- plugin, filename = plugin_or_specs.split(":")
162
- else:
163
- plugin = plugin_or_specs
164
- if not filename.endswith(".py"):
165
- filename += ".py"
166
-
167
- self.plugin = plugin
168
- self.filename = filename
169
- self.module_name = module_name
170
- self._record = None
171
- self._module = None
172
-
173
- @property
174
- def collection(self):
175
- return get_db().migrations
176
-
177
- @property
178
- def db_query(self):
179
- return {"plugin": self.plugin, "filename": self.filename}
180
-
181
- @property
182
- def label(self):
183
- return ":".join((self.plugin, self.filename))
184
-
185
- @property
186
- def record(self):
187
- if self._record is None:
188
- specs = {"plugin": self.plugin, "filename": self.filename}
189
- data = get_db().migrations.find_one(specs)
190
- self._record = Record(data or specs)
191
- return self._record
192
-
193
- @property
194
- def module(self):
195
- if self._module is None:
196
- self._module = load_migration(self.plugin, self.filename, module_name=self.module_name)
197
- return self._module
198
-
199
- def __eq__(self, value):
200
- return (
201
- isinstance(value, Migration)
202
- and getattr(value, "plugin") == self.plugin
203
- and getattr(value, "filename") == self.filename
204
- )
205
-
206
- def execute(self, recordonly=False, dryrun=False):
207
- """
208
- Execute a migration
209
-
210
- If recordonly is True, the migration is only recorded
211
- If dryrun is True, the migration is neither executed nor recorded
212
- """
213
- q = queue.Queue(-1) # no limit on size
214
- handler = QueueHandler(q)
215
- handler.setFormatter(MigrationFormatter())
216
- logger = getattr(self.module, "log", logging.getLogger(self.module.__name__))
217
- logger.propagate = False
218
- for h in logger.handlers:
219
- logger.removeHandler(h)
220
- logger.addHandler(handler)
221
-
222
- if not hasattr(self.module, "migrate"):
223
- error = SyntaxError("A migration should at least have a migrate(db) function")
224
- raise MigrationError("Error while executing migration", exc=error)
225
-
226
- out = [["info", "Recorded only"]] if recordonly else []
227
- state = {}
228
-
229
- if not recordonly and not dryrun:
230
- db = get_db()
231
- db._state = state
232
- try:
233
- self.module.migrate(db)
234
- out = _extract_output(q)
235
- except Exception as e:
236
- out = _extract_output(q)
237
- tb = traceback.format_exc()
238
- self.add_record("migrate", out, db._state, False, traceback=tb)
239
- fe = MigrationError(
240
- "Error while executing migration", output=out, exc=e, traceback=tb
241
- )
242
- if hasattr(self.module, "rollback"):
243
- try:
244
- self.module.rollback(db)
245
- out = _extract_output(q)
246
- self.add_record("rollback", out, db._state, True)
247
- msg = "Error while executing migration, rollback has been applied"
248
- fe = RollbackError(msg, output=out, migrate_exc=fe)
249
- except Exception as re:
250
- out = _extract_output(q)
251
- self.add_record("rollback", out, db._state, False)
252
- msg = "Error while executing migration rollback"
253
- fe = RollbackError(msg, output=out, exc=re, migrate_exc=fe)
254
- raise fe
255
-
256
- if not dryrun:
257
- self.add_record("migrate", out, state, True)
258
-
259
- return out
260
-
261
- def unrecord(self):
262
- """Delete a migration record"""
263
- if not self.record.exists():
264
- return False
265
- return bool(self.collection.delete_one(self.db_query).deleted_count)
266
-
267
- def add_record(self, type, output, state, success, traceback=None):
268
- script = inspect.getsource(self.module)
269
- return Record(
270
- self.collection.find_one_and_update(
271
- self.db_query,
272
- {
273
- "$push": {
274
- "ops": {
275
- "date": datetime.utcnow(),
276
- "type": type,
277
- "script": script,
278
- "output": output,
279
- "state": state,
280
- "success": success,
281
- "traceback": traceback,
282
- }
283
- }
284
- },
285
- upsert=True,
286
- return_document=ReturnDocument.AFTER,
287
- )
288
- )
289
-
290
-
291
- def get(plugin, filename):
292
- """Get a migration"""
293
- return Migration(plugin, filename)
294
-
295
-
296
- def list_available():
297
- """
298
- List available migrations for udata and enabled plugins
299
-
300
- Each row is a tuple with following signature:
301
-
302
- (plugin, package, filename)
303
- """
304
- migrations = []
305
-
306
- migrations.extend(_iter("udata", "udata"))
307
-
308
- plugins = entrypoints.get_enabled("udata.models", current_app)
309
- for plugin, module in plugins.items():
310
- migrations.extend(_iter(plugin, module))
311
- return sorted(migrations, key=lambda m: m.filename)
312
-
313
-
314
- def _iter(plugin, module):
315
- """
316
- Iterate over migrations for a given plugin module
317
-
318
- Yield tuples in the form (plugin_name, module_name, filename)
319
- """
320
- module_name = module if isinstance(module, str) else module.__name__
321
- if not resource_isdir(module_name, "migrations"):
322
- return
323
- for filename in resource_listdir(module_name, "migrations"):
324
- if filename.endswith(".py") and not filename.startswith("__"):
325
- yield Migration(plugin, filename, module_name)
326
-
327
-
328
- def _module_name(plugin):
329
- """Get the module name for a given plugin"""
330
- if plugin == "udata":
331
- return "udata"
332
- module = entrypoints.get_plugin_module("udata.models", current_app, plugin)
333
- if module is None:
334
- raise MigrationError("Plugin {} not found".format(plugin))
335
- return module.__name__
336
-
337
-
338
- def load_migration(plugin, filename, module_name=None):
339
- """
340
- Load a migration from its python file
341
-
342
- :returns: the loaded module
343
- """
344
- module_name = module_name or _module_name(plugin)
345
- basename = os.path.splitext(os.path.basename(filename))[0]
346
- name = ".".join((module_name, "migrations", basename))
347
- filename = os.path.join("migrations", filename)
348
- try:
349
- script = resource_string(module_name, filename)
350
- except Exception:
351
- msg = "Unable to load file {} from module {}".format(filename, module_name)
352
- raise MigrationError(msg)
353
- spec = importlib.util.spec_from_loader(name, loader=None)
354
- module = importlib.util.module_from_spec(spec)
355
- exec(script, module.__dict__)
356
- module.__file__ = resource_filename(module_name, filename)
357
- return module
358
-
359
-
360
- def _extract_output(q):
361
- """Extract log output from a QueueHandler queue"""
362
- out = []
363
- while not q.empty():
364
- record = q.get()
365
- # Use list instead of tuple to have the same data before and after mongo persist
366
- out.append([record.levelname.lower(), record.getMessage()])
367
- return out