udata 9.1.2.dev30754__py2.py3-none-any.whl → 9.1.4.dev30983__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of udata might be problematic. Click here for more details.

Files changed (90) hide show
  1. tasks/helpers.py +6 -5
  2. udata/__init__.py +1 -1
  3. udata/api/__init__.py +2 -3
  4. udata/api/commands.py +0 -1
  5. udata/api/fields.py +22 -1
  6. udata/api_fields.py +22 -12
  7. udata/app.py +1 -1
  8. udata/assets.py +1 -1
  9. udata/auth/__init__.py +8 -12
  10. udata/commands/db.py +3 -3
  11. udata/commands/dcat.py +1 -1
  12. udata/commands/fixtures.py +110 -54
  13. udata/commands/init.py +2 -2
  14. udata/commands/tests/test_fixtures.py +71 -0
  15. udata/core/activity/tasks.py +1 -1
  16. udata/core/badges/models.py +0 -2
  17. udata/core/contact_point/api.py +1 -3
  18. udata/core/dataservices/tasks.py +1 -1
  19. udata/core/dataset/actions.py +2 -2
  20. udata/core/dataset/forms.py +0 -2
  21. udata/core/dataset/models.py +12 -10
  22. udata/core/dataset/rdf.py +1 -1
  23. udata/core/discussions/api.py +1 -1
  24. udata/core/discussions/models.py +2 -2
  25. udata/core/discussions/tasks.py +1 -1
  26. udata/core/organization/api.py +3 -4
  27. udata/core/organization/api_fields.py +2 -1
  28. udata/core/organization/apiv2.py +1 -1
  29. udata/core/reports/api.py +11 -0
  30. udata/core/reports/models.py +6 -1
  31. udata/core/reuse/metrics.py +1 -1
  32. udata/core/reuse/permissions.py +2 -1
  33. udata/core/reuse/search.py +2 -2
  34. udata/core/reuse/tasks.py +2 -2
  35. udata/core/spatial/commands.py +3 -3
  36. udata/core/spatial/factories.py +1 -1
  37. udata/core/spatial/forms.py +1 -1
  38. udata/core/spatial/models.py +2 -2
  39. udata/core/spatial/tests/test_models.py +1 -1
  40. udata/core/spatial/translations.py +3 -1
  41. udata/core/user/api.py +4 -4
  42. udata/core/user/metrics.py +1 -1
  43. udata/frontend/__init__.py +1 -1
  44. udata/harvest/actions.py +1 -1
  45. udata/harvest/backends/__init__.py +1 -1
  46. udata/harvest/tasks.py +0 -1
  47. udata/harvest/tests/factories.py +0 -2
  48. udata/harvest/tests/test_base_backend.py +0 -1
  49. udata/harvest/tests/test_dcat_backend.py +16 -17
  50. udata/migrations/2020-07-24-remove-s-from-scope-oauth.py +1 -1
  51. udata/migrations/2021-07-05-remove-unused-badges.py +0 -1
  52. udata/migrations/2023-02-08-rename-internal-dates.py +0 -2
  53. udata/migrations/2024-06-11-fix-reuse-datasets-references.py +0 -1
  54. udata/models/__init__.py +1 -0
  55. udata/routing.py +5 -0
  56. udata/search/commands.py +1 -1
  57. udata/search/query.py +1 -1
  58. udata/sentry.py +11 -2
  59. udata/settings.py +1 -0
  60. udata/tags.py +3 -3
  61. udata/tests/api/test_base_api.py +1 -1
  62. udata/tests/api/test_contact_points.py +4 -4
  63. udata/tests/api/test_datasets_api.py +10 -10
  64. udata/tests/api/test_organizations_api.py +39 -39
  65. udata/tests/api/test_reports_api.py +53 -3
  66. udata/tests/api/test_tags_api.py +2 -2
  67. udata/tests/api/test_transfer_api.py +1 -1
  68. udata/tests/apiv2/test_datasets.py +4 -4
  69. udata/tests/dataset/test_dataset_model.py +3 -3
  70. udata/tests/frontend/__init__.py +0 -2
  71. udata/tests/frontend/test_auth.py +0 -1
  72. udata/tests/organization/test_csv_adapter.py +0 -2
  73. udata/tests/organization/test_notifications.py +3 -3
  74. udata/tests/reuse/test_reuse_model.py +0 -1
  75. udata/tests/site/test_site_rdf.py +1 -3
  76. udata/tests/test_cors.py +0 -3
  77. udata/tests/test_owned.py +4 -4
  78. udata/tests/test_routing.py +1 -1
  79. udata/tests/test_tags.py +1 -1
  80. udata/tests/test_transfer.py +1 -2
  81. udata/tests/workers/test_jobs_commands.py +1 -1
  82. udata/utils.py +15 -14
  83. {udata-9.1.2.dev30754.dist-info → udata-9.1.4.dev30983.dist-info}/METADATA +20 -4
  84. {udata-9.1.2.dev30754.dist-info → udata-9.1.4.dev30983.dist-info}/RECORD +88 -89
  85. udata/commands/tests/fixtures.py +0 -44
  86. udata/tests/features/territories/commands.py +0 -9
  87. {udata-9.1.2.dev30754.dist-info → udata-9.1.4.dev30983.dist-info}/LICENSE +0 -0
  88. {udata-9.1.2.dev30754.dist-info → udata-9.1.4.dev30983.dist-info}/WHEEL +0 -0
  89. {udata-9.1.2.dev30754.dist-info → udata-9.1.4.dev30983.dist-info}/entry_points.txt +0 -0
  90. {udata-9.1.2.dev30754.dist-info → udata-9.1.4.dev30983.dist-info}/top_level.txt +0 -0
tasks/helpers.py CHANGED
@@ -1,10 +1,11 @@
1
1
  from os.path import abspath, dirname, join
2
+ from typing import Callable
2
3
 
3
4
  #: Project absolute root path
4
5
  ROOT = abspath(join(dirname(__file__), ".."))
5
6
 
6
7
 
7
- def color(code):
8
+ def color(code: str) -> Callable:
8
9
  """A simple ANSI color wrapper factory"""
9
10
  return lambda t: "\033[{0}{1}\033[0;m".format(code, t)
10
11
 
@@ -17,25 +18,25 @@ purple = color("1;35m")
17
18
  white = color("1;39m")
18
19
 
19
20
 
20
- def header(text, *args, **kwargs):
21
+ def header(text: str, *args, **kwargs) -> None:
21
22
  """Display an header"""
22
23
  text = text.format(*args, **kwargs)
23
24
  print(" ".join((blue(">>"), cyan(text))))
24
25
 
25
26
 
26
- def info(text, *args, **kwargs):
27
+ def info(text: str, *args, **kwargs) -> None:
27
28
  """Display informations"""
28
29
  text = text.format(*args, **kwargs)
29
30
  print(" ".join((purple(">>>"), text)))
30
31
 
31
32
 
32
- def success(text, *args, **kwargs):
33
+ def success(text: str, *args, **kwargs) -> None:
33
34
  """Display a success message"""
34
35
  text = text.format(*args, **kwargs)
35
36
  print(" ".join((green("✔"), white(text))))
36
37
 
37
38
 
38
- def error(text, *args, **kwargs):
39
+ def error(text: str, *args, **kwargs) -> None:
39
40
  """Display an error message"""
40
41
  text = text.format(*args, **kwargs)
41
42
  print(red("✘ {0}".format(text)))
udata/__init__.py CHANGED
@@ -4,5 +4,5 @@
4
4
  udata
5
5
  """
6
6
 
7
- __version__ = "9.1.2.dev"
7
+ __version__ = "9.1.4.dev"
8
8
  __description__ = "Open data portal"
udata/api/__init__.py CHANGED
@@ -1,5 +1,4 @@
1
1
  import inspect
2
- import itertools
3
2
  import logging
4
3
  import urllib.parse
5
4
  from functools import wraps
@@ -18,7 +17,7 @@ from flask import (
18
17
  from flask_restx import Api, Resource
19
18
  from flask_storage import UnauthorizedFileType
20
19
 
21
- from udata import cors, entrypoints, tracking
20
+ from udata import entrypoints, tracking
22
21
  from udata.app import csrf
23
22
  from udata.auth import Permission, PermissionDenied, RoleNeed, current_user, login_user
24
23
  from udata.i18n import get_locale
@@ -325,7 +324,7 @@ def init_app(app):
325
324
  import udata.harvest.api # noqa
326
325
 
327
326
  for module in entrypoints.get_enabled("udata.apis", app).values():
328
- api_module = module if inspect.ismodule(module) else import_module(module)
327
+ module if inspect.ismodule(module) else import_module(module)
329
328
 
330
329
  # api.init_app(app)
331
330
  app.register_blueprint(apiv1_blueprint)
udata/api/commands.py CHANGED
@@ -4,7 +4,6 @@ import os
4
4
  import click
5
5
  from flask import current_app, json
6
6
  from flask_restx import schemas
7
- from werkzeug.security import gen_salt
8
7
 
9
8
  from udata.api import api
10
9
  from udata.api.oauth2 import OAuth2Client
udata/api/fields.py CHANGED
@@ -4,7 +4,28 @@ import logging
4
4
  import pytz
5
5
  from dateutil.parser import parse
6
6
  from flask import request, url_for
7
- from flask_restx.fields import * # noqa
7
+
8
+ # Explicitly import all of flask_restx fields so they're available throughout the codebase as api.fields
9
+ from flask_restx.fields import Arbitrary as Arbitrary
10
+ from flask_restx.fields import Boolean as Boolean
11
+ from flask_restx.fields import ClassName as ClassName
12
+ from flask_restx.fields import Date as Date
13
+ from flask_restx.fields import DateTime as DateTime
14
+ from flask_restx.fields import Fixed as Fixed
15
+ from flask_restx.fields import Float as Float
16
+ from flask_restx.fields import FormattedString as FormattedString
17
+ from flask_restx.fields import Integer as Integer
18
+ from flask_restx.fields import List as List
19
+ from flask_restx.fields import MarshallingError as MarshallingError
20
+ from flask_restx.fields import MinMaxMixin as MinMaxMixin
21
+ from flask_restx.fields import Nested as Nested
22
+ from flask_restx.fields import NumberMixin as NumberMixin
23
+ from flask_restx.fields import Polymorph as Polymorph
24
+ from flask_restx.fields import Raw as Raw
25
+ from flask_restx.fields import String as String
26
+ from flask_restx.fields import StringMixin as StringMixin
27
+ from flask_restx.fields import Url as Url
28
+ from flask_restx.fields import Wildcard as Wildcard
8
29
 
9
30
  from udata.uris import endpoint_for
10
31
  from udata.utils import multi_to_dict
udata/api_fields.py CHANGED
@@ -67,12 +67,18 @@ def convert_db_to_field(key, field, info={}):
67
67
  field_read, field_write = convert_db_to_field(
68
68
  f"{key}.inner", field.field, info.get("inner_field_info", {})
69
69
  )
70
- constructor_read = lambda **kwargs: restx_fields.List(field_read, **kwargs)
71
- constructor_write = lambda **kwargs: restx_fields.List(field_write, **kwargs)
70
+
71
+ def constructor_read(**kwargs):
72
+ return restx_fields.List(field_read, **kwargs)
73
+
74
+ def constructor_write(**kwargs):
75
+ return restx_fields.List(field_write, **kwargs)
72
76
  elif isinstance(
73
77
  field, (mongo_fields.GenericReferenceField, mongoengine.fields.GenericLazyReferenceField)
74
78
  ):
75
- constructor = lambda **kwargs: restx_fields.Nested(lazy_reference, **kwargs)
79
+
80
+ def constructor(**kwargs):
81
+ return restx_fields.Nested(lazy_reference, **kwargs)
76
82
  elif isinstance(field, mongo_fields.ReferenceField):
77
83
  # For reference we accept while writing a String representing the ID of the referenced model.
78
84
  # For reading, if the user supplied a `nested_fields` (RestX model), we use it to convert
@@ -83,21 +89,25 @@ def convert_db_to_field(key, field, info={}):
83
89
  # If there is no `nested_fields` convert the object to the string representation.
84
90
  constructor_read = restx_fields.String
85
91
  else:
86
- constructor_read = lambda **kwargs: restx_fields.Nested(nested_fields, **kwargs)
92
+
93
+ def constructor_read(**kwargs):
94
+ return restx_fields.Nested(nested_fields, **kwargs)
87
95
 
88
96
  write_params["description"] = "ID of the reference"
89
97
  constructor_write = restx_fields.String
90
98
  elif isinstance(field, mongo_fields.EmbeddedDocumentField):
91
99
  nested_fields = info.get("nested_fields")
92
100
  if nested_fields is not None:
93
- constructor = lambda **kwargs: restx_fields.Nested(nested_fields, **kwargs)
101
+
102
+ def constructor(**kwargs):
103
+ return restx_fields.Nested(nested_fields, **kwargs)
94
104
  elif hasattr(field.document_type_obj, "__read_fields__"):
95
- constructor_read = lambda **kwargs: restx_fields.Nested(
96
- field.document_type_obj.__read_fields__, **kwargs
97
- )
98
- constructor_write = lambda **kwargs: restx_fields.Nested(
99
- field.document_type_obj.__write_fields__, **kwargs
100
- )
105
+
106
+ def constructor_read(**kwargs):
107
+ return restx_fields.Nested(field.document_type_obj.__read_fields__, **kwargs)
108
+
109
+ def constructor_write(**kwargs):
110
+ return restx_fields.Nested(field.document_type_obj.__write_fields__, **kwargs)
101
111
  else:
102
112
  raise ValueError(
103
113
  f"EmbeddedDocumentField `{key}` requires a `nested_fields` param to serialize/deserialize or a `@generate_fields()` definition."
@@ -335,7 +345,7 @@ def wrap_primary_key(
335
345
  field_name: str,
336
346
  foreign_field: mongoengine.fields.ReferenceField | mongoengine.fields.GenericReferenceField,
337
347
  value: str,
338
- document_type: type = None,
348
+ document_type=None,
339
349
  ):
340
350
  """
341
351
  We need to wrap the `String` inside an `ObjectId` most of the time. If the foreign ID is a `String` we need to get
udata/app.py CHANGED
@@ -206,7 +206,7 @@ def register_extensions(app):
206
206
  mail,
207
207
  models,
208
208
  mongo,
209
- notifications,
209
+ notifications, # noqa
210
210
  routing,
211
211
  search,
212
212
  sentry,
udata/assets.py CHANGED
@@ -2,7 +2,7 @@ from flask import current_app, url_for
2
2
  from flask_cdn import url_for as cdn_url_for
3
3
 
4
4
 
5
- def cdn_for(endpoint, **kwargs):
5
+ def cdn_for(endpoint: str, **kwargs) -> str:
6
6
  """
7
7
  Get a CDN URL for a static assets.
8
8
 
udata/auth/__init__.py CHANGED
@@ -2,18 +2,14 @@ import logging
2
2
 
3
3
  from flask import current_app, render_template
4
4
  from flask_principal import Permission as BasePermission
5
- from flask_principal import (
6
- PermissionDenied, # noqa: facade pattern
7
- RoleNeed,
8
- UserNeed, # noqa: facade pattern
9
- identity_loaded, # noqa: facade pattern
10
- )
11
- from flask_security import ( # noqa
12
- Security, # noqa
13
- current_user,
14
- login_required,
15
- login_user,
16
- )
5
+ from flask_principal import PermissionDenied as PermissionDenied
6
+ from flask_principal import RoleNeed as RoleNeed
7
+ from flask_principal import UserNeed as UserNeed
8
+ from flask_principal import identity_loaded as identity_loaded
9
+ from flask_security import Security as Security
10
+ from flask_security import current_user as current_user
11
+ from flask_security import login_required as login_required
12
+ from flask_security import login_user as login_user
17
13
  from werkzeug.utils import import_string
18
14
 
19
15
  log = logging.getLogger(__name__)
udata/commands/db.py CHANGED
@@ -159,10 +159,10 @@ def check_references(models_to_check):
159
159
  references = []
160
160
  for model in set(_models):
161
161
  if model.__name__ == "Activity":
162
- print(f"Skipping Activity model, scheduled for deprecation")
162
+ print("Skipping Activity model, scheduled for deprecation")
163
163
  continue
164
164
  if model.__name__ == "GeoLevel":
165
- print(f"Skipping GeoLevel model, scheduled for deprecation")
165
+ print("Skipping GeoLevel model, scheduled for deprecation")
166
166
  continue
167
167
 
168
168
  if models_to_check and model.__name__ not in models_to_check:
@@ -367,7 +367,7 @@ def check_references(models_to_check):
367
367
  )
368
368
  else:
369
369
  print_and_save(f'Unknown ref type {reference["type"]}')
370
- except mongoengine.errors.FieldDoesNotExist as e:
370
+ except mongoengine.errors.FieldDoesNotExist:
371
371
  print_and_save(
372
372
  f"[ERROR for {model.__name__} {obj.id}] {traceback.format_exc()}"
373
373
  )
udata/commands/dcat.py CHANGED
@@ -33,7 +33,7 @@ def parse_url(url, csw, iso, quiet=False, rid=""):
33
33
  """Parse the datasets in a DCAT format located at URL (debug)"""
34
34
  if quiet:
35
35
  verbose_loggers = ["rdflib", "udata.core.dataset"]
36
- [logging.getLogger(l).setLevel(logging.ERROR) for l in verbose_loggers]
36
+ [logging.getLogger(logger).setLevel(logging.ERROR) for logger in verbose_loggers]
37
37
 
38
38
  class MockSource:
39
39
  url = ""
@@ -1,11 +1,21 @@
1
+ """Commands to download fixtures from the udata-fixtures repository, import them locally.
2
+
3
+ When "downloading" (generating) the fixtures, save the json as is.
4
+ When "importing" the fixtures, massage them so then can be loaded properly.
5
+ """
6
+
1
7
  import json
2
8
  import logging
9
+ import pathlib
3
10
 
4
11
  import click
5
12
  import requests
6
13
  from flask import current_app
7
14
 
8
15
  from udata.commands import cli
16
+ from udata.core.contact_point.factories import ContactPointFactory
17
+ from udata.core.contact_point.models import ContactPoint
18
+ from udata.core.dataservices.factories import DataserviceFactory
9
19
  from udata.core.dataset.factories import (
10
20
  CommunityResourceFactory,
11
21
  DatasetFactory,
@@ -21,21 +31,79 @@ log = logging.getLogger(__name__)
21
31
 
22
32
 
23
33
  DATASET_URL = "/api/1/datasets"
34
+ DATASERVICES_URL = "/api/1/dataservices"
24
35
  ORG_URL = "/api/1/organizations"
25
36
  REUSE_URL = "/api/1/reuses"
26
37
  COMMUNITY_RES_URL = "/api/1/datasets/community_resources"
27
38
  DISCUSSION_URL = "/api/1/discussions"
28
39
 
29
40
 
30
- DEFAULT_FIXTURE_FILE = (
31
- "https://raw.githubusercontent.com/opendatateam/udata-fixtures/main/results.json" # noqa
32
- )
41
+ DEFAULT_FIXTURE_FILE_TAG: str = "v2.0.0"
42
+ DEFAULT_FIXTURE_FILE: str = f"https://raw.githubusercontent.com/opendatateam/udata-fixtures/{DEFAULT_FIXTURE_FILE_TAG}/results.json" # noqa
43
+
44
+ DEFAULT_FIXTURES_RESULTS_FILENAME: str = "results.json"
45
+
46
+ UNWANTED_KEYS: dict[str, list[str]] = {
47
+ "dataset": [
48
+ "uri",
49
+ "page",
50
+ "last_update",
51
+ "last_modified",
52
+ "license",
53
+ "badges",
54
+ "spatial",
55
+ "quality",
56
+ ],
57
+ "resource": ["latest", "preview_url", "last_modified"],
58
+ "organization": ["members", "page", "uri", "logo_thumbnail"],
59
+ "reuse": ["datasets", "image_thumbnail", "page", "uri", "organization", "owner"],
60
+ "community": [
61
+ "dataset",
62
+ "organization",
63
+ "owner",
64
+ "latest",
65
+ "last_modified",
66
+ "preview_url",
67
+ ],
68
+ "discussion": ["subject", "user", "url", "class"],
69
+ "message": ["posted_by"],
70
+ "dataservice": [
71
+ "datasets",
72
+ "license",
73
+ "organization",
74
+ "owner",
75
+ "self_api_url",
76
+ "self_web_url",
77
+ ],
78
+ }
79
+
80
+
81
+ def remove_unwanted_keys(obj: dict, filter_type: str) -> dict:
82
+ """Remove UNWANTED_KEYS from a dict."""
83
+ for unwanted_key in UNWANTED_KEYS[filter_type]:
84
+ if unwanted_key in obj:
85
+ del obj[unwanted_key]
86
+ fix_dates(obj)
87
+ return obj
88
+
89
+
90
+ def fix_dates(obj: dict) -> dict:
91
+ """Fix dates from the fixtures so they can be safely reloaded later on."""
92
+ if "internal" not in obj:
93
+ return obj
94
+ obj["created_at_internal"] = obj["internal"]["created_at_internal"]
95
+ obj["last_modified_internal"] = obj["internal"]["last_modified_internal"]
96
+ del obj["internal"]
97
+ del obj["created_at"]
98
+ return obj
33
99
 
34
100
 
35
101
  @cli.command()
36
102
  @click.argument("data-source")
37
- def generate_fixtures_file(data_source):
38
- """Build sample fixture file based on datasets slugs list (users, datasets, reuses)."""
103
+ @click.argument("results-filename", default=DEFAULT_FIXTURES_RESULTS_FILENAME)
104
+ def generate_fixtures_file(data_source: str, results_filename: str) -> None:
105
+ """Build sample fixture file based on datasets slugs list (users, datasets, reuses, dataservices)."""
106
+ results_file = pathlib.Path(results_filename)
39
107
  datasets_slugs = current_app.config["FIXTURE_DATASET_SLUGS"]
40
108
  json_result = []
41
109
 
@@ -44,31 +112,13 @@ def generate_fixtures_file(data_source):
44
112
  json_fixture = {}
45
113
 
46
114
  json_dataset = requests.get(f"{data_source}{DATASET_URL}/{slug}/").json()
47
- del json_dataset["uri"]
48
- del json_dataset["page"]
49
- del json_dataset["last_update"]
50
- del json_dataset["last_modified"]
51
- del json_dataset["license"]
52
- del json_dataset["badges"]
53
- del json_dataset["spatial"]
54
- del json_dataset["quality"]
55
- json_dataset["created_at_internal"] = json_dataset.pop("created_at")
56
115
  json_resources = json_dataset.pop("resources")
57
- for res in json_resources:
58
- del res["latest"]
59
- del res["preview_url"]
60
- del res["last_modified"]
61
- res["created_at_internal"] = res.pop("created_at")
62
116
  if json_dataset["organization"] is None:
63
117
  json_owner = json_dataset.pop("owner")
64
118
  json_dataset["owner"] = json_owner["id"]
65
119
  else:
66
120
  json_org = json_dataset.pop("organization")
67
121
  json_org = requests.get(f"{data_source}{ORG_URL}/{json_org['id']}/").json()
68
- del json_org["members"]
69
- del json_org["page"]
70
- del json_org["uri"]
71
- del json_org["logo_thumbnail"]
72
122
  json_fixture["organization"] = json_org
73
123
  json_fixture["resources"] = json_resources
74
124
  json_fixture["dataset"] = json_dataset
@@ -76,50 +126,34 @@ def generate_fixtures_file(data_source):
76
126
  json_reuses = requests.get(
77
127
  f"{data_source}{REUSE_URL}/?dataset={json_dataset['id']}"
78
128
  ).json()["data"]
79
- for reuse in json_reuses:
80
- del reuse["datasets"]
81
- del reuse["image_thumbnail"]
82
- del reuse["page"]
83
- del reuse["uri"]
84
- del reuse["organization"]
85
- del reuse["owner"]
86
129
  json_fixture["reuses"] = json_reuses
87
130
 
88
131
  json_community = requests.get(
89
132
  f"{data_source}{COMMUNITY_RES_URL}/?dataset={json_dataset['id']}"
90
133
  ).json()["data"]
91
- for com in json_community:
92
- del com["dataset"]
93
- del com["organization"]
94
- del com["owner"]
95
- del com["latest"]
96
- del com["last_modified"]
97
- del com["preview_url"]
98
- com["created_at_internal"] = com.pop("created_at")
99
134
  json_fixture["community_resources"] = json_community
100
135
 
101
136
  json_discussion = requests.get(
102
137
  f"{data_source}{DISCUSSION_URL}/?for={json_dataset['id']}"
103
138
  ).json()["data"]
104
- for discussion in json_discussion:
105
- del discussion["subject"]
106
- del discussion["user"]
107
- del discussion["url"]
108
- del discussion["class"]
109
- for message in discussion["discussion"]:
110
- del message["posted_by"]
111
139
  json_fixture["discussions"] = json_discussion
112
140
 
141
+ json_dataservices = requests.get(
142
+ f"{data_source}{DATASERVICES_URL}/?dataset={json_dataset['id']}"
143
+ ).json()["data"]
144
+ json_fixture["dataservices"] = json_dataservices
145
+
113
146
  json_result.append(json_fixture)
114
147
 
115
- with open("results.json", "w") as f:
116
- json.dump(json_result, f)
148
+ with results_file.open("w") as f:
149
+ json.dump(json_result, f, indent=2)
150
+ print(f"Fixtures saved to file {results_filename}")
117
151
 
118
152
 
119
153
  @cli.command()
120
154
  @click.argument("source", default=DEFAULT_FIXTURE_FILE)
121
- def generate_fixtures(source):
122
- """Build sample fixture data (users, datasets, reuses) from local or remote file."""
155
+ def import_fixtures(source):
156
+ """Build sample fixture data (users, datasets, reuses, dataservices) from local or remote file."""
123
157
  if source.startswith("http"):
124
158
  json_fixtures = requests.get(source).json()
125
159
  else:
@@ -129,24 +163,32 @@ def generate_fixtures(source):
129
163
  with click.progressbar(json_fixtures) as bar:
130
164
  for fixture in bar:
131
165
  user = UserFactory()
166
+ dataset = fixture["dataset"]
167
+ dataset = remove_unwanted_keys(dataset, "dataset")
132
168
  if not fixture["organization"]:
133
- dataset = DatasetFactory(**fixture["dataset"], owner=user)
169
+ dataset = DatasetFactory(**dataset, owner=user)
134
170
  else:
135
171
  org = Organization.objects(id=fixture["organization"]["id"]).first()
136
172
  if not org:
137
- org = OrganizationFactory(
138
- **fixture["organization"], members=[Member(user=user)]
139
- )
140
- dataset = DatasetFactory(**fixture["dataset"], organization=org)
173
+ organization = fixture["organization"]
174
+ organization = remove_unwanted_keys(organization, "organization")
175
+ org = OrganizationFactory(**organization, members=[Member(user=user)])
176
+ dataset = DatasetFactory(**dataset, organization=org)
141
177
  for resource in fixture["resources"]:
178
+ resource = remove_unwanted_keys(resource, "resource")
142
179
  res = ResourceFactory(**resource)
143
180
  dataset.add_resource(res)
144
181
  for reuse in fixture["reuses"]:
182
+ reuse = remove_unwanted_keys(reuse, "reuse")
145
183
  ReuseFactory(**reuse, datasets=[dataset], owner=user)
146
184
  for community in fixture["community_resources"]:
185
+ community = remove_unwanted_keys(community, "community")
147
186
  CommunityResourceFactory(**community, dataset=dataset, owner=user)
148
187
  for discussion in fixture["discussions"]:
188
+ discussion = remove_unwanted_keys(discussion, "discussion")
149
189
  messages = discussion.pop("discussion")
190
+ for message in messages:
191
+ message = remove_unwanted_keys(message, "message")
150
192
  DiscussionFactory(
151
193
  **discussion,
152
194
  subject=dataset,
@@ -155,3 +197,17 @@ def generate_fixtures(source):
155
197
  MessageDiscussionFactory(**message, posted_by=user) for message in messages
156
198
  ],
157
199
  )
200
+ for dataservice in fixture["dataservices"]:
201
+ dataservice = remove_unwanted_keys(dataservice, "dataservice")
202
+ if not dataservice["contact_point"]:
203
+ DataserviceFactory(**dataservice, datasets=[dataset])
204
+ else:
205
+ contact_point = ContactPoint.objects(
206
+ id=dataservice["contact_point"]["id"]
207
+ ).first()
208
+ if not contact_point:
209
+ contact_point = ContactPointFactory(**dataservice["contact_point"])
210
+ dataservice.pop("contact_point")
211
+ DataserviceFactory(
212
+ **dataservice, datasets=[dataset], contact_point=contact_point
213
+ )
udata/commands/init.py CHANGED
@@ -11,7 +11,7 @@ from udata.i18n import gettext as _
11
11
  from udata.search.commands import index
12
12
 
13
13
  from .db import migrate
14
- from .fixtures import generate_fixtures
14
+ from .fixtures import import_fixtures
15
15
 
16
16
  log = logging.getLogger(__name__)
17
17
 
@@ -44,6 +44,6 @@ def init(ctx):
44
44
 
45
45
  text = _("Do you want to create some sample data?")
46
46
  if click.confirm(text, default=True):
47
- ctx.invoke(generate_fixtures)
47
+ ctx.invoke(import_fixtures)
48
48
 
49
49
  success(_("Your udata instance is ready!"))
@@ -0,0 +1,71 @@
1
+ from tempfile import NamedTemporaryFile
2
+
3
+ import pytest
4
+ import requests
5
+ from werkzeug.wrappers.response import Response
6
+
7
+ import udata.commands.fixtures
8
+ from udata import models
9
+ from udata.core.dataservices.factories import DataserviceFactory
10
+ from udata.core.dataset.factories import (
11
+ CommunityResourceFactory,
12
+ DatasetFactory,
13
+ ResourceFactory,
14
+ )
15
+ from udata.core.discussions.factories import DiscussionFactory, MessageDiscussionFactory
16
+ from udata.core.organization.factories import OrganizationFactory
17
+ from udata.core.organization.models import Member
18
+ from udata.core.reuse.factories import ReuseFactory
19
+ from udata.core.user.factories import UserFactory
20
+
21
+
22
+ @pytest.mark.usefixtures("clean_db")
23
+ class FixturesTest:
24
+ @pytest.mark.frontend
25
+ @pytest.mark.options(FIXTURE_DATASET_SLUGS=["some-test-dataset-slug"])
26
+ def test_generate_fixtures_file_then_import(self, app, cli, api, monkeypatch):
27
+ """Test generating fixtures from the current env, then importing them back."""
28
+ assert models.Dataset.objects.count() == 0 # Start with a clean slate.
29
+ user = UserFactory()
30
+ org = OrganizationFactory(**{}, members=[Member(user=user)])
31
+ # Set the same slug we're 'exporting' from the FIXTURE_DATASET_SLUG config, see the
32
+ # @pytest.mark.options above.
33
+ dataset = DatasetFactory(**{}, slug="some-test-dataset-slug", organization=org)
34
+ res = ResourceFactory(**{})
35
+ dataset.add_resource(res)
36
+ ReuseFactory(**{}, datasets=[dataset], owner=user)
37
+ CommunityResourceFactory(**{}, dataset=dataset, owner=user)
38
+ DiscussionFactory(
39
+ **{},
40
+ subject=dataset,
41
+ user=user,
42
+ discussion=[MessageDiscussionFactory(**{}, posted_by=user)],
43
+ )
44
+ DataserviceFactory(**{}, datasets=[dataset])
45
+
46
+ with NamedTemporaryFile(mode="w+", delete=True) as fixtures_fd:
47
+ # Get the fixtures from the local instance.
48
+ monkeypatch.setattr(requests, "get", lambda url: api.get(url))
49
+ monkeypatch.setattr(Response, "json", Response.get_json)
50
+ result = cli("generate-fixtures-file", "", fixtures_fd.name)
51
+ fixtures_fd.flush()
52
+ assert "Fixtures saved to file " in result.output
53
+
54
+ # Then load them in the database to make sure they're correct.
55
+ result = cli("import-fixtures", fixtures_fd.name)
56
+ assert models.Organization.objects(slug=org.slug).count() > 0
57
+ assert models.Dataset.objects.count() > 0
58
+ assert models.Discussion.objects.count() > 0
59
+ assert models.CommunityResource.objects.count() > 0
60
+ assert models.User.objects.count() > 0
61
+ assert models.Dataservice.objects.count() > 0
62
+
63
+ def test_import_fixtures_from_default_file(self, cli):
64
+ """Test importing fixtures from udata.commands.fixture.DEFAULT_FIXTURE_FILE."""
65
+ cli("import-fixtures")
66
+ assert models.Organization.objects.count() > 0
67
+ assert models.Dataset.objects.count() > 0
68
+ assert models.Reuse.objects.count() > 0
69
+ assert models.User.objects.count() > 0
70
+ if udata.commands.fixtures.DEFAULT_FIXTURE_FILE_TAG > "v1.0.0":
71
+ assert models.Dataservice.objects.count() > 0
@@ -1,7 +1,7 @@
1
1
  import logging
2
2
 
3
3
  from udata.models import Organization, User, db
4
- from udata.tasks import celery, task
4
+ from udata.tasks import task
5
5
 
6
6
  from .signals import new_activity
7
7
 
@@ -3,11 +3,9 @@ from datetime import datetime
3
3
 
4
4
  from mongoengine.signals import post_save
5
5
 
6
- from udata.api_fields import field
7
6
  from udata.auth import current_user
8
7
  from udata.mongo import db
9
8
 
10
- from .fields import badge_fields
11
9
  from .signals import on_badge_added, on_badge_removed
12
10
 
13
11
  log = logging.getLogger(__name__)
@@ -1,10 +1,8 @@
1
1
  from udata.api import API, api
2
2
  from udata.api.parsers import ModelApiParser
3
- from udata.auth import admin_permission
4
3
 
5
- from .api_fields import contact_point_fields, contact_point_page_fields
4
+ from .api_fields import contact_point_fields
6
5
  from .forms import ContactPointForm
7
- from .models import ContactPoint
8
6
 
9
7
 
10
8
  class ContactPointApiParser(ModelApiParser):
@@ -3,7 +3,7 @@ from celery.utils.log import get_task_logger
3
3
  from udata.core.dataservices.models import Dataservice
4
4
 
5
5
  # from udata.harvest.models import HarvestJob
6
- from udata.models import Activity, Discussion, Follow, Transfer
6
+ from udata.models import Discussion, Follow, Transfer
7
7
  from udata.tasks import job
8
8
 
9
9
  log = get_task_logger(__name__)