elody 0.0.63__tar.gz → 0.0.163__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. {elody-0.0.63 → elody-0.0.163}/PKG-INFO +8 -3
  2. {elody-0.0.63 → elody-0.0.163}/pyproject.toml +7 -2
  3. {elody-0.0.63 → elody-0.0.163}/src/elody/client.py +70 -23
  4. {elody-0.0.63 → elody-0.0.163}/src/elody/csv.py +128 -33
  5. elody-0.0.163/src/elody/error_codes.py +112 -0
  6. {elody-0.0.63 → elody-0.0.163}/src/elody/exceptions.py +14 -0
  7. elody-0.0.163/src/elody/job.py +95 -0
  8. {elody-0.0.63 → elody-0.0.163}/src/elody/loader.py +33 -5
  9. elody-0.0.163/src/elody/migration/base_object_migrator.py +18 -0
  10. elody-0.0.163/src/elody/object_configurations/base_object_configuration.py +174 -0
  11. elody-0.0.163/src/elody/object_configurations/elody_configuration.py +144 -0
  12. elody-0.0.163/src/elody/object_configurations/job_configuration.py +65 -0
  13. elody-0.0.163/src/elody/policies/authentication/__init__.py +0 -0
  14. {elody-0.0.63 → elody-0.0.163}/src/elody/policies/authentication/base_user_tenant_validation_policy.py +48 -15
  15. elody-0.0.163/src/elody/policies/authorization/__init__.py +0 -0
  16. {elody-0.0.63 → elody-0.0.163}/src/elody/policies/authorization/filter_generic_objects_policy.py +68 -22
  17. elody-0.0.163/src/elody/policies/authorization/filter_generic_objects_policy_v2.py +166 -0
  18. {elody-0.0.63 → elody-0.0.163}/src/elody/policies/authorization/generic_object_detail_policy.py +10 -27
  19. elody-0.0.163/src/elody/policies/authorization/generic_object_mediafiles_policy.py +82 -0
  20. elody-0.0.163/src/elody/policies/authorization/generic_object_metadata_policy.py +81 -0
  21. {elody-0.0.63 → elody-0.0.163}/src/elody/policies/authorization/generic_object_relations_policy.py +12 -29
  22. elody-0.0.163/src/elody/policies/authorization/generic_object_request_policy.py +137 -0
  23. elody-0.0.163/src/elody/policies/authorization/generic_object_request_policy_v2.py +133 -0
  24. elody-0.0.63/src/elody/policies/authorization/generic_object_metadata_policy.py → elody-0.0.163/src/elody/policies/authorization/mediafile_derivatives_policy.py +19 -27
  25. elody-0.0.163/src/elody/policies/authorization/mediafile_download_policy.py +71 -0
  26. {elody-0.0.63 → elody-0.0.163}/src/elody/policies/authorization/multi_tenant_policy.py +14 -6
  27. {elody-0.0.63 → elody-0.0.163}/src/elody/policies/authorization/tenant_request_policy.py +3 -1
  28. elody-0.0.163/src/elody/policies/helpers.py +37 -0
  29. elody-0.0.163/src/elody/policies/permission_handler.py +268 -0
  30. elody-0.0.163/src/elody/policies/tenant_id_resolver.py +375 -0
  31. {elody-0.0.63 → elody-0.0.163}/src/elody/schemas.py +0 -3
  32. elody-0.0.163/src/elody/util.py +271 -0
  33. {elody-0.0.63 → elody-0.0.163}/src/elody.egg-info/PKG-INFO +8 -3
  34. {elody-0.0.63 → elody-0.0.163}/src/elody.egg-info/SOURCES.txt +21 -1
  35. {elody-0.0.63 → elody-0.0.163}/src/elody.egg-info/requires.txt +6 -1
  36. {elody-0.0.63 → elody-0.0.163}/src/elody.egg-info/top_level.txt +1 -0
  37. elody-0.0.163/src/tests/__init_.py +0 -0
  38. elody-0.0.163/src/tests/data.py +74 -0
  39. elody-0.0.163/src/tests/unit/__init__.py +0 -0
  40. elody-0.0.163/src/tests/unit/test_csv.py +410 -0
  41. elody-0.0.163/src/tests/unit/test_utils.py +293 -0
  42. elody-0.0.63/src/elody/policies/authorization/generic_object_request_policy.py +0 -136
  43. elody-0.0.63/src/elody/policies/permission_handler.py +0 -250
  44. elody-0.0.63/src/elody/util.py +0 -117
  45. {elody-0.0.63 → elody-0.0.163}/LICENSE +0 -0
  46. {elody-0.0.63 → elody-0.0.163}/README.md +0 -0
  47. {elody-0.0.63 → elody-0.0.163}/setup.cfg +0 -0
  48. {elody-0.0.63 → elody-0.0.163}/src/__init__.py +0 -0
  49. {elody-0.0.63 → elody-0.0.163}/src/elody/__init__.py +0 -0
  50. {elody-0.0.63/src/elody/policies → elody-0.0.163/src/elody/migration}/__init__.py +0 -0
  51. {elody-0.0.63/src/elody/policies/authentication → elody-0.0.163/src/elody/object_configurations}/__init__.py +0 -0
  52. {elody-0.0.63/src/elody/policies/authorization → elody-0.0.163/src/elody/policies}/__init__.py +0 -0
  53. {elody-0.0.63 → elody-0.0.163}/src/elody/policies/authentication/multi_tenant_policy.py +0 -0
  54. {elody-0.0.63 → elody-0.0.163}/src/elody/validator.py +0 -0
  55. {elody-0.0.63 → elody-0.0.163}/src/elody.egg-info/dependency_links.txt +0 -0
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.2
2
2
  Name: elody
3
- Version: 0.0.63
3
+ Version: 0.0.163
4
4
  Summary: elody SDK for Python
5
5
  Author-email: Inuits <developers@inuits.eu>
6
6
  License: GNU GENERAL PUBLIC LICENSE
@@ -360,8 +360,13 @@ Requires-Dist: idna>=3.4
360
360
  Requires-Dist: requests>=2.31.0
361
361
  Requires-Dist: urllib3>=1.26.16
362
362
  Provides-Extra: loader
363
+ Requires-Dist: APScheduler>=3.10.4; extra == "loader"
363
364
  Requires-Dist: cloudevents>=1.9.0; extra == "loader"
364
- Requires-Dist: inuits-policy-based-auth>=9.6.0; extra == "loader"
365
+ Requires-Dist: inuits-policy-based-auth>=10.0.1; extra == "loader"
366
+ Requires-Dist: jsonschema>=4.23.0; extra == "loader"
367
+ Requires-Dist: pytz>=2024.1; extra == "loader"
368
+ Requires-Dist: six>=1.16.0; extra == "loader"
369
+ Requires-Dist: tzlocal>=5.2; extra == "loader"
365
370
  Provides-Extra: util
366
371
  Requires-Dist: cloudevents>=1.9.0; extra == "util"
367
372
 
@@ -6,7 +6,7 @@ build-backend = "setuptools.build_meta"
6
6
 
7
7
  [project]
8
8
  name = "elody"
9
- version = "0.0.63"
9
+ version = "0.0.163"
10
10
  description = "elody SDK for Python"
11
11
  readme = "README.md"
12
12
  authors = [{ name = "Inuits", email = "developers@inuits.eu" }]
@@ -31,8 +31,13 @@ dependencies = [
31
31
 
32
32
  [project.optional-dependencies]
33
33
  loader = [
34
+ "APScheduler>=3.10.4",
34
35
  "cloudevents>=1.9.0",
35
- "inuits-policy-based-auth>=9.6.0",
36
+ "inuits-policy-based-auth>=10.0.1",
37
+ "jsonschema>=4.23.0",
38
+ "pytz>=2024.1",
39
+ "six>=1.16.0",
40
+ "tzlocal>=5.2",
36
41
  ]
37
42
  util = [
38
43
  "cloudevents>=1.9.0",
@@ -5,7 +5,9 @@ from .exceptions import NonUniqueException, NotFoundException
5
5
 
6
6
 
7
7
  class Client:
8
- def __init__(self, elody_collection_url=None, static_jwt=None, extra_headers=None):
8
+ def __init__(
9
+ self, elody_collection_url=None, static_jwt=None, extra_headers=None, proxy=None
10
+ ):
9
11
  self.elody_collection_url = elody_collection_url or os.environ.get(
10
12
  "ELODY_COLLECTION_URL", None
11
13
  )
@@ -13,19 +15,30 @@ class Client:
13
15
  self.headers = {"Authorization": f"Bearer {self.static_jwt}"}
14
16
  if extra_headers:
15
17
  self.headers = {**self.headers, **extra_headers}
18
+ self.proxies = None
19
+ if proxy:
20
+ self.proxies = {
21
+ "https": proxy,
22
+ "http": proxy,
23
+ }
16
24
 
17
25
  def __create_mediafile(self, entity_id, mediafile):
18
26
  url = f"{self.elody_collection_url}/entities/{entity_id}/mediafiles"
19
27
  headers = {**self.headers, **{"Accept": "text/uri-list"}}
20
- response = requests.post(url, json=mediafile, headers=headers)
28
+ response = requests.post(
29
+ url, json=mediafile, headers=headers, proxies=self.proxies
30
+ )
21
31
  return self.__handle_response(response, "Failed to create mediafile", "text")
22
32
 
23
- def create_mediafile_with_filename(self, filename):
24
- data = {"filename": filename}
33
+ def create_mediafile_with_filename(self, filename, institution_id=None):
34
+ data = {"filename": filename, "type": "mediafile"}
35
+ if institution_id:
36
+ data.update({"metadata": [{"key": "institution", "value": institution_id}]})
25
37
  req = requests.post(
26
38
  f"{self.elody_collection_url}/mediafiles",
27
39
  json=data,
28
40
  headers=self.headers,
41
+ proxies=self.proxies,
29
42
  )
30
43
  if req.status_code != 201:
31
44
  raise Exception(req.text.strip())
@@ -36,16 +49,24 @@ class Client:
36
49
  f"{self.elody_collection_url}/tickets",
37
50
  json={"filename": mediafile_name},
38
51
  headers=self.headers,
52
+ proxies=self.proxies,
39
53
  )
40
54
  if req.status_code != 201:
41
55
  raise Exception(req.text.strip())
42
56
  return req.text.strip().replace('"', "")
43
57
 
44
58
  def __get_upload_location(
45
- self, entity_id, filename, is_public=True, identifiers=None
59
+ self,
60
+ entity_id,
61
+ filename,
62
+ is_public=True,
63
+ identifiers=None,
64
+ mediafile_object=None,
46
65
  ):
47
66
  if not identifiers:
48
67
  identifiers = list()
68
+ if not mediafile_object:
69
+ mediafile_object = dict()
49
70
  metadata = []
50
71
  if is_public:
51
72
  metadata = [
@@ -55,9 +76,12 @@ class Client:
55
76
  }
56
77
  ]
57
78
  mediafile = {
58
- "filename": filename,
59
- "metadata": metadata,
60
- "identifiers": identifiers,
79
+ **{
80
+ "filename": filename,
81
+ "metadata": metadata,
82
+ "identifiers": identifiers,
83
+ },
84
+ **mediafile_object,
61
85
  }
62
86
  return self.__create_mediafile(entity_id, mediafile)
63
87
 
@@ -78,38 +102,48 @@ class Client:
78
102
 
79
103
  def add_entity_mediafiles(self, identifier, payload):
80
104
  url = f"{self.elody_collection_url}/entities/{identifier}/mediafiles"
81
- response = requests.post(url, json=payload, headers=self.headers)
105
+ response = requests.post(
106
+ url, json=payload, headers=self.headers, proxies=self.proxies
107
+ )
82
108
  return self.__handle_response(response, "Failed to add mediafiles")
83
109
 
84
- def add_object(self, collection, payload):
110
+ def add_object(self, collection, payload, params=None):
85
111
  url = f"{self.elody_collection_url}/{collection}"
86
- response = requests.post(url, json=payload, headers=self.headers)
112
+ response = requests.post(
113
+ url, json=payload, headers=self.headers, params=params, proxies=self.proxies
114
+ )
87
115
  return self.__handle_response(response, "Failed to add object")
88
116
 
89
117
  def add_object_metadata(self, collection, identifier, payload):
90
118
  if collection == "entities":
91
119
  url = f"{self.elody_collection_url}/{collection}/{identifier}/metadata"
92
120
  payload = payload if isinstance(payload, list) else [payload]
93
- response = requests.patch(url, json=payload, headers=self.headers)
121
+ response = requests.patch(
122
+ url, json=payload, headers=self.headers, proxies=self.proxies
123
+ )
94
124
  if response.status_code == 400 and response.json()["message"].endswith(
95
125
  "has no metadata"
96
126
  ):
97
- response = requests.post(url, json=payload, headers=self.headers)
127
+ response = requests.post(
128
+ url, json=payload, headers=self.headers, proxies=self.proxies
129
+ )
98
130
  return self.__handle_response(response, "Failed to add metadata")
99
131
  else:
100
132
  url = f"{self.elody_collection_url}/{collection}/{identifier}"
101
133
  payload = {"metadata": payload if isinstance(payload, list) else [payload]}
102
- response = requests.patch(url, json=payload, headers=self.headers)
134
+ response = requests.patch(
135
+ url, json=payload, headers=self.headers, proxies=self.proxies
136
+ )
103
137
  return self.__handle_response(response, "Failed to add metadata")
104
138
 
105
139
  def delete_object(self, collection, identifier):
106
140
  url = f"{self.elody_collection_url}/{collection}/{identifier}"
107
- response = requests.delete(url, headers=self.headers)
141
+ response = requests.delete(url, headers=self.headers, proxies=self.proxies)
108
142
  return self.__handle_response(response, "Failed to delete object", "text")
109
143
 
110
144
  def get_all_objects(self, collection):
111
145
  url = f"{self.elody_collection_url}/{collection}"
112
- response = requests.get(url, headers=self.headers)
146
+ response = requests.get(url, headers=self.headers, proxies=self.proxies)
113
147
  return self.__handle_response(response, "Failed to get objects")
114
148
 
115
149
  def get_mediafiles_and_check_existence(self, mediafile_ids):
@@ -121,17 +155,26 @@ class Client:
121
155
 
122
156
  def get_object(self, collection, identifier):
123
157
  url = f"{self.elody_collection_url}/{collection}/{identifier}"
124
- response = requests.get(url, headers=self.headers)
158
+ response = requests.get(url, headers=self.headers, proxies=self.proxies)
125
159
  return self.__handle_response(response, "Failed to get object")
126
160
 
127
- def update_object(self, collection, identifier, payload):
161
+ def update_object(self, collection, identifier, payload, overwrite=True):
128
162
  url = f"{self.elody_collection_url}/{collection}/{identifier}"
129
- response = requests.put(url, json=payload, headers=self.headers)
163
+ if overwrite:
164
+ response = requests.put(
165
+ url, json=payload, headers=self.headers, proxies=self.proxies
166
+ )
167
+ else:
168
+ response = requests.patch(
169
+ url, json=payload, headers=self.headers, proxies=self.proxies
170
+ )
130
171
  return self.__handle_response(response, "Failed to update object")
131
172
 
132
173
  def update_object_relations(self, collection, identifier, payload):
133
174
  url = f"{self.elody_collection_url}/{collection}/{identifier}/relations"
134
- response = requests.patch(url, json=payload, headers=self.headers)
175
+ response = requests.patch(
176
+ url, json=payload, headers=self.headers, proxies=self.proxies
177
+ )
135
178
  return self.__handle_response(response, "Failed to update object relations")
136
179
 
137
180
  def upload_file_from_url(
@@ -141,19 +184,23 @@ class Client:
141
184
  file_url,
142
185
  identifiers=None,
143
186
  upload_location_replace_map=None,
187
+ mediafile_object=None,
144
188
  ):
145
189
  if not identifiers:
146
190
  identifiers = list()
147
191
  if not upload_location_replace_map:
148
192
  upload_location_replace_map = dict()
149
193
  upload_location = self.__get_upload_location(
150
- entity_id, filename, True, identifiers
194
+ entity_id, filename, True, identifiers, mediafile_object
151
195
  )
152
196
  for current_location, new_location in upload_location_replace_map.items():
153
197
  upload_location = upload_location.replace(current_location, new_location)
154
198
  print(upload_location)
155
- mediafile = requests.get(file_url).content
199
+ mediafile = requests.get(file_url, proxies=self.proxies).content
156
200
  response = requests.post(
157
- upload_location, files={"file": mediafile}, headers=self.headers
201
+ upload_location,
202
+ files={"file": mediafile},
203
+ headers=self.headers,
204
+ proxies=self.proxies,
158
205
  )
159
206
  return self.__handle_response(response, "Failed to upload mediafile")
@@ -6,13 +6,14 @@ from elody.exceptions import (
6
6
  ColumnNotFoundException,
7
7
  IncorrectTypeException,
8
8
  InvalidObjectException,
9
+ InvalidValueException,
9
10
  )
10
11
  from elody.validator import validate_json
11
12
  from elody.schemas import entity_schema, mediafile_schema
12
13
 
13
14
 
14
15
  class CSVParser:
15
- top_level_fields = ["type", "filename"]
16
+ top_level_fields = ["type", "filename", "file_identifier"]
16
17
  identifier_fields = ["identifiers", "identifier", "object_id", "entity_id"]
17
18
  schema_mapping = {
18
19
  "entity": entity_schema,
@@ -21,9 +22,12 @@ class CSVParser:
21
22
  "mediafiles": mediafile_schema,
22
23
  }
23
24
 
24
- def __init__(self, csvstring):
25
- self.csvstring = csvstring
26
- self.reader = csv.DictReader(self.__csv_string_to_file_object())
25
+ def __init__(self, csvstring=None, csvfile=None):
26
+ if csvstring:
27
+ self.csvstring = csvstring
28
+ self.reader = self.__get_reader_from_csv(self.__csv_string_to_file_object())
29
+ elif csvfile:
30
+ self.reader = self.__get_reader_from_csv(csvfile)
27
31
 
28
32
  def _get_metadata_object(self, key, value, lang="en"):
29
33
  return {
@@ -46,6 +50,11 @@ class CSVParser:
46
50
  def __csv_string_to_file_object(self):
47
51
  return StringIO(self.csvstring)
48
52
 
53
+ def __get_reader_from_csv(self, csv_file):
54
+ csv_dialect = csv.Sniffer().sniff(csv_file.read())
55
+ csv_file.seek(0)
56
+ return csv.DictReader(csv_file, dialect=csv_dialect)
57
+
49
58
 
50
59
  class CSVSingleObject(CSVParser):
51
60
  def __init__(self, csvstring, object_type="entity"):
@@ -117,18 +126,33 @@ class CSVMultiObject(CSVParser):
117
126
  index_mapping=None,
118
127
  object_field_mapping=None,
119
128
  required_metadata_values=None,
129
+ metadata_field_mapping=None,
130
+ include_indexed_field=False,
131
+ top_level_fields_mapping=None,
132
+ external_file_sources=None,
120
133
  ):
121
134
  super().__init__(csvstring)
122
- self.index_mapping = dict()
123
- if index_mapping:
124
- self.index_mapping = index_mapping
125
- self.object_field_mapping = dict()
126
- if object_field_mapping:
127
- self.object_field_mapping = object_field_mapping
128
- self.required_metadata_values = required_metadata_values
135
+ self.index_mapping = index_mapping if index_mapping else dict()
136
+ self.object_field_mapping = (
137
+ object_field_mapping if object_field_mapping else dict()
138
+ )
139
+ self.required_metadata_values = (
140
+ required_metadata_values if required_metadata_values else dict()
141
+ )
142
+ self.metadata_field_mapping = (
143
+ metadata_field_mapping if metadata_field_mapping else dict()
144
+ )
129
145
  self.objects = dict()
130
146
  self.errors = dict()
147
+ self.include_indexed_field = include_indexed_field
148
+ self.top_level_fields_mapping = (
149
+ top_level_fields_mapping if top_level_fields_mapping else dict()
150
+ )
151
+ self.external_file_sources = (
152
+ external_file_sources if external_file_sources else []
153
+ )
131
154
  self.__fill_objects_from_csv()
155
+ self.__rename_top_level_fields()
132
156
 
133
157
  def get_entities(self):
134
158
  return self.objects.get("entities", list())
@@ -136,9 +160,20 @@ class CSVMultiObject(CSVParser):
136
160
  def get_errors(self):
137
161
  return self.errors
138
162
 
163
+ def get_top_level_fields_mapping(self, type):
164
+ return self.top_level_fields_mapping.get(type, {})
165
+
139
166
  def get_mediafiles(self):
140
167
  return self.objects.get("mediafiles", list())
141
168
 
169
+ def __determine_language(self, row):
170
+ if "language" in row:
171
+ return row.get("language")
172
+ elif "lang" in row:
173
+ return row.get("lang")
174
+ else:
175
+ return "en"
176
+
142
177
  def __field_allowed(self, target_object_type, key, value):
143
178
  for object_type, fields in self.object_field_mapping.items():
144
179
  for _ in [x for x in fields if x == key]:
@@ -151,13 +186,23 @@ class CSVMultiObject(CSVParser):
151
186
 
152
187
  def __fill_objects_from_csv(self):
153
188
  indexed_dict = dict()
189
+ external_mediafiles_ids = []
154
190
  for row in self.reader:
155
- if not all(x in row.keys() for x in self.index_mapping.values()):
156
- raise ColumnNotFoundException(
157
- f"Not all identifying columns are present in CSV"
158
- )
191
+ mandatory_columns = [
192
+ v for k, v in self.index_mapping.items() if not k.startswith("?")
193
+ ]
194
+ missing_columns = [x for x in mandatory_columns if x not in row.keys()]
195
+ if missing_columns:
196
+ raise ColumnNotFoundException(f"{', '.join(missing_columns)}")
197
+ lang = self.__determine_language(row)
159
198
  previous_id = None
160
199
  for type, identifying_column in self.index_mapping.items():
200
+ is_type_optional = False
201
+ if type.startswith("?"):
202
+ is_type_optional = True
203
+ type = type.lstrip("?")
204
+ if not row.get(identifying_column) and is_type_optional:
205
+ continue
161
206
  id = row[identifying_column]
162
207
  if type not in indexed_dict:
163
208
  indexed_dict[type] = dict()
@@ -167,7 +212,23 @@ class CSVMultiObject(CSVParser):
167
212
  if previous_id:
168
213
  indexed_dict[type][id]["matching_id"] = previous_id
169
214
  previous_id = id
215
+ file_source = None
170
216
  for key, value in row.items():
217
+ if not value:
218
+ continue
219
+ if key == "file_source":
220
+ file_source = value
221
+ if (
222
+ key == "file_identifier"
223
+ and file_source in self.external_file_sources
224
+ ):
225
+ matching_id = indexed_dict[type][id]["matching_id"]
226
+ if not any(matching_id in id for id in external_mediafiles_ids):
227
+ external_mediafiles_ids.append({matching_id: file_source})
228
+ if "entities" not in indexed_dict:
229
+ indexed_dict["entities"] = dict()
230
+ if id in indexed_dict["entities"]:
231
+ indexed_dict["entities"][id]["file_identifier"] = value
171
232
  if self._is_relation_field(key) and self.__field_allowed(
172
233
  type, key, value
173
234
  ):
@@ -187,34 +248,53 @@ class CSVMultiObject(CSVParser):
187
248
  indexed_dict[type][id][key] = value
188
249
  elif (
189
250
  key not in self.index_mapping.values()
190
- and self.__field_allowed(type, key, value)
191
- ):
192
- indexed_dict[type][id].setdefault("metadata", list())
193
- indexed_dict[type][id]["metadata"].append(
194
- self._get_metadata_object(key, value)
195
- )
251
+ or self.include_indexed_field
252
+ ) and self.__field_allowed(type, key, value):
253
+ # Map the metadata field to a unified key if applicable
254
+ metadata_info = self.metadata_field_mapping.get(key, {})
255
+ if metadata_info.get("target") == type or not metadata_info:
256
+ case_insensitive = metadata_info.get("case_insensitive", False)
257
+ metadata_key = metadata_info.get("map_to", key)
258
+ indexed_dict[type][id].setdefault("metadata", list())
259
+ options = metadata_info.get("value_options")
260
+ if case_insensitive:
261
+ value = value.lower()
262
+ if options and value not in options:
263
+ raise InvalidValueException(
264
+ f'The value "{value}" is invalid, these are the valid values: {options}'
265
+ )
266
+ indexed_dict[type][id]["metadata"].append(
267
+ self._get_metadata_object(metadata_key, value, lang)
268
+ )
196
269
  self.__validate_indexed_dict(indexed_dict)
197
270
  self.__add_required_fields(indexed_dict)
198
271
  for object_type, objects in indexed_dict.items():
199
272
  self.objects[object_type] = list(objects.values())
273
+ if external_mediafiles_ids:
274
+ for mediafile in self.objects["mediafiles"]:
275
+ matching_id = mediafile["matching_id"]
276
+ for entry in external_mediafiles_ids:
277
+ if matching_id in entry:
278
+ file_source = entry[matching_id]
279
+ dynamic_key = f"is_{file_source}_mediafile"
280
+ mediafile[dynamic_key] = True
281
+ break
200
282
 
201
283
  def __add_required_fields(self, indexed_dict):
202
284
  if not self.required_metadata_values:
203
285
  return
204
286
  for object_type, objects in indexed_dict.items():
205
- for required_key, required_value in self.required_metadata_values.get(
206
- object_type, dict()
207
- ).items():
287
+ required_fields = self.required_metadata_values.get(object_type, {})
288
+ for required_key, required_value in required_fields.items():
208
289
  for object in objects.values():
209
- for metadata in object.get("metadata", list()):
210
- if metadata.get("key") == required_key:
211
- break
212
- else:
213
- if "metadata" not in object:
214
- object["metadata"] = list()
215
- object["metadata"].append(
216
- self._get_metadata_object(required_key, required_value)
217
- )
290
+ if "metadata" not in object:
291
+ object["metadata"] = []
292
+ if not any(metadata.get("key") == required_key for metadata in object["metadata"]):
293
+ if required_value is not None:
294
+ metadata_object = self._get_metadata_object(required_key, required_value)
295
+ else:
296
+ raise ColumnNotFoundException(required_key)
297
+ object["metadata"].append(metadata_object)
218
298
 
219
299
  def __validate_indexed_dict(self, indexed_dict):
220
300
  for object_type, objects in indexed_dict.items():
@@ -231,3 +311,18 @@ class CSVMultiObject(CSVParser):
231
311
  )
232
312
  for error_id in error_ids:
233
313
  del objects[error_id]
314
+
315
+ def __rename_top_level_fields(self):
316
+ def rename_fields(items, mapping):
317
+ for item in items:
318
+ for old_key, new_key in mapping.items():
319
+ if old_key in item:
320
+ item[new_key] = item.pop(old_key)
321
+
322
+ mediafiles = self.get_mediafiles()
323
+ entities = self.get_entities()
324
+ mediafiles_mapping = self.get_top_level_fields_mapping("mediafiles")
325
+ entities_mapping = self.get_top_level_fields_mapping("entities")
326
+
327
+ rename_fields(mediafiles, mediafiles_mapping)
328
+ rename_fields(entities, entities_mapping)
@@ -0,0 +1,112 @@
1
+ from enum import Enum
2
+
3
+
4
+ class ErrorCode(Enum):
5
+ READ = "R"
6
+ WRITE = "W"
7
+
8
+ # General error codes
9
+ UNKNOWN_ERROR = ("0002", [])
10
+ COLLECTION_NOT_FOUND = ("0003", ["collection"])
11
+ ITEM_NOT_FOUND = ("0004", ["id"])
12
+ HISTORY_ITEM_NOT_FOUND = ("0005", [])
13
+ CANNOT_SPECIFY_BOTH = ("0006", [])
14
+ ALREADY_PARENT = ("0007", ["id"])
15
+ METADATA_KEY_UNDEFINED = ("0008", ["key", "document"])
16
+ ENTITY_HAS_NO_TENANT = ("0009", ["value", "user"])
17
+ MEDIAFILE_NOT_FOUND = ("0010", [])
18
+ ITEM_NOT_FOUND_IN_COLLECTION = ("0011", ["id", "collection"])
19
+
20
+ # Auth error codes
21
+ INVALID_CREDENTIALS = ("1001", [])
22
+ ACCOUNT_LOCKED = ("1002", [])
23
+ INSUFFICIENT_PERMISSIONS = ("1003", ["restricted_keys"])
24
+ NO_PERMISSIONS = ("1004", [])
25
+ UNDEFINED_COLLECTION_RESOLVER = ("1005", [])
26
+ TENANT_NOT_FOUND = ("1006", [])
27
+ XTENANT_HAS_NO_TENANT_DEFENING_ENTITY = ("1007", ["x_tenant_id"])
28
+ INSUFFICIENT_PERMISSIONS_WITHOUT_VARS = ("1008", [])
29
+ NO_GLOBAL_ROLES = ("1009", [])
30
+ NO_PERMISSION_TO_TENANT = ("1010", ["tenant_id"])
31
+ XTENANT_NOT_FOUND = ("1011", ["x_tenant_id"])
32
+ NO_DOWNLOAD_PERMISSION = ("1012", [])
33
+
34
+ # Database error codes
35
+ DATABASE_NOT_INITIALIZED = ("2000", [])
36
+ DATABASE_CONNECTION_FAILED = ("2001", [])
37
+ QUERY_EXECUTION_FAILED = ("2002", [])
38
+ DUPLICATE_ENTRY = ("2003", [])
39
+ DUPLICATE_IDENTIFIERS = ("2004", ["duplicate_keys"])
40
+
41
+ # Network error codes
42
+ NETWORK_UNAVAILABLE = ("3001", [])
43
+ TIMEOUT = ("3002", [])
44
+ SERVER_NOT_FOUND = ("3003", [])
45
+
46
+ # File handling error codes
47
+ FILE_NOT_FOUND = ("4001", [])
48
+ FILE_ACCESS_DENIED = ("4002", [])
49
+ FILE_CORRUPTED = ("4003", [])
50
+ NO_FILENAME_SPECIFIED = ("4004", [])
51
+ NO_TICKET_ID_SPECIFIED = ("4005", [])
52
+ TICKET_NOT_FOUND = ("4006", [])
53
+ TICKET_EXPIRED = ("4007", [])
54
+ PROVIDE_MEDIAFILE_ID_OR_TICKET_ID = ("4008", [])
55
+ DUPLICATE_FILE = ("4009", [])
56
+ NO_BUCKET_SPECIFIED = ("4010", [])
57
+
58
+ # Validation error codes
59
+ INVALID_INPUT = ("5001", [])
60
+ REQUIRED_FIELD_MISSING = ("5002", [])
61
+ INVALID_FORMAT = ("5003", [])
62
+ INVALID_TYPE = ("5004", [])
63
+ COLUMN_NOT_FOUND = ("5005", ["missing_columns"])
64
+ ONLY_TYPE_CSV_ALLOWED = ("5006", [])
65
+ NO_METADATA_AVAILABLE = ("5007", [])
66
+ INVALID_DATETIME = ("5008", ["value"])
67
+ UNSUPPORTED_TYPE = ("5009", ["type"])
68
+ CONTENT_NOT_FOUND = ("5010", [])
69
+ VALIDATION_ERROR = ("5011", [])
70
+ TENANT_HAS_MISSING_DATA = ("5012", [])
71
+ INVALID_FORMAT_FOR_TYPE = ("5013", ["type"])
72
+ NO_METADATA_AVAILABLE_FOR_ITEM = ("5014", ["id"])
73
+ INVALID_ACCEPT_HEADER = ("5015", [])
74
+
75
+ # Filter error codes
76
+ NO_MATCHER_FOR_FILTER_REQUEST = ("6001", [])
77
+ UNDEFINED_FILTER_FOR_INPUT_TYPE = ("6002", ["input_type"])
78
+ UNSUPPORTED_OPERATOR = ("6003", ["operator"])
79
+
80
+ # Migration error codes
81
+ UNABLE_TO_UPDATE_SCHEMA_VERSION = ("7001", ["migrated_item"])
82
+ LAZY_MIGRATION_SCHEMA_TYPE_MISMATCH = ("7002", [])
83
+
84
+ # External Services
85
+ SERVICE_UNAVAILABLE = ("8001", [])
86
+
87
+ # Arches error codes
88
+ ARCHES_ERROR = ("11000", ["error"])
89
+ ARCHES_CONNECTION_UNAVAILABLE = ("11001", [])
90
+ ARCHES_RECORD_NOT_FOUND = ("11002", ["error_message", "arches_id"])
91
+ ARCHES_RECORD_MISSING_DATA = ("11003", ["arches_id"])
92
+ ARCHES_RECORD_MISSING_DATA_DC_PUBLISHER = ("11004", ["arches_id"])
93
+ ARCHES_UNABLE_TO_CREATE_RELATION = ("11005", ["type", "value"])
94
+
95
+ # Digipolis error codes
96
+ NO_PERMISSION_TO_CREATE_INSTIUTION = ("12000", ["institution"])
97
+ INSTITUTION_HAS_MISSING_DATA = ("12001", ["institution"])
98
+ INSTITUTION_NOT_FOUND = ("12002", [])
99
+
100
+
101
+ def get_error_code(error_code, prefix):
102
+ if prefix not in [ErrorCode.READ.value, ErrorCode.WRITE.value]:
103
+ raise ValueError("Prefix must be 'R' for read or 'W' for write.")
104
+ return f"{prefix}{error_code.value[0]}"
105
+
106
+
107
+ def get_read():
108
+ return ErrorCode.READ.value
109
+
110
+
111
+ def get_write():
112
+ return ErrorCode.WRITE.value
@@ -1,3 +1,9 @@
1
+ class DuplicateExternalLinkException(Exception):
2
+ def __init__(self, message, external_id=None):
3
+ super().__init__(message)
4
+ self.external_id = external_id
5
+
6
+
1
7
  class DuplicateFileException(Exception):
2
8
  def __init__(self, message, filename=None, md5sum=None):
3
9
  super().__init__(message)
@@ -26,6 +32,10 @@ class InvalidObjectException(Exception):
26
32
  pass
27
33
 
28
34
 
35
+ class InvalidValueException(Exception):
36
+ pass
37
+
38
+
29
39
  class NoMediafilesException(Exception):
30
40
  pass
31
41
 
@@ -38,5 +48,9 @@ class NotFoundException(Exception):
38
48
  pass
39
49
 
40
50
 
51
+ class NoTenantException(Exception):
52
+ pass
53
+
54
+
41
55
  class UnsupportedVersionException(Exception):
42
56
  pass