elody 0.0.62__py3-none-any.whl → 0.0.162__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- elody/client.py +70 -23
- elody/csv.py +118 -21
- elody/error_codes.py +112 -0
- elody/exceptions.py +14 -0
- elody/job.py +95 -0
- elody/loader.py +33 -5
- elody/migration/__init__.py +0 -0
- elody/migration/base_object_migrator.py +18 -0
- elody/object_configurations/__init__.py +0 -0
- elody/object_configurations/base_object_configuration.py +174 -0
- elody/object_configurations/elody_configuration.py +144 -0
- elody/object_configurations/job_configuration.py +65 -0
- elody/policies/authentication/base_user_tenant_validation_policy.py +48 -15
- elody/policies/authorization/filter_generic_objects_policy.py +68 -22
- elody/policies/authorization/filter_generic_objects_policy_v2.py +166 -0
- elody/policies/authorization/generic_object_detail_policy.py +10 -27
- elody/policies/authorization/generic_object_mediafiles_policy.py +82 -0
- elody/policies/authorization/generic_object_metadata_policy.py +8 -27
- elody/policies/authorization/generic_object_relations_policy.py +12 -29
- elody/policies/authorization/generic_object_request_policy.py +56 -55
- elody/policies/authorization/generic_object_request_policy_v2.py +133 -0
- elody/policies/authorization/mediafile_derivatives_policy.py +92 -0
- elody/policies/authorization/mediafile_download_policy.py +71 -0
- elody/policies/authorization/multi_tenant_policy.py +14 -6
- elody/policies/authorization/tenant_request_policy.py +3 -1
- elody/policies/helpers.py +37 -0
- elody/policies/permission_handler.py +217 -211
- elody/policies/tenant_id_resolver.py +375 -0
- elody/schemas.py +0 -3
- elody/util.py +165 -11
- {elody-0.0.62.dist-info → elody-0.0.162.dist-info}/METADATA +16 -11
- elody-0.0.162.dist-info/RECORD +47 -0
- {elody-0.0.62.dist-info → elody-0.0.162.dist-info}/WHEEL +1 -1
- {elody-0.0.62.dist-info → elody-0.0.162.dist-info}/top_level.txt +1 -0
- tests/__init_.py +0 -0
- tests/data.py +74 -0
- tests/unit/__init__.py +0 -0
- tests/unit/test_csv.py +410 -0
- tests/unit/test_utils.py +293 -0
- elody-0.0.62.dist-info/RECORD +0 -27
- {elody-0.0.62.dist-info → elody-0.0.162.dist-info}/LICENSE +0 -0
elody/client.py
CHANGED
|
@@ -5,7 +5,9 @@ from .exceptions import NonUniqueException, NotFoundException
|
|
|
5
5
|
|
|
6
6
|
|
|
7
7
|
class Client:
|
|
8
|
-
def __init__(
|
|
8
|
+
def __init__(
|
|
9
|
+
self, elody_collection_url=None, static_jwt=None, extra_headers=None, proxy=None
|
|
10
|
+
):
|
|
9
11
|
self.elody_collection_url = elody_collection_url or os.environ.get(
|
|
10
12
|
"ELODY_COLLECTION_URL", None
|
|
11
13
|
)
|
|
@@ -13,19 +15,30 @@ class Client:
|
|
|
13
15
|
self.headers = {"Authorization": f"Bearer {self.static_jwt}"}
|
|
14
16
|
if extra_headers:
|
|
15
17
|
self.headers = {**self.headers, **extra_headers}
|
|
18
|
+
self.proxies = None
|
|
19
|
+
if proxy:
|
|
20
|
+
self.proxies = {
|
|
21
|
+
"https": proxy,
|
|
22
|
+
"http": proxy,
|
|
23
|
+
}
|
|
16
24
|
|
|
17
25
|
def __create_mediafile(self, entity_id, mediafile):
|
|
18
26
|
url = f"{self.elody_collection_url}/entities/{entity_id}/mediafiles"
|
|
19
27
|
headers = {**self.headers, **{"Accept": "text/uri-list"}}
|
|
20
|
-
response = requests.post(
|
|
28
|
+
response = requests.post(
|
|
29
|
+
url, json=mediafile, headers=headers, proxies=self.proxies
|
|
30
|
+
)
|
|
21
31
|
return self.__handle_response(response, "Failed to create mediafile", "text")
|
|
22
32
|
|
|
23
|
-
def create_mediafile_with_filename(self, filename):
|
|
24
|
-
data = {"filename": filename}
|
|
33
|
+
def create_mediafile_with_filename(self, filename, institution_id=None):
|
|
34
|
+
data = {"filename": filename, "type": "mediafile"}
|
|
35
|
+
if institution_id:
|
|
36
|
+
data.update({"metadata": [{"key": "institution", "value": institution_id}]})
|
|
25
37
|
req = requests.post(
|
|
26
38
|
f"{self.elody_collection_url}/mediafiles",
|
|
27
39
|
json=data,
|
|
28
40
|
headers=self.headers,
|
|
41
|
+
proxies=self.proxies,
|
|
29
42
|
)
|
|
30
43
|
if req.status_code != 201:
|
|
31
44
|
raise Exception(req.text.strip())
|
|
@@ -36,16 +49,24 @@ class Client:
|
|
|
36
49
|
f"{self.elody_collection_url}/tickets",
|
|
37
50
|
json={"filename": mediafile_name},
|
|
38
51
|
headers=self.headers,
|
|
52
|
+
proxies=self.proxies,
|
|
39
53
|
)
|
|
40
54
|
if req.status_code != 201:
|
|
41
55
|
raise Exception(req.text.strip())
|
|
42
56
|
return req.text.strip().replace('"', "")
|
|
43
57
|
|
|
44
58
|
def __get_upload_location(
|
|
45
|
-
self,
|
|
59
|
+
self,
|
|
60
|
+
entity_id,
|
|
61
|
+
filename,
|
|
62
|
+
is_public=True,
|
|
63
|
+
identifiers=None,
|
|
64
|
+
mediafile_object=None,
|
|
46
65
|
):
|
|
47
66
|
if not identifiers:
|
|
48
67
|
identifiers = list()
|
|
68
|
+
if not mediafile_object:
|
|
69
|
+
mediafile_object = dict()
|
|
49
70
|
metadata = []
|
|
50
71
|
if is_public:
|
|
51
72
|
metadata = [
|
|
@@ -55,9 +76,12 @@ class Client:
|
|
|
55
76
|
}
|
|
56
77
|
]
|
|
57
78
|
mediafile = {
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
79
|
+
**{
|
|
80
|
+
"filename": filename,
|
|
81
|
+
"metadata": metadata,
|
|
82
|
+
"identifiers": identifiers,
|
|
83
|
+
},
|
|
84
|
+
**mediafile_object,
|
|
61
85
|
}
|
|
62
86
|
return self.__create_mediafile(entity_id, mediafile)
|
|
63
87
|
|
|
@@ -78,38 +102,48 @@ class Client:
|
|
|
78
102
|
|
|
79
103
|
def add_entity_mediafiles(self, identifier, payload):
|
|
80
104
|
url = f"{self.elody_collection_url}/entities/{identifier}/mediafiles"
|
|
81
|
-
response = requests.post(
|
|
105
|
+
response = requests.post(
|
|
106
|
+
url, json=payload, headers=self.headers, proxies=self.proxies
|
|
107
|
+
)
|
|
82
108
|
return self.__handle_response(response, "Failed to add mediafiles")
|
|
83
109
|
|
|
84
|
-
def add_object(self, collection, payload):
|
|
110
|
+
def add_object(self, collection, payload, params=None):
|
|
85
111
|
url = f"{self.elody_collection_url}/{collection}"
|
|
86
|
-
response = requests.post(
|
|
112
|
+
response = requests.post(
|
|
113
|
+
url, json=payload, headers=self.headers, params=params, proxies=self.proxies
|
|
114
|
+
)
|
|
87
115
|
return self.__handle_response(response, "Failed to add object")
|
|
88
116
|
|
|
89
117
|
def add_object_metadata(self, collection, identifier, payload):
|
|
90
118
|
if collection == "entities":
|
|
91
119
|
url = f"{self.elody_collection_url}/{collection}/{identifier}/metadata"
|
|
92
120
|
payload = payload if isinstance(payload, list) else [payload]
|
|
93
|
-
response = requests.patch(
|
|
121
|
+
response = requests.patch(
|
|
122
|
+
url, json=payload, headers=self.headers, proxies=self.proxies
|
|
123
|
+
)
|
|
94
124
|
if response.status_code == 400 and response.json()["message"].endswith(
|
|
95
125
|
"has no metadata"
|
|
96
126
|
):
|
|
97
|
-
response = requests.post(
|
|
127
|
+
response = requests.post(
|
|
128
|
+
url, json=payload, headers=self.headers, proxies=self.proxies
|
|
129
|
+
)
|
|
98
130
|
return self.__handle_response(response, "Failed to add metadata")
|
|
99
131
|
else:
|
|
100
132
|
url = f"{self.elody_collection_url}/{collection}/{identifier}"
|
|
101
133
|
payload = {"metadata": payload if isinstance(payload, list) else [payload]}
|
|
102
|
-
response = requests.patch(
|
|
134
|
+
response = requests.patch(
|
|
135
|
+
url, json=payload, headers=self.headers, proxies=self.proxies
|
|
136
|
+
)
|
|
103
137
|
return self.__handle_response(response, "Failed to add metadata")
|
|
104
138
|
|
|
105
139
|
def delete_object(self, collection, identifier):
|
|
106
140
|
url = f"{self.elody_collection_url}/{collection}/{identifier}"
|
|
107
|
-
response = requests.delete(url, headers=self.headers)
|
|
141
|
+
response = requests.delete(url, headers=self.headers, proxies=self.proxies)
|
|
108
142
|
return self.__handle_response(response, "Failed to delete object", "text")
|
|
109
143
|
|
|
110
144
|
def get_all_objects(self, collection):
|
|
111
145
|
url = f"{self.elody_collection_url}/{collection}"
|
|
112
|
-
response = requests.get(url, headers=self.headers)
|
|
146
|
+
response = requests.get(url, headers=self.headers, proxies=self.proxies)
|
|
113
147
|
return self.__handle_response(response, "Failed to get objects")
|
|
114
148
|
|
|
115
149
|
def get_mediafiles_and_check_existence(self, mediafile_ids):
|
|
@@ -121,17 +155,26 @@ class Client:
|
|
|
121
155
|
|
|
122
156
|
def get_object(self, collection, identifier):
|
|
123
157
|
url = f"{self.elody_collection_url}/{collection}/{identifier}"
|
|
124
|
-
response = requests.get(url, headers=self.headers)
|
|
158
|
+
response = requests.get(url, headers=self.headers, proxies=self.proxies)
|
|
125
159
|
return self.__handle_response(response, "Failed to get object")
|
|
126
160
|
|
|
127
|
-
def update_object(self, collection, identifier, payload):
|
|
161
|
+
def update_object(self, collection, identifier, payload, overwrite=True):
|
|
128
162
|
url = f"{self.elody_collection_url}/{collection}/{identifier}"
|
|
129
|
-
|
|
163
|
+
if overwrite:
|
|
164
|
+
response = requests.put(
|
|
165
|
+
url, json=payload, headers=self.headers, proxies=self.proxies
|
|
166
|
+
)
|
|
167
|
+
else:
|
|
168
|
+
response = requests.patch(
|
|
169
|
+
url, json=payload, headers=self.headers, proxies=self.proxies
|
|
170
|
+
)
|
|
130
171
|
return self.__handle_response(response, "Failed to update object")
|
|
131
172
|
|
|
132
173
|
def update_object_relations(self, collection, identifier, payload):
|
|
133
174
|
url = f"{self.elody_collection_url}/{collection}/{identifier}/relations"
|
|
134
|
-
response = requests.patch(
|
|
175
|
+
response = requests.patch(
|
|
176
|
+
url, json=payload, headers=self.headers, proxies=self.proxies
|
|
177
|
+
)
|
|
135
178
|
return self.__handle_response(response, "Failed to update object relations")
|
|
136
179
|
|
|
137
180
|
def upload_file_from_url(
|
|
@@ -141,19 +184,23 @@ class Client:
|
|
|
141
184
|
file_url,
|
|
142
185
|
identifiers=None,
|
|
143
186
|
upload_location_replace_map=None,
|
|
187
|
+
mediafile_object=None,
|
|
144
188
|
):
|
|
145
189
|
if not identifiers:
|
|
146
190
|
identifiers = list()
|
|
147
191
|
if not upload_location_replace_map:
|
|
148
192
|
upload_location_replace_map = dict()
|
|
149
193
|
upload_location = self.__get_upload_location(
|
|
150
|
-
entity_id, filename, True, identifiers
|
|
194
|
+
entity_id, filename, True, identifiers, mediafile_object
|
|
151
195
|
)
|
|
152
196
|
for current_location, new_location in upload_location_replace_map.items():
|
|
153
197
|
upload_location = upload_location.replace(current_location, new_location)
|
|
154
198
|
print(upload_location)
|
|
155
|
-
mediafile = requests.get(file_url).content
|
|
199
|
+
mediafile = requests.get(file_url, proxies=self.proxies).content
|
|
156
200
|
response = requests.post(
|
|
157
|
-
upload_location,
|
|
201
|
+
upload_location,
|
|
202
|
+
files={"file": mediafile},
|
|
203
|
+
headers=self.headers,
|
|
204
|
+
proxies=self.proxies,
|
|
158
205
|
)
|
|
159
206
|
return self.__handle_response(response, "Failed to upload mediafile")
|
elody/csv.py
CHANGED
|
@@ -6,13 +6,14 @@ from elody.exceptions import (
|
|
|
6
6
|
ColumnNotFoundException,
|
|
7
7
|
IncorrectTypeException,
|
|
8
8
|
InvalidObjectException,
|
|
9
|
+
InvalidValueException,
|
|
9
10
|
)
|
|
10
11
|
from elody.validator import validate_json
|
|
11
12
|
from elody.schemas import entity_schema, mediafile_schema
|
|
12
13
|
|
|
13
14
|
|
|
14
15
|
class CSVParser:
|
|
15
|
-
top_level_fields = ["type", "filename"]
|
|
16
|
+
top_level_fields = ["type", "filename", "file_identifier"]
|
|
16
17
|
identifier_fields = ["identifiers", "identifier", "object_id", "entity_id"]
|
|
17
18
|
schema_mapping = {
|
|
18
19
|
"entity": entity_schema,
|
|
@@ -21,9 +22,12 @@ class CSVParser:
|
|
|
21
22
|
"mediafiles": mediafile_schema,
|
|
22
23
|
}
|
|
23
24
|
|
|
24
|
-
def __init__(self, csvstring):
|
|
25
|
-
|
|
26
|
-
|
|
25
|
+
def __init__(self, csvstring=None, csvfile=None):
|
|
26
|
+
if csvstring:
|
|
27
|
+
self.csvstring = csvstring
|
|
28
|
+
self.reader = self.__get_reader_from_csv(self.__csv_string_to_file_object())
|
|
29
|
+
elif csvfile:
|
|
30
|
+
self.reader = self.__get_reader_from_csv(csvfile)
|
|
27
31
|
|
|
28
32
|
def _get_metadata_object(self, key, value, lang="en"):
|
|
29
33
|
return {
|
|
@@ -46,6 +50,11 @@ class CSVParser:
|
|
|
46
50
|
def __csv_string_to_file_object(self):
|
|
47
51
|
return StringIO(self.csvstring)
|
|
48
52
|
|
|
53
|
+
def __get_reader_from_csv(self, csv_file):
|
|
54
|
+
csv_dialect = csv.Sniffer().sniff(csv_file.read())
|
|
55
|
+
csv_file.seek(0)
|
|
56
|
+
return csv.DictReader(csv_file, dialect=csv_dialect)
|
|
57
|
+
|
|
49
58
|
|
|
50
59
|
class CSVSingleObject(CSVParser):
|
|
51
60
|
def __init__(self, csvstring, object_type="entity"):
|
|
@@ -117,18 +126,33 @@ class CSVMultiObject(CSVParser):
|
|
|
117
126
|
index_mapping=None,
|
|
118
127
|
object_field_mapping=None,
|
|
119
128
|
required_metadata_values=None,
|
|
129
|
+
metadata_field_mapping=None,
|
|
130
|
+
include_indexed_field=False,
|
|
131
|
+
top_level_fields_mapping=None,
|
|
132
|
+
external_file_sources=None,
|
|
120
133
|
):
|
|
121
134
|
super().__init__(csvstring)
|
|
122
|
-
self.index_mapping = dict()
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
135
|
+
self.index_mapping = index_mapping if index_mapping else dict()
|
|
136
|
+
self.object_field_mapping = (
|
|
137
|
+
object_field_mapping if object_field_mapping else dict()
|
|
138
|
+
)
|
|
139
|
+
self.required_metadata_values = (
|
|
140
|
+
required_metadata_values if required_metadata_values else dict()
|
|
141
|
+
)
|
|
142
|
+
self.metadata_field_mapping = (
|
|
143
|
+
metadata_field_mapping if metadata_field_mapping else dict()
|
|
144
|
+
)
|
|
129
145
|
self.objects = dict()
|
|
130
146
|
self.errors = dict()
|
|
147
|
+
self.include_indexed_field = include_indexed_field
|
|
148
|
+
self.top_level_fields_mapping = (
|
|
149
|
+
top_level_fields_mapping if top_level_fields_mapping else dict()
|
|
150
|
+
)
|
|
151
|
+
self.external_file_sources = (
|
|
152
|
+
external_file_sources if external_file_sources else []
|
|
153
|
+
)
|
|
131
154
|
self.__fill_objects_from_csv()
|
|
155
|
+
self.__rename_top_level_fields()
|
|
132
156
|
|
|
133
157
|
def get_entities(self):
|
|
134
158
|
return self.objects.get("entities", list())
|
|
@@ -136,9 +160,20 @@ class CSVMultiObject(CSVParser):
|
|
|
136
160
|
def get_errors(self):
|
|
137
161
|
return self.errors
|
|
138
162
|
|
|
163
|
+
def get_top_level_fields_mapping(self, type):
|
|
164
|
+
return self.top_level_fields_mapping.get(type, {})
|
|
165
|
+
|
|
139
166
|
def get_mediafiles(self):
|
|
140
167
|
return self.objects.get("mediafiles", list())
|
|
141
168
|
|
|
169
|
+
def __determine_language(self, row):
|
|
170
|
+
if "language" in row:
|
|
171
|
+
return row.get("language")
|
|
172
|
+
elif "lang" in row:
|
|
173
|
+
return row.get("lang")
|
|
174
|
+
else:
|
|
175
|
+
return "en"
|
|
176
|
+
|
|
142
177
|
def __field_allowed(self, target_object_type, key, value):
|
|
143
178
|
for object_type, fields in self.object_field_mapping.items():
|
|
144
179
|
for _ in [x for x in fields if x == key]:
|
|
@@ -151,13 +186,23 @@ class CSVMultiObject(CSVParser):
|
|
|
151
186
|
|
|
152
187
|
def __fill_objects_from_csv(self):
|
|
153
188
|
indexed_dict = dict()
|
|
189
|
+
external_mediafiles_ids = []
|
|
154
190
|
for row in self.reader:
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
191
|
+
mandatory_columns = [
|
|
192
|
+
v for k, v in self.index_mapping.items() if not k.startswith("?")
|
|
193
|
+
]
|
|
194
|
+
missing_columns = [x for x in mandatory_columns if x not in row.keys()]
|
|
195
|
+
if missing_columns:
|
|
196
|
+
raise ColumnNotFoundException(f"{', '.join(missing_columns)}")
|
|
197
|
+
lang = self.__determine_language(row)
|
|
159
198
|
previous_id = None
|
|
160
199
|
for type, identifying_column in self.index_mapping.items():
|
|
200
|
+
is_type_optional = False
|
|
201
|
+
if type.startswith("?"):
|
|
202
|
+
is_type_optional = True
|
|
203
|
+
type = type.lstrip("?")
|
|
204
|
+
if not row.get(identifying_column) and is_type_optional:
|
|
205
|
+
continue
|
|
161
206
|
id = row[identifying_column]
|
|
162
207
|
if type not in indexed_dict:
|
|
163
208
|
indexed_dict[type] = dict()
|
|
@@ -167,7 +212,23 @@ class CSVMultiObject(CSVParser):
|
|
|
167
212
|
if previous_id:
|
|
168
213
|
indexed_dict[type][id]["matching_id"] = previous_id
|
|
169
214
|
previous_id = id
|
|
215
|
+
file_source = None
|
|
170
216
|
for key, value in row.items():
|
|
217
|
+
if not value:
|
|
218
|
+
continue
|
|
219
|
+
if key == "file_source":
|
|
220
|
+
file_source = value
|
|
221
|
+
if (
|
|
222
|
+
key == "file_identifier"
|
|
223
|
+
and file_source in self.external_file_sources
|
|
224
|
+
):
|
|
225
|
+
matching_id = indexed_dict[type][id]["matching_id"]
|
|
226
|
+
if not any(matching_id in id for id in external_mediafiles_ids):
|
|
227
|
+
external_mediafiles_ids.append({matching_id: file_source})
|
|
228
|
+
if "entities" not in indexed_dict:
|
|
229
|
+
indexed_dict["entities"] = dict()
|
|
230
|
+
if id in indexed_dict["entities"]:
|
|
231
|
+
indexed_dict["entities"][id]["file_identifier"] = value
|
|
171
232
|
if self._is_relation_field(key) and self.__field_allowed(
|
|
172
233
|
type, key, value
|
|
173
234
|
):
|
|
@@ -187,16 +248,37 @@ class CSVMultiObject(CSVParser):
|
|
|
187
248
|
indexed_dict[type][id][key] = value
|
|
188
249
|
elif (
|
|
189
250
|
key not in self.index_mapping.values()
|
|
190
|
-
|
|
191
|
-
):
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
251
|
+
or self.include_indexed_field
|
|
252
|
+
) and self.__field_allowed(type, key, value):
|
|
253
|
+
# Map the metadata field to a unified key if applicable
|
|
254
|
+
metadata_info = self.metadata_field_mapping.get(key, {})
|
|
255
|
+
if metadata_info.get("target") == type or not metadata_info:
|
|
256
|
+
case_insensitive = metadata_info.get("case_insensitive", False)
|
|
257
|
+
metadata_key = metadata_info.get("map_to", key)
|
|
258
|
+
indexed_dict[type][id].setdefault("metadata", list())
|
|
259
|
+
options = metadata_info.get("value_options")
|
|
260
|
+
if case_insensitive:
|
|
261
|
+
value = value.lower()
|
|
262
|
+
if options and value not in options:
|
|
263
|
+
raise InvalidValueException(
|
|
264
|
+
f'The value "{value}" is invalid, these are the valid values: {options}'
|
|
265
|
+
)
|
|
266
|
+
indexed_dict[type][id]["metadata"].append(
|
|
267
|
+
self._get_metadata_object(metadata_key, value, lang)
|
|
268
|
+
)
|
|
196
269
|
self.__validate_indexed_dict(indexed_dict)
|
|
197
270
|
self.__add_required_fields(indexed_dict)
|
|
198
271
|
for object_type, objects in indexed_dict.items():
|
|
199
272
|
self.objects[object_type] = list(objects.values())
|
|
273
|
+
if external_mediafiles_ids:
|
|
274
|
+
for mediafile in self.objects["mediafiles"]:
|
|
275
|
+
matching_id = mediafile["matching_id"]
|
|
276
|
+
for entry in external_mediafiles_ids:
|
|
277
|
+
if matching_id in entry:
|
|
278
|
+
file_source = entry[matching_id]
|
|
279
|
+
dynamic_key = f"is_{file_source}_mediafile"
|
|
280
|
+
mediafile[dynamic_key] = True
|
|
281
|
+
break
|
|
200
282
|
|
|
201
283
|
def __add_required_fields(self, indexed_dict):
|
|
202
284
|
if not self.required_metadata_values:
|
|
@@ -231,3 +313,18 @@ class CSVMultiObject(CSVParser):
|
|
|
231
313
|
)
|
|
232
314
|
for error_id in error_ids:
|
|
233
315
|
del objects[error_id]
|
|
316
|
+
|
|
317
|
+
def __rename_top_level_fields(self):
|
|
318
|
+
def rename_fields(items, mapping):
|
|
319
|
+
for item in items:
|
|
320
|
+
for old_key, new_key in mapping.items():
|
|
321
|
+
if old_key in item:
|
|
322
|
+
item[new_key] = item.pop(old_key)
|
|
323
|
+
|
|
324
|
+
mediafiles = self.get_mediafiles()
|
|
325
|
+
entities = self.get_entities()
|
|
326
|
+
mediafiles_mapping = self.get_top_level_fields_mapping("mediafiles")
|
|
327
|
+
entities_mapping = self.get_top_level_fields_mapping("entities")
|
|
328
|
+
|
|
329
|
+
rename_fields(mediafiles, mediafiles_mapping)
|
|
330
|
+
rename_fields(entities, entities_mapping)
|
elody/error_codes.py
ADDED
|
@@ -0,0 +1,112 @@
|
|
|
1
|
+
from enum import Enum
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class ErrorCode(Enum):
|
|
5
|
+
READ = "R"
|
|
6
|
+
WRITE = "W"
|
|
7
|
+
|
|
8
|
+
# General error codes
|
|
9
|
+
UNKNOWN_ERROR = ("0002", [])
|
|
10
|
+
COLLECTION_NOT_FOUND = ("0003", ["collection"])
|
|
11
|
+
ITEM_NOT_FOUND = ("0004", ["id"])
|
|
12
|
+
HISTORY_ITEM_NOT_FOUND = ("0005", [])
|
|
13
|
+
CANNOT_SPECIFY_BOTH = ("0006", [])
|
|
14
|
+
ALREADY_PARENT = ("0007", ["id"])
|
|
15
|
+
METADATA_KEY_UNDEFINED = ("0008", ["key", "document"])
|
|
16
|
+
ENTITY_HAS_NO_TENANT = ("0009", ["value", "user"])
|
|
17
|
+
MEDIAFILE_NOT_FOUND = ("0010", [])
|
|
18
|
+
ITEM_NOT_FOUND_IN_COLLECTION = ("0011", ["id", "collection"])
|
|
19
|
+
|
|
20
|
+
# Auth error codes
|
|
21
|
+
INVALID_CREDENTIALS = ("1001", [])
|
|
22
|
+
ACCOUNT_LOCKED = ("1002", [])
|
|
23
|
+
INSUFFICIENT_PERMISSIONS = ("1003", ["restricted_keys"])
|
|
24
|
+
NO_PERMISSIONS = ("1004", [])
|
|
25
|
+
UNDEFINED_COLLECTION_RESOLVER = ("1005", [])
|
|
26
|
+
TENANT_NOT_FOUND = ("1006", [])
|
|
27
|
+
XTENANT_HAS_NO_TENANT_DEFENING_ENTITY = ("1007", ["x_tenant_id"])
|
|
28
|
+
INSUFFICIENT_PERMISSIONS_WITHOUT_VARS = ("1008", [])
|
|
29
|
+
NO_GLOBAL_ROLES = ("1009", [])
|
|
30
|
+
NO_PERMISSION_TO_TENANT = ("1010", ["tenant_id"])
|
|
31
|
+
XTENANT_NOT_FOUND = ("1011", ["x_tenant_id"])
|
|
32
|
+
NO_DOWNLOAD_PERMISSION = ("1012", [])
|
|
33
|
+
|
|
34
|
+
# Database error codes
|
|
35
|
+
DATABASE_NOT_INITIALIZED = ("2000", [])
|
|
36
|
+
DATABASE_CONNECTION_FAILED = ("2001", [])
|
|
37
|
+
QUERY_EXECUTION_FAILED = ("2002", [])
|
|
38
|
+
DUPLICATE_ENTRY = ("2003", [])
|
|
39
|
+
DUPLICATE_IDENTIFIERS = ("2004", ["duplicate_keys"])
|
|
40
|
+
|
|
41
|
+
# Network error codes
|
|
42
|
+
NETWORK_UNAVAILABLE = ("3001", [])
|
|
43
|
+
TIMEOUT = ("3002", [])
|
|
44
|
+
SERVER_NOT_FOUND = ("3003", [])
|
|
45
|
+
|
|
46
|
+
# File handling error codes
|
|
47
|
+
FILE_NOT_FOUND = ("4001", [])
|
|
48
|
+
FILE_ACCESS_DENIED = ("4002", [])
|
|
49
|
+
FILE_CORRUPTED = ("4003", [])
|
|
50
|
+
NO_FILENAME_SPECIFIED = ("4004", [])
|
|
51
|
+
NO_TICKET_ID_SPECIFIED = ("4005", [])
|
|
52
|
+
TICKET_NOT_FOUND = ("4006", [])
|
|
53
|
+
TICKET_EXPIRED = ("4007", [])
|
|
54
|
+
PROVIDE_MEDIAFILE_ID_OR_TICKET_ID = ("4008", [])
|
|
55
|
+
DUPLICATE_FILE = ("4009", [])
|
|
56
|
+
NO_BUCKET_SPECIFIED = ("4010", [])
|
|
57
|
+
|
|
58
|
+
# Validation error codes
|
|
59
|
+
INVALID_INPUT = ("5001", [])
|
|
60
|
+
REQUIRED_FIELD_MISSING = ("5002", [])
|
|
61
|
+
INVALID_FORMAT = ("5003", [])
|
|
62
|
+
INVALID_TYPE = ("5004", [])
|
|
63
|
+
COLUMN_NOT_FOUND = ("5005", ["missing_columns"])
|
|
64
|
+
ONLY_TYPE_CSV_ALLOWED = ("5006", [])
|
|
65
|
+
NO_METADATA_AVAILABLE = ("5007", [])
|
|
66
|
+
INVALID_DATETIME = ("5008", ["value"])
|
|
67
|
+
UNSUPPORTED_TYPE = ("5009", ["type"])
|
|
68
|
+
CONTENT_NOT_FOUND = ("5010", [])
|
|
69
|
+
VALIDATION_ERROR = ("5011", [])
|
|
70
|
+
TENANT_HAS_MISSING_DATA = ("5012", [])
|
|
71
|
+
INVALID_FORMAT_FOR_TYPE = ("5013", ["type"])
|
|
72
|
+
NO_METADATA_AVAILABLE_FOR_ITEM = ("5014", ["id"])
|
|
73
|
+
INVALID_ACCEPT_HEADER = ("5015", [])
|
|
74
|
+
|
|
75
|
+
# Filter error codes
|
|
76
|
+
NO_MATCHER_FOR_FILTER_REQUEST = ("6001", [])
|
|
77
|
+
UNDEFINED_FILTER_FOR_INPUT_TYPE = ("6002", ["input_type"])
|
|
78
|
+
UNSUPPORTED_OPERATOR = ("6003", ["operator"])
|
|
79
|
+
|
|
80
|
+
# Migration error codes
|
|
81
|
+
UNABLE_TO_UPDATE_SCHEMA_VERSION = ("7001", ["migrated_item"])
|
|
82
|
+
LAZY_MIGRATION_SCHEMA_TYPE_MISMATCH = ("7002", [])
|
|
83
|
+
|
|
84
|
+
# External Services
|
|
85
|
+
SERVICE_UNAVAILABLE = ("8001", [])
|
|
86
|
+
|
|
87
|
+
# Arches error codes
|
|
88
|
+
ARCHES_ERROR = ("11000", ["error"])
|
|
89
|
+
ARCHES_CONNECTION_UNAVAILABLE = ("11001", [])
|
|
90
|
+
ARCHES_RECORD_NOT_FOUND = ("11002", ["error_message", "arches_id"])
|
|
91
|
+
ARCHES_RECORD_MISSING_DATA = ("11003", ["arches_id"])
|
|
92
|
+
ARCHES_RECORD_MISSING_DATA_DC_PUBLISHER = ("11004", ["arches_id"])
|
|
93
|
+
ARCHES_UNABLE_TO_CREATE_RELATION = ("11005", ["type", "value"])
|
|
94
|
+
|
|
95
|
+
# Digipolis error codes
|
|
96
|
+
NO_PERMISSION_TO_CREATE_INSTIUTION = ("12000", ["institution"])
|
|
97
|
+
INSTITUTION_HAS_MISSING_DATA = ("12001", ["institution"])
|
|
98
|
+
INSTITUTION_NOT_FOUND = ("12002", [])
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
def get_error_code(error_code, prefix):
|
|
102
|
+
if prefix not in [ErrorCode.READ.value, ErrorCode.WRITE.value]:
|
|
103
|
+
raise ValueError("Prefix must be 'R' for read or 'W' for write.")
|
|
104
|
+
return f"{prefix}{error_code.value[0]}"
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
def get_read():
|
|
108
|
+
return ErrorCode.READ.value
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
def get_write():
|
|
112
|
+
return ErrorCode.WRITE.value
|
elody/exceptions.py
CHANGED
|
@@ -1,3 +1,9 @@
|
|
|
1
|
+
class DuplicateExternalLinkException(Exception):
|
|
2
|
+
def __init__(self, message, external_id=None):
|
|
3
|
+
super().__init__(message)
|
|
4
|
+
self.external_id = external_id
|
|
5
|
+
|
|
6
|
+
|
|
1
7
|
class DuplicateFileException(Exception):
|
|
2
8
|
def __init__(self, message, filename=None, md5sum=None):
|
|
3
9
|
super().__init__(message)
|
|
@@ -26,6 +32,10 @@ class InvalidObjectException(Exception):
|
|
|
26
32
|
pass
|
|
27
33
|
|
|
28
34
|
|
|
35
|
+
class InvalidValueException(Exception):
|
|
36
|
+
pass
|
|
37
|
+
|
|
38
|
+
|
|
29
39
|
class NoMediafilesException(Exception):
|
|
30
40
|
pass
|
|
31
41
|
|
|
@@ -38,5 +48,9 @@ class NotFoundException(Exception):
|
|
|
38
48
|
pass
|
|
39
49
|
|
|
40
50
|
|
|
51
|
+
class NoTenantException(Exception):
|
|
52
|
+
pass
|
|
53
|
+
|
|
54
|
+
|
|
41
55
|
class UnsupportedVersionException(Exception):
|
|
42
56
|
pass
|
elody/job.py
ADDED
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
from elody.object_configurations.job_configuration import JobConfiguration
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
_config = JobConfiguration()
|
|
5
|
+
_create = _config.crud()["creator"]
|
|
6
|
+
_post_crud_hook = _config.crud()["post_crud_hook"]
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def start_job(
|
|
10
|
+
name,
|
|
11
|
+
job_type,
|
|
12
|
+
*,
|
|
13
|
+
get_rabbit,
|
|
14
|
+
get_user_context=None,
|
|
15
|
+
user_email=None,
|
|
16
|
+
parent_id=None,
|
|
17
|
+
id_of_document_job_was_initiated_for=None,
|
|
18
|
+
type_of_document_job_was_initiated_for=None,
|
|
19
|
+
) -> str:
|
|
20
|
+
relations = []
|
|
21
|
+
if parent_id:
|
|
22
|
+
relations.append({"key": parent_id, "type": "hasParentJob"})
|
|
23
|
+
if id_of_document_job_was_initiated_for and type_of_document_job_was_initiated_for:
|
|
24
|
+
relations.append(
|
|
25
|
+
{"key": id_of_document_job_was_initiated_for, "type": "isJobOf"}
|
|
26
|
+
)
|
|
27
|
+
|
|
28
|
+
job = _create(
|
|
29
|
+
{
|
|
30
|
+
"metadata": [
|
|
31
|
+
{"key": "name", "value": name},
|
|
32
|
+
{"key": "status", "value": "running"},
|
|
33
|
+
{"key": "type", "value": job_type},
|
|
34
|
+
],
|
|
35
|
+
"relations": relations,
|
|
36
|
+
"type": "job",
|
|
37
|
+
},
|
|
38
|
+
get_user_context=get_user_context
|
|
39
|
+
or (lambda: type("UserContext", (object,), {"email": user_email})()),
|
|
40
|
+
)
|
|
41
|
+
del job["computed_values"]["created_at"]
|
|
42
|
+
|
|
43
|
+
_post_crud_hook(
|
|
44
|
+
crud="create", document=job, parent_id=parent_id, get_rabbit=get_rabbit
|
|
45
|
+
)
|
|
46
|
+
__patch_document_job_was_initiated_for(
|
|
47
|
+
id_of_document_job_was_initiated_for,
|
|
48
|
+
type_of_document_job_was_initiated_for,
|
|
49
|
+
get_rabbit,
|
|
50
|
+
)
|
|
51
|
+
return job["_id"]
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def finish_job(
|
|
55
|
+
id,
|
|
56
|
+
id_of_document_job_was_initiated_for=None,
|
|
57
|
+
type_of_document_job_was_initiated_for=None,
|
|
58
|
+
*,
|
|
59
|
+
get_rabbit,
|
|
60
|
+
):
|
|
61
|
+
document = {
|
|
62
|
+
"id": id,
|
|
63
|
+
"patch": {
|
|
64
|
+
"metadata": [{"key": "status", "value": "finished"}],
|
|
65
|
+
"relations": ([] if id_of_document_job_was_initiated_for else []),
|
|
66
|
+
},
|
|
67
|
+
}
|
|
68
|
+
_post_crud_hook(crud="update", document=document, get_rabbit=get_rabbit)
|
|
69
|
+
__patch_document_job_was_initiated_for(
|
|
70
|
+
id_of_document_job_was_initiated_for,
|
|
71
|
+
type_of_document_job_was_initiated_for,
|
|
72
|
+
get_rabbit,
|
|
73
|
+
)
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def fail_job(id, exception_message, *, get_rabbit):
|
|
77
|
+
document = {
|
|
78
|
+
"id": id,
|
|
79
|
+
"patch": {
|
|
80
|
+
"metadata": [
|
|
81
|
+
{"key": "info", "value": exception_message},
|
|
82
|
+
{"key": "status", "value": "failed"},
|
|
83
|
+
]
|
|
84
|
+
},
|
|
85
|
+
}
|
|
86
|
+
_post_crud_hook(crud="update", document=document, get_rabbit=get_rabbit)
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def __patch_document_job_was_initiated_for(id, type, get_rabbit):
|
|
90
|
+
if id and type:
|
|
91
|
+
document = {
|
|
92
|
+
"document_info_job_was_initiated_for": {"id": id, "type": type},
|
|
93
|
+
"patch": {"relations": [{"key": id, "type": "hasJob"}]},
|
|
94
|
+
}
|
|
95
|
+
_post_crud_hook(crud="update", document=document, get_rabbit=get_rabbit)
|