das-cli 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- das/__init__.py +0 -0
- das/ai/plugins/dasai.py +50 -0
- das/ai/plugins/entries/entries_plugin.py +16 -0
- das/app.py +37 -0
- das/authentication/auth.py +43 -0
- das/authentication/secure_input.py +67 -0
- das/cli.py +1070 -0
- das/common/api.py +100 -0
- das/common/config.py +185 -0
- das/common/entry_fields_constants.py +3 -0
- das/common/enums.py +46 -0
- das/common/file_utils.py +203 -0
- das/managers/__init__.py +0 -0
- das/managers/download_manager.py +93 -0
- das/managers/entries_manager.py +433 -0
- das/managers/search_manager.py +64 -0
- das/services/attributes.py +81 -0
- das/services/cache.py +70 -0
- das/services/downloads.py +84 -0
- das/services/entries.py +132 -0
- das/services/entry_fields.py +33 -0
- das/services/hangfire.py +26 -0
- das/services/search.py +33 -0
- das_cli-1.0.0.dist-info/METADATA +408 -0
- das_cli-1.0.0.dist-info/RECORD +29 -0
- das_cli-1.0.0.dist-info/WHEEL +5 -0
- das_cli-1.0.0.dist-info/entry_points.txt +2 -0
- das_cli-1.0.0.dist-info/licenses/LICENSE +22 -0
- das_cli-1.0.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,433 @@
|
|
|
1
|
+
import json
|
|
2
|
+
from das.common.config import load_api_url
|
|
3
|
+
from das.managers.search_manager import SearchManager
|
|
4
|
+
from das.services.attributes import AttributesService
|
|
5
|
+
from das.services.entry_fields import EntryFieldsService
|
|
6
|
+
from das.services.entries import EntriesService
|
|
7
|
+
from das.common.entry_fields_constants import DIGITAL_OBJECT_INPUT, SELECT_COMBO_INPUT
|
|
8
|
+
from das.services.search import SearchService
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class EntryManager:
|
|
12
|
+
def __init__(self):
|
|
13
|
+
|
|
14
|
+
base_url = load_api_url()
|
|
15
|
+
|
|
16
|
+
if (base_url is None or base_url == ""):
|
|
17
|
+
raise ValueError(f"Base URL is required - {self.__class__.__name__} - You must be authenticated.")
|
|
18
|
+
|
|
19
|
+
self.entry_service = EntriesService(base_url)
|
|
20
|
+
self.entry_fields_service = EntryFieldsService(base_url)
|
|
21
|
+
self.search_service = SearchService(base_url)
|
|
22
|
+
self.attribute_service = AttributesService(base_url)
|
|
23
|
+
|
|
24
|
+
def get_entry(self, entry_id: str):
|
|
25
|
+
"""Get entry details by ID"""
|
|
26
|
+
if not entry_id:
|
|
27
|
+
raise ValueError("Entry ID is required")
|
|
28
|
+
|
|
29
|
+
return self.get(id=entry_id)
|
|
30
|
+
|
|
31
|
+
def get(self, id: str = None, code: str = None):
|
|
32
|
+
"""Get entry by id or code. The client parameter is not used but kept for backward compatibility."""
|
|
33
|
+
|
|
34
|
+
try:
|
|
35
|
+
entry_response = self.entry_service.get(id=id, code=code)
|
|
36
|
+
|
|
37
|
+
if not entry_response or not isinstance(entry_response, dict):
|
|
38
|
+
raise ValueError(f"Invalid entry response format: {type(entry_response)}")
|
|
39
|
+
|
|
40
|
+
if "attributeId" not in entry_response:
|
|
41
|
+
raise ValueError(f"Missing attributeId in entry response: {entry_response}")
|
|
42
|
+
|
|
43
|
+
fields = self.entry_fields_service.get_entry_fields(attribute_id=entry_response.get("attributeId"))
|
|
44
|
+
|
|
45
|
+
if not fields or not isinstance(fields, dict):
|
|
46
|
+
raise ValueError(f"Invalid fields response: {fields}")
|
|
47
|
+
|
|
48
|
+
entry = {}
|
|
49
|
+
entry_raw = entry_response.get('entry', {})
|
|
50
|
+
|
|
51
|
+
if "result" not in fields or "items" not in fields.get("result", {}):
|
|
52
|
+
raise ValueError(f"Missing fields data in response")
|
|
53
|
+
|
|
54
|
+
if (entry_raw.get("id") is not None):
|
|
55
|
+
entry["ID"] = entry_raw.get("id")
|
|
56
|
+
|
|
57
|
+
for field in fields.get("result").get("items"):
|
|
58
|
+
field_name = field.get("displayName")
|
|
59
|
+
field_value = self.__get_field_value(entry_raw, field)
|
|
60
|
+
|
|
61
|
+
if field_value is None:
|
|
62
|
+
field_value = ""
|
|
63
|
+
|
|
64
|
+
entry[field_name] = field_value
|
|
65
|
+
|
|
66
|
+
if (entry_raw.get("creationtime") is not None):
|
|
67
|
+
entry["Created At"] = entry_raw.get("creationtime")
|
|
68
|
+
|
|
69
|
+
if (entry_raw.get("lastmodificationtime") is not None):
|
|
70
|
+
entry["Updated At"] = entry_raw.get("lastmodificationtime")
|
|
71
|
+
|
|
72
|
+
return entry
|
|
73
|
+
except Exception as e:
|
|
74
|
+
raise ValueError(f"Error processing entry data: {e}")
|
|
75
|
+
|
|
76
|
+
def create(self, attribute: str, entry: dict = None, entries: list = None) -> list:
|
|
77
|
+
"""
|
|
78
|
+
Create one or more new entries.
|
|
79
|
+
|
|
80
|
+
Args:
|
|
81
|
+
attribute (str): The attribute name
|
|
82
|
+
entry (dict, optional): Single entry data
|
|
83
|
+
entries (list, optional): List of entry data for creating multiple entries
|
|
84
|
+
|
|
85
|
+
Returns:
|
|
86
|
+
list: List of created entry IDs with status information
|
|
87
|
+
|
|
88
|
+
Raises:
|
|
89
|
+
ValueError: If required parameters are missing or invalid
|
|
90
|
+
"""
|
|
91
|
+
if not attribute:
|
|
92
|
+
raise ValueError("Attribute name is required")
|
|
93
|
+
|
|
94
|
+
if entries is not None:
|
|
95
|
+
# Multiple entries creation
|
|
96
|
+
if not isinstance(entries, list):
|
|
97
|
+
raise ValueError("Entries must be a list")
|
|
98
|
+
|
|
99
|
+
if not entries:
|
|
100
|
+
raise ValueError("Entries list is empty")
|
|
101
|
+
|
|
102
|
+
results = []
|
|
103
|
+
for entry_data in entries:
|
|
104
|
+
if not isinstance(entry_data, dict):
|
|
105
|
+
raise ValueError(f"Invalid entry data: {entry_data}")
|
|
106
|
+
|
|
107
|
+
try:
|
|
108
|
+
result = self._create_single_entry(attribute, entry_data)
|
|
109
|
+
results.append({"id": result, "status": "success"})
|
|
110
|
+
except Exception as e:
|
|
111
|
+
results.append({"error": str(e), "status": "error"})
|
|
112
|
+
|
|
113
|
+
return results
|
|
114
|
+
|
|
115
|
+
elif entry is not None:
|
|
116
|
+
# Single entry creation
|
|
117
|
+
result = self._create_single_entry(attribute, entry)
|
|
118
|
+
return [{"id": result, "status": "success"}]
|
|
119
|
+
|
|
120
|
+
else:
|
|
121
|
+
raise ValueError("Either 'entry' or 'entries' must be provided")
|
|
122
|
+
|
|
123
|
+
def _create_single_entry(self, attribute: str, entry: dict) -> str:
|
|
124
|
+
"""Internal method to create a single entry."""
|
|
125
|
+
if not attribute:
|
|
126
|
+
raise ValueError("Attribute name is required")
|
|
127
|
+
|
|
128
|
+
attribute_id = self.attribute_service.get_id(name=attribute)
|
|
129
|
+
|
|
130
|
+
if not entry or not isinstance(entry, dict):
|
|
131
|
+
raise ValueError("Entry data must be a non-empty dictionary")
|
|
132
|
+
|
|
133
|
+
entry_fields_response = self.entry_fields_service.get_entry_fields(attribute_id=attribute_id)
|
|
134
|
+
|
|
135
|
+
fields = entry_fields_response.get("result", {}).get("items", [])
|
|
136
|
+
|
|
137
|
+
if not fields or not isinstance(fields, list):
|
|
138
|
+
raise ValueError(f"Invalid fields response: {fields}")
|
|
139
|
+
|
|
140
|
+
if not all(isinstance(item, dict) for item in fields):
|
|
141
|
+
raise ValueError(f"Invalid fields data format: {fields}")
|
|
142
|
+
|
|
143
|
+
new_entry = {}
|
|
144
|
+
|
|
145
|
+
for field in fields:
|
|
146
|
+
field_name = field.get("displayName")
|
|
147
|
+
column_name = field.get("column").lower()
|
|
148
|
+
|
|
149
|
+
if field_name in entry:
|
|
150
|
+
new_entry[column_name] = self.__get_value(field, entry[field_name])
|
|
151
|
+
else:
|
|
152
|
+
new_entry[column_name] = None
|
|
153
|
+
|
|
154
|
+
return self.entry_service.create(attribute_id=attribute_id, entry=new_entry)
|
|
155
|
+
|
|
156
|
+
def update(self, attribute: str, code: str = None, entry: dict = None, entries: list = None) -> list:
|
|
157
|
+
"""
|
|
158
|
+
Update one or more existing entries.
|
|
159
|
+
|
|
160
|
+
If 'code' and 'entry' are provided, updates a single entry.
|
|
161
|
+
If 'entries' is provided, updates multiple entries based on the code in each entry.
|
|
162
|
+
|
|
163
|
+
Args:
|
|
164
|
+
attribute (str): The attribute name
|
|
165
|
+
code (str, optional): The entry code for single entry update
|
|
166
|
+
entry (dict, optional): The entry data for single entry update
|
|
167
|
+
entries (list, optional): List of entry data for multiple updates
|
|
168
|
+
|
|
169
|
+
Returns:
|
|
170
|
+
list: List of updated entry IDs
|
|
171
|
+
|
|
172
|
+
Raises:
|
|
173
|
+
ValueError: If required parameters are missing or invalid
|
|
174
|
+
"""
|
|
175
|
+
if entries is not None:
|
|
176
|
+
# Multiple entries update
|
|
177
|
+
if not isinstance(entries, list):
|
|
178
|
+
raise ValueError("Entries must be a list")
|
|
179
|
+
|
|
180
|
+
if not entries:
|
|
181
|
+
raise ValueError("Entries list is empty")
|
|
182
|
+
|
|
183
|
+
results = []
|
|
184
|
+
for entry_data in entries:
|
|
185
|
+
if not isinstance(entry_data, dict):
|
|
186
|
+
raise ValueError(f"Invalid entry data: {entry_data}")
|
|
187
|
+
|
|
188
|
+
# Each entry must have a Code field
|
|
189
|
+
entry_code = next((entry_data.get(key) for key in entry_data if key.lower() == 'code'), None)
|
|
190
|
+
if not entry_code:
|
|
191
|
+
raise ValueError(f"Entry code is missing in entry data: {entry_data}")
|
|
192
|
+
|
|
193
|
+
try:
|
|
194
|
+
result = self._update_single_entry(attribute, entry_code, entry_data)
|
|
195
|
+
results.append({"code": entry_code, "id": result, "status": "success"})
|
|
196
|
+
except Exception as e:
|
|
197
|
+
results.append({"code": entry_code, "error": str(e), "status": "error"})
|
|
198
|
+
|
|
199
|
+
return results
|
|
200
|
+
|
|
201
|
+
elif code and entry:
|
|
202
|
+
# Single entry update
|
|
203
|
+
result = self._update_single_entry(attribute, code, entry)
|
|
204
|
+
return [{"code": code, "id": result, "status": "success"}]
|
|
205
|
+
|
|
206
|
+
else:
|
|
207
|
+
raise ValueError("Either 'code' and 'entry' or 'entries' must be provided")
|
|
208
|
+
|
|
209
|
+
def _update_single_entry(self, attribute: str, code: str, entry: dict) -> str:
|
|
210
|
+
"""Internal method to update a single entry."""
|
|
211
|
+
if not code:
|
|
212
|
+
raise ValueError("Entry code is required")
|
|
213
|
+
|
|
214
|
+
if not entry or not isinstance(entry, dict):
|
|
215
|
+
raise ValueError("Entry data must be a non-empty dictionary")
|
|
216
|
+
|
|
217
|
+
existing_entry_response = self.entry_service.get_entry(code=code)
|
|
218
|
+
|
|
219
|
+
if not existing_entry_response or not isinstance(existing_entry_response, dict):
|
|
220
|
+
raise ValueError(f"Invalid existing entry response: {existing_entry_response}")
|
|
221
|
+
|
|
222
|
+
attribute_id = existing_entry_response.get("attributeId")
|
|
223
|
+
|
|
224
|
+
if not attribute_id:
|
|
225
|
+
raise ValueError("Attribute ID is missing in the existing entry")
|
|
226
|
+
|
|
227
|
+
entry_fields_response = self.entry_fields_service.get_entry_fields(attribute_id=attribute_id)
|
|
228
|
+
|
|
229
|
+
fields = entry_fields_response.get("result", {}).get("items", [])
|
|
230
|
+
|
|
231
|
+
if not fields or not isinstance(fields, list):
|
|
232
|
+
raise ValueError(f"Invalid fields response: {fields}")
|
|
233
|
+
|
|
234
|
+
if not all(isinstance(item, dict) for item in fields):
|
|
235
|
+
raise ValueError(f"Invalid fields data format: {fields}")
|
|
236
|
+
|
|
237
|
+
updated_entry = existing_entry_response.get('entry', {})
|
|
238
|
+
|
|
239
|
+
for field in fields:
|
|
240
|
+
field_name = field.get("displayName")
|
|
241
|
+
column_name = field.get("column").lower()
|
|
242
|
+
|
|
243
|
+
if field_name in entry:
|
|
244
|
+
updated_entry[column_name] = self.__get_value(field, entry[field_name])
|
|
245
|
+
|
|
246
|
+
return self.entry_service.update(attribute_id=attribute_id, entry=updated_entry)
|
|
247
|
+
|
|
248
|
+
|
|
249
|
+
def __get_value(self, field, source: str):
|
|
250
|
+
"""Helper method to get field value based on its type."""
|
|
251
|
+
if field.get('inputType') == SELECT_COMBO_INPUT: # SELECT_COMBO_INPUT
|
|
252
|
+
select_value = self.__get_select_combobox_field_value(field, source)
|
|
253
|
+
return select_value
|
|
254
|
+
else:
|
|
255
|
+
return source
|
|
256
|
+
|
|
257
|
+
def __get_select_combobox_field_value(self, field, source: str) -> str:
|
|
258
|
+
"""Helper method to get select combobox field value."""
|
|
259
|
+
|
|
260
|
+
attribute_id = -1
|
|
261
|
+
|
|
262
|
+
if field.get('column').isdigit():
|
|
263
|
+
attribute_id = int(field.get('column'))
|
|
264
|
+
elif field.get('column')[0].isalpha() and field.get('column')[1:].isdigit():
|
|
265
|
+
attribute_id = int(field.get('column')[1:])
|
|
266
|
+
|
|
267
|
+
if attribute_id == -1:
|
|
268
|
+
# then we need to check if the field has the property customdata
|
|
269
|
+
if not field.get('customdata', None) is None:
|
|
270
|
+
try:
|
|
271
|
+
customdata = json.loads(field.get('customdata'))
|
|
272
|
+
if (customdata is not None and isinstance(customdata, dict) and "datasource" in customdata):
|
|
273
|
+
datasource = customdata.get("datasource")
|
|
274
|
+
if (datasource is not None and isinstance(datasource, dict) and "attributeid" in datasource):
|
|
275
|
+
attribute_id = datasource.get("attributeid")
|
|
276
|
+
except json.JSONDecodeError:
|
|
277
|
+
raise ValueError(f"Invalid customdata JSON: {field.get('customdata')}")
|
|
278
|
+
|
|
279
|
+
search_params = {
|
|
280
|
+
"attributeId": attribute_id,
|
|
281
|
+
"queryString": f"displayname({source});",
|
|
282
|
+
"maxResultCount": 1,
|
|
283
|
+
"skipCount": 0
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
search_response = self.search_service.search_entries(**search_params)
|
|
287
|
+
|
|
288
|
+
if search_response.get('totalCount', 0) == 0:
|
|
289
|
+
search_params['queryString'] = f"code({source});"
|
|
290
|
+
search_response = self.search_service.search_entries(**search_params)
|
|
291
|
+
|
|
292
|
+
if search_response and 'items' in search_response and len(search_response['items']) > 0:
|
|
293
|
+
result = {}
|
|
294
|
+
result['id'] = search_response['items'][0].get('entry', {}).get('id')
|
|
295
|
+
result['name'] = search_response['items'][0].get('entry', {}).get('displayname')
|
|
296
|
+
result['code'] = search_response['items'][0].get('entry', {}).get('code')
|
|
297
|
+
result['alias'] = search_response['items'][0].get('entry', {}).get('alias', None)
|
|
298
|
+
result['attributeid'] = attribute_id
|
|
299
|
+
# Filter out None values from the result dictionary
|
|
300
|
+
result = {k: v for k, v in result.items() if v is not None}
|
|
301
|
+
return json.dumps([result])
|
|
302
|
+
else:
|
|
303
|
+
return source
|
|
304
|
+
|
|
305
|
+
def __get_field_value(self, entry_raw, field):
|
|
306
|
+
"""Helper method to safely get field value from entry_raw."""
|
|
307
|
+
|
|
308
|
+
if field.get('inputType') == DIGITAL_OBJECT_INPUT:
|
|
309
|
+
digital_object = self.__get_digital_object_field_value(entry_raw, field)
|
|
310
|
+
return digital_object
|
|
311
|
+
elif field.get('inputType') == SELECT_COMBO_INPUT: # SELECT_COMBO_INPUT
|
|
312
|
+
select_value = self.__get_select_field_value(entry_raw, field)
|
|
313
|
+
return select_value
|
|
314
|
+
else:
|
|
315
|
+
return entry_raw.get(field.get('column').lower(), "")
|
|
316
|
+
|
|
317
|
+
|
|
318
|
+
def __get_select_field_value(self, entry_raw, field):
|
|
319
|
+
"""Helper method to get select field"""
|
|
320
|
+
results = []
|
|
321
|
+
select_value = entry_raw.get(field.get('column').lower(), None)
|
|
322
|
+
|
|
323
|
+
# checks if its a valid json string
|
|
324
|
+
if select_value is not None:
|
|
325
|
+
try:
|
|
326
|
+
data = json.loads(select_value)
|
|
327
|
+
if isinstance(data, list):
|
|
328
|
+
for obj in data:
|
|
329
|
+
item = {}
|
|
330
|
+
item["Id"] = obj.get("id")
|
|
331
|
+
item["Name"] = obj.get("name")
|
|
332
|
+
results.append(item)
|
|
333
|
+
return results
|
|
334
|
+
else:
|
|
335
|
+
return data
|
|
336
|
+
|
|
337
|
+
except json.JSONDecodeError:
|
|
338
|
+
return select_value
|
|
339
|
+
|
|
340
|
+
def __get_digital_object_field_value(self, entry_raw, field):
|
|
341
|
+
"""Helper method to get digital object field value."""
|
|
342
|
+
digital_objects = entry_raw.get(field.get('column').lower(), None)
|
|
343
|
+
|
|
344
|
+
results = []
|
|
345
|
+
|
|
346
|
+
# checks if its a valid json string
|
|
347
|
+
if digital_objects is not None:
|
|
348
|
+
try:
|
|
349
|
+
data = json.loads(digital_objects)
|
|
350
|
+
|
|
351
|
+
if isinstance(data, list):
|
|
352
|
+
for obj in data:
|
|
353
|
+
digital_object = {
|
|
354
|
+
"Id": obj.get("id"),
|
|
355
|
+
"Name": obj.get("name"),
|
|
356
|
+
"Links": obj.get("needle"),
|
|
357
|
+
"Type": obj.get("typename")
|
|
358
|
+
}
|
|
359
|
+
results.append(digital_object)
|
|
360
|
+
return results
|
|
361
|
+
except json.JSONDecodeError:
|
|
362
|
+
return digital_objects
|
|
363
|
+
|
|
364
|
+
def __create_from_json_file(self, attribute: str, file_path: str):
|
|
365
|
+
"""Create a set of new entries from a file."""
|
|
366
|
+
|
|
367
|
+
with open(file_path, 'r', encoding='utf-8') as file:
|
|
368
|
+
try:
|
|
369
|
+
entries = json.load(file)
|
|
370
|
+
except json.JSONDecodeError as e:
|
|
371
|
+
raise ValueError(f"Error parsing JSON file: {e}")
|
|
372
|
+
|
|
373
|
+
if not isinstance(entries, list):
|
|
374
|
+
raise ValueError("JSON file must contain a list of entries")
|
|
375
|
+
|
|
376
|
+
for entry in entries:
|
|
377
|
+
if not isinstance(entry, dict):
|
|
378
|
+
raise ValueError("Each entry must be a dictionary")
|
|
379
|
+
self.create(attribute=attribute, entry=entry)
|
|
380
|
+
|
|
381
|
+
def __create_from_csv_file(self, attribute: str, file_path: str):
|
|
382
|
+
"""Create a set of new entries from a file."""
|
|
383
|
+
|
|
384
|
+
import csv
|
|
385
|
+
|
|
386
|
+
with open(file_path, 'r', encoding='utf-8') as file:
|
|
387
|
+
reader = csv.DictReader(file)
|
|
388
|
+
entries = [row for row in reader]
|
|
389
|
+
|
|
390
|
+
for entry in entries:
|
|
391
|
+
self.create(attribute=attribute, entry=entry)
|
|
392
|
+
|
|
393
|
+
|
|
394
|
+
def __create_from_excel_file(self, attribute: str, file_path: str):
|
|
395
|
+
"""Create a set of new entries from a file."""
|
|
396
|
+
import pandas as pd
|
|
397
|
+
|
|
398
|
+
try:
|
|
399
|
+
df = pd.read_excel(file_path)
|
|
400
|
+
except Exception as e:
|
|
401
|
+
raise ValueError(f"Error reading Excel file: {e}")
|
|
402
|
+
|
|
403
|
+
entries = df.to_dict(orient='records')
|
|
404
|
+
|
|
405
|
+
for entry in entries:
|
|
406
|
+
self.create(attribute=attribute, entry=entry)
|
|
407
|
+
|
|
408
|
+
|
|
409
|
+
def create_from_file(self, attribute: str, file_path: str):
|
|
410
|
+
"""Create a set of new entries from a file."""
|
|
411
|
+
|
|
412
|
+
if not attribute:
|
|
413
|
+
raise ValueError("Attribute name is required")
|
|
414
|
+
|
|
415
|
+
if not file_path:
|
|
416
|
+
raise ValueError("File path is required")
|
|
417
|
+
|
|
418
|
+
# determine the file type by its extension
|
|
419
|
+
if file_path.endswith('.json'):
|
|
420
|
+
self.__create_from_json_file(attribute, file_path)
|
|
421
|
+
elif file_path.endswith('.csv'):
|
|
422
|
+
self.__create_from_csv_file(attribute, file_path)
|
|
423
|
+
elif file_path.endswith('.xlsx'):
|
|
424
|
+
self.__create_from_excel_file(attribute, file_path)
|
|
425
|
+
else:
|
|
426
|
+
raise ValueError("Unsupported file type. Supported types are: .json, .csv, .xlsx")
|
|
427
|
+
|
|
428
|
+
if __name__ == "__main__":
|
|
429
|
+
manager = EntryManager()
|
|
430
|
+
entry = manager.get(id="8d2841c9-e307-4971-bd2b-70da0d7a7534")
|
|
431
|
+
print(entry)
|
|
432
|
+
|
|
433
|
+
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
from das.common.config import load_api_url
|
|
2
|
+
from das.services.attributes import AttributesService
|
|
3
|
+
from das.services.entry_fields import EntryFieldsService
|
|
4
|
+
from das.services.search import SearchService
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class SearchManager:
|
|
8
|
+
def __init__(self):
|
|
9
|
+
base_url = load_api_url()
|
|
10
|
+
if (base_url is None or base_url == ""):
|
|
11
|
+
raise ValueError(f"Base URL is required - {self.__class__.__name__} - You must be authenticated.")
|
|
12
|
+
self.search_service = SearchService(base_url)
|
|
13
|
+
self.attributes_service = AttributesService(base_url)
|
|
14
|
+
self.entry_fields = EntryFieldsService(base_url)
|
|
15
|
+
|
|
16
|
+
def __convert_filter(self, entry_fields: list, filter: str) -> str:
|
|
17
|
+
"""Converts user-friendly filter to API-compatible filter."""
|
|
18
|
+
# The fileter will be in the format: name(*64*);Create at(>2023-01-01);Sampling Location(*North Sea*)
|
|
19
|
+
# Replace the display names with the column names from entry_fields only for the values outside the parentheses.
|
|
20
|
+
for field in entry_fields:
|
|
21
|
+
display_name = field.get('displayName')
|
|
22
|
+
column_name = field.get('column')
|
|
23
|
+
if display_name and column_name:
|
|
24
|
+
filter = filter.lower().replace(f"{display_name.lower()}(", f"{column_name.lower()}(")
|
|
25
|
+
return filter
|
|
26
|
+
|
|
27
|
+
def __convert_sorting(self, entry_fields: list, sort_by: str, sort_order: str) -> str:
|
|
28
|
+
"""Converts user-friendly sorting to API-compatible sorting."""
|
|
29
|
+
field = next((ef for ef in entry_fields if ef.get('displayName').lower() == sort_by.lower()), None)
|
|
30
|
+
if field is None:
|
|
31
|
+
raise ValueError(f"Sorting field '{sort_by}' not found in entry fields.")
|
|
32
|
+
return f"{field.get('column')} {sort_order}"
|
|
33
|
+
|
|
34
|
+
def search_entries(self, attribute: str, query: str, max_results: int = 10, page: int = 1, sort_by: str = "Name", sort_order: str = "asc"):
|
|
35
|
+
"""Search entries based on provided criteria."""
|
|
36
|
+
try:
|
|
37
|
+
# Validate attribute
|
|
38
|
+
attr_response = self.attributes_service.get_attribute(name=attribute)
|
|
39
|
+
if not attr_response or not isinstance(attr_response, dict):
|
|
40
|
+
raise ValueError(f"Invalid attribute response format: {type(attr_response)}")
|
|
41
|
+
items = attr_response.get("result", {}).get("items", [])
|
|
42
|
+
if len(items) == 0:
|
|
43
|
+
raise ValueError(f"Attribute '{attribute}' not found.")
|
|
44
|
+
attribute_id = items[0].get("id")
|
|
45
|
+
if attribute_id is None:
|
|
46
|
+
raise ValueError(f"Attribute ID not found for attribute '{attribute}'.")
|
|
47
|
+
|
|
48
|
+
entry_fields = self.entry_fields.get_entry_fields(attribute_id=attribute_id).get('result', {}).get('items', [])
|
|
49
|
+
|
|
50
|
+
if len(entry_fields) == 0:
|
|
51
|
+
raise ValueError(f"No entry fields found for attribute '{attribute}'.")
|
|
52
|
+
|
|
53
|
+
# Perform search
|
|
54
|
+
search_params = {
|
|
55
|
+
"attributeId": attribute_id,
|
|
56
|
+
"queryString": self.__convert_filter(entry_fields, query),
|
|
57
|
+
"maxResultCount": max_results,
|
|
58
|
+
"skipCount": (max_results * (page - 1)),
|
|
59
|
+
"sorting": self.__convert_sorting(entry_fields, sort_by, sort_order)
|
|
60
|
+
}
|
|
61
|
+
results = self.search_service.search_entries(**search_params)
|
|
62
|
+
return results
|
|
63
|
+
except Exception as e:
|
|
64
|
+
raise ValueError(f"Search failed: {str(e)}")
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
from das.common.api import get_data
|
|
2
|
+
from das.common.config import load_token
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class AttributesService():
|
|
6
|
+
|
|
7
|
+
def __init__(self, base_url):
|
|
8
|
+
self.base_url = f"{base_url}/api/services/app/Attribute"
|
|
9
|
+
|
|
10
|
+
def get_attribute(self, id: int = None, name: str = None, alias: str = None, table_name: str = None):
|
|
11
|
+
token = load_token()
|
|
12
|
+
|
|
13
|
+
if (id is None and name is None and alias is None and table_name is None):
|
|
14
|
+
raise ValueError("At least one parameter must be provided")
|
|
15
|
+
|
|
16
|
+
if (token is None or token == ""):
|
|
17
|
+
raise ValueError("Authorization token is required")
|
|
18
|
+
|
|
19
|
+
headers = {
|
|
20
|
+
"Authorization": f"Bearer {token}"
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
if (id is not None):
|
|
24
|
+
url = f"{self.base_url}/GetAll?id={id}"
|
|
25
|
+
elif (name is not None):
|
|
26
|
+
url = f"{self.base_url}/GetAll?name={name}"
|
|
27
|
+
elif (alias is not None):
|
|
28
|
+
url = f"{self.base_url}/GetAll?alias={alias}"
|
|
29
|
+
elif (table_name is not None):
|
|
30
|
+
url = f"{self.base_url}/GetAll?tableName={table_name}"
|
|
31
|
+
|
|
32
|
+
response = get_data(url, headers=headers)
|
|
33
|
+
|
|
34
|
+
if response.get('success') == True:
|
|
35
|
+
return response
|
|
36
|
+
else:
|
|
37
|
+
raise ValueError(response.get('error'))
|
|
38
|
+
|
|
39
|
+
def get_name(self, id: int)-> str:
|
|
40
|
+
token = load_token()
|
|
41
|
+
|
|
42
|
+
if (id is None):
|
|
43
|
+
raise ValueError("ID parameter must be provided")
|
|
44
|
+
|
|
45
|
+
if (token is None or token == ""):
|
|
46
|
+
raise ValueError("Authorization token is required")
|
|
47
|
+
|
|
48
|
+
headers = {
|
|
49
|
+
"Authorization": f"Bearer {token}"
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
url = f"{self.base_url}/GetAll?id={id}"
|
|
53
|
+
|
|
54
|
+
response = get_data(url, headers=headers)
|
|
55
|
+
|
|
56
|
+
if response.get('success') == True:
|
|
57
|
+
return response.get('result').get('items')[0].get('name')
|
|
58
|
+
else:
|
|
59
|
+
raise ValueError(response.get('error'))
|
|
60
|
+
|
|
61
|
+
def get_id(self, name: str)-> int:
|
|
62
|
+
token = load_token()
|
|
63
|
+
|
|
64
|
+
if (name is None):
|
|
65
|
+
raise ValueError("Name parameter must be provided")
|
|
66
|
+
|
|
67
|
+
if (token is None or token == ""):
|
|
68
|
+
raise ValueError("Authorization token is required")
|
|
69
|
+
|
|
70
|
+
headers = {
|
|
71
|
+
"Authorization": f"Bearer {token}"
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
url = f"{self.base_url}/GetAll?name={name}"
|
|
75
|
+
|
|
76
|
+
response = get_data(url, headers=headers)
|
|
77
|
+
|
|
78
|
+
if response.get('success') == True:
|
|
79
|
+
return response.get('result').get('items')[0].get('id')
|
|
80
|
+
else:
|
|
81
|
+
raise ValueError(response.get('error'))
|
das/services/cache.py
ADDED
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
from das.common.api import get_data, post_data
|
|
2
|
+
from das.common.config import load_token
|
|
3
|
+
|
|
4
|
+
class CacheService():
|
|
5
|
+
def __init__(self, base_url):
|
|
6
|
+
self.base_url = f"{base_url}/api/services/app/Caching"
|
|
7
|
+
|
|
8
|
+
def clear_all(self):
|
|
9
|
+
"""Clear all cached data."""
|
|
10
|
+
token = load_token()
|
|
11
|
+
|
|
12
|
+
if (token is None or token == ""):
|
|
13
|
+
raise ValueError("Authorization token is required")
|
|
14
|
+
|
|
15
|
+
headers = {
|
|
16
|
+
"Authorization": f"Bearer {token}"
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
url = f"{self.base_url}/ClearAllCaches"
|
|
20
|
+
|
|
21
|
+
response = post_data(url, headers=headers)
|
|
22
|
+
|
|
23
|
+
if response.get('success') == True:
|
|
24
|
+
return response
|
|
25
|
+
else:
|
|
26
|
+
raise ValueError(response.get('error'))
|
|
27
|
+
|
|
28
|
+
def get_all(self):
|
|
29
|
+
"""Get all cache entries."""
|
|
30
|
+
token = load_token()
|
|
31
|
+
|
|
32
|
+
if (token is None or token == ""):
|
|
33
|
+
raise ValueError("Authorization token is required")
|
|
34
|
+
|
|
35
|
+
headers = {
|
|
36
|
+
"Authorization": f"Bearer {token}"
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
url = f"{self.base_url}/GetAllCaches"
|
|
40
|
+
|
|
41
|
+
response = get_data(url, headers=headers)
|
|
42
|
+
|
|
43
|
+
if response.get('success') == True:
|
|
44
|
+
return response.get('result')
|
|
45
|
+
else:
|
|
46
|
+
raise ValueError(response.get('error'))
|
|
47
|
+
|
|
48
|
+
def clear_cache(self, name: str):
|
|
49
|
+
"""Clear a specific cache by name."""
|
|
50
|
+
token = load_token()
|
|
51
|
+
|
|
52
|
+
if (token is None or token == ""):
|
|
53
|
+
raise ValueError("Authorization token is required")
|
|
54
|
+
|
|
55
|
+
headers = {
|
|
56
|
+
"Authorization": f"Bearer {token}"
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
data = {
|
|
60
|
+
"id": name
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
url = f"{self.base_url}/ClearCache"
|
|
64
|
+
|
|
65
|
+
response = post_data(url, headers=headers, data=data)
|
|
66
|
+
|
|
67
|
+
if response.get('success') == True:
|
|
68
|
+
return response
|
|
69
|
+
else:
|
|
70
|
+
raise ValueError(response.get('error'))
|