pyxecm 3.1.0__tar.gz → 3.1.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pyxecm might be problematic. Click here for more details.
- {pyxecm-3.1.0 → pyxecm-3.1.1}/.gitignore +1 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/PKG-INFO +1 -1
- {pyxecm-3.1.0 → pyxecm-3.1.1}/pyproject.toml +1 -1
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm/otca.py +8 -3
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm/otcs.py +166 -86
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_api/common/functions.py +0 -97
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_customizer/browser_automation.py +65 -33
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_customizer/guidewire.py +8 -8
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_customizer/knowledge_graph.py +14 -17
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_customizer/payload.py +33 -23
- {pyxecm-3.1.0 → pyxecm-3.1.1}/README.md +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm/__init__.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm/avts.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm/coreshare.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm/helper/__init__.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm/helper/assoc.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm/helper/data.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm/helper/logadapter.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm/helper/otel_config.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm/helper/web.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm/helper/xml.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm/otac.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm/otawp.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm/otds.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm/otiv.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm/otkd.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm/otmm.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm/otpd.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm/py.typed +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_api/__init__.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_api/__main__.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_api/app.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_api/auth/__init__.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_api/auth/functions.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_api/auth/models.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_api/auth/router.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_api/common/__init__.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_api/common/metrics.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_api/common/models.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_api/common/router.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_api/settings.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_api/terminal/__init__.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_api/terminal/router.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_api/v1_csai/__init__.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_api/v1_csai/models.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_api/v1_csai/router.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_api/v1_csai/statics/bindings/utils.js +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_api/v1_csai/statics/tom-select/tom-select.complete.min.js +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_api/v1_csai/statics/tom-select/tom-select.css +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_api/v1_csai/statics/vis-9.1.2/vis-network.css +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_api/v1_csai/statics/vis-9.1.2/vis-network.min.js +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_api/v1_maintenance/__init__.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_api/v1_maintenance/functions.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_api/v1_maintenance/models.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_api/v1_maintenance/router.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_api/v1_otcs/__init__.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_api/v1_otcs/functions.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_api/v1_otcs/router.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_api/v1_payload/__init__.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_api/v1_payload/functions.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_api/v1_payload/models.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_api/v1_payload/router.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_customizer/__init__.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_customizer/__main__.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_customizer/customizer.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_customizer/exceptions.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_customizer/k8s.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_customizer/log.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_customizer/m365.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_customizer/payload_list.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_customizer/salesforce.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_customizer/sap.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_customizer/servicenow.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_customizer/settings.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_customizer/successfactors.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_customizer/translate.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_maintenance_page/__init__.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_maintenance_page/__main__.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_maintenance_page/app.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_maintenance_page/settings.py +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_maintenance_page/static/favicon.avif +0 -0
- {pyxecm-3.1.0 → pyxecm-3.1.1}/src/pyxecm_maintenance_page/templates/maintenance.html +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: pyxecm
|
|
3
|
-
Version: 3.1.
|
|
3
|
+
Version: 3.1.1
|
|
4
4
|
Summary: A Python library to interact with Opentext Content Management Rest API
|
|
5
5
|
Project-URL: Homepage, https://github.com/opentext/pyxecm
|
|
6
6
|
Author-email: Kai Gatzweiler <kgatzweiler@opentext.com>, "Dr. Marc Diefenbruch" <mdiefenb@opentext.com>
|
|
@@ -2240,9 +2240,14 @@ class OTCA:
|
|
|
2240
2240
|
}
|
|
2241
2241
|
},
|
|
2242
2242
|
},
|
|
2243
|
-
"responseTemplate": {
|
|
2244
|
-
|
|
2245
|
-
|
|
2243
|
+
"responseTemplate": {
|
|
2244
|
+
'scratchpad': {
|
|
2245
|
+
'item': {
|
|
2246
|
+
'input': {'where': 'response.context_update.where'}
|
|
2247
|
+
}
|
|
2248
|
+
},
|
|
2249
|
+
"agents": ["retrieverAgent"],
|
|
2250
|
+
}
|
|
2246
2251
|
|
|
2247
2252
|
Returns:
|
|
2248
2253
|
dict: Tool details or None in case of an error.
|
|
@@ -500,6 +500,7 @@ class OTCS:
|
|
|
500
500
|
self._use_numeric_category_identifier = use_numeric_category_identifier
|
|
501
501
|
self._executor = ThreadPoolExecutor(max_workers=thread_number)
|
|
502
502
|
self._workspace_type_lookup = {}
|
|
503
|
+
self._workspace_type_names = []
|
|
503
504
|
|
|
504
505
|
# end method definition
|
|
505
506
|
|
|
@@ -1018,7 +1019,12 @@ class OTCS:
|
|
|
1018
1019
|
if success_message:
|
|
1019
1020
|
self.logger.info(success_message)
|
|
1020
1021
|
if parse_request_response and not stream:
|
|
1021
|
-
|
|
1022
|
+
# There are cases where OTCS returns response.ok (200) but
|
|
1023
|
+
# because of restart or scaling of pods the response text is not
|
|
1024
|
+
# valid JSON. So parse_request_response() may raise an ConnectionError exception that
|
|
1025
|
+
# is handled in the exception block below (with waiting for readiness and retry logic)
|
|
1026
|
+
parsed_response = self.parse_request_response(response_object=response)
|
|
1027
|
+
return parsed_response
|
|
1022
1028
|
else:
|
|
1023
1029
|
return response
|
|
1024
1030
|
# Check if Session has expired - then re-authenticate and try once more
|
|
@@ -1123,17 +1129,20 @@ class OTCS:
|
|
|
1123
1129
|
else:
|
|
1124
1130
|
return None
|
|
1125
1131
|
# end except Timeout
|
|
1126
|
-
except requests.exceptions.ConnectionError:
|
|
1132
|
+
except requests.exceptions.ConnectionError as connection_error:
|
|
1127
1133
|
if retries <= max_retries:
|
|
1128
1134
|
self.logger.warning(
|
|
1129
|
-
"
|
|
1135
|
+
"Cannot connect to OTCS at -> %s; error -> %s! Retrying in %d seconds... %d/%d",
|
|
1130
1136
|
url,
|
|
1137
|
+
str(connection_error),
|
|
1131
1138
|
REQUEST_RETRY_DELAY,
|
|
1132
1139
|
retries,
|
|
1133
1140
|
max_retries,
|
|
1134
1141
|
)
|
|
1135
1142
|
retries += 1
|
|
1136
1143
|
|
|
1144
|
+
# The connection error could have been caused by a restart of the OTCS pod or services.
|
|
1145
|
+
# So we better check if OTCS is ready to receive requests again before retrying:
|
|
1137
1146
|
while not self.is_ready():
|
|
1138
1147
|
self.logger.warning(
|
|
1139
1148
|
"Content Server is not ready to receive requests. Waiting for state change in %d seconds...",
|
|
@@ -1143,8 +1152,9 @@ class OTCS:
|
|
|
1143
1152
|
|
|
1144
1153
|
else:
|
|
1145
1154
|
self.logger.error(
|
|
1146
|
-
"%s; connection error",
|
|
1155
|
+
"%s; connection error -> %s",
|
|
1147
1156
|
failure_message,
|
|
1157
|
+
str(connection_error),
|
|
1148
1158
|
)
|
|
1149
1159
|
if retry_forever:
|
|
1150
1160
|
# If it fails after REQUEST_MAX_RETRIES retries
|
|
@@ -1183,13 +1193,17 @@ class OTCS:
|
|
|
1183
1193
|
The response object delivered by the request call.
|
|
1184
1194
|
additional_error_message (str):
|
|
1185
1195
|
Custom error message to include in logs.
|
|
1186
|
-
show_error (bool):
|
|
1187
|
-
If True, logs an error. If False, logs a warning.
|
|
1196
|
+
show_error (bool, optional):
|
|
1197
|
+
If True, logs an error / raises an exception. If False, logs a warning.
|
|
1188
1198
|
|
|
1189
1199
|
Returns:
|
|
1190
1200
|
dict | None:
|
|
1191
1201
|
Parsed response as a dictionary, or None in case of an error.
|
|
1192
1202
|
|
|
1203
|
+
Raises:
|
|
1204
|
+
requests.exceptions.ConnectionError:
|
|
1205
|
+
If the response cannot be decoded as JSON.
|
|
1206
|
+
|
|
1193
1207
|
"""
|
|
1194
1208
|
|
|
1195
1209
|
if not response_object:
|
|
@@ -1214,12 +1228,13 @@ class OTCS:
|
|
|
1214
1228
|
exception,
|
|
1215
1229
|
)
|
|
1216
1230
|
if show_error:
|
|
1217
|
-
|
|
1218
|
-
|
|
1219
|
-
|
|
1231
|
+
# Raise ConnectionError instead of returning None
|
|
1232
|
+
raise requests.exceptions.ConnectionError(message) from exception
|
|
1233
|
+
self.logger.warning(message)
|
|
1220
1234
|
return None
|
|
1221
|
-
|
|
1222
|
-
|
|
1235
|
+
# end try-except block
|
|
1236
|
+
|
|
1237
|
+
return dict_object
|
|
1223
1238
|
|
|
1224
1239
|
# end method definition
|
|
1225
1240
|
|
|
@@ -1710,9 +1725,9 @@ class OTCS:
|
|
|
1710
1725
|
# than creating a list with all values at once.
|
|
1711
1726
|
# This is especially important for large result sets.
|
|
1712
1727
|
yield from (
|
|
1713
|
-
item[data_name][property_name]
|
|
1728
|
+
item[data_name][property_name] if property_name else item[data_name]
|
|
1714
1729
|
for item in response["results"]
|
|
1715
|
-
if isinstance(item.get(data_name), dict) and property_name in item[data_name]
|
|
1730
|
+
if isinstance(item.get(data_name), dict) and (not property_name or property_name in item[data_name])
|
|
1716
1731
|
)
|
|
1717
1732
|
|
|
1718
1733
|
# end method definition
|
|
@@ -5054,9 +5069,9 @@ class OTCS:
|
|
|
5054
5069
|
show_hidden (bool, optional):
|
|
5055
5070
|
Whether to list hidden items. Defaults to False.
|
|
5056
5071
|
limit (int, optional):
|
|
5057
|
-
The maximum number of results to return. Defaults to 100.
|
|
5072
|
+
The maximum number of results to return (page size). Defaults to 100.
|
|
5058
5073
|
page (int, optional):
|
|
5059
|
-
The page of results to retrieve. Defaults to 1 (first page).
|
|
5074
|
+
The page of results to retrieve (page number). Defaults to 1 (first page).
|
|
5060
5075
|
fields (str | list, optional):
|
|
5061
5076
|
Which fields to retrieve.
|
|
5062
5077
|
This can have a significant impact on performance.
|
|
@@ -5408,11 +5423,10 @@ class OTCS:
|
|
|
5408
5423
|
continue
|
|
5409
5424
|
category_key = next(iter(category_schema))
|
|
5410
5425
|
|
|
5411
|
-
|
|
5412
|
-
|
|
5413
|
-
|
|
5414
|
-
|
|
5415
|
-
if not attribute_schema:
|
|
5426
|
+
# There can be multiple attributes with the same name in a category
|
|
5427
|
+
# if the category has sets:
|
|
5428
|
+
attribute_schemas = [cat_elem for cat_elem in category_schema.values() if cat_elem.get("name") == attribute]
|
|
5429
|
+
if not attribute_schemas:
|
|
5416
5430
|
self.logger.debug(
|
|
5417
5431
|
"Node -> '%s' (%s) does not have attribute -> '%s'. Skipping...",
|
|
5418
5432
|
node_name,
|
|
@@ -5420,73 +5434,81 @@ class OTCS:
|
|
|
5420
5434
|
attribute,
|
|
5421
5435
|
)
|
|
5422
5436
|
continue
|
|
5423
|
-
attribute_key = attribute_schema["key"]
|
|
5424
|
-
# Split the attribute key once (1) at the first underscore from the right.
|
|
5425
|
-
# rsplit delivers a list and [-1] delivers the last list item:
|
|
5426
|
-
attribute_id = attribute_key.rsplit("_", 1)[-1]
|
|
5427
5437
|
|
|
5428
|
-
|
|
5429
|
-
|
|
5430
|
-
|
|
5431
|
-
|
|
5432
|
-
|
|
5433
|
-
|
|
5434
|
-
),
|
|
5435
|
-
None,
|
|
5436
|
-
)
|
|
5437
|
-
if not set_schema:
|
|
5438
|
-
self.logger.debug(
|
|
5439
|
-
"Node -> '%s' (%s) does not have attribute set -> '%s'. Skipping...",
|
|
5440
|
-
node_name,
|
|
5441
|
-
node_id,
|
|
5442
|
-
attribute_set,
|
|
5443
|
-
)
|
|
5444
|
-
continue
|
|
5445
|
-
set_key = set_schema["key"]
|
|
5446
|
-
else:
|
|
5447
|
-
set_schema = None
|
|
5448
|
-
set_key = None
|
|
5438
|
+
# Traverse the attribute schemas with the matching attribute name:
|
|
5439
|
+
for attribute_schema in attribute_schemas:
|
|
5440
|
+
attribute_key = attribute_schema["key"]
|
|
5441
|
+
# Split the attribute key once (1) at the first underscore from the right.
|
|
5442
|
+
# rsplit delivers a list and [-1] delivers the last list item:
|
|
5443
|
+
attribute_id = attribute_key.rsplit("_", 1)[-1]
|
|
5449
5444
|
|
|
5450
|
-
|
|
5451
|
-
|
|
5452
|
-
|
|
5453
|
-
|
|
5454
|
-
|
|
5455
|
-
|
|
5456
|
-
|
|
5445
|
+
if attribute_set: # is the attribute_set parameter provided?
|
|
5446
|
+
set_schema = next(
|
|
5447
|
+
(
|
|
5448
|
+
cat_elem
|
|
5449
|
+
for cat_elem in category_schema.values()
|
|
5450
|
+
if cat_elem.get("name") == attribute_set and cat_elem.get("persona") == "set"
|
|
5451
|
+
),
|
|
5452
|
+
None,
|
|
5453
|
+
)
|
|
5454
|
+
if not set_schema:
|
|
5455
|
+
self.logger.debug(
|
|
5456
|
+
"Node -> '%s' (%s) does not have attribute set -> '%s'. Skipping...",
|
|
5457
|
+
node_name,
|
|
5458
|
+
node_id,
|
|
5459
|
+
attribute_set,
|
|
5460
|
+
)
|
|
5461
|
+
continue
|
|
5462
|
+
set_key = set_schema["key"]
|
|
5463
|
+
else: # no attribute set value provided via the attribute_set parameter:
|
|
5464
|
+
if "_x_" in attribute_key:
|
|
5465
|
+
# The lookup does not include a set name but this attribute key
|
|
5466
|
+
# belongs to a set attribute - so we can skip it:
|
|
5467
|
+
continue
|
|
5468
|
+
set_schema = None
|
|
5469
|
+
set_key = None
|
|
5470
|
+
|
|
5471
|
+
prefix = set_key + "_" if set_key else category_key + "_"
|
|
5472
|
+
|
|
5473
|
+
data = node["data"]["categories"]
|
|
5474
|
+
for cat_data in data:
|
|
5475
|
+
if set_key:
|
|
5476
|
+
for i in range(1, int(set_schema["multi_value_length_max"])):
|
|
5477
|
+
key = prefix + str(i) + "_" + attribute_id
|
|
5478
|
+
attribute_value = cat_data.get(key)
|
|
5479
|
+
if not attribute_value:
|
|
5480
|
+
break
|
|
5481
|
+
# Is it a multi-value attribute (i.e. a list of values)?
|
|
5482
|
+
if isinstance(attribute_value, list):
|
|
5483
|
+
if value in attribute_value:
|
|
5484
|
+
# Create a "results" dict that is compatible with normal REST calls
|
|
5485
|
+
# to not break get_result_value() method that may be called on the result:
|
|
5486
|
+
results["results"].append(node)
|
|
5487
|
+
elif value == attribute_value:
|
|
5488
|
+
# Create a results dict that is compatible with normal REST calls
|
|
5489
|
+
# to not break get_result_value() method that may be called on the result:
|
|
5490
|
+
results["results"].append(node)
|
|
5491
|
+
# end if set_key
|
|
5492
|
+
else:
|
|
5493
|
+
key = prefix + attribute_id
|
|
5457
5494
|
attribute_value = cat_data.get(key)
|
|
5458
5495
|
if not attribute_value:
|
|
5459
|
-
|
|
5496
|
+
continue
|
|
5460
5497
|
# Is it a multi-value attribute (i.e. a list of values)?
|
|
5461
5498
|
if isinstance(attribute_value, list):
|
|
5462
5499
|
if value in attribute_value:
|
|
5463
5500
|
# Create a "results" dict that is compatible with normal REST calls
|
|
5464
5501
|
# to not break get_result_value() method that may be called on the result:
|
|
5465
5502
|
results["results"].append(node)
|
|
5503
|
+
# If not a multi-value attribute, check for equality:
|
|
5466
5504
|
elif value == attribute_value:
|
|
5467
5505
|
# Create a results dict that is compatible with normal REST calls
|
|
5468
5506
|
# to not break get_result_value() method that may be called on the result:
|
|
5469
5507
|
results["results"].append(node)
|
|
5470
|
-
|
|
5471
|
-
|
|
5472
|
-
|
|
5473
|
-
|
|
5474
|
-
if not attribute_value:
|
|
5475
|
-
continue
|
|
5476
|
-
# Is it a multi-value attribute (i.e. a list of values)?
|
|
5477
|
-
if isinstance(attribute_value, list):
|
|
5478
|
-
if value in attribute_value:
|
|
5479
|
-
# Create a "results" dict that is compatible with normal REST calls
|
|
5480
|
-
# to not break get_result_value() method that may be called on the result:
|
|
5481
|
-
results["results"].append(node)
|
|
5482
|
-
# If not a multi-value attribute, check for equality:
|
|
5483
|
-
elif value == attribute_value:
|
|
5484
|
-
# Create a results dict that is compatible with normal REST calls
|
|
5485
|
-
# to not break get_result_value() method that may be called on the result:
|
|
5486
|
-
results["results"].append(node)
|
|
5487
|
-
# end if set_key else
|
|
5488
|
-
# end for cat_data, cat_schema in zip(data, schema)
|
|
5489
|
-
# end for node in nodes
|
|
5508
|
+
# end if set_key ... else
|
|
5509
|
+
# end for cat_data in data:
|
|
5510
|
+
# end for attribute_schema in attribute_schemas:
|
|
5511
|
+
# end for node in self.get_subnodes_iterator()
|
|
5490
5512
|
|
|
5491
5513
|
self.logger.debug(
|
|
5492
5514
|
"Couldn't find a node with the value -> '%s' in the attribute -> '%s' of category -> '%s' in parent with node ID -> %d.",
|
|
@@ -7516,7 +7538,7 @@ class OTCS:
|
|
|
7516
7538
|
chunk_size: int = 8192,
|
|
7517
7539
|
overwrite: bool = True,
|
|
7518
7540
|
) -> bool:
|
|
7519
|
-
"""Download a document from OTCS to local file system.
|
|
7541
|
+
"""Download a document (version) from OTCS to local file system.
|
|
7520
7542
|
|
|
7521
7543
|
Args:
|
|
7522
7544
|
node_id (int):
|
|
@@ -7541,8 +7563,7 @@ class OTCS:
|
|
|
7541
7563
|
"""
|
|
7542
7564
|
|
|
7543
7565
|
if not version_number:
|
|
7544
|
-
# we retrieve the latest version - using V1 REST API. V2 has issues
|
|
7545
|
-
# request_url = self.config()["nodesUrlv2"] + "/" + str(node_id) + "/content"
|
|
7566
|
+
# we retrieve the latest version - using V1 REST API. V2 has issues with downloading files:
|
|
7546
7567
|
request_url = self.config()["nodesUrl"] + "/" + str(node_id) + "/content"
|
|
7547
7568
|
self.logger.debug(
|
|
7548
7569
|
"Download document with node ID -> %d (latest version); calling -> %s",
|
|
@@ -7550,10 +7571,7 @@ class OTCS:
|
|
|
7550
7571
|
request_url,
|
|
7551
7572
|
)
|
|
7552
7573
|
else:
|
|
7553
|
-
# we retrieve the given version - using V1 REST API. V2 has issues
|
|
7554
|
-
# request_url = (
|
|
7555
|
-
# self.config()["nodesUrlv2"] + "/" + str(node_id) + "/versions/" + str(version_number) + "/content"
|
|
7556
|
-
# )
|
|
7574
|
+
# we retrieve the given version - using V1 REST API. V2 has issues with downloading files:
|
|
7557
7575
|
request_url = (
|
|
7558
7576
|
self.config()["nodesUrl"] + "/" + str(node_id) + "/versions/" + str(version_number) + "/content"
|
|
7559
7577
|
)
|
|
@@ -7585,6 +7603,14 @@ class OTCS:
|
|
|
7585
7603
|
content_encoding = response.headers.get("Content-Encoding", "").lower()
|
|
7586
7604
|
is_compressed = content_encoding in ("gzip", "deflate", "br")
|
|
7587
7605
|
|
|
7606
|
+
self.logger.debug(
|
|
7607
|
+
"Downloading document with node ID -> %d to file -> '%s'; total size -> %s bytes; content encoding -> '%s'",
|
|
7608
|
+
node_id,
|
|
7609
|
+
file_path,
|
|
7610
|
+
total_size,
|
|
7611
|
+
content_encoding,
|
|
7612
|
+
)
|
|
7613
|
+
|
|
7588
7614
|
if os.path.exists(file_path) and not overwrite:
|
|
7589
7615
|
self.logger.warning(
|
|
7590
7616
|
"File -> '%s' already exists and overwrite is set to False, not downloading document.",
|
|
@@ -7617,6 +7643,9 @@ class OTCS:
|
|
|
7617
7643
|
)
|
|
7618
7644
|
return False
|
|
7619
7645
|
|
|
7646
|
+
# if we have a total size and the content is not compressed
|
|
7647
|
+
# we can do a sanity check if the downloaded size matches
|
|
7648
|
+
# the expected size:
|
|
7620
7649
|
if total_size and not is_compressed and bytes_downloaded != total_size:
|
|
7621
7650
|
self.logger.error(
|
|
7622
7651
|
"Downloaded size (%d bytes) does not match expected size (%d bytes) for file -> '%s'",
|
|
@@ -9458,7 +9487,7 @@ class OTCS:
|
|
|
9458
9487
|
expand_workspace_info: bool = True,
|
|
9459
9488
|
expand_templates: bool = True,
|
|
9460
9489
|
) -> dict | None:
|
|
9461
|
-
"""Get all workspace types configured in
|
|
9490
|
+
"""Get all workspace types configured in OTCS.
|
|
9462
9491
|
|
|
9463
9492
|
This REST API is very limited. It does not return all workspace type properties
|
|
9464
9493
|
you can see in OTCS business admin page.
|
|
@@ -9639,11 +9668,11 @@ class OTCS:
|
|
|
9639
9668
|
|
|
9640
9669
|
@tracer.start_as_current_span(attributes=OTEL_TRACING_ATTRIBUTES, name="get_workspace_type_name")
|
|
9641
9670
|
def get_workspace_type_name(self, type_id: int) -> str | None:
|
|
9642
|
-
"""Get the name of a workspace type based on the provided type ID.
|
|
9671
|
+
"""Get the name of a workspace type based on the provided workspace type ID.
|
|
9643
9672
|
|
|
9644
|
-
The name is taken from a OTCS
|
|
9673
|
+
The name is taken from a OTCS object variable self._workspace_type_lookup if recorded there.
|
|
9645
9674
|
If not yet derived it is determined via the REST API and then stored
|
|
9646
|
-
in
|
|
9675
|
+
in self._workspace_type_lookup (as a lookup cache).
|
|
9647
9676
|
|
|
9648
9677
|
Args:
|
|
9649
9678
|
type_id (int):
|
|
@@ -9654,6 +9683,10 @@ class OTCS:
|
|
|
9654
9683
|
The name of the workspace type. Or None if the type ID
|
|
9655
9684
|
was ot found.
|
|
9656
9685
|
|
|
9686
|
+
Side effects:
|
|
9687
|
+
Caches the workspace type name in self._workspace_type_lookup
|
|
9688
|
+
for future calls.
|
|
9689
|
+
|
|
9657
9690
|
"""
|
|
9658
9691
|
|
|
9659
9692
|
workspace_type = self._workspace_type_lookup.get(type_id)
|
|
@@ -9663,6 +9696,7 @@ class OTCS:
|
|
|
9663
9696
|
workspace_type = self.get_workspace_type(type_id=type_id)
|
|
9664
9697
|
type_name = workspace_type.get("workspace_type")
|
|
9665
9698
|
if type_name:
|
|
9699
|
+
# Update the lookup cache:
|
|
9666
9700
|
self._workspace_type_lookup[type_id] = {"location": None, "name": type_name}
|
|
9667
9701
|
return type_name
|
|
9668
9702
|
|
|
@@ -9670,6 +9704,43 @@ class OTCS:
|
|
|
9670
9704
|
|
|
9671
9705
|
# end method definition
|
|
9672
9706
|
|
|
9707
|
+
@tracer.start_as_current_span(attributes=OTEL_TRACING_ATTRIBUTES, name="get_workspace_type_by_name")
|
|
9708
|
+
def get_workspace_type_names(self, lower_case: bool = False, renew: bool = False) -> list[str] | None:
|
|
9709
|
+
"""Get a list of all workspace type names.
|
|
9710
|
+
|
|
9711
|
+
Args:
|
|
9712
|
+
lower_case (bool):
|
|
9713
|
+
Whether to return the names in lower case.
|
|
9714
|
+
renew (bool):
|
|
9715
|
+
Whether to renew the cached workspace type names.
|
|
9716
|
+
|
|
9717
|
+
Returns:
|
|
9718
|
+
list[str] | None:
|
|
9719
|
+
List of workspace type names or None if the request fails.
|
|
9720
|
+
|
|
9721
|
+
Side effects:
|
|
9722
|
+
Caches the workspace type names in self._workspace_type_names
|
|
9723
|
+
for future calls.
|
|
9724
|
+
|
|
9725
|
+
"""
|
|
9726
|
+
|
|
9727
|
+
if self._workspace_type_names and not renew:
|
|
9728
|
+
return self._workspace_type_names
|
|
9729
|
+
|
|
9730
|
+
workspace_types = self.get_workspace_types_iterator()
|
|
9731
|
+
workspace_type_names = [
|
|
9732
|
+
self.get_result_value(response=workspace_type, key="wksp_type_name") for workspace_type in workspace_types
|
|
9733
|
+
]
|
|
9734
|
+
if lower_case:
|
|
9735
|
+
workspace_type_names = [name.lower() for name in workspace_type_names]
|
|
9736
|
+
|
|
9737
|
+
# Update the cache:
|
|
9738
|
+
self._workspace_type_names = workspace_type_names
|
|
9739
|
+
|
|
9740
|
+
return workspace_type_names
|
|
9741
|
+
|
|
9742
|
+
# end method definition
|
|
9743
|
+
|
|
9673
9744
|
@tracer.start_as_current_span(attributes=OTEL_TRACING_ATTRIBUTES, name="get_workspace_templates")
|
|
9674
9745
|
def get_workspace_templates(
|
|
9675
9746
|
self, type_id: int | None = None, type_name: str | None = None
|
|
@@ -13998,6 +14069,7 @@ class OTCS:
|
|
|
13998
14069
|
apply_action: str = "add_upgrade",
|
|
13999
14070
|
add_version: bool = False,
|
|
14000
14071
|
clear_existing_categories: bool = False,
|
|
14072
|
+
attribute_values: dict | None = None,
|
|
14001
14073
|
) -> bool:
|
|
14002
14074
|
"""Assign a category to a Content Server node.
|
|
14003
14075
|
|
|
@@ -14005,6 +14077,7 @@ class OTCS:
|
|
|
14005
14077
|
(if node_id is a container / folder / workspace).
|
|
14006
14078
|
If the category is already assigned to the node this method will
|
|
14007
14079
|
throw an error.
|
|
14080
|
+
Optionally set category attributes values.
|
|
14008
14081
|
|
|
14009
14082
|
Args:
|
|
14010
14083
|
node_id (int):
|
|
@@ -14025,6 +14098,9 @@ class OTCS:
|
|
|
14025
14098
|
True, if a document version should be added for the category change (default = False).
|
|
14026
14099
|
clear_existing_categories (bool, optional):
|
|
14027
14100
|
Defines, whether or not existing (other) categories should be removed (default = False).
|
|
14101
|
+
attribute_values (dict, optional):
|
|
14102
|
+
Dictionary containing "attribute_id":"value" pairs, to be populated during the category assignment.
|
|
14103
|
+
(In case of the category attributes being set as "Required" in xECM, providing corresponding values for those attributes will resolve inability to assign the category).
|
|
14028
14104
|
|
|
14029
14105
|
Returns:
|
|
14030
14106
|
bool:
|
|
@@ -14050,6 +14126,9 @@ class OTCS:
|
|
|
14050
14126
|
"category_id": category_id,
|
|
14051
14127
|
}
|
|
14052
14128
|
|
|
14129
|
+
if attribute_values is not None:
|
|
14130
|
+
category_post_data.update(attribute_values)
|
|
14131
|
+
|
|
14053
14132
|
self.logger.debug(
|
|
14054
14133
|
"Assign category with ID -> %d to item with ID -> %d; calling -> %s",
|
|
14055
14134
|
category_id,
|
|
@@ -17286,6 +17365,7 @@ class OTCS:
|
|
|
17286
17365
|
|
|
17287
17366
|
"""
|
|
17288
17367
|
|
|
17368
|
+
# If no sub-process ID is given, use the process ID:
|
|
17289
17369
|
if subprocess_id is None:
|
|
17290
17370
|
subprocess_id = process_id
|
|
17291
17371
|
|
|
@@ -17781,8 +17861,8 @@ class OTCS:
|
|
|
17781
17861
|
for subnode in subnodes:
|
|
17782
17862
|
subnode_id = self.get_result_value(response=subnode, key="id")
|
|
17783
17863
|
subnode_name = self.get_result_value(response=subnode, key="name")
|
|
17784
|
-
|
|
17785
|
-
self.logger.info("Traversing %s node -> '%s' (%s)",
|
|
17864
|
+
subnode_type_name = self.get_result_value(response=subnode, key="type_name")
|
|
17865
|
+
self.logger.info("Traversing %s node -> '%s' (%s)", subnode_type_name, subnode_name, subnode_id)
|
|
17786
17866
|
# Recursive call for current subnode:
|
|
17787
17867
|
result = self.traverse_node(
|
|
17788
17868
|
node=subnode,
|
|
@@ -2,15 +2,12 @@
|
|
|
2
2
|
|
|
3
3
|
import logging
|
|
4
4
|
import os
|
|
5
|
-
import time
|
|
6
|
-
from datetime import UTC, datetime
|
|
7
5
|
from typing import Annotated
|
|
8
6
|
|
|
9
7
|
from fastapi import Depends
|
|
10
8
|
from pyxecm.otca import OTCA
|
|
11
9
|
from pyxecm.otcs import OTCS
|
|
12
10
|
from pyxecm_customizer import K8s, PayloadList, Settings
|
|
13
|
-
from pyxecm_customizer.knowledge_graph import KnowledgeGraph
|
|
14
11
|
|
|
15
12
|
from pyxecm_api.auth.functions import get_otcsticket
|
|
16
13
|
from pyxecm_api.settings import CustomizerAPISettings, api_settings
|
|
@@ -22,100 +19,6 @@ LOGS_LOCK = {}
|
|
|
22
19
|
# Initialize the globel Payloadlist object
|
|
23
20
|
PAYLOAD_LIST = PayloadList(logger=logger)
|
|
24
21
|
|
|
25
|
-
# This object is initialized in the build_graph() function below.
|
|
26
|
-
KNOWLEDGEGRAPH_OBJECT: KnowledgeGraph = None
|
|
27
|
-
|
|
28
|
-
# The following ontology is fed into the knowledge graph tool description.
|
|
29
|
-
# This is currently hard-coded. Ideally this should be derived from OTCM
|
|
30
|
-
# or provided via a payload file:
|
|
31
|
-
|
|
32
|
-
KNOWLEDGEGRAPH_ONTOLOGY = {
|
|
33
|
-
("Vendor", "Material", "child"): ["offers", "supplies", "provides"],
|
|
34
|
-
("Vendor", "Purchase Order", "child"): ["supplies", "provides"],
|
|
35
|
-
("Vendor", "Purchase Contract", "child"): ["signs", "owns"],
|
|
36
|
-
("Material", "Vendor", "parent"): ["is supplied by"],
|
|
37
|
-
("Purchase Order", "Material", "child"): ["includes", "is part of"],
|
|
38
|
-
("Customer", "Sales Order", "child"): ["has ordered"],
|
|
39
|
-
("Customer", "Sales Contract", "child"): ["signs", "owns"],
|
|
40
|
-
("Sales Order", "Customer", "parent"): ["belongs to", "is initiated by"],
|
|
41
|
-
("Sales Order", "Material", "child"): ["includes", "consists of"],
|
|
42
|
-
("Sales Order", "Delivery", "child"): ["triggers", "is followed by"],
|
|
43
|
-
("Sales Order", "Production Order", "child"): ["triggers", "is followed by"],
|
|
44
|
-
("Sales Contract", "Material", "child"): ["includes", "consists of"],
|
|
45
|
-
("Production Order", "Material", "child"): ["includes", "consists of"],
|
|
46
|
-
("Production Order", "Delivery", "child"): ["triggers", "is followed by"],
|
|
47
|
-
("Production Order", "Goods Movement", "child"): ["triggers", "is followed by"],
|
|
48
|
-
("Delivery", "Goods Movement", "child"): ["triggers", "is followed by"],
|
|
49
|
-
("Delivery", "Material", "child"): ["triggers", "is followed by"],
|
|
50
|
-
}
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
### Functions
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
def get_ontology() -> dict:
|
|
57
|
-
"""Get the ontology for the knowledge graph.
|
|
58
|
-
|
|
59
|
-
Returns:
|
|
60
|
-
dict: The ontology as a dictionary.
|
|
61
|
-
|
|
62
|
-
"""
|
|
63
|
-
|
|
64
|
-
return KNOWLEDGEGRAPH_ONTOLOGY
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
def get_knowledgegraph_object() -> KnowledgeGraph:
|
|
68
|
-
"""Get the Knowledge Graph object."""
|
|
69
|
-
|
|
70
|
-
global KNOWLEDGEGRAPH_OBJECT # noqa: PLW0603
|
|
71
|
-
|
|
72
|
-
if KNOWLEDGEGRAPH_OBJECT is None:
|
|
73
|
-
KNOWLEDGEGRAPH_OBJECT = KnowledgeGraph(otcs_object=get_otcs_object(), ontology=KNOWLEDGEGRAPH_ONTOLOGY)
|
|
74
|
-
|
|
75
|
-
return KNOWLEDGEGRAPH_OBJECT
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
def build_graph() -> None:
|
|
79
|
-
"""Build the knowledge Graph. And keep it updated every hour."""
|
|
80
|
-
|
|
81
|
-
def build() -> None:
|
|
82
|
-
"""Build the knowledge graph once."""
|
|
83
|
-
|
|
84
|
-
logger.info("Starting knowledge graph build...")
|
|
85
|
-
start_time = datetime.now(UTC)
|
|
86
|
-
result = get_knowledgegraph_object().build_graph(
|
|
87
|
-
workspace_type_exclusions=None,
|
|
88
|
-
workspace_type_inclusions=[
|
|
89
|
-
"Vendor",
|
|
90
|
-
"Purchase Contract",
|
|
91
|
-
"Purchase Order",
|
|
92
|
-
"Material",
|
|
93
|
-
"Customer",
|
|
94
|
-
"Sales Order",
|
|
95
|
-
"Sales Contract",
|
|
96
|
-
"Delivery",
|
|
97
|
-
"Goods Movement",
|
|
98
|
-
],
|
|
99
|
-
workers=20, # for multi-threaded traversal
|
|
100
|
-
filter_at_traversal=True, # also filter for workspace types if following relationships
|
|
101
|
-
relationship_types=["child"], # only go from parent to child
|
|
102
|
-
strategy="BFS", # Breadth-First-Search
|
|
103
|
-
metadata=True, # don't include workspace metadata
|
|
104
|
-
)
|
|
105
|
-
end_time = datetime.now(UTC)
|
|
106
|
-
logger.info(
|
|
107
|
-
"Knowledge graph completed in %s. Processed %d workspace nodes and traversed %d workspace relationships.",
|
|
108
|
-
str(end_time - start_time),
|
|
109
|
-
result["processed"],
|
|
110
|
-
result["traversed"],
|
|
111
|
-
)
|
|
112
|
-
|
|
113
|
-
# Endless loop to build knowledge graph and update it every hour:
|
|
114
|
-
while True:
|
|
115
|
-
build()
|
|
116
|
-
logger.info("Waiting for 1 hour before rebuilding the knowledge graph...")
|
|
117
|
-
time.sleep(3600)
|
|
118
|
-
|
|
119
22
|
|
|
120
23
|
def get_k8s_object() -> K8s:
|
|
121
24
|
"""Get an instance of a K8s object.
|