pyxecm 3.0.1__py3-none-any.whl → 3.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyxecm might be problematic. Click here for more details.

Files changed (52) hide show
  1. pyxecm/avts.py +4 -4
  2. pyxecm/coreshare.py +14 -15
  3. pyxecm/helper/data.py +2 -1
  4. pyxecm/helper/web.py +11 -11
  5. pyxecm/helper/xml.py +41 -10
  6. pyxecm/otac.py +1 -1
  7. pyxecm/otawp.py +19 -19
  8. pyxecm/otca.py +878 -70
  9. pyxecm/otcs.py +1716 -349
  10. pyxecm/otds.py +332 -153
  11. pyxecm/otkd.py +4 -4
  12. pyxecm/otmm.py +1 -1
  13. pyxecm/otpd.py +246 -30
  14. {pyxecm-3.0.1.dist-info → pyxecm-3.1.1.dist-info}/METADATA +2 -1
  15. pyxecm-3.1.1.dist-info/RECORD +82 -0
  16. pyxecm_api/app.py +45 -35
  17. pyxecm_api/auth/functions.py +2 -2
  18. pyxecm_api/auth/router.py +2 -3
  19. pyxecm_api/common/functions.py +67 -12
  20. pyxecm_api/settings.py +0 -8
  21. pyxecm_api/terminal/router.py +1 -1
  22. pyxecm_api/v1_csai/router.py +33 -18
  23. pyxecm_customizer/browser_automation.py +161 -79
  24. pyxecm_customizer/customizer.py +43 -25
  25. pyxecm_customizer/guidewire.py +422 -8
  26. pyxecm_customizer/k8s.py +23 -27
  27. pyxecm_customizer/knowledge_graph.py +498 -20
  28. pyxecm_customizer/m365.py +45 -44
  29. pyxecm_customizer/payload.py +1723 -1188
  30. pyxecm_customizer/payload_list.py +3 -0
  31. pyxecm_customizer/salesforce.py +122 -79
  32. pyxecm_customizer/servicenow.py +27 -7
  33. pyxecm_customizer/settings.py +3 -1
  34. pyxecm_customizer/successfactors.py +2 -2
  35. pyxecm_customizer/translate.py +1 -1
  36. pyxecm-3.0.1.dist-info/RECORD +0 -96
  37. pyxecm_api/agents/__init__.py +0 -7
  38. pyxecm_api/agents/app.py +0 -13
  39. pyxecm_api/agents/functions.py +0 -119
  40. pyxecm_api/agents/models.py +0 -10
  41. pyxecm_api/agents/otcm_knowledgegraph/__init__.py +0 -1
  42. pyxecm_api/agents/otcm_knowledgegraph/functions.py +0 -85
  43. pyxecm_api/agents/otcm_knowledgegraph/models.py +0 -61
  44. pyxecm_api/agents/otcm_knowledgegraph/router.py +0 -74
  45. pyxecm_api/agents/otcm_user_agent/__init__.py +0 -1
  46. pyxecm_api/agents/otcm_user_agent/models.py +0 -20
  47. pyxecm_api/agents/otcm_user_agent/router.py +0 -65
  48. pyxecm_api/agents/otcm_workspace_agent/__init__.py +0 -1
  49. pyxecm_api/agents/otcm_workspace_agent/models.py +0 -40
  50. pyxecm_api/agents/otcm_workspace_agent/router.py +0 -200
  51. {pyxecm-3.0.1.dist-info → pyxecm-3.1.1.dist-info}/WHEEL +0 -0
  52. {pyxecm-3.0.1.dist-info → pyxecm-3.1.1.dist-info}/entry_points.txt +0 -0
@@ -200,7 +200,7 @@ class Customizer:
200
200
  # of unsetting 2 checkboxes on that config page - we reset these checkboxes
201
201
  # with the settings file "O365Settings.xml"):
202
202
  file_path = os.path.join(tempfile.gettempdir(), "ot.xecm.teams.zip")
203
- response = self.otcs_frontend_object.download_config_file(
203
+ _ = self.otcs_frontend_object.download_config_file(
204
204
  otcs_url_suffix="/cs/cs?func=officegroups.DownloadTeamsPackage",
205
205
  file_path=file_path,
206
206
  )
@@ -257,7 +257,7 @@ class Customizer:
257
257
  # by its wrong name in the customizer automation. This can
258
258
  # happen if the app is installed manually or the environment
259
259
  # variable is set to a wrong name.
260
- app_catalog_name = m365_object.get_result_value(response, "displayName")
260
+ app_catalog_name = m365_object.get_result_value(response=response, key="displayName")
261
261
  if app_catalog_name != self.settings.m365.teams_app_name:
262
262
  self.logger.warning(
263
263
  "The Extended ECM app name -> '%s' in the M365 Teams catalog does not match the defined app name -> '%s'!",
@@ -624,14 +624,14 @@ class Customizer:
624
624
  )
625
625
  if not otcs_frontend_scale:
626
626
  self.logger.error(
627
- "Cannot find Kubernetes Stateful Set -> '%s' for OTCS Frontends!",
627
+ "Cannot find Kubernetes stateful set -> '%s' for OTCS Frontends!",
628
628
  self.settings.k8s.sts_otcs_frontend,
629
629
  )
630
630
  sys.exit()
631
631
 
632
632
  self.settings.k8s.sts_otcs_frontend_replicas = otcs_frontend_scale.spec.replicas
633
633
  self.logger.info(
634
- "Stateful Set -> '%s' has -> %s replicas",
634
+ "Stateful set -> '%s' has -> %s replicas",
635
635
  self.settings.k8s.sts_otcs_frontend,
636
636
  self.settings.k8s.sts_otcs_frontend_replicas,
637
637
  )
@@ -642,16 +642,17 @@ class Customizer:
642
642
  )
643
643
  if not otcs_backend_scale:
644
644
  self.logger.error(
645
- "Cannot find Kubernetes Stateful Set -> '%s' for OTCS Backends!",
645
+ "Cannot find Kubernetes stateful set -> '%s' for OTCS Backends!",
646
646
  self.settings.k8s.sts_otcs_admin,
647
647
  )
648
648
  sys.exit()
649
649
 
650
650
  self.settings.k8s.sts_otcs_admin_replicas = otcs_backend_scale.spec.replicas
651
651
  self.logger.info(
652
- "Stateful Set -> '%s' has -> %s replicas",
652
+ "Stateful set -> '%s' has -> %s replica%s",
653
653
  self.settings.k8s.sts_otcs_admin,
654
654
  self.settings.k8s.sts_otcs_admin_replicas,
655
+ "s" if self.settings.k8s.sts_otcs_admin_replicas > 1 else "",
655
656
  )
656
657
 
657
658
  return k8s_object
@@ -782,7 +783,7 @@ class Customizer:
782
783
  if not response:
783
784
  self.logger.error("Failed to enable OTAC certificate for OTCS!")
784
785
  else:
785
- self.logger.info("Successfully enabled OTAC certificate for OTCS!")
786
+ self.logger.info("Successfully enabled OTAC certificate for OTCS.")
786
787
 
787
788
  # is there a known server configured for Archive Center (to sync content with)
788
789
  if otac_object and self.settings.otac.known_server != "":
@@ -918,7 +919,9 @@ class Customizer:
918
919
  otcs_resource_id = otcs_resource["resourceID"]
919
920
  otcs_object.set_resource_id(resource_id=otcs_resource_id)
920
921
  self.logger.info(
921
- "OTCS has resource ID -> '%s' for resource name -> '%s'", otcs_resource_id, self.settings.otcs.resource_name
922
+ "OTCS has resource -> '%s' (%s) in OTDS.",
923
+ self.settings.otcs.resource_name,
924
+ otcs_resource_id,
922
925
  )
923
926
 
924
927
  if "OTCS_RESSOURCE_ID" not in self.settings.placeholder_values:
@@ -1257,7 +1260,7 @@ class Customizer:
1257
1260
  )
1258
1261
  while otcs_partition is None:
1259
1262
  self.logger.warning(
1260
- "OTDS user partition for Content Server with name -> '%s' does not exist yet. Waiting...",
1263
+ "OTDS user partition -> '%s' for Content Server does not exist yet. Waiting...",
1261
1264
  self.settings.otcs.partition,
1262
1265
  )
1263
1266
 
@@ -1350,24 +1353,27 @@ class Customizer:
1350
1353
  )
1351
1354
  if not otcs_da_scale:
1352
1355
  self.logger.warning(
1353
- "Cannot find Kubernetes Stateful Set -> '%s' for OTCS DA!",
1356
+ "Cannot find Kubernetes stateful set -> '%s' for OTCS DA!",
1354
1357
  self.settings.k8s.sts_otcs_da,
1355
1358
  )
1356
1359
  self.settings.k8s.sts_otcs_da_replicas = 0
1357
1360
  else:
1358
1361
  self.settings.k8s.sts_otcs_da_replicas = otcs_da_scale.spec.replicas
1359
1362
 
1363
+ if not self.settings.k8s.sts_otcs_da_replicas:
1364
+ self.settings.k8s.sts_otcs_da_replicas = 0
1365
+
1360
1366
  # Restart all da:
1361
1367
  for x in range(self.settings.k8s.sts_otcs_da_replicas):
1362
1368
  pod_name = self.settings.k8s.sts_otcs_da + "-" + str(x)
1363
1369
 
1364
- self.logger.info("Deactivate Liveness probe for pod -> '%s'", pod_name)
1370
+ self.logger.info("Deactivate liveness probe for pod -> '%s'...", pod_name)
1365
1371
  self.k8s_object.exec_pod_command(
1366
1372
  pod_name,
1367
1373
  ["/bin/sh", "-c", "touch /tmp/keepalive"],
1368
1374
  container="otcs-da-container",
1369
1375
  )
1370
- self.logger.info("Restarting OTCS in pod -> '%s'", pod_name)
1376
+ self.logger.info("Restarting OTCS in pod -> '%s'...", pod_name)
1371
1377
  self.k8s_object.exec_pod_command(
1372
1378
  pod_name,
1373
1379
  ["/bin/sh", "-c", "/opt/opentext/cs/stop_csserver"],
@@ -1385,24 +1391,27 @@ class Customizer:
1385
1391
  )
1386
1392
  if not otcs_frontend_scale:
1387
1393
  self.logger.error(
1388
- "Cannot find Kubernetes Stateful Set -> '%s' for OTCS Frontends!",
1394
+ "Cannot find Kubernetes stateful set -> '%s' for OTCS frontends!",
1389
1395
  self.settings.k8s.sts_otcs_frontend,
1390
1396
  )
1391
1397
  self.settings.k8s.sts_otcs_frontend_replicas = 0
1392
1398
  else:
1393
1399
  self.settings.k8s.sts_otcs_frontend_replicas = otcs_frontend_scale.spec.replicas
1394
1400
 
1401
+ if not self.settings.k8s.sts_otcs_frontend_replicas:
1402
+ self.settings.k8s.sts_otcs_frontend_replicas = 0
1403
+
1395
1404
  # Restart all frontends:
1396
1405
  for x in range(self.settings.k8s.sts_otcs_frontend_replicas):
1397
1406
  pod_name = self.settings.k8s.sts_otcs_frontend + "-" + str(x)
1398
1407
 
1399
- self.logger.info("Deactivate Liveness probe for pod -> '%s'", pod_name)
1408
+ self.logger.info("Deactivate liveness probe for pod -> '%s'...", pod_name)
1400
1409
  self.k8s_object.exec_pod_command(
1401
1410
  pod_name,
1402
1411
  ["/bin/sh", "-c", "touch /tmp/keepalive"],
1403
1412
  container="otcs-frontend-container",
1404
1413
  )
1405
- self.logger.info("Restarting OTCS in pod -> '%s'", pod_name)
1414
+ self.logger.info("Restarting OTCS in pod -> '%s'...", pod_name)
1406
1415
  self.k8s_object.exec_pod_command(
1407
1416
  pod_name,
1408
1417
  ["/bin/sh", "-c", "/opt/opentext/cs/stop_csserver"],
@@ -1418,13 +1427,13 @@ class Customizer:
1418
1427
  for x in range(self.settings.k8s.sts_otcs_admin_replicas):
1419
1428
  pod_name = self.settings.k8s.sts_otcs_admin + "-" + str(x)
1420
1429
 
1421
- self.logger.info("Deactivate Liveness probe for pod -> '%s'", pod_name)
1430
+ self.logger.info("Deactivate liveness probe for pod -> '%s'...", pod_name)
1422
1431
  self.k8s_object.exec_pod_command(
1423
1432
  pod_name,
1424
1433
  ["/bin/sh", "-c", "touch /tmp/keepalive"],
1425
1434
  container="otcs-admin-container",
1426
1435
  )
1427
- self.logger.info("Restarting OTCS in pod -> '%s'", pod_name)
1436
+ self.logger.info("Restarting OTCS in pod -> '%s'...", pod_name)
1428
1437
  self.k8s_object.exec_pod_command(
1429
1438
  pod_name,
1430
1439
  ["/bin/sh", "-c", "/opt/opentext/cs/stop_csserver"],
@@ -1438,7 +1447,7 @@ class Customizer:
1438
1447
 
1439
1448
  # Reauthenticate at frontend:
1440
1449
  self.logger.info(
1441
- "Re-Authenticating to OTCS frontend after restart of frontend pods...",
1450
+ "Re-authenticating to OTCS frontend after restart of frontend pods...",
1442
1451
  )
1443
1452
  otcs_cookie = frontend.authenticate(revalidate=True)
1444
1453
  while otcs_cookie is None:
@@ -1449,7 +1458,7 @@ class Customizer:
1449
1458
 
1450
1459
  # Reauthenticate at backend:
1451
1460
  self.logger.info(
1452
- "Re-Authenticating to OTCS backend after restart of backend pods...",
1461
+ "Re-authenticating to OTCS backend after restart of backend pods...",
1453
1462
  )
1454
1463
  otcs_cookie = backend.authenticate(revalidate=True)
1455
1464
  while otcs_cookie is None:
@@ -1458,11 +1467,11 @@ class Customizer:
1458
1467
  otcs_cookie = backend.authenticate(revalidate=True)
1459
1468
  self.logger.info("OTCS backend is ready again.")
1460
1469
 
1461
- # Reactivate Liveness probes in all pods:
1470
+ # Reactivate Kubernetes liveness probes in all pods:
1462
1471
  for x in range(self.settings.k8s.sts_otcs_frontend_replicas):
1463
1472
  pod_name = self.settings.k8s.sts_otcs_frontend + "-" + str(x)
1464
1473
 
1465
- self.logger.info("Reactivate Liveness probe for pod -> '%s'", pod_name)
1474
+ self.logger.info("Reactivate liveness probe for pod -> '%s'...", pod_name)
1466
1475
  self.k8s_object.exec_pod_command(
1467
1476
  pod_name,
1468
1477
  ["/bin/sh", "-c", "rm /tmp/keepalive"],
@@ -1472,7 +1481,7 @@ class Customizer:
1472
1481
  for x in range(self.settings.k8s.sts_otcs_admin_replicas):
1473
1482
  pod_name = self.settings.k8s.sts_otcs_admin + "-" + str(x)
1474
1483
 
1475
- self.logger.info("Reactivate Liveness probe for pod -> '%s'", pod_name)
1484
+ self.logger.info("Reactivate liveness probe for pod -> '%s'...", pod_name)
1476
1485
  self.k8s_object.exec_pod_command(
1477
1486
  pod_name,
1478
1487
  ["/bin/sh", "-c", "rm /tmp/keepalive"],
@@ -1522,7 +1531,7 @@ class Customizer:
1522
1531
  # end method definition
1523
1532
 
1524
1533
  def restart_otawp_pod(self) -> None:
1525
- """Delete the AppWorks Platform Pod to make Kubernetes restart it."""
1534
+ """Delete the AppWorks Platform pod to make Kubernetes restart it."""
1526
1535
 
1527
1536
  self.k8s_object.delete_pod(self.settings.k8s.sts_otawp + "-0")
1528
1537
 
@@ -1899,6 +1908,15 @@ class Customizer:
1899
1908
 
1900
1909
  # Upload payload file for later review to Enterprise Workspace
1901
1910
  if self.settings.otcs.upload_config_files:
1911
+ # Wait until OTCS is ready to accept uploads. Parallel running
1912
+ # payload processing might be in the process of restarting OTCS:
1913
+ while not self.otcs_backend_object.is_ready():
1914
+ self.logger.info(
1915
+ "OTCS is not ready. Cannot upload payload file -> '%s' to OTCS. Waiting 30 seconds and retry...",
1916
+ os.path.basename(cust_payload),
1917
+ )
1918
+ time.sleep(30)
1919
+
1902
1920
  self.log_header("Upload Payload file to OpenText Content Management")
1903
1921
  response = self.otcs_backend_object.get_node_from_nickname(
1904
1922
  nickname=self.settings.cust_target_folder_nickname,
@@ -1908,7 +1926,7 @@ class Customizer:
1908
1926
  key="id",
1909
1927
  )
1910
1928
  if not target_folder_id:
1911
- target_folder_id = 2000 # use Enterprise Workspace as fallback
1929
+ target_folder_id = 2004 # use Enterprise Workspace as fallback
1912
1930
  # Write YAML file with upadated payload (including IDs, etc.).
1913
1931
  # We need to write to a temporary location as initial location is read-only:
1914
1932
  payload_file = os.path.basename(cust_payload)
@@ -2017,7 +2035,7 @@ class Customizer:
2017
2035
  key="id",
2018
2036
  )
2019
2037
  if not target_folder_id:
2020
- target_folder_id = 2000 # use Enterprise Workspace as fallback
2038
+ target_folder_id = 2004 # use Enterprise Workspace as fallback
2021
2039
  # Check if the log file has been uploaded before.
2022
2040
  # This can happen if we re-run the python container:
2023
2041
  # In this case we add a version to the existing document: