pyxecm 2.0.2__py3-none-any.whl → 2.0.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyxecm might be problematic. Click here for more details.

@@ -67,9 +67,9 @@ from dateutil.parser import parse
67
67
  from lark import exceptions as lark_exceptions # used by hcl2
68
68
 
69
69
  # OpenText specific modules:
70
- from pyxecm import AVTS, OTAC, OTAWP, OTCS, OTDS, OTIV, OTMM, OTPD, CoreShare
70
+ from pyxecm import AVTS, OTAC, OTAWP, OTCA, OTCS, OTDS, OTIV, OTKD, OTMM, OTPD, CoreShare
71
71
  from pyxecm.customizer.browser_automation import BrowserAutomation
72
- from pyxecm.customizer.exceptions import StopOnError
72
+ from pyxecm.customizer.exceptions import PayloadImportError, StopOnError
73
73
  from pyxecm.customizer.k8s import K8s
74
74
  from pyxecm.customizer.m365 import M365
75
75
  from pyxecm.customizer.pht import PHT
@@ -160,23 +160,18 @@ def load_payload(
160
160
  except (
161
161
  lark_exceptions.UnexpectedToken,
162
162
  lark_exceptions.UnexpectedCharacters,
163
- ):
164
- logger.error(
165
- "Syntax error while reading Terraform payload file -> '%s'!",
166
- payload_source,
167
- )
168
- payload = {}
163
+ ) as exc:
164
+ exception = f"Syntax error while reading Terraform payload file -> '{payload_source}'! --> {traceback.format_exception_only(exc)}"
165
+ raise PayloadImportError(exception) from exc
166
+
169
167
  except (
170
168
  FileNotFoundError,
171
169
  ImportError,
172
170
  ValueError,
173
171
  SyntaxError,
174
- ):
175
- logger.error(
176
- "Error while reading Terraform payload file -> '%s'",
177
- payload_source,
178
- )
179
- payload = {}
172
+ ) as exc:
173
+ exception = f"Error while reading Terraform payload file -> '{payload_source}'! --> {traceback.format_exception_only(exc)}"
174
+ raise PayloadImportError(exception) from exc
180
175
 
181
176
  elif payload_source.endswith(".yml.gz.b64"):
182
177
  logger.info("Open payload from base64-gz-YAML file -> '%s'", payload_source)
@@ -239,9 +234,11 @@ class Payload:
239
234
  _successfactors: SuccessFactors | None
240
235
  _salesforce: Salesforce | None
241
236
  _servicenow: ServiceNow | None
242
- _browser_automation: BrowserAutomation | None
243
237
  _custom_settings_dir = ""
244
238
  _otawp: OTAWP | None
239
+ _otca: OTCA | None
240
+ _otkd: OTKD | None
241
+ _avts: AVTS | None
245
242
 
246
243
  # _payload_source (string): This is either path + filename of the yaml payload
247
244
  # or an path + filename of the Terraform HCL payload
@@ -411,6 +408,7 @@ class Payload:
411
408
  - title (str, optional, default = "")
412
409
  - email (str, optional, default = "")
413
410
  - base_group (str, optional, default = "DefaultGroup")
411
+ - user_type (str, optional, default = "User") - possible values are "User" and "ServiceUser"
414
412
  - company (str, optional, default = "Innovate") - currently used for Salesforce users only
415
413
  - privileges (list, optional, default = ["Login", "Public Access"])
416
414
  - groups (list, optional)
@@ -567,7 +565,12 @@ class Payload:
567
565
  * role (name)
568
566
  * users (list, optional, default = [])
569
567
  * groups (list, optional, default = [])
570
- - relationships (list, optional, default = []) - list of strings with logical workspace IDs
568
+ - relationships (list, optional, default = []) - list of related workspaces.
569
+ The elements of the list can be:
570
+ * string or integer with logical workspace ID
571
+ * string with nickname of the related workspace
572
+ * dictionaries with keys "type" and "name" of the related workspace
573
+ * list of strings with the top-down path in the Enterprise volume
571
574
  """
572
575
  _workspaces = []
573
576
 
@@ -636,6 +639,8 @@ class Payload:
636
639
  - name (str, mandatory)
637
640
  - nodeid (int, mandatory if no volume is specified) - this is the technical OTCS ID - typically only known for some preinstalled items
638
641
  - volume (int, mandatory if no nodeid is specified)
642
+ - path (list, optional) - can be combined with volume - to specify a top-down path in the volume to the item to be renamed
643
+ - nickname (str, optional) - the nickname of the node to rename - alternative to to volume/path or nodeid
639
644
  """
640
645
  _renamings = []
641
646
 
@@ -747,7 +752,7 @@ class Payload:
747
752
  - base_url (str, mandatory)
748
753
  - user_name (str, optional)
749
754
  - password (str, optional)
750
- - wait_time (float, optional, default = 15.0) - wait time in seconds
755
+ - wait_time (float, optional, default = 30.0) - wait time in seconds
751
756
  - wait_until (str, optional) - the page load / navigation `wait until` strategy. Possible values: `load`, `networkidle`, `domcontentloaded`
752
757
  - debug (bool, optional, default = False) - if True take screenshots and save to container
753
758
  - automations (list, mandatory)
@@ -1058,7 +1063,7 @@ class Payload:
1058
1063
  - sort (list, optional, default = []) - list of fields to sort the data frame by
1059
1064
  - operations (list, optional, default = ["create"])
1060
1065
  - update_operations (list, optional, default = ["name", "description", "categories", "nickname", "version"]) - possible values:
1061
- "name", "description", "categories", "nickname", "version"
1066
+ "name", "description", "categories", "nickname", "version", "purge"
1062
1067
  - name (str, mandatory) - can include placeholder surrounded by {...}
1063
1068
  - name_alt (str, optional, default = None) - can include placeholder surrounded by {...}
1064
1069
  - name_regex (str, optional, default = r"") - regex replacement for document names. The pattern and replacement are separated by pipe character |
@@ -1137,7 +1142,7 @@ class Payload:
1137
1142
  - unique (list, optional, default = []) - list of column names which values should be unique -> deduplication
1138
1143
  - sort (list, optional, default = []) - list of fields to sort the data frame by
1139
1144
  - operations (list, optional, default = ["create"])
1140
- - update_operations (list, optional, default = ["name", "description", "categories", "nickname", "version"]) - possible values:
1145
+ - update_operations (list, optional, default = ["name", "description", "categories", "nickname"]) - possible values:
1141
1146
  "name", "description", "categories", "nickname", "url"
1142
1147
  - name (str, mandatory) - can include placeholder surrounded by {...}
1143
1148
  - name_alt (str, optional, default = None) - can include placeholder surrounded by {...}
@@ -1189,6 +1194,8 @@ class Payload:
1189
1194
 
1190
1195
  _bulk_classifications = []
1191
1196
 
1197
+ _nifi_flows = []
1198
+
1192
1199
  _placeholder_values = {}
1193
1200
 
1194
1201
  # Link to the method in customizer.py to restart the Content Server pods.
@@ -1224,7 +1231,6 @@ class Payload:
1224
1231
  otpd_object: OTPD | None,
1225
1232
  m365_object: M365 | None,
1226
1233
  core_share_object: CoreShare | None,
1227
- browser_automation_object: BrowserAutomation | None,
1228
1234
  placeholder_values: dict,
1229
1235
  log_header_callback: Callable,
1230
1236
  browser_headless: bool = True,
@@ -1232,6 +1238,8 @@ class Payload:
1232
1238
  aviator_enabled: bool = False,
1233
1239
  upload_status_files: bool = True,
1234
1240
  otawp_object: OTAWP | None = None,
1241
+ otca_object: OTCA | None = None,
1242
+ otkd_object: OTKD | None = None,
1235
1243
  avts_object: AVTS | None = None,
1236
1244
  logger: logging.Logger = default_logger,
1237
1245
  ) -> None:
@@ -1266,8 +1274,6 @@ class Payload:
1266
1274
  The M365 object to talk to Microsoft Graph API.
1267
1275
  core_share_object (CoreShare | None):
1268
1276
  The Core Share object.
1269
- browser_automation_object (BrowserAutomation):
1270
- The BrowserAutomation object to automate things which don't have a REST API.
1271
1277
  placeholder_values (dict):
1272
1278
  A dictionary of placeholder values to be replaced in admin settings.
1273
1279
  log_header_callback:
@@ -1284,6 +1290,10 @@ class Payload:
1284
1290
  of the admin user in Content Server.
1285
1291
  otawp_object (OTAWP):
1286
1292
  An optional AppWorks Platform object.
1293
+ otca_object (OTCA):
1294
+ An optional Content Aviator object.
1295
+ otkd_object (OTKD):
1296
+ An optional Knowledge Discovery object.
1287
1297
  avts_object (AVTS):
1288
1298
  An optional Aviator Search object.
1289
1299
  logger (logging.Logger, optional):
@@ -1316,8 +1326,9 @@ class Payload:
1316
1326
  self._otcs_source = None
1317
1327
  self._pht = None # the OpenText prodcut hierarchy
1318
1328
  self._nhc = None # National Hurricane Center
1319
- self._avts = avts_object
1320
- self._browser_automation = browser_automation_object
1329
+ self._otca = otca_object # Content Aviator
1330
+ self._otkd = otkd_object # Knowledge Discovery
1331
+ self._avts = avts_object # Aviator Search
1321
1332
  self._browser_headless = browser_headless
1322
1333
  self._custom_settings_dir = custom_settings_dir
1323
1334
  self._placeholder_values = placeholder_values
@@ -1504,6 +1515,7 @@ class Payload:
1504
1515
  self._avts_repositories = self.get_payload_section("avtsRepositories")
1505
1516
  self._avts_questions = self.get_payload_section("avtsQuestions")
1506
1517
  self._embeddings = self.get_payload_section("embeddings")
1518
+ self._nifi_flows = self.get_payload_section("nifi")
1507
1519
 
1508
1520
  return self._payload
1509
1521
 
@@ -2153,7 +2165,7 @@ class Payload:
2153
2165
  case "customer":
2154
2166
  customer = self._otawp.get_customer_by_name(name=entity.get("name"))
2155
2167
  if customer:
2156
- customer_id = self._otawp.get_entity_value(entity=case_type, key="id")
2168
+ customer_id = self._otawp.get_entity_value(entity=customer, key="id")
2157
2169
  self.logger.info(
2158
2170
  "Customer -> '%s' (%s) does already exist. Skipping...", entity.get("name"), str(customer_id)
2159
2171
  )
@@ -2763,7 +2775,7 @@ class Payload:
2763
2775
  return group["id"]
2764
2776
  else:
2765
2777
  self.logger.debug(
2766
- "Did not find an existing group with name -> '%s'",
2778
+ "Cannot find an existing group -> '%s'",
2767
2779
  group_name,
2768
2780
  )
2769
2781
  return 0
@@ -2814,7 +2826,7 @@ class Payload:
2814
2826
  return group["m365_id"]
2815
2827
  else:
2816
2828
  self.logger.debug(
2817
- "Did not find an existing M365 group with name -> '%s'",
2829
+ "Cannot find an existing M365 group -> '%s'",
2818
2830
  group_name,
2819
2831
  )
2820
2832
  return None
@@ -2868,7 +2880,7 @@ class Payload:
2868
2880
  return group["core_share_id"]
2869
2881
  else:
2870
2882
  self.logger.debug(
2871
- "Did not find an existing Core Share group with name -> '%s'",
2883
+ "Cannot find an existing Core Share group -> '%s'",
2872
2884
  group["name"],
2873
2885
  )
2874
2886
  return None
@@ -2903,7 +2915,10 @@ class Payload:
2903
2915
  self.logger.error("User needs a login name to lookup the ID!")
2904
2916
  return 0
2905
2917
 
2906
- response = self._otcs.get_user(name=user_name)
2918
+ user_type = 17 if user.get("type", "User") == "ServiceUser" else 0
2919
+
2920
+ response = self._otcs.get_user(name=user_name, user_type=user_type)
2921
+
2907
2922
  # We use the lookup method here as get_user() could deliver more
2908
2923
  # then 1 result element (in edge cases):
2909
2924
  user_id = self._otcs.lookup_result_value(
@@ -2920,7 +2935,7 @@ class Payload:
2920
2935
  return user["id"]
2921
2936
  else:
2922
2937
  self.logger.debug(
2923
- "Did not find an existing user with name -> '%s'!",
2938
+ "Cannot find an existing user -> '%s'!",
2924
2939
  user_name,
2925
2940
  )
2926
2941
  return 0
@@ -2969,7 +2984,7 @@ class Payload:
2969
2984
  return user["m365_id"]
2970
2985
  else:
2971
2986
  self.logger.debug(
2972
- "Did not find an existing M365 user with name -> '%s'",
2987
+ "Did not find an existing M365 user -> '%s'",
2973
2988
  user_name,
2974
2989
  )
2975
2990
  return None
@@ -3029,11 +3044,17 @@ class Payload:
3029
3044
  user["core_share_id"] = core_share_user_id
3030
3045
  return user["core_share_id"]
3031
3046
  else:
3032
- self.logger.debug(
3033
- "Did not find an existing Core Share user with name -> '%s %s'",
3034
- user["firstname"],
3035
- user["lastname"],
3036
- )
3047
+ if "email" in user:
3048
+ self.logger.debug(
3049
+ "Did not find an existing Core Share user with email -> '%s'",
3050
+ user["email"],
3051
+ )
3052
+ else:
3053
+ self.logger.debug(
3054
+ "Cannot find an existing Core Share user -> '%s %s'",
3055
+ user.get("firstname"),
3056
+ user.get("lastname"),
3057
+ )
3037
3058
  return None
3038
3059
 
3039
3060
  # end method definition
@@ -3650,6 +3671,9 @@ class Payload:
3650
3671
  if not self._business_object_types:
3651
3672
  self._log_header_callback(text="Process Business Object Types")
3652
3673
  self.process_business_object_types()
3674
+ case "nifi":
3675
+ self._log_header_callback("Process Knowledge Discovery Nifi Flows")
3676
+ self.process_nifi_flows()
3653
3677
  case _:
3654
3678
  self.logger.error(
3655
3679
  "Illegal payload section name -> '%s' in payloadSections!",
@@ -5397,7 +5421,7 @@ class Payload:
5397
5421
  self.logger.error("Group -> '%s' does not have an ID.", group["name"])
5398
5422
  success = False
5399
5423
  continue
5400
- parent_group_names = group["parent_groups"]
5424
+ parent_group_names = group.get("parent_groups", [])
5401
5425
  for parent_group_name in parent_group_names:
5402
5426
  # First, try to find parent group in payload by parent group name:
5403
5427
  parent_group = next(
@@ -5460,8 +5484,9 @@ class Payload:
5460
5484
  member_id=group["id"],
5461
5485
  group_id=parent_group_id,
5462
5486
  )
5487
+ # end for parent_group_name in parent_group_names:
5463
5488
 
5464
- # Assign application roles to the new user:
5489
+ # Assign application roles to the new group:
5465
5490
  application_roles = group.get("application_roles", [])
5466
5491
  for role in application_roles:
5467
5492
  group_partition = self._otcs.config()["partition"]
@@ -5999,7 +6024,7 @@ class Payload:
5999
6024
  )
6000
6025
  continue
6001
6026
  self.logger.info(
6002
- "Did not find an existing user with name '%s' - creating a new user...",
6027
+ "Cannot find an existing user -> '%s' - creating a new user...",
6003
6028
  user_name,
6004
6029
  )
6005
6030
 
@@ -6355,7 +6380,7 @@ class Payload:
6355
6380
  user_name,
6356
6381
  rfc_name,
6357
6382
  rfc_description,
6358
- rfc_params,
6383
+ str(rfc_params),
6359
6384
  )
6360
6385
 
6361
6386
  result = self._sap.call(
@@ -6837,7 +6862,7 @@ class Payload:
6837
6862
  )
6838
6863
  if not group:
6839
6864
  self.logger.error(
6840
- "Cannot find group with name -> '%s'. Cannot establish membership in Salesforce. Skipping to next group...",
6865
+ "Cannot find group -> '%s'. Cannot establish membership in Salesforce. Skipping to next group...",
6841
6866
  user_group,
6842
6867
  )
6843
6868
  success = False
@@ -7305,7 +7330,7 @@ class Payload:
7305
7330
  )
7306
7331
  if not group:
7307
7332
  self.logger.error(
7308
- "Cannot find group with name -> '%s'. Cannot establish membership in Core Share. Skipping to next group...",
7333
+ "Cannot find group -> '%s'. Cannot establish membership in Core Share. Skipping to next group...",
7309
7334
  user_group,
7310
7335
  )
7311
7336
  success = False
@@ -7649,6 +7674,7 @@ class Payload:
7649
7674
  group_names,
7650
7675
  user_department,
7651
7676
  )
7677
+
7652
7678
  # Go through all group names:
7653
7679
  for group_name in group_names:
7654
7680
  # Find the group payload item to the parent group name:
@@ -7748,7 +7774,7 @@ class Payload:
7748
7774
  )
7749
7775
  success = False
7750
7776
 
7751
- # As M365 groups are flat (not nested) we also add the
7777
+ # As M365 groups are flat (not nested), we also add the
7752
7778
  # user as member to the parent groups of the current group
7753
7779
  # if the parent group is enabled for M365:
7754
7780
  parent_group_names = group.get("parent_groups")
@@ -7859,13 +7885,17 @@ class Payload:
7859
7885
  response = self._m365.follow_sharepoint_site(site_id=group_site_id, user_id=m365_user_id)
7860
7886
  if not response:
7861
7887
  self.logger.warning(
7862
- "User -> '%s' cannot follow SharePoint site -> '%s'. ",
7888
+ "User -> '%s' cannot follow SharePoint site -> '%s'.",
7863
7889
  user["email"],
7864
7890
  group_site_name,
7865
7891
  )
7866
7892
  success = False
7867
- # end for group name
7893
+ # end if group_site_id:
7894
+ # end if group_id:
7895
+ # end if group_name == user_department and user["m365_skus"]:
7896
+ # end for group name in group_names:
7868
7897
  # end for user
7898
+
7869
7899
  self.write_status_file(
7870
7900
  success=success,
7871
7901
  payload_section_name=section_name,
@@ -7880,8 +7910,8 @@ class Payload:
7880
7910
  """Process groups in payload and create matching Teams in Microsoft 365.
7881
7911
 
7882
7912
  We need to do this after the creation of the M365 users as we require
7883
- Group Owners to create teams. These are NOT the teams for Extended ECM
7884
- workspaces! Those are created by Scheduled Bots (Jobs) from Extended ECM!
7913
+ Group Owners to create teams. These are NOT the teams for OTCS
7914
+ workspaces! Those are created by Scheduled Bots (Jobs) from OTCS!
7885
7915
 
7886
7916
  Args:
7887
7917
  section_name (str, optional):
@@ -8285,7 +8315,7 @@ class Payload:
8285
8315
 
8286
8316
  workspace_name = workspace["name"]
8287
8317
  self.logger.info(
8288
- "Check if stale Microsoft 365 Teams with name -> '%s' exist...",
8318
+ "Check if stale Microsoft 365 Teams -> '%s' exist...",
8289
8319
  workspace_name,
8290
8320
  )
8291
8321
  self._m365.delete_teams(name=workspace_name)
@@ -8381,12 +8411,22 @@ class Payload:
8381
8411
  def process_sites_m365(self, section_name: str = "sitesM365") -> bool:
8382
8412
  """Process M365 groups in payload and configure SharePoint sites in Microsoft 365.
8383
8413
 
8384
- These are NOT the SharePoint sites for Extended ECM workspaces which are created
8385
- by Scheduled Bots (Jobs) from Extended ECM via the creation of MS teams
8414
+ These are NOT the SharePoint sites for Business Workspaces which are created
8415
+ by Scheduled Bots (Jobs) from OTCS via the creation of MS teams
8386
8416
  (each MS Team has a SharePoint site behind it)!
8387
8417
 
8388
- The are the SharePoint sites for the departmental groups such as "Sales",
8418
+ These are the SharePoint sites for the departmental groups such as "Sales",
8389
8419
  "Procurement", "Enterprise Asset Management", ...
8420
+ Only departmental group that have a top-level folder with the exact same
8421
+ name as the Department are configured.
8422
+
8423
+ For each departmental group:
8424
+ 1. Determine a departmental folder in the Enterprise Workspace
8425
+ 2. Determine the M365 Group
8426
+ 3. Determine the SharePoint Site (based on the M365 group ID)
8427
+ 4. Determine the Page in the SharePoint site
8428
+ 5. Determine or create the SharePoint webpart for the OTCS browser
8429
+ 6. Create URL object pointing to SharePoint site inside top level department folder
8390
8430
 
8391
8431
  Args:
8392
8432
  section_name (str, optional):
@@ -8744,7 +8784,7 @@ class Payload:
8744
8784
  # end else
8745
8785
 
8746
8786
  #
8747
- # 6. Create URL object pointing to SharePoint site inside top level ddepartment folder
8787
+ # 6. Create URL object pointing to SharePoint site inside top level department folder
8748
8788
  #
8749
8789
 
8750
8790
  item_name = (
@@ -8882,6 +8922,19 @@ class Payload:
8882
8922
  "A restart of the Content Server service is required.",
8883
8923
  )
8884
8924
  restart_required = True
8925
+
8926
+ if admin_setting.get("restart", False):
8927
+ self.logger.info(
8928
+ "Immediate restart requested - restart of OTCS services...",
8929
+ )
8930
+ # Restart OTCS frontend and backend pods:
8931
+ self._otcs_restart_callback(
8932
+ backend=self._otcs_backend,
8933
+ frontend=self._otcs_frontend,
8934
+ )
8935
+
8936
+ restart_required = False
8937
+
8885
8938
  else:
8886
8939
  self.logger.error(
8887
8940
  "Admin settings file -> '%s' not found.",
@@ -9026,6 +9079,11 @@ class Payload:
9026
9079
  continue
9027
9080
  system_type = external_system["external_system_type"]
9028
9081
 
9082
+ self._log_header_callback(
9083
+ text="Process External System -> '{}' ({})".format(system_name, system_type),
9084
+ char="-",
9085
+ )
9086
+
9029
9087
  # Check if external system has been explicitly disabled in payload
9030
9088
  # (enabled = false). In this case we skip the element:
9031
9089
  if not external_system.get("enabled", True):
@@ -9538,6 +9596,11 @@ class Payload:
9538
9596
  )
9539
9597
  continue
9540
9598
 
9599
+ # We skip also user of type "ServiceUser":
9600
+ if user.get("type", "User") == "ServiceUser":
9601
+ self.logger.info("Skipping service user -> '%s'...", user_name)
9602
+ continue
9603
+
9541
9604
  user_id = user.get("id")
9542
9605
  if not user_id:
9543
9606
  self.logger.error(
@@ -9978,23 +10041,23 @@ class Payload:
9978
10041
  # we assume the nickname of the photo item equals the login name of the user
9979
10042
  # we also assume that the photos have been uploaded / transported into the target system
9980
10043
  for user in self._users:
9981
- if "lastname" not in user or "firstname" not in user:
10044
+ if "name" not in user:
9982
10045
  self.logger.error(
9983
- "User is missing last name or first name. Skipping to next user...",
10046
+ "User is missing login name. Skipping to next user...",
9984
10047
  )
9985
10048
  success = False
9986
10049
  continue
9987
10050
  user_login = user["name"]
9988
- user_last_name = user["lastname"]
9989
- user_first_name = user["firstname"]
9990
- user_name = user_first_name + " " + user_last_name
10051
+ user_last_name = user.get("lastname", "")
10052
+ user_first_name = user.get("firstname", "")
10053
+ user_name = "{} {}".format(user_first_name, user_last_name).strip()
9991
10054
 
9992
10055
  # Check if user has been explicitly disabled in payload
9993
10056
  # (enabled = false). In this case we skip the element:
9994
10057
  if not user.get("enabled", True):
9995
10058
  self.logger.info(
9996
10059
  "Payload for user -> '%s' is disabled. Skipping...",
9997
- user_name,
10060
+ user_login,
9998
10061
  )
9999
10062
  continue
10000
10063
 
@@ -10005,7 +10068,7 @@ class Payload:
10005
10068
  if not user.get("enable_core_share", False):
10006
10069
  self.logger.info(
10007
10070
  "User -> '%s' is not enabled for Core Share. Skipping...",
10008
- user_name,
10071
+ user_login,
10009
10072
  )
10010
10073
  continue
10011
10074
 
@@ -10520,7 +10583,7 @@ class Payload:
10520
10583
 
10521
10584
  if not self._business_object_types:
10522
10585
  self.logger.warning(
10523
- "List of business object types is empty / not initialized! Cannot lookup type with name -> '%s'",
10586
+ "List of business object types is empty / not initialized! Cannot lookup type -> '%s'",
10524
10587
  bo_type_name,
10525
10588
  )
10526
10589
  return None
@@ -10532,7 +10595,7 @@ class Payload:
10532
10595
  )
10533
10596
  if not business_object_type:
10534
10597
  self.logger.warning(
10535
- "Cannot find business object type with name -> '%s'",
10598
+ "Cannot find business object type -> '%s'",
10536
10599
  bo_type_name,
10537
10600
  )
10538
10601
  return None
@@ -10851,7 +10914,7 @@ class Payload:
10851
10914
  if role_id is None:
10852
10915
  # if member_role is None:
10853
10916
  self.logger.error(
10854
- "Workspace template -> '%s' does not have a role with name -> '%s'",
10917
+ "Workspace template -> '%s' does not have a role -> '%s'",
10855
10918
  template_name,
10856
10919
  member_role_name,
10857
10920
  )
@@ -12998,7 +13061,7 @@ class Payload:
12998
13061
  workspace["type_name"],
12999
13062
  )
13000
13063
 
13001
- # now determine the actual node IDs of the workspaces (has been created before):
13064
+ # now determine the actual node ID of the workspace (which should have been created before):
13002
13065
  workspace_node_id = int(self.determine_workspace_id(workspace=workspace))
13003
13066
  if not workspace_node_id:
13004
13067
  self.logger.warning(
@@ -13016,64 +13079,97 @@ class Payload:
13016
13079
 
13017
13080
  success: bool = True
13018
13081
 
13019
- for related_workspace_id in workspace["relationships"]:
13082
+ for related_workspace in workspace["relationships"]:
13020
13083
  # Initialize variable to determine if we found a related workspace:
13021
13084
  related_workspace_node_id = None
13085
+ found_by = ""
13022
13086
 
13023
- #
13024
- # 1. Option: Find the related workspace with the logical ID given in the payload:
13025
- #
13026
- related_workspace = next(
13027
- (item for item in self._workspaces if item["id"] == related_workspace_id),
13028
- None,
13029
- )
13030
- if related_workspace:
13031
- if not related_workspace.get("enabled", True):
13032
- self.logger.info(
13033
- "Payload for Related Workspace -> '%s' is disabled. Skipping...",
13034
- related_workspace["name"],
13087
+ if isinstance(related_workspace, (str, int)):
13088
+ #
13089
+ # 1. Option: Find the related workspace with the logical ID given in the payload:
13090
+ #
13091
+ related_workspace_payload = next(
13092
+ (item for item in self._workspaces if str(item["id"]) == str(related_workspace)),
13093
+ None,
13094
+ )
13095
+ if related_workspace_payload:
13096
+ if not related_workspace_payload.get("enabled", True):
13097
+ self.logger.info(
13098
+ "Payload for Related Workspace -> '%s' is disabled. Skipping...",
13099
+ related_workspace_payload["name"],
13100
+ )
13101
+ continue
13102
+
13103
+ related_workspace_node_id = self.determine_workspace_id(
13104
+ workspace=related_workspace_payload,
13035
13105
  )
13036
- continue
13106
+ if not related_workspace_node_id:
13107
+ self.logger.warning(
13108
+ "Related Workspace -> '%s' (type -> '%s') has no node ID (workspaces creation may have failed or name is different from payload). Skipping to next workspace...",
13109
+ related_workspace_payload["name"],
13110
+ related_workspace_payload["type_name"],
13111
+ )
13112
+ continue
13113
+ found_by = "logical ID -> '{}' in payload".format(related_workspace)
13114
+ # end if related_workspace_payload:
13037
13115
 
13038
- related_workspace_node_id = self.determine_workspace_id(
13039
- workspace=related_workspace,
13040
- )
13041
- if not related_workspace_node_id:
13042
- self.logger.warning(
13043
- "Related Workspace -> '%s' (type -> '%s') has no node ID (workspaces creation may have failed or name is different from payload). Skipping to next workspace...",
13044
- related_workspace["name"],
13045
- related_workspace["type_name"],
13116
+ #
13117
+ # 2. Option: Find the related workspace with nickname:
13118
+ #
13119
+ else:
13120
+ # See if a nickname exists the the provided related_workspace:
13121
+ response = self._otcs.get_node_from_nickname(nickname=related_workspace)
13122
+ related_workspace_node_id = self._otcs.get_result_value(
13123
+ response=response,
13124
+ key="id",
13046
13125
  )
13047
- continue
13048
- self.logger.debug(
13049
- "Related Workspace with logical ID -> %s has node ID -> %s",
13050
- related_workspace_id,
13051
- related_workspace_node_id,
13052
- )
13053
- # end if related_workspace is not None
13126
+ if related_workspace_node_id:
13127
+ found_by = "nickname -> '{}'".format(related_workspace)
13128
+ # end if isinstance(related_workspace_id, (str, int)):
13054
13129
 
13055
13130
  #
13056
- # 2. Option: Find the related workspace with nickname:
13131
+ # 3. Option: Find the related workspace type and name:
13057
13132
  #
13058
- else:
13059
- # See if a nickname exists the the provided related_workspace_id:
13060
- response = self._otcs.get_node_from_nickname(nickname=related_workspace_id)
13133
+ elif isinstance(related_workspace, dict):
13134
+ related_workspace_type = related_workspace.get("type", None)
13135
+ related_workspace_name = related_workspace.get("name", None)
13136
+ if related_workspace_type and related_workspace_name:
13137
+ response = self._otcs.get_workspace_by_type_and_name(
13138
+ type_name=related_workspace_type, name=related_workspace_name
13139
+ )
13140
+ related_workspace_node_id = self._otcs.get_result_value(
13141
+ response=response,
13142
+ key="id",
13143
+ )
13144
+ if related_workspace_node_id:
13145
+ found_by = "type -> '{}' and name -> '{}'".format(
13146
+ related_workspace_type, related_workspace_name
13147
+ )
13148
+ #
13149
+ # 4. Option: Find the related workspace volume and path:
13150
+ #
13151
+ elif isinstance(related_workspace, list):
13152
+ response = self._otcs.get_node_by_volume_and_path(
13153
+ volume_type=self._otcs.VOLUME_TYPE_ENTERPRISE_WORKSPACE, path=related_workspace
13154
+ )
13061
13155
  related_workspace_node_id = self._otcs.get_result_value(
13062
13156
  response=response,
13063
13157
  key="id",
13064
13158
  )
13159
+ if related_workspace_node_id:
13160
+ found_by = "path -> {}".format(related_workspace)
13065
13161
 
13066
13162
  if related_workspace_node_id is None:
13067
13163
  self.logger.error(
13068
- "Related Workspace with logical ID or nickname -> %s not found.",
13069
- related_workspace_id,
13164
+ "Related Workspace -> %s not found.",
13165
+ related_workspace,
13070
13166
  )
13071
13167
  success = False
13072
13168
  continue
13073
13169
 
13074
13170
  self.logger.debug(
13075
- "Related Workspace with logical ID or nickname -> %s has node ID -> %s",
13076
- related_workspace_id,
13171
+ "Related Workspace with %s has node ID -> %s",
13172
+ found_by,
13077
13173
  related_workspace_node_id,
13078
13174
  )
13079
13175
 
@@ -13345,10 +13441,6 @@ class Payload:
13345
13441
  continue
13346
13442
 
13347
13443
  workspace_id = workspace["id"]
13348
- self.logger.info(
13349
- "Workspace -> '%s' has memberships in payload - establishing...",
13350
- workspace_name,
13351
- )
13352
13444
 
13353
13445
  workspace_node_id = int(self.determine_workspace_id(workspace=workspace))
13354
13446
  if not workspace_node_id:
@@ -13357,6 +13449,10 @@ class Payload:
13357
13449
  )
13358
13450
  continue
13359
13451
 
13452
+ self.logger.info(
13453
+ "Workspace -> '%s' (%s) has memberships in payload - establishing...", workspace_name, workspace_node_id
13454
+ )
13455
+
13360
13456
  # now determine the actual node IDs of the workspaces (have been created by process_workspaces()):
13361
13457
  workspace_node = self._otcs.get_node(node_id=workspace_node_id)
13362
13458
  workspace_owner_id = self._otcs.get_result_value(
@@ -13373,21 +13469,29 @@ class Payload:
13373
13469
  )
13374
13470
  if workspace_roles is None:
13375
13471
  self.logger.debug(
13376
- "Workspace with ID -> %s and node Id -> %s has no roles. Skipping to next workspace...",
13377
- workspace_id,
13472
+ "Workspace -> '%s' (%s) has no roles. Skipping to next workspace...",
13473
+ workspace_name,
13378
13474
  workspace_node_id,
13379
13475
  )
13380
13476
  continue
13381
13477
 
13382
13478
  # We don't want the workspace creator to be in the leader role
13383
13479
  # of automatically created workspaces - this can happen because the
13384
- # creator gets added to the leader role automatically:
13385
- leader_role_id = self._otcs.lookup_result_value(
13386
- response=workspace_roles,
13387
- key="leader",
13388
- value=True,
13389
- return_key="id",
13390
- )
13480
+ # creator gets added to the leader role automatically if
13481
+ # the workspace type advanved configuration setting
13482
+ # "Add the creator of a business workspace to the Lead role" is
13483
+ # enabled:
13484
+ roles_iterator = self._otcs.get_result_values_iterator(response=workspace_roles)
13485
+ for role in roles_iterator:
13486
+ # We can have two leader roles if in a sub-workspaces a leader
13487
+ # roles is inherited from the parent workspace. As we want
13488
+ # don't want to consider leader role of the parent workspace
13489
+ # we check that 'inherited_from_id' is not set:
13490
+ if role["leader"] and role["inherited_from_id"] is None:
13491
+ leader_role_id = role["id"]
13492
+ break
13493
+ else:
13494
+ leader_role_id = None
13391
13495
 
13392
13496
  if leader_role_id:
13393
13497
  leader_role_name = self._otcs.lookup_result_value(
@@ -13412,6 +13516,16 @@ class Payload:
13412
13516
  workspace_name,
13413
13517
  workspace_node_id,
13414
13518
  )
13519
+ else:
13520
+ self.logger.info(
13521
+ "Creator user -> '%s' (%s) is not in leader role -> '%s' (%s) of workspace -> '%s' (%s). No need to remove it.",
13522
+ workspace_owner_name,
13523
+ workspace_owner_id,
13524
+ leader_role_name,
13525
+ leader_role_id,
13526
+ workspace_name,
13527
+ workspace_node_id,
13528
+ )
13415
13529
 
13416
13530
  self.logger.info(
13417
13531
  "Adding members to workspace -> '%s' (%s) defined in payload...",
@@ -13428,7 +13542,7 @@ class Payload:
13428
13542
 
13429
13543
  if member_role_name == "": # role name is required
13430
13544
  self.logger.error(
13431
- "Members of workspace -> '%s' is missing the role name.",
13545
+ "Members of workspace -> '%s' is missing the role name in the payload.",
13432
13546
  workspace_name,
13433
13547
  )
13434
13548
  success = False
@@ -13443,12 +13557,29 @@ class Payload:
13443
13557
  if role_id is None:
13444
13558
  # if member_role is None:
13445
13559
  self.logger.error(
13446
- "Workspace -> '%s' does not have a role with name -> '%s'",
13560
+ "Workspace -> '%s' does not have a role -> '%s'",
13447
13561
  workspace_name,
13448
13562
  member_role_name,
13449
13563
  )
13450
13564
  success = False
13451
13565
  continue
13566
+ inherited_role_id = self._otcs.lookup_result_value(
13567
+ response=workspace_roles,
13568
+ key="name",
13569
+ value=member_role_name,
13570
+ return_key="inherited_from_id",
13571
+ )
13572
+ if inherited_role_id is not None:
13573
+ self.logger.error(
13574
+ "The role -> '%s' (%s) of workspace -> '%s' (%s) is inherited from role with ID -> %d and members cannot be set in this sub-workspace.",
13575
+ member_role_name,
13576
+ role_id,
13577
+ workspace_name,
13578
+ workspace_node_id,
13579
+ inherited_role_id,
13580
+ )
13581
+ success = False
13582
+ continue
13452
13583
  self.logger.debug(
13453
13584
  "Role -> '%s' has ID -> %s",
13454
13585
  member_role_name,
@@ -13502,20 +13633,22 @@ class Payload:
13502
13633
  )
13503
13634
  if response is None:
13504
13635
  self.logger.error(
13505
- "Failed to add user -> '%s' (%s) as member to role -> '%s' of workspace -> '%s'",
13636
+ "Failed to add user -> '%s' (%s) as member to role -> '%s' of workspace -> '%s' (%s)",
13506
13637
  member_user,
13507
13638
  user_id,
13508
13639
  member_role_name,
13509
13640
  workspace_name,
13641
+ workspace_node_id,
13510
13642
  )
13511
13643
  success = False
13512
13644
  else:
13513
13645
  self.logger.info(
13514
- "Successfully added user -> '%s' (%s) as member to role -> '%s' of workspace -> '%s'",
13646
+ "Successfully added user -> '%s' (%s) as member to role -> '%s' of workspace -> '%s' (%s)",
13515
13647
  member_user,
13516
13648
  user_id,
13517
13649
  member_role_name,
13518
13650
  workspace_name,
13651
+ workspace_node_id,
13519
13652
  )
13520
13653
 
13521
13654
  # Process groups as workspaces members:
@@ -13719,7 +13852,7 @@ class Payload:
13719
13852
  )
13720
13853
  if role_id is None:
13721
13854
  self.logger.error(
13722
- "Workspace -> '%s' does not have a role with name -> '%s'",
13855
+ "Workspace -> '%s' does not have a role -> '%s'",
13723
13856
  workspace_name,
13724
13857
  member_role_name,
13725
13858
  )
@@ -14197,7 +14330,12 @@ class Payload:
14197
14330
  )
14198
14331
  continue
14199
14332
 
14200
- user_partition = self._otcs.config()["partition"]
14333
+ self._log_header_callback(
14334
+ text="Process settings for user -> '{}'".format(user_name),
14335
+ char="-",
14336
+ )
14337
+
14338
+ user_partition = self._otcs.config().get("partition", None)
14201
14339
  if not user_partition:
14202
14340
  self.logger.error("User partition not found!")
14203
14341
  success = False
@@ -14312,9 +14450,6 @@ class Payload:
14312
14450
  # The following code (for loop) will change the authenticated user - we need to
14313
14451
  # switch it back to admin user later so we safe the admin credentials for this:
14314
14452
 
14315
- # save admin credentials for later switch back to admin user:
14316
- # admin_credentials = self._otcs.credentials() if self._users else {}
14317
-
14318
14453
  for user in self._users:
14319
14454
  user_name = user.get("name")
14320
14455
  if not user_name:
@@ -14323,6 +14458,11 @@ class Payload:
14323
14458
  )
14324
14459
  continue
14325
14460
 
14461
+ self._log_header_callback(
14462
+ text="Process Favorites and Profile for user -> '{}'".format(user_name),
14463
+ char="-",
14464
+ )
14465
+
14326
14466
  # Check if user has been explicitly disabled in payload
14327
14467
  # (enabled = false). In this case we skip the element:
14328
14468
  if not user.get("enabled", True):
@@ -14332,6 +14472,11 @@ class Payload:
14332
14472
  )
14333
14473
  continue
14334
14474
 
14475
+ # We skip also user of type "ServiceUser":
14476
+ if user.get("type", "User") == "ServiceUser":
14477
+ self.logger.info("Skipping service user -> '%s'...", user_name)
14478
+ continue
14479
+
14335
14480
  # Impersonate as the user:
14336
14481
  self.logger.info("Impersonate user -> '%s'...", user_name)
14337
14482
  result = self.start_impersonation(username=user_name)
@@ -15499,17 +15644,37 @@ class Payload:
15499
15644
  if "name" not in renaming:
15500
15645
  self.logger.error("Renamings require the new name!")
15501
15646
  continue
15502
- if "nodeid" not in renaming:
15503
- if "volume" not in renaming:
15504
- self.logger.error(
15505
- "Renamings require either a node ID or a volume! Skipping to next renaming...",
15647
+ if "nodeid" in renaming:
15648
+ node_id = renaming["nodeid"]
15649
+ elif "volume" in renaming:
15650
+ path = renaming.get("path")
15651
+ volume = renaming.get("volume")
15652
+ if path:
15653
+ self.logger.info(
15654
+ "Found path -> '%s' in renaming payload. Determine node ID by volume and path...",
15655
+ path,
15506
15656
  )
15507
- continue
15508
- # Determine object ID of volume:
15509
- volume = self._otcs.get_volume(volume_type=renaming["volume"])
15510
- node_id = self._otcs.get_result_value(response=volume, key="id")
15657
+ node = self._otcs.get_node_by_volume_and_path(
15658
+ volume_type=volume,
15659
+ path=path,
15660
+ )
15661
+ else:
15662
+ # Determine object ID of volume:
15663
+ node = self._otcs.get_volume(volume_type=volume)
15664
+ node_id = self._otcs.get_result_value(response=node, key="id")
15665
+ elif "nickname" in renaming:
15666
+ nickname = renaming["nickname"]
15667
+ self.logger.info(
15668
+ "Found nickname -> '%s' in renaming payload. Determine node ID by nickname...",
15669
+ nickname,
15670
+ )
15671
+ node = self._otcs.get_node_from_nickname(nickname=nickname)
15672
+ node_id = self._otcs.get_result_value(response=node, key="id")
15511
15673
  else:
15512
- node_id = renaming["nodeid"]
15674
+ self.logger.error(
15675
+ "Renamings require either a node ID or a volume (with an optional path) or a nickname! Skipping to next renaming...",
15676
+ )
15677
+ continue
15513
15678
 
15514
15679
  # Check if renaming has been explicitly disabled in payload
15515
15680
  # (enabled = false). In this case we skip this payload element:
@@ -15625,7 +15790,7 @@ class Payload:
15625
15790
  )
15626
15791
  success = False
15627
15792
  continue
15628
- else:
15793
+ elif parent_path is not None: # parent_path can be [] which is valid for top-level items!
15629
15794
  parent_volume = item.get("parent_volume", self._otcs.VOLUME_TYPE_ENTERPRISE_WORKSPACE)
15630
15795
  parent_node = self._otcs.get_node_by_volume_and_path(
15631
15796
  volume_type=parent_volume,
@@ -15636,11 +15801,17 @@ class Payload:
15636
15801
  if not parent_id:
15637
15802
  # if not parent_node:
15638
15803
  self.logger.error(
15639
- "Item -> '%s' has a parent path that does not exist. Skipping...",
15804
+ "Item -> '%s' has a parent path -> %s that does not exist and couldn't be created in volume -> %d. Skipping...",
15640
15805
  item_name,
15806
+ parent_path,
15807
+ self._otcs.VOLUME_TYPE_ENTERPRISE_WORKSPACE,
15641
15808
  )
15642
15809
  success = False
15643
15810
  continue
15811
+ else:
15812
+ self.logger.error("The parent for the item -> '%s' is not specified by nickname nor path!", item_name)
15813
+ success = False
15814
+ continue
15644
15815
 
15645
15816
  # Handling for shortcut items that have an orginal node:
15646
15817
  original_nickname = item.get("original_nickname")
@@ -15663,9 +15834,10 @@ class Payload:
15663
15834
  )
15664
15835
  success = False
15665
15836
  continue
15666
- elif original_path:
15837
+ elif original_path is not None: # original_path can be [] which is valid for top-level items!
15838
+ original_volume = item.get("original_volume", self._otcs.VOLUME_TYPE_ENTERPRISE_WORKSPACE)
15667
15839
  original_node = self._otcs.get_node_by_volume_and_path(
15668
- volume_type=self._otcs.VOLUME_TYPE_ENTERPRISE_WORKSPACE,
15840
+ volume_type=original_volume,
15669
15841
  path=original_path,
15670
15842
  )
15671
15843
  original_id = self._otcs.get_result_value(
@@ -15704,7 +15876,7 @@ class Payload:
15704
15876
  success = False
15705
15877
  continue
15706
15878
  case self._otcs.ITEM_TYPE_SHORTCUT: # Shortcut
15707
- if original_id == 0:
15879
+ if not original_id:
15708
15880
  self.logger.error(
15709
15881
  "Item -> '%s' has type Shortcut but the original item is not in the payload. Skipping...",
15710
15882
  item_name,
@@ -15759,14 +15931,23 @@ class Payload:
15759
15931
  )
15760
15932
  node_id = self._otcs.get_result_value(response=response, key="id")
15761
15933
  if not node_id:
15762
- self.logger.error("Failed to create item -> '%s'.", item_name)
15934
+ self.logger.error(
15935
+ "Failed to create item -> '%s' under parent%s.",
15936
+ item_name,
15937
+ " with nickname -> '{}'".format(parent_nickname)
15938
+ if parent_nickname
15939
+ else " path -> {} in volume -> {}".format(parent_path, parent_volume),
15940
+ )
15763
15941
  success = False
15764
15942
  continue
15765
15943
 
15766
15944
  self.logger.info(
15767
- "Successfully created item -> '%s' with ID -> %s.",
15945
+ "Successfully created item -> '%s' with ID -> %s under parent%s.",
15768
15946
  item_name,
15769
15947
  node_id,
15948
+ " with nickname -> '{}'".format(parent_nickname)
15949
+ if parent_nickname
15950
+ else " path -> {} in volume -> {}".format(parent_path, parent_volume),
15770
15951
  )
15771
15952
 
15772
15953
  # Special handling for scheduled bot items:
@@ -15829,14 +16010,20 @@ class Payload:
15829
16010
  success = False
15830
16011
  continue
15831
16012
 
15832
- # If the Job has start mode manual we start it now:
15833
- if start_mode == "manual":
15834
- self.logger.info("Run scheduled bot -> '%s' now...", item_name)
15835
- response = self._otcs.update_item(node_id=node_id, body=False, actionName="Runnow")
16013
+ # Check if we want to execute an action immediately after creation, like "Runnow":
16014
+ actions = item_details.get("actions", [])
16015
+ for action in actions:
16016
+ self.logger.info("Execute action -> '%s' for scheduled bot -> '%s'...", action, item_name)
16017
+ response = self._otcs.update_item(node_id=node_id, body=False, actionName=action)
15836
16018
  if not response:
15837
- self.logger.error("Failed to run scheduled bot item -> '%s'.", item_name)
16019
+ self.logger.error(
16020
+ "Failed to execute action -> '%s' for scheduled bot item -> '%s'.", action, item_name
16021
+ )
15838
16022
  success = False
15839
16023
  continue
16024
+ if not actions:
16025
+ self.logger.info("No immediate actions specified for scheduled bot -> '%s'.", item_name)
16026
+
15840
16027
  # end if item_type == self._otcs.ITEM_TYPE_SCHEDULED_BOT:
15841
16028
 
15842
16029
  # Special handling for collection items:
@@ -16035,7 +16222,7 @@ class Payload:
16035
16222
  user_id = self._otcs.get_result_value(response=response, key="id")
16036
16223
  if not user_id:
16037
16224
  self.logger.error(
16038
- "Cannot find user with name -> '%s'; cannot set user permissions. Skipping user...",
16225
+ "Cannot find user -> '%s'; cannot set user permissions. Skipping user...",
16039
16226
  user_name,
16040
16227
  )
16041
16228
  return False
@@ -16090,7 +16277,7 @@ class Payload:
16090
16277
  group_id = self._otcs.get_result_value(response=otcs_group, key="id")
16091
16278
  if not group_id:
16092
16279
  self.logger.error(
16093
- "Cannot find group with name -> '%s'; cannot set group permissions. Skipping group...",
16280
+ "Cannot find group -> '%s'; cannot set group permissions. Skipping group...",
16094
16281
  group_name,
16095
16282
  )
16096
16283
  return False
@@ -16149,7 +16336,7 @@ class Payload:
16149
16336
  )
16150
16337
  if not role_id:
16151
16338
  self.logger.error(
16152
- "Cannot find role with name -> '%s'; cannot set role permissions.",
16339
+ "Cannot find role -> '%s'; cannot set role permissions.",
16153
16340
  role_name,
16154
16341
  )
16155
16342
  return False
@@ -17473,6 +17660,11 @@ class Payload:
17473
17660
  continue
17474
17661
  workspace_type = doc_generator["workspace_type"]
17475
17662
 
17663
+ self._log_header_callback(
17664
+ text="Process Document Generator for workspace type -> '{}'".format(workspace_type),
17665
+ char="-",
17666
+ )
17667
+
17476
17668
  # Check if doc generator has been explicitly disabled in payload
17477
17669
  # (enabled = false). In this case we skip the element:
17478
17670
  if not doc_generator.get("enabled", True):
@@ -17715,7 +17907,7 @@ class Payload:
17715
17907
  )
17716
17908
  if response["results"]:
17717
17909
  self.logger.warning(
17718
- "Node with name -> '%s' does already exist in workspace folder with ID -> %s",
17910
+ "Node -> '%s' does already exist in workspace folder with ID -> %s",
17719
17911
  document_name,
17720
17912
  workspace_folder_id,
17721
17913
  )
@@ -17730,7 +17922,7 @@ class Payload:
17730
17922
  )
17731
17923
  if not response:
17732
17924
  self.logger.error(
17733
- "Failed to generate document -> '%s' in workspace -> '%s' (%s) as user -> %s",
17925
+ "Failed to generate document -> '%s' in workspace -> '%s' (%s) as user -> '%s'",
17734
17926
  document_name,
17735
17927
  workspace_name,
17736
17928
  workspace_id,
@@ -18288,10 +18480,9 @@ class Payload:
18288
18480
  # (enabled = false). In this case we skip this payload element:
18289
18481
  if not browser_automation.get("enabled", True):
18290
18482
  self.logger.info(
18291
- "Payload for %s automation -> '%s'%s is disabled. Skipping...",
18483
+ "Payload for %s automation -> '%s' is disabled. Skipping...",
18292
18484
  automation_type.lower(),
18293
18485
  name,
18294
- " ({})".format(description) if description else "",
18295
18486
  )
18296
18487
  continue
18297
18488
 
@@ -18300,6 +18491,7 @@ class Payload:
18300
18491
  self.logger.error(
18301
18492
  "%s automation -> '%s' is missing 'base_url' parameter. Skipping...", automation_type, name
18302
18493
  )
18494
+ browser_automation["result"] = "failure"
18303
18495
  success = False
18304
18496
  continue
18305
18497
 
@@ -18318,6 +18510,7 @@ class Payload:
18318
18510
  automation_type,
18319
18511
  name,
18320
18512
  )
18513
+ browser_automation["result"] = "failure"
18321
18514
  success = False
18322
18515
  continue
18323
18516
 
@@ -18340,21 +18533,22 @@ class Payload:
18340
18533
  user_password=password,
18341
18534
  automation_name=name,
18342
18535
  take_screenshots=debug_automation,
18343
- headless=self._browser_headless,
18536
+ headless=browser_automation.get("headless", self._browser_headless),
18344
18537
  logger=self.logger,
18345
18538
  wait_until=wait_until,
18539
+ browser=browser_automation.get("browser"), # None is acceptable
18346
18540
  )
18347
- # Wait time is a global setting (for whole brwoser session)
18541
+ # Wait time is a global setting (for whole browser session)
18348
18542
  # This makes sure a page is fully loaded and elements are present
18349
- # before accessing them. We set 15.0 seconds as default if not
18543
+ # before accessing them. We set 30.0 seconds as default if not
18350
18544
  # otherwise specified by "wait_time" in the payload.
18351
- wait_time = browser_automation.get("wait_time", 15.0)
18545
+ wait_time = float(browser_automation.get("wait_time", 30.0))
18352
18546
  browser_automation_object.set_timeout(wait_time=wait_time)
18353
18547
  if "wait_time" in browser_automation:
18354
18548
  self.logger.info(
18355
18549
  "%s Automation wait time -> '%s' configured.",
18356
18550
  automation_type,
18357
- wait_time,
18551
+ str(wait_time),
18358
18552
  )
18359
18553
 
18360
18554
  # Initialize overall result status:
@@ -18362,11 +18556,17 @@ class Payload:
18362
18556
  first_step = True
18363
18557
 
18364
18558
  for automation_step in automation_steps:
18365
- if "type" not in automation_step:
18366
- self.logger.error("%s automation step is missing type. Skipping...", automation_type)
18559
+ automation_step_type = automation_step.get("type", "")
18560
+ if not automation_step_type:
18561
+ self.logger.error(
18562
+ "%s automation step -> %s in browser automation -> '%s' is missing 'type' parameter. Stopping automation -> '%s'.",
18563
+ automation_type,
18564
+ str(automation_step),
18565
+ name,
18566
+ name,
18567
+ )
18367
18568
  success = False
18368
18569
  break
18369
- automation_step_type = automation_step.get("type", "")
18370
18570
  dependent = automation_step.get("dependent", True)
18371
18571
  if not dependent and not result:
18372
18572
  self.logger.warning(
@@ -18384,7 +18584,7 @@ class Payload:
18384
18584
  )
18385
18585
  continue
18386
18586
  elif not first_step:
18387
- self.logger.info(
18587
+ self.logger.debug(
18388
18588
  "Current step -> '%s' is %s on proceeding step.",
18389
18589
  automation_step_type,
18390
18590
  "dependent" if dependent else "not dependent",
@@ -18422,6 +18622,7 @@ class Payload:
18422
18622
  "Cannot log into -> %s. Skipping to next automation step...",
18423
18623
  base_url + page,
18424
18624
  )
18625
+ automation_step["result"] = "failure"
18425
18626
  success = False
18426
18627
  continue
18427
18628
  self.logger.info(
@@ -18436,6 +18637,7 @@ class Payload:
18436
18637
  "Automation step type -> '%s' requires 'page' parameter. Stopping automation.",
18437
18638
  automation_step_type,
18438
18639
  )
18640
+ automation_step["result"] = "failure"
18439
18641
  success = False
18440
18642
  break
18441
18643
  volume = automation_step.get("volume", OTCS.VOLUME_TYPE_ENTERPRISE_WORKSPACE)
@@ -18454,6 +18656,7 @@ class Payload:
18454
18656
  automation_type,
18455
18657
  name,
18456
18658
  )
18659
+ automation_step["result"] = "failure"
18457
18660
  success = False
18458
18661
  continue
18459
18662
  self.logger.info(
@@ -18479,6 +18682,7 @@ class Payload:
18479
18682
  "Cannot load page -> %s. Skipping this step...",
18480
18683
  page,
18481
18684
  )
18685
+ automation_step["result"] = "failure"
18482
18686
  success = False
18483
18687
  continue
18484
18688
  self.logger.info(
@@ -18494,17 +18698,22 @@ class Payload:
18494
18698
  "Automation step type -> '%s' requires 'selector' parameter. Stopping automation.",
18495
18699
  automation_step_type,
18496
18700
  )
18701
+ automation_step["result"] = "failure"
18497
18702
  success = False
18498
18703
  break
18499
18704
  # We keep the deprecated "find" syntax supported (for now)
18500
18705
  selector_type = automation_step.get("selector_type", automation_step.get("find", "id"))
18501
18706
  show_error = automation_step.get("show_error", True)
18707
+ # Do we navigate away from the current page with the click?
18502
18708
  navigation = automation_step.get("navigation", False)
18709
+ # Do we open a new browser (popup) window with the click?
18710
+ popup_window = automation_step.get("popup_window", False)
18711
+ # De we close the current (popup) window with the click?
18712
+ close_window = automation_step.get("close_window", False)
18713
+ # Do we have a 'desired' state for clicking a checkbox?
18503
18714
  checkbox_state = automation_step.get("checkbox_state", None)
18504
- # Do we have a step-specific wait mechanism? If not, we pass None
18505
- # then the browser automation will take the default configured for
18506
- # the whole browser automation (see BrowserAutomation() constructor called above):
18507
18715
  wait_until = automation_step.get("wait_until", None)
18716
+ wait_time = automation_step.get("wait_time", 0.0)
18508
18717
  role_type = automation_step.get("role_type", None)
18509
18718
  result = browser_automation_object.find_elem_and_click(
18510
18719
  selector=selector,
@@ -18512,7 +18721,10 @@ class Payload:
18512
18721
  role_type=role_type,
18513
18722
  desired_checkbox_state=checkbox_state,
18514
18723
  is_navigation_trigger=navigation,
18724
+ is_popup_trigger=popup_window,
18725
+ is_page_close_trigger=close_window,
18515
18726
  wait_until=wait_until,
18727
+ wait_time=wait_time,
18516
18728
  show_error=show_error,
18517
18729
  )
18518
18730
  if not result:
@@ -18521,15 +18733,17 @@ class Payload:
18521
18733
  )
18522
18734
  if show_error:
18523
18735
  self.logger.error(message)
18736
+ automation_step["result"] = "failure"
18524
18737
  success = False
18525
18738
  else:
18526
18739
  self.logger.warning(message)
18527
18740
  continue
18528
18741
  self.logger.info(
18529
- "Successfully clicked %s element selected by -> '%s' (%s)",
18742
+ "Successfully clicked %s element selected by -> '%s' (%s%s)",
18530
18743
  "navigational" if navigation else "non-navigational",
18531
18744
  selector,
18532
- selector_type,
18745
+ "selector type -> '{}'".format(selector_type),
18746
+ ", role type -> '{}'".format(role_type) if role_type else "",
18533
18747
  )
18534
18748
  case "set_elem":
18535
18749
  # We keep the deprecated "elem" syntax supported (for now)
@@ -18539,6 +18753,7 @@ class Payload:
18539
18753
  "Automation step type -> '%s' requires 'selector' parameter. Stopping automation.",
18540
18754
  automation_step_type,
18541
18755
  )
18756
+ automation_step["result"] = "failure"
18542
18757
  success = False
18543
18758
  break
18544
18759
  # We keep the deprecated "find" syntax supported (for now)
@@ -18552,11 +18767,13 @@ class Payload:
18552
18767
  selector,
18553
18768
  selector_type,
18554
18769
  )
18770
+ automation_step["result"] = "failure"
18555
18771
  success = False
18556
18772
  break
18557
18773
  # we also support replacing placeholders that are
18558
18774
  # enclosed in double % characters like %%OTCS_RESOURCE_ID%%:
18559
- value = self.replace_placeholders(value)
18775
+ if isinstance(value, str):
18776
+ value = self.replace_placeholders(value)
18560
18777
  show_error = automation_step.get("show_error", True)
18561
18778
  result = browser_automation_object.find_elem_and_set(
18562
18779
  selector=selector,
@@ -18566,19 +18783,24 @@ class Payload:
18566
18783
  show_error=show_error,
18567
18784
  )
18568
18785
  if not result:
18569
- message = "Cannot set element selected by -> '{}' ({}) to value -> '{}'. Skipping this step...".format(
18570
- selector, selector_type, value
18786
+ message = "Cannot set element selected by -> '{}' ({}{}) to value -> '{}'. Skipping this step...".format(
18787
+ selector,
18788
+ "selector type -> '{}'".format(selector_type),
18789
+ ", role type -> '{}'".format(role_type) if role_type else "",
18790
+ value,
18571
18791
  )
18572
18792
  if show_error:
18573
18793
  self.logger.error(message)
18794
+ automation_step["result"] = "failure"
18574
18795
  success = False
18575
18796
  else:
18576
18797
  self.logger.warning(message)
18577
18798
  continue
18578
18799
  self.logger.info(
18579
- "Successfully set element selected by -> '%s' (%s) to value -> '%s'.",
18800
+ "Successfully set element selected by -> '%s' (%s%s) to value -> '%s'.",
18580
18801
  selector,
18581
- selector_type,
18802
+ "selector type -> '{}'".format(selector_type),
18803
+ ", role type -> '{}'".format(role_type) if role_type else "",
18582
18804
  value,
18583
18805
  )
18584
18806
  case "check_elem":
@@ -18589,6 +18811,7 @@ class Payload:
18589
18811
  "Automation step type -> '%s' requires 'selector' parameter. Stopping automation.",
18590
18812
  automation_step_type,
18591
18813
  )
18814
+ automation_step["result"] = "failure"
18592
18815
  success = False
18593
18816
  break
18594
18817
  # We keep the deprecated "find" syntax supported (for now)
@@ -18613,20 +18836,33 @@ class Payload:
18613
18836
  substring=substring,
18614
18837
  min_count=min_count,
18615
18838
  wait_time=wait_time, # time to wait before the check is actually done
18839
+ show_error=not want_exist, # if element is not found that we do not want to find it is not an error
18616
18840
  )
18617
18841
  # Check if we didn't get what we want:
18618
18842
  if (not result and want_exist) or (result and not want_exist):
18619
18843
  self.logger.error(
18620
18844
  "%s %s%s%s on current page. Test failed.%s",
18621
- "Cannot find" if not result else "Found",
18622
- "{} elements with selector -> '{}' ({})".format(min_count, selector, selector_type)
18623
- if min_count > 1
18624
- else "an element with selector -> '{}' ({})".format(selector, selector_type),
18845
+ "Cannot find" if not result and want_exist else "Found",
18846
+ "{} elements with selector -> '{}' ({}{})".format(
18847
+ min_count if want_exist else count,
18848
+ selector,
18849
+ "selector type -> '{}'".format(selector_type),
18850
+ ", role type -> '{}'".format(role_type) if role_type else "",
18851
+ )
18852
+ if (min_count > 1 and want_exist) or (count > 1 and not want_exist)
18853
+ else "an element with selector -> '{}' ({}{})".format(
18854
+ selector,
18855
+ "selector type -> '{}'".format(selector_type),
18856
+ ", role type -> '{}'".format(role_type) if role_type else "",
18857
+ ),
18625
18858
  " with {}value -> '{}'".format("substring-" if substring else "", value)
18626
18859
  if value
18627
18860
  else "",
18628
18861
  " in attribute -> '{}'".format(attribute) if attribute else "",
18629
- " Found {}{} occurences.".format(count, " undesirable" if not want_exist else ""),
18862
+ " Found {}{} occurences.".format(
18863
+ count,
18864
+ " undesirable" if not want_exist else " from a minimum of {}".format(min_count),
18865
+ ),
18630
18866
  )
18631
18867
  success = False
18632
18868
  continue
@@ -18646,12 +18882,20 @@ class Payload:
18646
18882
  automation_step_type,
18647
18883
  automation_type.lower(),
18648
18884
  )
18885
+ automation_step["result"] = "failure"
18649
18886
  success = False
18650
18887
  break
18651
18888
  # end match automation_step_type:
18652
18889
  first_step = False
18653
18890
  # end for automation_step in automation_steps:
18891
+
18892
+ # Cleanup session and and remove reference to the object:
18654
18893
  browser_automation_object.end_session()
18894
+ browser_automation_object = None
18895
+
18896
+ browser_automation["result"] = (
18897
+ "failure" if any(step.get("result", "success") == "failure" for step in automation_steps) else "success"
18898
+ )
18655
18899
  # end for browser_automation in browser_automations:
18656
18900
 
18657
18901
  if check_status:
@@ -21630,14 +21874,9 @@ class Payload:
21630
21874
  )
21631
21875
  result["skipped_counter"] += 1
21632
21876
  continue
21633
- # Workspace names for sure are not allowed to have ":":
21634
- workspace_name = workspace_name.replace(":", "")
21635
- # Workspace names for sure should not have leading or trailing spaces:
21636
- workspace_name = workspace_name.strip()
21637
- # Truncate the workspace name to 254 characters which is the maximum
21638
- # allowed length in Content Server
21639
- if len(workspace_name) > 254:
21640
- workspace_name = workspace_name[:254]
21877
+
21878
+ # Cleanse the workspace name (allowed characters, maximum length):
21879
+ workspace_name = OTCS.cleanse_item_name(workspace_name)
21641
21880
 
21642
21881
  # Check if workspace has been created before (either in this run
21643
21882
  # or in a former run of the customizer):
@@ -22053,7 +22292,7 @@ class Payload:
22053
22292
  if response["results"]:
22054
22293
  # We add the suffix with the key which should be unique:
22055
22294
  self.logger.warning(
22056
- "Workspace with name -> '%s' does already exist in folder with ID -> %s and we need to handle the name clash by using name -> '%s'",
22295
+ "Workspace -> '%s' does already exist in folder with ID -> %s and we need to handle the name clash by using name -> '%s'",
22057
22296
  workspace_name,
22058
22297
  parent_id,
22059
22298
  workspace_name + " (" + key + ")",
@@ -22534,8 +22773,8 @@ class Payload:
22534
22773
  # If not, we load the data source on the fly:
22535
22774
  data_source_data: Data = data_source.get("data")
22536
22775
  if not data_source_data:
22537
- self.logger.warning(
22538
- "Lookup data source -> '%s' has no data yet. Trying to reload...",
22776
+ self.logger.info(
22777
+ "Lookup data source -> '%s' has no data yet. Reloading...",
22539
22778
  data_source_name,
22540
22779
  )
22541
22780
  data_source_data = self.process_bulk_datasource(
@@ -24814,14 +25053,8 @@ class Payload:
24814
25053
  # We keep success = True as this is a data problem and not a config problem!
24815
25054
  return None, success
24816
25055
 
24817
- # Workspace names for sure are not allowed to have ":":
24818
- workspace_name = workspace_name.replace(":", "")
24819
- # Workspace names for sure should not have leading or trailing spaces:
24820
- workspace_name = workspace_name.strip()
24821
- # Truncate the workspace name to 254 characters which is
24822
- # the maximum allowed length in Content Server
24823
- if len(workspace_name) > 254:
24824
- workspace_name = workspace_name[:254]
25056
+ # Cleanse the workspace name (allowed characters, maximum length):
25057
+ workspace_name = OTCS.cleanse_item_name(workspace_name)
24825
25058
 
24826
25059
  # Check if all data conditions to create the workspace are met
24827
25060
  conditions = workspace.get("conditions")
@@ -25151,7 +25384,7 @@ class Payload:
25151
25384
 
25152
25385
  # Create Business Relationship between workspace and sub-workspace:
25153
25386
  if workspace_id and sub_workspace_id:
25154
- # Check if workspace relationship does already exist in Extended ECM
25387
+ # Check if workspace relationship does already exist in OTCS
25155
25388
  # (this is an additional safety measure to avoid errors):
25156
25389
  response = self._otcs_frontend.get_workspace_relationships(
25157
25390
  workspace_id=workspace_id,
@@ -25590,14 +25823,8 @@ class Payload:
25590
25823
  result["skipped_counter"] += 1
25591
25824
  continue
25592
25825
 
25593
- # Document names for sure are not allowed to have ":":
25594
- document_name = document_name.replace(":", "")
25595
- # Document names for sure should not have leading or trailing spaces:
25596
- document_name = document_name.strip()
25597
- # Truncate the document name to 254 characters which is
25598
- # the maximum allowed length in Content Server
25599
- if len(document_name) > 254:
25600
- document_name = document_name[:254]
25826
+ # Cleanse the document name (allowed characters, maximum length):
25827
+ document_name = OTCS.cleanse_item_name(document_name)
25601
25828
 
25602
25829
  download_name = ""
25603
25830
  if download_name_field:
@@ -25835,6 +26062,8 @@ class Payload:
25835
26062
  att_name,
25836
26063
  parent_id,
25837
26064
  )
26065
+ # Keep track that we need to handle a name clash as based on the key
26066
+ # the document should not exist.
25838
26067
  handle_name_clash = True
25839
26068
  else:
25840
26069
  self.logger.error(
@@ -25870,7 +26099,7 @@ class Payload:
25870
26099
  else:
25871
26100
  # Case 4: no key given + name not found = item does not exist
25872
26101
  self.logger.info(
25873
- "No existing document with name -> '%s' in parent with ID -> %s",
26102
+ "Cannot find document -> '%s' in parent with ID -> %s",
25874
26103
  document_name,
25875
26104
  parent_id,
25876
26105
  )
@@ -25937,7 +26166,7 @@ class Payload:
25937
26166
 
25938
26167
  # We add the suffix with the key which should be unique:
25939
26168
  self.logger.warning(
25940
- "Document with name -> '%s' does already exist in workspace folder with ID -> %s and we need to handle the name clash and use name -> '%s'",
26169
+ "Document -> '%s' does already exist in workspace folder with ID -> %s and we need to handle the name clash and use name -> '%s'",
25941
26170
  document_name,
25942
26171
  parent_id,
25943
26172
  document_name + " (" + key + ")",
@@ -25956,11 +26185,6 @@ class Payload:
25956
26185
  categories=categories,
25957
26186
  replacements=replacements,
25958
26187
  )
25959
- # document_category_data = self.prepare_item_create_form(
25960
- # parent_id=parent_id,
25961
- # categories=worker_categories,
25962
- # subtype=self._otcs_frontend.ITEM_TYPE_DOCUMENT,
25963
- # )
25964
26188
  document_category_data = self.prepare_category_data(
25965
26189
  categories_payload=worker_categories,
25966
26190
  source_node_id=parent_id,
@@ -26062,7 +26286,8 @@ class Payload:
26062
26286
  )
26063
26287
  ):
26064
26288
  # get the specific update operations given in the payload
26065
- # if not specified we do all 4 update operations (name, description, categories and version)
26289
+ # if not specified we do the following update operations.
26290
+ # The 'purge' operation needs to be specified explicitly.
26066
26291
  update_operations = bulk_document.get(
26067
26292
  "update_operations",
26068
26293
  ["name", "description", "categories", "nickname", "version"],
@@ -26129,6 +26354,21 @@ class Payload:
26129
26354
  )
26130
26355
  success = False
26131
26356
  continue
26357
+ if "purge" in update_operations:
26358
+ max_versions = bulk_document.get("max_versions", 1)
26359
+ response = self._otcs_frontend.purge_document_versions(
26360
+ node_id=document_id, versions_to_keep=max_versions
26361
+ )
26362
+ if not response:
26363
+ self.logger.error(
26364
+ "Failed to purge versions of document -> '%s' (%s) to %d version%s!",
26365
+ (document_old_name if document_old_name else document_name),
26366
+ document_id,
26367
+ max_versions,
26368
+ "s" if max_versions > 1 else "",
26369
+ )
26370
+ success = False
26371
+ continue
26132
26372
  response = self._otcs_frontend.update_item(
26133
26373
  node_id=document_id,
26134
26374
  parent_id=None, # None = do not move item
@@ -26874,14 +27114,8 @@ class Payload:
26874
27114
  result["skipped_counter"] += 1
26875
27115
  continue
26876
27116
 
26877
- # Document names for sure are not allowed to have ":":
26878
- item_name = item_name.replace(":", "")
26879
- # Document names for sure should not have leading or trailing spaces:
26880
- item_name = item_name.strip()
26881
- # Truncate the item name to 254 characters which is
26882
- # the maximum allowed length in Content Server
26883
- if len(item_name) > 254:
26884
- item_name = item_name[:254]
27117
+ # Cleanse the item name (allowed characters, maximum length):
27118
+ item_name = OTCS.cleanse_item_name(item_name)
26885
27119
 
26886
27120
  # This is an optimization. We check if the item was created
26887
27121
  # in a former run. This helps if the customizer is re-run:
@@ -27113,7 +27347,8 @@ class Payload:
27113
27347
  )
27114
27348
  success = False
27115
27349
  continue
27116
- # end if key
27350
+ # end if key_attribute:
27351
+ # end if key:
27117
27352
  else:
27118
27353
  # If we haven't a key we try by parent + name
27119
27354
  response = self._otcs_frontend.get_node_by_parent_and_name(
@@ -27140,7 +27375,7 @@ class Payload:
27140
27375
  else:
27141
27376
  # Case 4: no key given + name not found = item does not exist
27142
27377
  self.logger.info(
27143
- "No existing item with name -> '%s' in parent with ID -> %s",
27378
+ "No existing item -> '%s' in parent with ID -> %s",
27144
27379
  item_name,
27145
27380
  parent_id,
27146
27381
  )
@@ -27207,7 +27442,7 @@ class Payload:
27207
27442
 
27208
27443
  # We add the suffix with the key which should be unique:
27209
27444
  self.logger.warning(
27210
- "Item with name -> '%s' does already exist in workspace folder with ID -> %s and we need to handle the name clash and use name -> '%s'",
27445
+ "Item -> '%s' does already exist in workspace folder with ID -> %s and we need to handle the name clash and use name -> '%s'",
27211
27446
  item_name,
27212
27447
  parent_id,
27213
27448
  item_name + " (" + key + ")",
@@ -27499,111 +27734,116 @@ class Payload:
27499
27734
 
27500
27735
  success: bool = True
27501
27736
 
27502
- self._avts.authenticate()
27503
-
27504
- for payload_repo in self._avts_repositories:
27505
- if not payload_repo.get("enabled", True):
27506
- continue
27507
-
27508
- repository = self._avts.get_repo_by_name(name=payload_repo["name"])
27509
-
27510
- if repository is None:
27511
- self.logger.info(
27512
- "Repository -> '%s' does not exist, creating it...",
27513
- payload_repo["name"],
27514
- )
27737
+ token = self._avts.authenticate()
27738
+ if not token:
27739
+ self.logger.error("Cannot authenticate at Aviator Search!")
27740
+ success = False
27741
+ else:
27742
+ for payload_repo in self._avts_repositories:
27743
+ if not payload_repo.get("enabled", True):
27744
+ continue
27515
27745
 
27516
- if payload_repo.get("type", "Extended ECM") == "Extended ECM":
27517
- repository = self._avts.create_extended_ecm_repo(
27518
- name=payload_repo["name"],
27519
- username=payload_repo["username"],
27520
- password=payload_repo["password"],
27521
- otcs_url=payload_repo["otcs_url"],
27522
- otcs_api_url=payload_repo["otcs_api_url"],
27523
- node_id=int(payload_repo["node_id"]),
27524
- )
27525
-
27526
- elif payload_repo["type"] == "Documentum":
27527
- self.logger.warning("Not yet implemented")
27528
- elif payload_repo["type"] == "MSTeams":
27529
- repository = self._avts.create_msteams_repo(
27530
- name=payload_repo["name"],
27531
- client_id=payload_repo["client_id"],
27532
- tenant_id=payload_repo["tenant_id"],
27533
- certificate_file=payload_repo["certificate_file"],
27534
- certificate_password=payload_repo["certificate_password"],
27535
- index_attachments=payload_repo.get("index_attachments", True),
27536
- index_call_recordings=payload_repo.get(
27537
- "index_call_recordings",
27538
- True,
27539
- ),
27540
- index_message_replies=payload_repo.get(
27541
- "index_message_replies",
27542
- True,
27543
- ),
27544
- index_user_chats=payload_repo.get("index_user_chats", True),
27545
- )
27546
- elif payload_repo["type"] == "SharePoint":
27547
- repository = self._avts.create_sharepoint_repo(
27548
- name=payload_repo["name"],
27549
- client_id=payload_repo["client_id"],
27550
- tenant_id=payload_repo["tenant_id"],
27551
- certificate_file=payload_repo["certificate_file"],
27552
- certificate_password=payload_repo["certificate_password"],
27553
- sharepoint_url=payload_repo["sharepoint_url"],
27554
- sharepoint_url_type=payload_repo["sharepoint_url_type"],
27555
- sharepoint_mysite_url=payload_repo["sharepoint_mysite_url"],
27556
- sharepoint_admin_url=payload_repo["sharepoint_admin_url"],
27557
- index_user_profiles=payload_repo.get(
27558
- "index_message_replies",
27559
- False,
27560
- ),
27561
- )
27562
- else:
27563
- self.logger.error(
27564
- "Invalid repository type -> '%s' specified. Valid values are: Extended ECM, Documentum, MSTeams, SharePoint",
27565
- payload_repo["type"],
27566
- )
27567
- success = False
27568
- break
27746
+ repository = self._avts.get_repo_by_name(name=payload_repo["name"])
27569
27747
 
27570
27748
  if repository is None:
27571
- self.logger.error(
27572
- "Creation of Aviator Search repository -> '%s' failed!",
27573
- payload_repo["name"],
27574
- )
27575
- success = False
27576
- else:
27577
27749
  self.logger.info(
27578
- "Successfully created Aviator Search repository -> '%s'",
27750
+ "Repository -> '%s' does not exist, creating it...",
27579
27751
  payload_repo["name"],
27580
27752
  )
27581
- self.logger.debug("%s", repository)
27582
27753
 
27583
- else:
27584
- self.logger.info(
27585
- "Aviator Search repository -> '%s' already exists.",
27586
- payload_repo["name"],
27587
- )
27754
+ if payload_repo.get("type", "Extended ECM") == "Extended ECM":
27755
+ repository = self._avts.create_extended_ecm_repo(
27756
+ name=payload_repo["name"],
27757
+ username=payload_repo["username"],
27758
+ password=payload_repo["password"],
27759
+ otcs_url=payload_repo["otcs_url"],
27760
+ otcs_api_url=payload_repo["otcs_api_url"],
27761
+ node_id=int(payload_repo["node_id"]),
27762
+ )
27588
27763
 
27589
- # Start Crawling
27590
- start_crawling = payload_repo.get("start", False)
27764
+ elif payload_repo["type"] == "Documentum":
27765
+ self.logger.warning("Not yet implemented")
27766
+ elif payload_repo["type"] == "MSTeams":
27767
+ repository = self._avts.create_msteams_repo(
27768
+ name=payload_repo["name"],
27769
+ client_id=payload_repo["client_id"],
27770
+ tenant_id=payload_repo["tenant_id"],
27771
+ certificate_file=payload_repo["certificate_file"],
27772
+ certificate_password=payload_repo["certificate_password"],
27773
+ index_attachments=payload_repo.get("index_attachments", True),
27774
+ index_call_recordings=payload_repo.get(
27775
+ "index_call_recordings",
27776
+ True,
27777
+ ),
27778
+ index_message_replies=payload_repo.get(
27779
+ "index_message_replies",
27780
+ True,
27781
+ ),
27782
+ index_user_chats=payload_repo.get("index_user_chats", True),
27783
+ )
27784
+ elif payload_repo["type"] == "SharePoint":
27785
+ repository = self._avts.create_sharepoint_repo(
27786
+ name=payload_repo["name"],
27787
+ client_id=payload_repo["client_id"],
27788
+ tenant_id=payload_repo["tenant_id"],
27789
+ certificate_file=payload_repo["certificate_file"],
27790
+ certificate_password=payload_repo["certificate_password"],
27791
+ sharepoint_url=payload_repo["sharepoint_url"],
27792
+ sharepoint_url_type=payload_repo["sharepoint_url_type"],
27793
+ sharepoint_mysite_url=payload_repo["sharepoint_mysite_url"],
27794
+ sharepoint_admin_url=payload_repo["sharepoint_admin_url"],
27795
+ index_user_profiles=payload_repo.get(
27796
+ "index_message_replies",
27797
+ False,
27798
+ ),
27799
+ )
27800
+ else:
27801
+ self.logger.error(
27802
+ "Invalid repository type -> '%s' specified. Valid values are: Extended ECM, Documentum, MSTeams, SharePoint",
27803
+ payload_repo["type"],
27804
+ )
27805
+ success = False
27806
+ break
27591
27807
 
27592
- if repository is not None and start_crawling:
27593
- response = self._avts.start_crawling(repo_name=payload_repo["name"])
27808
+ if repository is None:
27809
+ self.logger.error(
27810
+ "Creation of Aviator Search repository -> '%s' failed!",
27811
+ payload_repo["name"],
27812
+ )
27813
+ success = False
27814
+ else:
27815
+ self.logger.info(
27816
+ "Successfully created Aviator Search repository -> '%s'",
27817
+ payload_repo["name"],
27818
+ )
27819
+ self.logger.debug("%s", repository)
27594
27820
 
27595
- if response is None:
27596
- self.logger.error(
27597
- "Aviator Search start crawling on repository failed -> '%s'",
27598
- payload_repo["name"],
27599
- )
27600
- success = False
27601
27821
  else:
27602
27822
  self.logger.info(
27603
- "Aviator Search crawling started on repository -> '%s'",
27823
+ "Aviator Search repository -> '%s' already exists.",
27604
27824
  payload_repo["name"],
27605
27825
  )
27606
- self.logger.debug("%s", response)
27826
+
27827
+ # Start Crawling
27828
+ start_crawling = payload_repo.get("start", False)
27829
+
27830
+ if repository is not None and start_crawling:
27831
+ response = self._avts.start_crawling(repo_name=payload_repo["name"])
27832
+
27833
+ if response is None:
27834
+ self.logger.error(
27835
+ "Aviator Search start crawling on repository failed -> '%s'",
27836
+ payload_repo["name"],
27837
+ )
27838
+ success = False
27839
+ else:
27840
+ self.logger.info(
27841
+ "Aviator Search crawling started on repository -> '%s'",
27842
+ payload_repo["name"],
27843
+ )
27844
+ self.logger.debug("%s", response)
27845
+ # end for payload_repo in self._avts_repositories:
27846
+ # end else:
27607
27847
 
27608
27848
  self.write_status_file(
27609
27849
  success=success,
@@ -27646,26 +27886,28 @@ class Payload:
27646
27886
 
27647
27887
  success: bool = True
27648
27888
 
27649
- self._avts.authenticate()
27650
-
27651
27889
  if not self._avts_questions.get("enabled", True):
27652
27890
  self.logger.info(
27653
27891
  "Payload section -> '%s' is not enabled. Skipping...",
27654
27892
  section_name,
27655
27893
  )
27656
27894
  return True
27657
-
27658
27895
  questions = self._avts_questions.get("questions", [])
27659
27896
  self.logger.info("Sample questions -> %s", questions)
27660
27897
 
27661
- response = self._avts.set_questions(questions=questions)
27662
-
27663
- if response is None:
27664
- self.logger.error("Aviator Search setting questions failed")
27898
+ token = self._avts.authenticate()
27899
+ if not token:
27900
+ self.logger.error("Cannot authenticate at Aviator Search!")
27665
27901
  success = False
27666
27902
  else:
27667
- self.logger.info("Aviator Search questions set succesfully")
27668
- self.logger.debug("%s", response)
27903
+ response = self._avts.set_questions(questions=questions)
27904
+
27905
+ if response is None:
27906
+ self.logger.error("Aviator Search setting questions failed")
27907
+ success = False
27908
+ else:
27909
+ self.logger.info("Aviator Search questions set succesfully")
27910
+ self.logger.debug("%s", response)
27669
27911
 
27670
27912
  self.write_status_file(
27671
27913
  success=success,
@@ -28701,14 +28943,8 @@ class Payload:
28701
28943
  result["skipped_counter"] += 1
28702
28944
  continue
28703
28945
 
28704
- # Classification names for sure are not allowed to have ":":
28705
- classification_name = classification_name.replace(":", "")
28706
- # Classification names for sure should not have leading or trailing spaces:
28707
- classification_name = classification_name.strip()
28708
- # Truncate the classification name to 254 characters which is the maximum
28709
- # allowed length in Content Server
28710
- if len(classification_name) > 254:
28711
- classification_name = classification_name[:254]
28946
+ # Cleanse the classification name (allowed characters, maximum length):
28947
+ classification_name = OTCS.cleanse_item_name(classification_name)
28712
28948
 
28713
28949
  # Check if classification has been created before (either in this run
28714
28950
  # or in a former run of the customizer):
@@ -29126,7 +29362,7 @@ class Payload:
29126
29362
  if response and response["results"]:
29127
29363
  # We add the suffix with the key which should be unique:
29128
29364
  self.logger.warning(
29129
- "Classification with name -> '%s' does already exist in folder with ID -> %s and we need to handle the name clash by using name -> '%s'",
29365
+ "Classification -> '%s' does already exist in folder with ID -> %s and we need to handle the name clash by using name -> '%s'",
29130
29366
  classification_name,
29131
29367
  parent_id,
29132
29368
  classification_name + " (" + key + ")",
@@ -29457,3 +29693,160 @@ class Payload:
29457
29693
  return bool(response)
29458
29694
 
29459
29695
  # end method definition
29696
+
29697
+ def process_nifi_flows(self, section_name: str = "nifi") -> bool:
29698
+ """Process Knowledge Discovery Nifi flows in payload and create them in Nifi.
29699
+
29700
+ Args:
29701
+ section_name (str, optional):
29702
+ The name of the payload section. It can be overridden
29703
+ for cases where multiple sections of same type
29704
+ are used (e.g. the "Post" sections).
29705
+ This name is also used for the "success" status
29706
+ files written to the Admin Personal Workspace.
29707
+
29708
+ Returns:
29709
+ bool:
29710
+ True, if payload has been processed without errors, False otherwise
29711
+
29712
+ """
29713
+
29714
+ if not self._nifi_flows:
29715
+ self.logger.info(
29716
+ "Payload section -> '%s' is empty. Skipping...",
29717
+ section_name,
29718
+ )
29719
+ return True
29720
+
29721
+ # If this payload section has been processed successfully before we
29722
+ # can return True and skip processing it once more:
29723
+ if self.check_status_file(payload_section_name=section_name):
29724
+ return True
29725
+
29726
+ success: bool = True
29727
+
29728
+ for nifi_flow in self._nifi_flows:
29729
+ if "name" not in nifi_flow:
29730
+ self.logger.error(
29731
+ "Knowledge Discovery Nifi flow needs a name! Skipping to next Nifi flow...",
29732
+ )
29733
+ success = False
29734
+ continue
29735
+ name = nifi_flow["name"]
29736
+
29737
+ # Check if element has been disabled in payload (enabled = false).
29738
+ # In this case we skip the element:
29739
+ if not nifi_flow.get("enabled", True):
29740
+ self.logger.info(
29741
+ "Payload for Knowledge Discovery Nifi flow -> '%s' is disabled. Skipping...",
29742
+ name,
29743
+ )
29744
+ continue
29745
+
29746
+ if "file" not in nifi_flow:
29747
+ self.logger.error(
29748
+ "Knowledge Discovery Nifi flow -> '%s' needs a file! Skipping to next Nifi flow...", name
29749
+ )
29750
+ success = False
29751
+ continue
29752
+ filename = nifi_flow["file"]
29753
+
29754
+ parameters = nifi_flow.get("parameters", [])
29755
+
29756
+ if not self._otkd:
29757
+ self.logger.error("Knowledge Discovery is not initialized. Stop processing Nifi flows.")
29758
+ success = False
29759
+ break
29760
+
29761
+ # Optional layout positions of the flow:
29762
+ position_x = nifi_flow.get("position_x", 0.0)
29763
+ position_y = nifi_flow.get("position_y", 0.0)
29764
+ start = nifi_flow.get("start", False)
29765
+
29766
+ self.logger.info("Processing Knowledge Discovery Nifi flow -> '%s'...", name)
29767
+
29768
+ existing = self._otkd.get_process_group_by_name(name=name)
29769
+ if existing:
29770
+ self.logger.warning("Nifi flow -> '%s' does already exist. Updating parameters only...", name)
29771
+ # We better don't start existing flows!? Otherwise this may produce errors.
29772
+ start = False
29773
+ else:
29774
+ response = self._otkd.upload_process_group(
29775
+ file_path=filename, name=name, position_x=position_x, position_y=position_y
29776
+ )
29777
+ if not response:
29778
+ self.logger.error("Failed to upload new Nifi flow -> '%s' for Knowledge Discovery!", name)
29779
+ success = False
29780
+ continue
29781
+ self.logger.info("Sucessfully uploaded new Nifi flow -> '%s' for Knowledge Discovery!", name)
29782
+
29783
+ for parameter in parameters:
29784
+ component = parameter.get("component", None)
29785
+ if not component:
29786
+ self.logger.error("Missing component in parameter of Nifi flow -> '%s'!", name)
29787
+ success = False
29788
+ continue
29789
+ parameter_name = parameter.get("name", None)
29790
+ if not parameter_name:
29791
+ self.logger.error(
29792
+ "Missing name in parameter of Nifi flow -> '%s', component -> '%s'!", name, component
29793
+ )
29794
+ success = False
29795
+ continue
29796
+ parameter_description = parameter.get("description", "")
29797
+ parameter_value = parameter.get("value", None)
29798
+ if not parameter_value:
29799
+ self.logger.error(
29800
+ "Missing value in parameter of Nifi flow -> '%s', component -> '%s'", name, component
29801
+ )
29802
+ success = False
29803
+ continue
29804
+ parameter_sensitive = parameter.get("sensitive", False)
29805
+
29806
+ response = self._otkd.update_parameter(
29807
+ component=component,
29808
+ parameter=parameter_name,
29809
+ value=parameter_value,
29810
+ sensitive=parameter_sensitive,
29811
+ description=parameter_description,
29812
+ )
29813
+ if not response:
29814
+ self.logger.error("Failed to update parameter -> '%s' of Nifi flow -> '%s'!", parameter_name, name)
29815
+ success = False
29816
+ continue
29817
+ self.logger.info(
29818
+ "Successfully updated parameter -> '%s' of component -> '%s' in Nifi flow -> '%s' to value -> '%s'.",
29819
+ parameter_name,
29820
+ component,
29821
+ name,
29822
+ parameter_value if not parameter_sensitive else "<sensitive>",
29823
+ )
29824
+ # end for parameter in parameters:
29825
+ if start:
29826
+ response = self._otkd.start_all_processors(name=name)
29827
+ if response:
29828
+ self.logger.info("Successfully started Nifi flow -> '%s'.", name)
29829
+ else:
29830
+ self.logger.error("Failed to start Nifi flow -> '%s'!", name)
29831
+ success = False
29832
+
29833
+ response = self._otkd.set_controller_services_state(name=name, state="ENABLED")
29834
+ if response:
29835
+ self.logger.info("Successfully enabled Nifi Controller Services for Nifi flow -> '%s'.", name)
29836
+ else:
29837
+ self.logger.error("Failed to enable Nifi Controller Services for Nifi flow -> '%s'!", name)
29838
+ success = False
29839
+
29840
+ else:
29841
+ self.logger.info("Don't (re)start Nifi flow -> '%s'.", name)
29842
+ # end for nifi_flow in self._nifi_flows:
29843
+
29844
+ self.write_status_file(
29845
+ success=success,
29846
+ payload_section_name=section_name,
29847
+ payload_section=self._nifi_flows,
29848
+ )
29849
+
29850
+ return success
29851
+
29852
+ # end method definition