pyxecm 2.0.3__py3-none-any.whl → 2.0.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyxecm might be problematic. Click here for more details.

@@ -1063,7 +1063,7 @@ class Payload:
1063
1063
  - sort (list, optional, default = []) - list of fields to sort the data frame by
1064
1064
  - operations (list, optional, default = ["create"])
1065
1065
  - update_operations (list, optional, default = ["name", "description", "categories", "nickname", "version"]) - possible values:
1066
- "name", "description", "categories", "nickname", "version"
1066
+ "name", "description", "categories", "nickname", "version", "purge"
1067
1067
  - name (str, mandatory) - can include placeholder surrounded by {...}
1068
1068
  - name_alt (str, optional, default = None) - can include placeholder surrounded by {...}
1069
1069
  - name_regex (str, optional, default = r"") - regex replacement for document names. The pattern and replacement are separated by pipe character |
@@ -1142,7 +1142,7 @@ class Payload:
1142
1142
  - unique (list, optional, default = []) - list of column names which values should be unique -> deduplication
1143
1143
  - sort (list, optional, default = []) - list of fields to sort the data frame by
1144
1144
  - operations (list, optional, default = ["create"])
1145
- - update_operations (list, optional, default = ["name", "description", "categories", "nickname", "version"]) - possible values:
1145
+ - update_operations (list, optional, default = ["name", "description", "categories", "nickname"]) - possible values:
1146
1146
  "name", "description", "categories", "nickname", "url"
1147
1147
  - name (str, mandatory) - can include placeholder surrounded by {...}
1148
1148
  - name_alt (str, optional, default = None) - can include placeholder surrounded by {...}
@@ -7674,6 +7674,7 @@ class Payload:
7674
7674
  group_names,
7675
7675
  user_department,
7676
7676
  )
7677
+
7677
7678
  # Go through all group names:
7678
7679
  for group_name in group_names:
7679
7680
  # Find the group payload item to the parent group name:
@@ -7773,7 +7774,7 @@ class Payload:
7773
7774
  )
7774
7775
  success = False
7775
7776
 
7776
- # As M365 groups are flat (not nested) we also add the
7777
+ # As M365 groups are flat (not nested), we also add the
7777
7778
  # user as member to the parent groups of the current group
7778
7779
  # if the parent group is enabled for M365:
7779
7780
  parent_group_names = group.get("parent_groups")
@@ -7884,13 +7885,17 @@ class Payload:
7884
7885
  response = self._m365.follow_sharepoint_site(site_id=group_site_id, user_id=m365_user_id)
7885
7886
  if not response:
7886
7887
  self.logger.warning(
7887
- "User -> '%s' cannot follow SharePoint site -> '%s'. ",
7888
+ "User -> '%s' cannot follow SharePoint site -> '%s'.",
7888
7889
  user["email"],
7889
7890
  group_site_name,
7890
7891
  )
7891
7892
  success = False
7892
- # end for group name
7893
+ # end if group_site_id:
7894
+ # end if group_id:
7895
+ # end if group_name == user_department and user["m365_skus"]:
7896
+ # end for group name in group_names:
7893
7897
  # end for user
7898
+
7894
7899
  self.write_status_file(
7895
7900
  success=success,
7896
7901
  payload_section_name=section_name,
@@ -8410,8 +8415,18 @@ class Payload:
8410
8415
  by Scheduled Bots (Jobs) from OTCS via the creation of MS teams
8411
8416
  (each MS Team has a SharePoint site behind it)!
8412
8417
 
8413
- The are the SharePoint sites for the departmental groups such as "Sales",
8418
+ These are the SharePoint sites for the departmental groups such as "Sales",
8414
8419
  "Procurement", "Enterprise Asset Management", ...
8420
+ Only departmental group that have a top-level folder with the exact same
8421
+ name as the Department are configured.
8422
+
8423
+ For each departmental group:
8424
+ 1. Determine a departmental folder in the Enterprise Workspace
8425
+ 2. Determine the M365 Group
8426
+ 3. Determine the SharePoint Site (based on the M365 group ID)
8427
+ 4. Determine the Page in the SharePoint site
8428
+ 5. Determine or create the SharePoint webpart for the OTCS browser
8429
+ 6. Create URL object pointing to SharePoint site inside top level department folder
8415
8430
 
8416
8431
  Args:
8417
8432
  section_name (str, optional):
@@ -8769,7 +8784,7 @@ class Payload:
8769
8784
  # end else
8770
8785
 
8771
8786
  #
8772
- # 6. Create URL object pointing to SharePoint site inside top level ddepartment folder
8787
+ # 6. Create URL object pointing to SharePoint site inside top level department folder
8773
8788
  #
8774
8789
 
8775
8790
  item_name = (
@@ -13426,10 +13441,6 @@ class Payload:
13426
13441
  continue
13427
13442
 
13428
13443
  workspace_id = workspace["id"]
13429
- self.logger.info(
13430
- "Workspace -> '%s' has memberships in payload - establishing...",
13431
- workspace_name,
13432
- )
13433
13444
 
13434
13445
  workspace_node_id = int(self.determine_workspace_id(workspace=workspace))
13435
13446
  if not workspace_node_id:
@@ -13438,6 +13449,10 @@ class Payload:
13438
13449
  )
13439
13450
  continue
13440
13451
 
13452
+ self.logger.info(
13453
+ "Workspace -> '%s' (%s) has memberships in payload - establishing...", workspace_name, workspace_node_id
13454
+ )
13455
+
13441
13456
  # now determine the actual node IDs of the workspaces (have been created by process_workspaces()):
13442
13457
  workspace_node = self._otcs.get_node(node_id=workspace_node_id)
13443
13458
  workspace_owner_id = self._otcs.get_result_value(
@@ -13454,21 +13469,29 @@ class Payload:
13454
13469
  )
13455
13470
  if workspace_roles is None:
13456
13471
  self.logger.debug(
13457
- "Workspace with ID -> %s and node Id -> %s has no roles. Skipping to next workspace...",
13458
- workspace_id,
13472
+ "Workspace -> '%s' (%s) has no roles. Skipping to next workspace...",
13473
+ workspace_name,
13459
13474
  workspace_node_id,
13460
13475
  )
13461
13476
  continue
13462
13477
 
13463
13478
  # We don't want the workspace creator to be in the leader role
13464
13479
  # of automatically created workspaces - this can happen because the
13465
- # creator gets added to the leader role automatically:
13466
- leader_role_id = self._otcs.lookup_result_value(
13467
- response=workspace_roles,
13468
- key="leader",
13469
- value=True,
13470
- return_key="id",
13471
- )
13480
+ # creator gets added to the leader role automatically if
13481
+ # the workspace type advanved configuration setting
13482
+ # "Add the creator of a business workspace to the Lead role" is
13483
+ # enabled:
13484
+ roles_iterator = self._otcs.get_result_values_iterator(response=workspace_roles)
13485
+ for role in roles_iterator:
13486
+ # We can have two leader roles if in a sub-workspaces a leader
13487
+ # roles is inherited from the parent workspace. As we want
13488
+ # don't want to consider leader role of the parent workspace
13489
+ # we check that 'inherited_from_id' is not set:
13490
+ if role["leader"] and role["inherited_from_id"] is None:
13491
+ leader_role_id = role["id"]
13492
+ break
13493
+ else:
13494
+ leader_role_id = None
13472
13495
 
13473
13496
  if leader_role_id:
13474
13497
  leader_role_name = self._otcs.lookup_result_value(
@@ -13493,6 +13516,16 @@ class Payload:
13493
13516
  workspace_name,
13494
13517
  workspace_node_id,
13495
13518
  )
13519
+ else:
13520
+ self.logger.info(
13521
+ "Creator user -> '%s' (%s) is not in leader role -> '%s' (%s) of workspace -> '%s' (%s). No need to remove it.",
13522
+ workspace_owner_name,
13523
+ workspace_owner_id,
13524
+ leader_role_name,
13525
+ leader_role_id,
13526
+ workspace_name,
13527
+ workspace_node_id,
13528
+ )
13496
13529
 
13497
13530
  self.logger.info(
13498
13531
  "Adding members to workspace -> '%s' (%s) defined in payload...",
@@ -13509,7 +13542,7 @@ class Payload:
13509
13542
 
13510
13543
  if member_role_name == "": # role name is required
13511
13544
  self.logger.error(
13512
- "Members of workspace -> '%s' is missing the role name.",
13545
+ "Members of workspace -> '%s' is missing the role name in the payload.",
13513
13546
  workspace_name,
13514
13547
  )
13515
13548
  success = False
@@ -13530,6 +13563,23 @@ class Payload:
13530
13563
  )
13531
13564
  success = False
13532
13565
  continue
13566
+ inherited_role_id = self._otcs.lookup_result_value(
13567
+ response=workspace_roles,
13568
+ key="name",
13569
+ value=member_role_name,
13570
+ return_key="inherited_from_id",
13571
+ )
13572
+ if inherited_role_id is not None:
13573
+ self.logger.error(
13574
+ "The role -> '%s' (%s) of workspace -> '%s' (%s) is inherited from role with ID -> %d and members cannot be set in this sub-workspace.",
13575
+ member_role_name,
13576
+ role_id,
13577
+ workspace_name,
13578
+ workspace_node_id,
13579
+ inherited_role_id,
13580
+ )
13581
+ success = False
13582
+ continue
13533
13583
  self.logger.debug(
13534
13584
  "Role -> '%s' has ID -> %s",
13535
13585
  member_role_name,
@@ -13583,20 +13633,22 @@ class Payload:
13583
13633
  )
13584
13634
  if response is None:
13585
13635
  self.logger.error(
13586
- "Failed to add user -> '%s' (%s) as member to role -> '%s' of workspace -> '%s'",
13636
+ "Failed to add user -> '%s' (%s) as member to role -> '%s' of workspace -> '%s' (%s)",
13587
13637
  member_user,
13588
13638
  user_id,
13589
13639
  member_role_name,
13590
13640
  workspace_name,
13641
+ workspace_node_id,
13591
13642
  )
13592
13643
  success = False
13593
13644
  else:
13594
13645
  self.logger.info(
13595
- "Successfully added user -> '%s' (%s) as member to role -> '%s' of workspace -> '%s'",
13646
+ "Successfully added user -> '%s' (%s) as member to role -> '%s' of workspace -> '%s' (%s)",
13596
13647
  member_user,
13597
13648
  user_id,
13598
13649
  member_role_name,
13599
13650
  workspace_name,
13651
+ workspace_node_id,
13600
13652
  )
13601
13653
 
13602
13654
  # Process groups as workspaces members:
@@ -15824,7 +15876,7 @@ class Payload:
15824
15876
  success = False
15825
15877
  continue
15826
15878
  case self._otcs.ITEM_TYPE_SHORTCUT: # Shortcut
15827
- if original_id == 0:
15879
+ if not original_id:
15828
15880
  self.logger.error(
15829
15881
  "Item -> '%s' has type Shortcut but the original item is not in the payload. Skipping...",
15830
15882
  item_name,
@@ -18481,7 +18533,7 @@ class Payload:
18481
18533
  user_password=password,
18482
18534
  automation_name=name,
18483
18535
  take_screenshots=debug_automation,
18484
- headless=self._browser_headless,
18536
+ headless=browser_automation.get("headless", self._browser_headless),
18485
18537
  logger=self.logger,
18486
18538
  wait_until=wait_until,
18487
18539
  browser=browser_automation.get("browser"), # None is acceptable
@@ -21822,14 +21874,9 @@ class Payload:
21822
21874
  )
21823
21875
  result["skipped_counter"] += 1
21824
21876
  continue
21825
- # Workspace names for sure are not allowed to have ":":
21826
- workspace_name = workspace_name.replace(":", "")
21827
- # Workspace names for sure should not have leading or trailing spaces:
21828
- workspace_name = workspace_name.strip()
21829
- # Truncate the workspace name to 254 characters which is the maximum
21830
- # allowed length in Content Server
21831
- if len(workspace_name) > 254:
21832
- workspace_name = workspace_name[:254]
21877
+
21878
+ # Cleanse the workspace name (allowed characters, maximum length):
21879
+ workspace_name = OTCS.cleanse_item_name(workspace_name)
21833
21880
 
21834
21881
  # Check if workspace has been created before (either in this run
21835
21882
  # or in a former run of the customizer):
@@ -22726,8 +22773,8 @@ class Payload:
22726
22773
  # If not, we load the data source on the fly:
22727
22774
  data_source_data: Data = data_source.get("data")
22728
22775
  if not data_source_data:
22729
- self.logger.warning(
22730
- "Lookup data source -> '%s' has no data yet. Trying to reload...",
22776
+ self.logger.info(
22777
+ "Lookup data source -> '%s' has no data yet. Reloading...",
22731
22778
  data_source_name,
22732
22779
  )
22733
22780
  data_source_data = self.process_bulk_datasource(
@@ -25006,14 +25053,8 @@ class Payload:
25006
25053
  # We keep success = True as this is a data problem and not a config problem!
25007
25054
  return None, success
25008
25055
 
25009
- # Workspace names for sure are not allowed to have ":":
25010
- workspace_name = workspace_name.replace(":", "")
25011
- # Workspace names for sure should not have leading or trailing spaces:
25012
- workspace_name = workspace_name.strip()
25013
- # Truncate the workspace name to 254 characters which is
25014
- # the maximum allowed length in Content Server
25015
- if len(workspace_name) > 254:
25016
- workspace_name = workspace_name[:254]
25056
+ # Cleanse the workspace name (allowed characters, maximum length):
25057
+ workspace_name = OTCS.cleanse_item_name(workspace_name)
25017
25058
 
25018
25059
  # Check if all data conditions to create the workspace are met
25019
25060
  conditions = workspace.get("conditions")
@@ -25782,14 +25823,8 @@ class Payload:
25782
25823
  result["skipped_counter"] += 1
25783
25824
  continue
25784
25825
 
25785
- # Document names for sure are not allowed to have ":":
25786
- document_name = document_name.replace(":", "")
25787
- # Document names for sure should not have leading or trailing spaces:
25788
- document_name = document_name.strip()
25789
- # Truncate the document name to 254 characters which is
25790
- # the maximum allowed length in Content Server
25791
- if len(document_name) > 254:
25792
- document_name = document_name[:254]
25826
+ # Cleanse the document name (allowed characters, maximum length):
25827
+ document_name = OTCS.cleanse_item_name(document_name)
25793
25828
 
25794
25829
  download_name = ""
25795
25830
  if download_name_field:
@@ -26027,6 +26062,8 @@ class Payload:
26027
26062
  att_name,
26028
26063
  parent_id,
26029
26064
  )
26065
+ # Keep track that we need to handle a name clash as based on the key
26066
+ # the document should not exist.
26030
26067
  handle_name_clash = True
26031
26068
  else:
26032
26069
  self.logger.error(
@@ -26148,11 +26185,6 @@ class Payload:
26148
26185
  categories=categories,
26149
26186
  replacements=replacements,
26150
26187
  )
26151
- # document_category_data = self.prepare_item_create_form(
26152
- # parent_id=parent_id,
26153
- # categories=worker_categories,
26154
- # subtype=self._otcs_frontend.ITEM_TYPE_DOCUMENT,
26155
- # )
26156
26188
  document_category_data = self.prepare_category_data(
26157
26189
  categories_payload=worker_categories,
26158
26190
  source_node_id=parent_id,
@@ -26254,7 +26286,8 @@ class Payload:
26254
26286
  )
26255
26287
  ):
26256
26288
  # get the specific update operations given in the payload
26257
- # if not specified we do all 4 update operations (name, description, categories and version)
26289
+ # if not specified we do the following update operations.
26290
+ # The 'purge' operation needs to be specified explicitly.
26258
26291
  update_operations = bulk_document.get(
26259
26292
  "update_operations",
26260
26293
  ["name", "description", "categories", "nickname", "version"],
@@ -26321,6 +26354,21 @@ class Payload:
26321
26354
  )
26322
26355
  success = False
26323
26356
  continue
26357
+ if "purge" in update_operations:
26358
+ max_versions = bulk_document.get("max_versions", 1)
26359
+ response = self._otcs_frontend.purge_document_versions(
26360
+ node_id=document_id, versions_to_keep=max_versions
26361
+ )
26362
+ if not response:
26363
+ self.logger.error(
26364
+ "Failed to purge versions of document -> '%s' (%s) to %d version%s!",
26365
+ (document_old_name if document_old_name else document_name),
26366
+ document_id,
26367
+ max_versions,
26368
+ "s" if max_versions > 1 else "",
26369
+ )
26370
+ success = False
26371
+ continue
26324
26372
  response = self._otcs_frontend.update_item(
26325
26373
  node_id=document_id,
26326
26374
  parent_id=None, # None = do not move item
@@ -27066,14 +27114,8 @@ class Payload:
27066
27114
  result["skipped_counter"] += 1
27067
27115
  continue
27068
27116
 
27069
- # Document names for sure are not allowed to have ":":
27070
- item_name = item_name.replace(":", "")
27071
- # Document names for sure should not have leading or trailing spaces:
27072
- item_name = item_name.strip()
27073
- # Truncate the item name to 254 characters which is
27074
- # the maximum allowed length in Content Server
27075
- if len(item_name) > 254:
27076
- item_name = item_name[:254]
27117
+ # Cleanse the item name (allowed characters, maximum length):
27118
+ item_name = OTCS.cleanse_item_name(item_name)
27077
27119
 
27078
27120
  # This is an optimization. We check if the item was created
27079
27121
  # in a former run. This helps if the customizer is re-run:
@@ -28901,14 +28943,8 @@ class Payload:
28901
28943
  result["skipped_counter"] += 1
28902
28944
  continue
28903
28945
 
28904
- # Classification names for sure are not allowed to have ":":
28905
- classification_name = classification_name.replace(":", "")
28906
- # Classification names for sure should not have leading or trailing spaces:
28907
- classification_name = classification_name.strip()
28908
- # Truncate the classification name to 254 characters which is the maximum
28909
- # allowed length in Content Server
28910
- if len(classification_name) > 254:
28911
- classification_name = classification_name[:254]
28946
+ # Cleanse the classification name (allowed characters, maximum length):
28947
+ classification_name = OTCS.cleanse_item_name(classification_name)
28912
28948
 
28913
28949
  # Check if classification has been created before (either in this run
28914
28950
  # or in a former run of the customizer):
@@ -149,16 +149,20 @@ class Translator:
149
149
  request_header = self._headers
150
150
  request_url = self.config()["translateUrlV3"]
151
151
 
152
- response = requests.post(
153
- url=request_url,
154
- headers=request_header,
155
- json=data,
156
- timeout=REQUEST_TIMEOUT,
157
- )
158
-
159
- if response.status_code != 200:
160
- self.logger.error("Failed to translate text -> %s", response.content)
161
- return None
152
+ try:
153
+ response = requests.post(
154
+ url=request_url,
155
+ headers=request_header,
156
+ json=data,
157
+ timeout=REQUEST_TIMEOUT,
158
+ )
159
+
160
+ if response.status_code != 200:
161
+ self.logger.error("Failed to translate text -> %s", response.content)
162
+ return None
163
+
164
+ except Exception as error:
165
+ self.logger.error("Failed translation request; error -> %s", str(error))
162
166
 
163
167
  translated_text = response.json()["data"]["translations"][0]["translatedText"]
164
168
 
pyxecm/helper/data.py CHANGED
@@ -857,6 +857,10 @@ class Data:
857
857
  columns = existing_columns
858
858
 
859
859
  # Attempt to save the data frame to Excel:
860
+ if self._df is None:
861
+ self.logger.error(
862
+ "Cannot write Excel file -> '%s' from empty / non-initialized data frame!", excel_path
863
+ )
860
864
  self._df.to_excel(
861
865
  excel_path,
862
866
  sheet_name=sheet_name,
@@ -868,29 +872,17 @@ class Data:
868
872
  excel_path,
869
873
  )
870
874
 
871
- except FileNotFoundError:
872
- self.logger.error(
873
- "Cannot write data frame to Excel file -> '%s'",
874
- excel_path,
875
- )
875
+ except FileNotFoundError as fnf_error:
876
+ self.logger.error("Cannot write data frame to Excel file -> '%s'; error -> %s", excel_path, str(fnf_error))
876
877
  return False
877
- except PermissionError:
878
- self.logger.error(
879
- "Cannot write data frame to Excel file -> '%s'",
880
- excel_path,
881
- )
878
+ except PermissionError as pe:
879
+ self.logger.error("Cannot write data frame to Excel file -> '%s'; error -> %s", excel_path, str(pe))
882
880
  return False
883
- except ValueError:
884
- self.logger.error(
885
- "Cannot write data frame to Excel file -> '%s'",
886
- excel_path,
887
- )
881
+ except ValueError as ve:
882
+ self.logger.error("Cannot write data frame to Excel file -> '%s'; error -> %s", excel_path, str(ve))
888
883
  return False
889
- except OSError:
890
- self.logger.error(
891
- "Cannot write data frame to Excel file -> '%s'",
892
- excel_path,
893
- )
884
+ except OSError as ose:
885
+ self.logger.error("Cannot write data frame to Excel file -> '%s'; error -> %s", excel_path, str(ose))
894
886
  return False
895
887
 
896
888
  return True
@@ -1,6 +1,7 @@
1
1
  """Maintenance Page that can be enabled by the customizer."""
2
2
 
3
3
  import os
4
+ import threading
4
5
  from datetime import datetime
5
6
 
6
7
  import uvicorn
@@ -47,5 +48,8 @@ async def http_exception_handler(request: Request, exc: HTTPException) -> Jinja2
47
48
 
48
49
 
49
50
  def run_maintenance_page() -> None:
50
- """Start the FASTAPI Webserver."""
51
- uvicorn.run(app, host=settings.host, port=settings.port)
51
+ """Start the FASTAPI Webserver in a dedicated thread."""
52
+ maint_thread = threading.Thread(
53
+ target=uvicorn.run, name="MaintenancePage", kwargs={"app": app, "host": settings.host, "port": settings.port}
54
+ )
55
+ maint_thread.start()