pyxecm 2.0.3__py3-none-any.whl → 2.0.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyxecm might be problematic. Click here for more details.

pyxecm/coreshare.py CHANGED
@@ -27,6 +27,7 @@ import time
27
27
  import urllib.parse
28
28
  from http import HTTPStatus
29
29
  from importlib.metadata import version
30
+ from urllib.parse import parse_qs, urlparse
30
31
 
31
32
  import requests
32
33
 
@@ -522,7 +523,7 @@ class CoreShare:
522
523
  Can be used to provide a more specific error message
523
524
  in case an error occurs.
524
525
  show_error (bool, optional):
525
- True: write an error to the log file
526
+ True: write an error to the log file (this is the default)
526
527
  False: write a warning to the log file
527
528
 
528
529
  Returns:
@@ -933,6 +934,71 @@ class CoreShare:
933
934
 
934
935
  # end method definition
935
936
 
937
+ def get_groups_iterator(
938
+ self,
939
+ count: int | None = None,
940
+ ) -> iter:
941
+ """Get an iterator object that can be used to traverse all Core Share groups.
942
+
943
+ Returning a generator avoids loading a large number of items into memory at once. Instead you
944
+ can iterate over the potential large list of Core Share groups.
945
+
946
+ Example usage:
947
+ groups = core_share_object.get_groups_iterator(page_size=10)
948
+ for group in groups:
949
+ logger.info("Traversing Core Share group -> %s", group["name"])
950
+
951
+ Args:
952
+ count (int | None, optional):
953
+ The chunk size for the number of groups returned by one
954
+ REST API call. If None, then a default of 250 is used.
955
+
956
+ Returns:
957
+ iter:
958
+ A generator yielding one OTDS group per iteration.
959
+ If the REST API fails, returns no value.
960
+
961
+ """
962
+
963
+ offset = 0
964
+
965
+ while True:
966
+ response = self.get_groups(
967
+ offset=offset,
968
+ count=count,
969
+ )
970
+ if not response or not response.get("results", []):
971
+ # Don't return None! Plain return is what we need for iterators.
972
+ # Natural Termination: If the generator does not yield, it behaves
973
+ # like an empty iterable when used in a loop or converted to a list:
974
+ return
975
+
976
+ # Yield users one at a time:
977
+ yield from response["results"]
978
+
979
+ # See if we have an additional result page.
980
+ # If not terminate the iterator and return
981
+ # no value.
982
+
983
+ next_page_url = response["_links"].get("next")
984
+ if not next_page_url:
985
+ # Don't return None! Plain return is what we need for iterators.
986
+ # Natural Termination: If the generator does not yield, it behaves
987
+ # like an empty iterable when used in a loop or converted to a list:
988
+ return
989
+ next_page_url = next_page_url.get("href")
990
+
991
+ # Extract the query string from the URL
992
+ query = urlparse(next_page_url).query
993
+
994
+ # Parse the query parameters into a dictionary
995
+ params = parse_qs(query)
996
+
997
+ # Get the 'offset' value as an integer (it's a list by default)
998
+ offset = int(params.get("offset", [0])[0])
999
+
1000
+ # end method definition
1001
+
936
1002
  def add_group(
937
1003
  self,
938
1004
  group_name: str,
@@ -1276,7 +1342,7 @@ class CoreShare:
1276
1342
  dict | None:
1277
1343
  Dictionary with the Core Share group data or None if the request fails.
1278
1344
 
1279
- Example result:
1345
+ Example Response:
1280
1346
  {
1281
1347
  'results': [
1282
1348
  {
@@ -1343,15 +1409,15 @@ class CoreShare:
1343
1409
 
1344
1410
  # end method definition
1345
1411
 
1346
- def get_users(self) -> dict | None:
1412
+ def get_users(self) -> list | None:
1347
1413
  """Get Core Share users.
1348
1414
 
1349
1415
  Args:
1350
1416
  None
1351
1417
 
1352
1418
  Returns:
1353
- dict | None:
1354
- Dictionary with the Core Share user data or None if the request fails.
1419
+ list | None:
1420
+ List with the Core Share user data or None if the request fails.
1355
1421
 
1356
1422
  Example response (it is a list!):
1357
1423
  [
@@ -13,7 +13,6 @@ from collections.abc import AsyncGenerator
13
13
  from contextlib import asynccontextmanager
14
14
  from datetime import datetime, timezone
15
15
  from importlib.metadata import version
16
- from threading import Thread
17
16
 
18
17
  import uvicorn
19
18
  from fastapi import FastAPI
@@ -85,17 +84,11 @@ async def lifespan(
85
84
  # Optional Payload
86
85
  import_payload(payload_dir=api_settings.payload_dir_optional)
87
86
 
88
- if api_settings.maintenance_mode:
89
- logger.info("Starting maintenance_page thread...")
90
- maint_thread = Thread(target=run_maintenance_page, name="maintenance_page")
91
- maint_thread.start()
87
+ logger.info("Starting maintenance_page thread...")
88
+ run_maintenance_page()
92
89
 
93
90
  logger.info("Starting processing thread...")
94
- thread = Thread(
95
- target=PAYLOAD_LIST.run_payload_processing,
96
- name="customization_run_api",
97
- )
98
- thread.start()
91
+ PAYLOAD_LIST.run_payload_processing(concurrent=api_settings.concurrent_payloads)
99
92
 
100
93
  yield
101
94
  logger.info("Shutdown")
@@ -104,7 +97,8 @@ async def lifespan(
104
97
 
105
98
  app = FastAPI(
106
99
  docs_url="/api",
107
- title="Customizer API",
100
+ title=api_settings.title,
101
+ description=api_settings.description,
108
102
  openapi_url=api_settings.openapi_url,
109
103
  root_path=api_settings.root_path,
110
104
  lifespan=lifespan,
@@ -609,7 +609,7 @@ class PayloadList:
609
609
 
610
610
  # Log each runnable item
611
611
  for _, row in runnable_df.iterrows():
612
- self.logger.info(
612
+ self.logger.debug(
613
613
  "Added payload file -> '%s' with index -> %s to runnable queue.",
614
614
  row["name"] if row["name"] else row["filename"],
615
615
  row["index"],
@@ -619,12 +619,35 @@ class PayloadList:
619
619
 
620
620
  # end method definition
621
621
 
622
- def process_payload_list(self) -> None:
622
+ def pick_running(self) -> int:
623
+ """Pick all PayloadItems with status "running".
624
+
625
+ Returns:
626
+ pd.DataFrame:
627
+ A list of running payload items.
628
+
629
+ """
630
+
631
+ if self.payload_items.empty:
632
+ return 0
633
+
634
+ all_status = self.payload_items["status"].value_counts().to_dict()
635
+
636
+ return all_status.get("running", 0)
637
+
638
+ # end method definition
639
+
640
+ def process_payload_list(self, concurrent: int | None = None) -> None:
623
641
  """Process runnable payloads.
624
642
 
643
+ Args:
644
+ concurrent (int | None, optional):
645
+ The maximum number of concurrent payloads to run at any given time.
646
+
625
647
  Continuously checks for runnable payload items and starts their
626
648
  "process_payload" method in separate threads.
627
649
  Runs as a daemon until the customizer ends.
650
+
628
651
  """
629
652
 
630
653
  def run_and_complete_payload(payload_item: pd.Series) -> None:
@@ -817,6 +840,18 @@ class PayloadList:
817
840
  # Start a thread for each runnable item (item is a pd.Series)
818
841
  if runnable_items is not None:
819
842
  for _, item in runnable_items.iterrows():
843
+ if concurrent and self.pick_running() >= concurrent:
844
+ self.logger.debug(
845
+ "Reached concurrency limit of %s payloads. Waiting for one to finish.",
846
+ )
847
+ break
848
+
849
+ self.logger.info(
850
+ "Added payload file -> '%s' with index -> %s to runnable queue.",
851
+ item["name"] if item["name"] else item["filename"],
852
+ item["index"],
853
+ )
854
+
820
855
  # Update the status to "running" in the data frame to prevent re-processing
821
856
  self.payload_items.loc[
822
857
  self.payload_items["name"] == item["name"],
@@ -837,13 +872,14 @@ class PayloadList:
837
872
 
838
873
  # end method definition
839
874
 
840
- def run_payload_processing(self) -> None:
875
+ def run_payload_processing(self, concurrent: int | None = None) -> None:
841
876
  """Start the `process_payload_list` method in a daemon thread."""
842
877
 
843
878
  scheduler_thread = threading.Thread(
844
879
  target=self.process_payload_list,
845
880
  daemon=True,
846
881
  name="Scheduler",
882
+ kwargs={"concurrent": concurrent},
847
883
  )
848
884
 
849
885
  self.logger.info(
@@ -853,13 +889,6 @@ class PayloadList:
853
889
  self._stopped = False
854
890
  scheduler_thread.start()
855
891
 
856
- self.logger.info(
857
- "Waiting for thread -> '%s' to complete...",
858
- str(scheduler_thread.name),
859
- )
860
- scheduler_thread.join()
861
- self.logger.info("Thread -> '%s' has completed.", str(scheduler_thread.name))
862
-
863
892
  # end method definition
864
893
 
865
894
  def stop_payload_processing(self) -> None:
@@ -8,7 +8,7 @@ import tempfile
8
8
  from http import HTTPStatus
9
9
  from typing import Annotated
10
10
 
11
- from fastapi import APIRouter, Depends, HTTPException, Query, Response
11
+ from fastapi import APIRouter, Depends, HTTPException, Query, Request, Response
12
12
  from fastapi.responses import FileResponse, JSONResponse, RedirectResponse
13
13
 
14
14
  from pyxecm.customizer.api.auth.functions import get_authorized_user
@@ -16,21 +16,23 @@ from pyxecm.customizer.api.auth.models import User
16
16
  from pyxecm.customizer.api.common.functions import PAYLOAD_LIST, list_files_in_directory
17
17
  from pyxecm.customizer.api.common.models import CustomizerStatus
18
18
 
19
- router = APIRouter(tags=["default"])
19
+ router = APIRouter()
20
20
 
21
21
  logger = logging.getLogger("pyxecm.customizer.api.common")
22
22
 
23
23
 
24
24
  @router.get("/", include_in_schema=False)
25
- async def redirect_to_api() -> RedirectResponse:
25
+ async def redirect_to_api(request: Request) -> RedirectResponse:
26
26
  """Redirect from / to /api.
27
27
 
28
28
  Returns:
29
29
  None
30
30
 
31
31
  """
32
- return RedirectResponse(url="/api")
32
+ # Construct the new URL by appending /api
33
+ new_url = f"{request.url.path!s}api"
33
34
 
35
+ return RedirectResponse(url=new_url)
34
36
 
35
37
  @router.get(path="/status", name="Get Status")
36
38
  async def get_status() -> CustomizerStatus:
@@ -74,7 +76,7 @@ def shutdown(user: Annotated[User, Depends(get_authorized_user)]) -> JSONRespons
74
76
  return JSONResponse({"status": "shutdown"}, status_code=HTTPStatus.ACCEPTED)
75
77
 
76
78
 
77
- @router.get(path="/browser_automations/assets")
79
+ @router.get(path="/browser_automations/assets", tags=["payload"])
78
80
  def list_browser_automation_files(
79
81
  user: Annotated[User, Depends(get_authorized_user)], # noqa: ARG001
80
82
  ) -> JSONResponse:
@@ -90,7 +92,7 @@ def list_browser_automation_files(
90
92
  return JSONResponse(result)
91
93
 
92
94
 
93
- @router.get(path="/browser_automations/download")
95
+ @router.get(path="/browser_automations/download", tags=["payload"])
94
96
  def get_browser_automation_file(
95
97
  user: Annotated[User, Depends(get_authorized_user)], # noqa: ARG001
96
98
  file: Annotated[str, Query(description="File name")],
@@ -16,6 +16,12 @@ from pydantic_settings import (
16
16
  class CustomizerAPISettings(BaseSettings):
17
17
  """Settings for the Customizer API."""
18
18
 
19
+ title: str = Field(default="Customizer API", description="Name of the API Service")
20
+ description: str = Field(
21
+ default="API provided by [pyxecm](https://github.com/opentext/pyxecm). The documentation for the payload syntax can be found [here](https://opentext.github.io/pyxecm/payload-syntax/).",
22
+ description="Descriptive text on the SwaggerUI page.",
23
+ )
24
+
19
25
  api_key: str | None = Field(
20
26
  default=None,
21
27
  description="Optional API KEY that can be specified that has access to the Customizer API, bypassing the OTDS authentication.",
@@ -26,6 +32,9 @@ class CustomizerAPISettings(BaseSettings):
26
32
  root_path: str = Field(default="/", description="Root path for the Customizer API")
27
33
  openapi_url: str = Field(default="/api/openapi.json", description="OpenAPI URL")
28
34
 
35
+ concurrent_payloads: int = Field(
36
+ default=3, description="Maximum number of concurrent payloads that are executed at the same time."
37
+ )
29
38
  import_payload: bool = Field(default=False)
30
39
  payload: str = Field(
31
40
  default="/payload/payload.yml.gz.b64",
@@ -35,10 +35,15 @@ async def put_otcs_logs(
35
35
 
36
36
  if "all" in hosts:
37
37
  hosts = []
38
- for sts in ["otcs-admin", "otcs-frontend", "otcs-backend-search"]:
38
+ for sts in ["otcs-admin", "otcs-frontend", "otcs-backend-search", "otcs-da"]:
39
39
  try:
40
- sts_replicas = k8s_object.get_stateful_set_scale(sts).status.replicas
41
- hosts.extend([f"{sts}-{i}" for i in range(sts_replicas)])
40
+ sts_replicas = k8s_object.get_stateful_set_scale(sts)
41
+
42
+ if sts_replicas is None:
43
+ logger.debug("Cannot get statefulset {sts}")
44
+ continue
45
+
46
+ hosts.extend([f"{sts}-{i}" for i in range(sts_replicas.status.replicas)])
42
47
  except Exception as e:
43
48
  raise HTTPException(status_code=HTTPStatus.INTERNAL_SERVER_ERROR) from e
44
49
 
@@ -118,10 +123,15 @@ async def get_otcs_log_files(
118
123
  response = {"status": {host: bool(otcs_logs_lock[host].locked()) for host in otcs_logs_lock}, "files": files}
119
124
 
120
125
  # Extend response with all hosts
121
- for sts in ["otcs-admin", "otcs-frontend", "otcs-backend-search"]:
126
+ for sts in ["otcs-admin", "otcs-frontend", "otcs-backend-search", "otcs-da"]:
122
127
  try:
123
- sts_replicas = k8s_object.get_stateful_set_scale(sts).status.replicas
124
- for i in range(sts_replicas):
128
+ sts_replicas = k8s_object.get_stateful_set_scale(sts)
129
+
130
+ if sts_replicas is None:
131
+ logger.debug("Cannot get statefulset {sts}")
132
+ continue
133
+
134
+ for i in range(sts_replicas.status.replicas):
125
135
  host = f"{sts}-{i}"
126
136
 
127
137
  if host in otcs_logs_lock: