pyxecm 1.5__py3-none-any.whl → 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pyxecm might be problematic. Click here for more details.
- pyxecm/__init__.py +6 -2
- pyxecm/avts.py +1492 -0
- pyxecm/coreshare.py +1075 -960
- pyxecm/customizer/__init__.py +16 -4
- pyxecm/customizer/__main__.py +58 -0
- pyxecm/customizer/api/__init__.py +5 -0
- pyxecm/customizer/api/__main__.py +6 -0
- pyxecm/customizer/api/app.py +914 -0
- pyxecm/customizer/api/auth.py +154 -0
- pyxecm/customizer/api/metrics.py +92 -0
- pyxecm/customizer/api/models.py +13 -0
- pyxecm/customizer/api/payload_list.py +865 -0
- pyxecm/customizer/api/settings.py +103 -0
- pyxecm/customizer/browser_automation.py +332 -139
- pyxecm/customizer/customizer.py +1075 -1057
- pyxecm/customizer/exceptions.py +35 -0
- pyxecm/customizer/guidewire.py +322 -0
- pyxecm/customizer/k8s.py +787 -338
- pyxecm/customizer/log.py +107 -0
- pyxecm/customizer/m365.py +3424 -2270
- pyxecm/customizer/nhc.py +1169 -0
- pyxecm/customizer/openapi.py +258 -0
- pyxecm/customizer/payload.py +18201 -7030
- pyxecm/customizer/pht.py +1047 -210
- pyxecm/customizer/salesforce.py +836 -727
- pyxecm/customizer/sap.py +58 -41
- pyxecm/customizer/servicenow.py +851 -383
- pyxecm/customizer/settings.py +442 -0
- pyxecm/customizer/successfactors.py +408 -346
- pyxecm/customizer/translate.py +83 -48
- pyxecm/helper/__init__.py +5 -2
- pyxecm/helper/assoc.py +98 -38
- pyxecm/helper/data.py +2482 -742
- pyxecm/helper/logadapter.py +27 -0
- pyxecm/helper/web.py +229 -101
- pyxecm/helper/xml.py +528 -172
- pyxecm/maintenance_page/__init__.py +5 -0
- pyxecm/maintenance_page/__main__.py +6 -0
- pyxecm/maintenance_page/app.py +51 -0
- pyxecm/maintenance_page/settings.py +28 -0
- pyxecm/maintenance_page/static/favicon.avif +0 -0
- pyxecm/maintenance_page/templates/maintenance.html +165 -0
- pyxecm/otac.py +234 -140
- pyxecm/otawp.py +2689 -0
- pyxecm/otcs.py +12344 -7547
- pyxecm/otds.py +3166 -2219
- pyxecm/otiv.py +36 -21
- pyxecm/otmm.py +1363 -296
- pyxecm/otpd.py +231 -127
- pyxecm-2.0.0.dist-info/METADATA +145 -0
- pyxecm-2.0.0.dist-info/RECORD +54 -0
- {pyxecm-1.5.dist-info → pyxecm-2.0.0.dist-info}/WHEEL +1 -1
- pyxecm-1.5.dist-info/METADATA +0 -51
- pyxecm-1.5.dist-info/RECORD +0 -30
- {pyxecm-1.5.dist-info → pyxecm-2.0.0.dist-info/licenses}/LICENSE +0 -0
- {pyxecm-1.5.dist-info → pyxecm-2.0.0.dist-info}/top_level.txt +0 -0
pyxecm/customizer/customizer.py
CHANGED
|
@@ -1,47 +1,7 @@
|
|
|
1
|
-
"""
|
|
2
|
-
|
|
3
|
-
Data classes to handle settings read from environment variables
|
|
4
|
-
* CustomizerSettings: Class to manage settings
|
|
5
|
-
* CustomizerSettingsOTDS: Class for OTDS related settings
|
|
6
|
-
* CustomizerSettingsOTCS: Class for OTCS related settings
|
|
7
|
-
* CustomizerSettingsOTAC: Class for OTAC related settings
|
|
8
|
-
* CustomizerSettingsOTPD: Class for OTPD related settings
|
|
9
|
-
* CustomizerSettingsOTIV: Class for OTIV related settings
|
|
10
|
-
* CustomizerSettingsK8S: Class for K8s related settings
|
|
11
|
-
* CustomizerSettingsOTAWP: Class for OTAWP related settings
|
|
12
|
-
* CustomizerSettingsM365: Class for O365 related settings
|
|
13
|
-
* CustomizerSettingsAviator: Class for Aviator related settings
|
|
14
|
-
|
|
15
|
-
Methods of class Customizer:
|
|
16
|
-
|
|
17
|
-
__init__: object initializer for class Customizer
|
|
18
|
-
log_header: Helper method to output a section header in the log file
|
|
19
|
-
init_browser_automation: initialize browser automation for Content Aviator
|
|
20
|
-
init_m365: initialize the Microsoft 365 object
|
|
21
|
-
init_k8s: initialize the Kubernetes object we use to talk to the Kubernetes API
|
|
22
|
-
init_otds: initialize the OTDS object
|
|
23
|
-
init_otac: initialize the OTAC object
|
|
24
|
-
init_otcs: initialize the OTCS (Extended ECM) object
|
|
25
|
-
init_otiv: initialize the OTIV (Intelligent Viewing) object and its OTDS settings
|
|
26
|
-
init_otpd: initialize the PowerDocs object
|
|
27
|
-
init_otawp: initialize OTDS settings for AppWorks Platform
|
|
28
|
-
|
|
29
|
-
restart_otcs_service: restart the OTCS backend and frontend pods -
|
|
30
|
-
required to make certain configurations effective
|
|
31
|
-
restart_otac_service: restart spawner process in Archive Center
|
|
32
|
-
restart_otawp_pod: restart the AppWorks Platform Pod to make settings effective
|
|
33
|
-
consolidate_otds: consolidate OTDS users / groups (to get to a fully synchronized state)
|
|
34
|
-
|
|
35
|
-
import_powerdocs_configuration: import PowerDocs database
|
|
36
|
-
|
|
37
|
-
set_maintenance_mode: Enable or Disable Maintenance Mode
|
|
38
|
-
|
|
39
|
-
customization_run: Central function to initiate the customization
|
|
40
|
-
|
|
41
|
-
"""
|
|
1
|
+
"""Module to automate Directory Services (OTDS) and Content Server (OTCS) configurations."""
|
|
42
2
|
|
|
43
3
|
__author__ = "Dr. Marc Diefenbruch"
|
|
44
|
-
__copyright__ = "Copyright 2024, OpenText"
|
|
4
|
+
__copyright__ = "Copyright (C) 2024-2025, OpenText"
|
|
45
5
|
__credits__ = ["Kai-Philip Gatzweiler"]
|
|
46
6
|
__maintainer__ = "Dr. Marc Diefenbruch"
|
|
47
7
|
__email__ = "mdiefenb@opentext.com"
|
|
@@ -49,272 +9,58 @@ __email__ = "mdiefenb@opentext.com"
|
|
|
49
9
|
import logging
|
|
50
10
|
import os
|
|
51
11
|
import sys
|
|
12
|
+
import tempfile
|
|
52
13
|
import time
|
|
53
|
-
from
|
|
54
|
-
from
|
|
55
|
-
|
|
56
|
-
# from packaging.version import Version
|
|
14
|
+
from datetime import datetime, timezone
|
|
15
|
+
from typing import TYPE_CHECKING
|
|
57
16
|
|
|
58
17
|
import requests
|
|
59
18
|
|
|
60
19
|
# OpenText specific modules:
|
|
61
20
|
import yaml
|
|
62
|
-
from
|
|
21
|
+
from pydantic import HttpUrl
|
|
22
|
+
|
|
23
|
+
from pyxecm import AVTS, OTAC, OTAWP, OTCS, OTDS, OTIV, OTPD, CoreShare
|
|
63
24
|
from pyxecm.customizer.k8s import K8s
|
|
64
25
|
from pyxecm.customizer.m365 import M365
|
|
65
26
|
from pyxecm.customizer.payload import Payload
|
|
27
|
+
from pyxecm.customizer.settings import Settings
|
|
66
28
|
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
@dataclass
|
|
73
|
-
class CustomizerSettings:
|
|
74
|
-
"""Class to manage settings"""
|
|
75
|
-
|
|
76
|
-
placeholder_values: dict = field(default_factory=dict)
|
|
77
|
-
stop_on_error: bool = os.environ.get("STOP_ON_ERROR", "false").lower() == "true"
|
|
78
|
-
cust_log_file: str = "/tmp/customizing.log"
|
|
79
|
-
customizer_start_time = customizer_end_time = datetime.now()
|
|
80
|
-
|
|
81
|
-
# The following CUST artifacts are created by the main.tf in the python module:
|
|
82
|
-
cust_settings_dir: str = "/settings/"
|
|
83
|
-
cust_payload_dir: str = "/payload/"
|
|
84
|
-
cust_payload: str = cust_payload_dir + "payload.yaml"
|
|
85
|
-
cust_payload_gz: str = cust_payload_dir + "payload.yml.gz.b64"
|
|
86
|
-
cust_payload_external: str = "/payload-external/"
|
|
87
|
-
|
|
88
|
-
cust_target_folder_nickname: str = (
|
|
89
|
-
"deployment" # nickname of folder to upload payload and log files
|
|
90
|
-
)
|
|
91
|
-
# CUST_RM_SETTINGS_DIR = "/opt/opentext/cs/appData/supportasset/Settings/"
|
|
92
|
-
cust_rm_settings_dir = cust_settings_dir
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
@dataclass
|
|
96
|
-
class CustomizerSettingsOTDS:
|
|
97
|
-
"""Class for OTDS related settings"""
|
|
98
|
-
|
|
99
|
-
protocol: str = os.environ.get("OTDS_PROTOCOL", "http")
|
|
100
|
-
public_protocol: str = os.environ.get("OTDS_PUBLIC_PROTOCOL", "https")
|
|
101
|
-
hostname: str = os.environ.get("OTDS_HOSTNAME", "otds")
|
|
102
|
-
port: int = os.environ.get("OTDS_SERVICE_PORT_OTDS", 80)
|
|
103
|
-
username: str = os.environ.get("OTDS_ADMIN", "admin")
|
|
104
|
-
otds_ticket: str | None = None
|
|
105
|
-
admin_partition: str = "otds.admin"
|
|
106
|
-
public_url: str = os.environ.get("OTDS_PUBLIC_URL")
|
|
107
|
-
password: str = os.environ.get("OTDS_PASSWORD")
|
|
108
|
-
disable_password_policy: bool = True
|
|
109
|
-
enable_audit: bool = True
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
@dataclass
|
|
113
|
-
class CustomizerSettingsOTCS:
|
|
114
|
-
"""Class for OTCS related settings"""
|
|
115
|
-
|
|
116
|
-
# Content Server Constants:
|
|
117
|
-
protocol: str = os.environ.get("OTCS_PROTOCOL", "http")
|
|
118
|
-
public_protocol: str = os.environ.get("OTCS_PUBLIC_PROTOCOL", "https")
|
|
119
|
-
hostname: str = os.environ.get("OTCS_HOSTNAME", "otcs-admin-0")
|
|
120
|
-
hostname_backend: str = os.environ.get("OTCS_HOSTNAME", "otcs-admin-0")
|
|
121
|
-
hostname_frontend: str = os.environ.get("OTCS_HOSTNAME_FRONTEND", "otcs-frontend")
|
|
122
|
-
public_url: str = os.environ.get("OTCS_PUBLIC_URL", "otcs.public-url.undefined")
|
|
123
|
-
port: int = os.environ.get("OTCS_SERVICE_PORT_OTCS", 8080)
|
|
124
|
-
port_backend: int = os.environ.get("OTCS_SERVICE_PORT_OTCS", 8080)
|
|
125
|
-
port_frontend: int = 80
|
|
126
|
-
base_path: str = "/cs/cs"
|
|
127
|
-
admin: str = os.environ.get("OTCS_ADMIN", "admin")
|
|
128
|
-
password: str = os.environ.get("OTCS_PASSWORD")
|
|
129
|
-
partition: str = os.environ.get("OTCS_PARTITION", "Content Server Members")
|
|
130
|
-
resource_name: str = "cs"
|
|
131
|
-
k8s_statefulset_frontend: str = "otcs-frontend"
|
|
132
|
-
k8s_statefulset_backend: str = "otcs-admin"
|
|
133
|
-
k8s_ingress: str = "otxecm-ingress"
|
|
134
|
-
maintenance_mode: bool = (
|
|
135
|
-
os.environ.get("OTCS_MAINTENANCE_MODE", "true").lower() == "true"
|
|
136
|
-
)
|
|
137
|
-
license_feature: str = "X3"
|
|
138
|
-
|
|
139
|
-
# K8s service name and port for maintenance pod
|
|
140
|
-
maintenance_service_name: str = "otxecm-customizer"
|
|
141
|
-
mainteance_service_port: int = 5555
|
|
142
|
-
|
|
143
|
-
replicas_frontend = 0
|
|
144
|
-
replicas_backend = 0
|
|
145
|
-
|
|
146
|
-
# Add configuration options for Customizer behaviour
|
|
147
|
-
update_admin_user: bool = True
|
|
148
|
-
upload_config_files: bool = True
|
|
149
|
-
upload_status_files: bool = True
|
|
150
|
-
upload_log_file: bool = True
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
@dataclass
|
|
154
|
-
class CustomizerSettingsOTAC:
|
|
155
|
-
"""Class for OTAC related settings"""
|
|
156
|
-
|
|
157
|
-
enabled: bool = os.environ.get("OTAC_ENABLED", "false").lower() == "true"
|
|
158
|
-
hostname: str = os.environ.get("OTAC_SERVICE_HOST", "otac-0")
|
|
159
|
-
port: int = os.environ.get("OTAC_SERVICE_PORT", 8080)
|
|
160
|
-
protocol: str = os.environ.get("OTAC_PROTOCOL", "http")
|
|
161
|
-
public_url: str = os.environ.get("OTAC_PUBLIC_URL")
|
|
162
|
-
admin: str = os.environ.get("OTAC_ADMIN", "dsadmin")
|
|
163
|
-
password: str = os.environ.get("OTAC_PASSWORD", "")
|
|
164
|
-
known_server: str = os.environ.get("OTAC_KNOWN_SERVER", "")
|
|
165
|
-
k8s_pod_name: str = "otac-0"
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
@dataclass
|
|
169
|
-
class CustomizerSettingsOTPD:
|
|
170
|
-
"""Class for OTPD related settings"""
|
|
171
|
-
|
|
172
|
-
enabled: bool = os.environ.get("OTPD_ENABLED", "false").lower() == "true"
|
|
173
|
-
hostname: str = os.environ.get("OTPD_SERVICE_HOST", "otpd")
|
|
174
|
-
port: int = os.environ.get("OTPD_SERVICE_PORT", 8080)
|
|
175
|
-
protocol: str = os.environ.get("OTPD_PROTOCOL", "http")
|
|
176
|
-
db_importfile: str = os.environ.get(
|
|
177
|
-
"OTPD_DBIMPORTFILE", "URL://url.download.location/file.zip"
|
|
178
|
-
)
|
|
179
|
-
tenant: str = os.environ.get("OTPD_TENANT", "Successfactors")
|
|
180
|
-
user: str = os.environ.get("OTPD_USER", "powerdocsapiuser")
|
|
181
|
-
password: str = os.environ.get(
|
|
182
|
-
"OTPD_PASSWORD",
|
|
183
|
-
)
|
|
184
|
-
k8s_pod_name: str = "otpd-0"
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
@dataclass
|
|
188
|
-
class CustomizerSettingsOTIV:
|
|
189
|
-
"""Class for OTIV related settings"""
|
|
190
|
-
|
|
191
|
-
enabled: bool = os.environ.get("OTIV_ENABLED", "false").lower() == "true"
|
|
192
|
-
license_file: str = "/payload/otiv-license.lic"
|
|
193
|
-
license_feature: str = "FULLTIME_USERS_REGULAR"
|
|
194
|
-
product_name: str = "Viewing"
|
|
195
|
-
product_description: str = "OpenText Intelligent Viewing"
|
|
196
|
-
resource_name: str = "iv"
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
@dataclass
|
|
200
|
-
class CustomizerSettingsK8S:
|
|
201
|
-
"""Class for K8s related settings"""
|
|
202
|
-
|
|
203
|
-
enabled: bool = os.environ.get("K8S_ENABLED", "true").lower() == "true"
|
|
204
|
-
in_cluster: bool = True
|
|
205
|
-
kubeconfig_file: str = "~/.kube/config"
|
|
206
|
-
namespace: str = "default"
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
@dataclass
|
|
210
|
-
class CustomizerSettingsOTAWP:
|
|
211
|
-
"""Class for OTAWP related settings"""
|
|
212
|
-
|
|
213
|
-
enabled: bool = os.environ.get("OTAWP_ENABLED", "false").lower() == "true"
|
|
214
|
-
license_file: str = "/payload/otawp-license.lic"
|
|
215
|
-
product_name: str = "APPWORKS_PLATFORM"
|
|
216
|
-
product_description: str = "OpenText Appworks Platform"
|
|
217
|
-
resource_name: str = "awp"
|
|
218
|
-
access_role_name: str = "Access to " + resource_name
|
|
219
|
-
admin: str = os.environ.get("OTAWP_ADMIN", "sysadmin")
|
|
220
|
-
password: str = os.environ.get("OTAWP_PASSWORD")
|
|
221
|
-
public_protocol: str = os.environ.get("OTAWP_PROTOCOL", "https")
|
|
222
|
-
public_url: str = os.environ.get("OTAWP_PUBLIC_URL")
|
|
223
|
-
k8s_statefulset: str = "appworks"
|
|
224
|
-
k8s_configmap: str = "appworks-config-ymls"
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
@dataclass
|
|
228
|
-
class CustomizerSettingsM365:
|
|
229
|
-
"""Class for O365 related settings"""
|
|
230
|
-
|
|
231
|
-
enabled: bool = os.environ.get("O365_ENABLED", "false").lower() == "true"
|
|
232
|
-
tenant_id: str = os.environ.get("O365_TENANT_ID", "")
|
|
233
|
-
client_id: str = os.environ.get("O365_CLIENT_ID", "")
|
|
234
|
-
client_secret: str = os.environ.get("O365_CLIENT_SECRET", "")
|
|
235
|
-
user: str = os.environ.get("O365_USER", "")
|
|
236
|
-
password: str = os.environ.get("O365_PASSWORD", "")
|
|
237
|
-
domain: str = os.environ.get("O365_DOMAIN", "")
|
|
238
|
-
sku_id: str = os.environ.get("O365_SKU_ID", "c7df2760-2c81-4ef7-b578-5b5392b571df")
|
|
239
|
-
teams_app_name: str = os.environ.get("O365_TEAMS_APP_NAME", "OpenText Extended ECM")
|
|
240
|
-
teams_app_external_id: str = os.environ.get(
|
|
241
|
-
"O365_TEAMS_APP_ID", "dd4af790-d8ff-47a0-87ad-486318272c7a"
|
|
242
|
-
)
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
@dataclass
|
|
246
|
-
class CustomizerSettingsCoreShare:
|
|
247
|
-
"""Class for Core Share related settings"""
|
|
248
|
-
|
|
249
|
-
enabled: bool = os.environ.get("CORE_SHARE_ENABLED", "false").lower() == "true"
|
|
250
|
-
base_url: str = os.environ.get("CORE_SHARE_BASE_URL", "https://core.opentext.com")
|
|
251
|
-
sso_url: str = os.environ.get("CORE_SHARE_SSO_URL", "https://sso.core.opentext.com")
|
|
252
|
-
client_id: str = os.environ.get("CORE_SHARE_CLIENT_ID", "")
|
|
253
|
-
client_secret = os.environ.get("CORE_SHARE_CLIENT_SECRET", "")
|
|
254
|
-
username: str = os.environ.get("CORE_SHARE_USERNAME", "")
|
|
255
|
-
password: str = os.environ.get("CORE_SHARE_PASSWORD", "")
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
@dataclass
|
|
259
|
-
class CustomizerSettingsAviator:
|
|
260
|
-
"""Class for Aviator related settings"""
|
|
261
|
-
|
|
262
|
-
enabled: bool = os.environ.get("AVIATOR_ENABLED", "false").lower() == "true"
|
|
29
|
+
if TYPE_CHECKING:
|
|
30
|
+
from pyxecm.customizer.browser_automation import BrowserAutomation
|
|
31
|
+
|
|
32
|
+
default_logger = logging.getLogger("pyxecm.customizer")
|
|
263
33
|
|
|
264
34
|
|
|
265
35
|
class Customizer:
|
|
266
|
-
"""Customizer Class to control the cusomization automation
|
|
36
|
+
"""Customizer Class to control the cusomization automation."""
|
|
267
37
|
|
|
268
|
-
|
|
269
|
-
|
|
38
|
+
logger: logging.Logger = default_logger
|
|
39
|
+
customizer_start_time: datetime | None
|
|
40
|
+
customizer_stop_time: datetime | None
|
|
270
41
|
|
|
271
42
|
def __init__(
|
|
272
43
|
self,
|
|
273
|
-
settings:
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
otpd: CustomizerSettingsOTPD = CustomizerSettingsOTPD(),
|
|
278
|
-
otiv: CustomizerSettingsOTIV = CustomizerSettingsOTIV(),
|
|
279
|
-
k8s: CustomizerSettingsK8S = CustomizerSettingsK8S(),
|
|
280
|
-
otawp: CustomizerSettingsOTAWP = CustomizerSettingsOTAWP(),
|
|
281
|
-
m365: CustomizerSettingsM365 = CustomizerSettingsM365(),
|
|
282
|
-
core_share: CustomizerSettingsCoreShare = CustomizerSettingsCoreShare(),
|
|
283
|
-
aviator: CustomizerSettingsAviator = CustomizerSettingsAviator(),
|
|
284
|
-
):
|
|
285
|
-
self.settings = settings
|
|
286
|
-
|
|
287
|
-
# OTDS Constants:
|
|
288
|
-
self.otds_settings = otds
|
|
289
|
-
|
|
290
|
-
# Content Server Constants:
|
|
291
|
-
self.otcs_settings = otcs
|
|
292
|
-
|
|
293
|
-
# Archive Center constants:
|
|
294
|
-
self.otac_settings = otac
|
|
295
|
-
|
|
296
|
-
# PowerDocs constants:
|
|
297
|
-
self.otpd_settings = otpd
|
|
298
|
-
|
|
299
|
-
# Intelligent Viewing constants:
|
|
300
|
-
self.otiv_settings = otiv
|
|
44
|
+
settings: dict | None = None,
|
|
45
|
+
logger: logging.Logger = default_logger,
|
|
46
|
+
) -> None:
|
|
47
|
+
"""Initialize Customzer object.
|
|
301
48
|
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
49
|
+
Args:
|
|
50
|
+
settings (dict | None, optional):
|
|
51
|
+
Customizer settings. Defaults to None.
|
|
52
|
+
logger (logging.Logger, optional):
|
|
53
|
+
The loggoing object to be used for all log messages.
|
|
54
|
+
Defaults to default_logger.
|
|
307
55
|
|
|
308
|
-
|
|
309
|
-
self.m365_settings = m365
|
|
56
|
+
"""
|
|
310
57
|
|
|
311
|
-
|
|
312
|
-
self.core_share_settings = core_share
|
|
58
|
+
self.logger = logger
|
|
313
59
|
|
|
314
|
-
#
|
|
315
|
-
self.
|
|
60
|
+
# Create Settings class, raise ValidationError if settings are invalid
|
|
61
|
+
self.settings = Settings(**settings) if settings is not None else Settings()
|
|
316
62
|
|
|
317
|
-
# Initialize Objects
|
|
63
|
+
# Initialize Objects:
|
|
318
64
|
self.otds_object: OTDS | None = None
|
|
319
65
|
self.otcs_object: OTCS | None = None
|
|
320
66
|
self.otcs_backend_object: OTCS | None = None
|
|
@@ -326,23 +72,31 @@ class Customizer:
|
|
|
326
72
|
self.m365_object: M365 | None = None
|
|
327
73
|
self.core_share_object: CoreShare | None = None
|
|
328
74
|
self.browser_automation_object: BrowserAutomation | None = None
|
|
75
|
+
self.otawp_object: OTAWP | None = None
|
|
76
|
+
self.avts_object: AVTS | None = None
|
|
329
77
|
|
|
330
78
|
# end initializer
|
|
331
79
|
|
|
332
|
-
def log_header(self, text: str, char: str = "=", length: int =
|
|
333
|
-
"""
|
|
80
|
+
def log_header(self, text: str, char: str = "=", length: int = 120) -> None:
|
|
81
|
+
"""Output a section header in the log file.
|
|
334
82
|
|
|
335
83
|
Args:
|
|
336
|
-
text (str):
|
|
337
|
-
|
|
338
|
-
|
|
84
|
+
text (str):
|
|
85
|
+
Headline text to output into the log file.
|
|
86
|
+
char (str, optional):
|
|
87
|
+
The header line character. Defaults to "=".
|
|
88
|
+
length (int, optional):
|
|
89
|
+
The maximum line length. Defaults to 120.
|
|
90
|
+
|
|
339
91
|
Returns:
|
|
340
92
|
None
|
|
93
|
+
|
|
341
94
|
"""
|
|
342
95
|
|
|
343
96
|
# Calculate the remaining space for the text after adding spaces
|
|
344
97
|
available_space = max(
|
|
345
|
-
0,
|
|
98
|
+
0,
|
|
99
|
+
length - len(text) - 2,
|
|
346
100
|
) # 2 accounts for the spaces each side of the text
|
|
347
101
|
|
|
348
102
|
# Calculate the number of characters needed on each side
|
|
@@ -353,8 +107,11 @@ class Customizer:
|
|
|
353
107
|
char_count = max(3, char_count)
|
|
354
108
|
|
|
355
109
|
# Build the header string, extra_char is either 0 or 1
|
|
356
|
-
logger.info(
|
|
357
|
-
"%s %s %s",
|
|
110
|
+
self.logger.info(
|
|
111
|
+
"%s %s %s",
|
|
112
|
+
char * char_count,
|
|
113
|
+
text,
|
|
114
|
+
char * (char_count + extra_char),
|
|
358
115
|
)
|
|
359
116
|
|
|
360
117
|
# end method definition
|
|
@@ -364,235 +121,248 @@ class Customizer:
|
|
|
364
121
|
|
|
365
122
|
Args:
|
|
366
123
|
None
|
|
124
|
+
|
|
367
125
|
Returns:
|
|
368
|
-
|
|
369
|
-
|
|
126
|
+
M365 object:
|
|
127
|
+
M365 object or None if the object couldn't be created or
|
|
128
|
+
the authentication fails.
|
|
129
|
+
|
|
370
130
|
"""
|
|
371
131
|
|
|
372
|
-
logger.info(
|
|
373
|
-
"Microsoft 365 Tenant ID = %s",
|
|
132
|
+
self.logger.info(
|
|
133
|
+
"Microsoft 365 Tenant ID = %s",
|
|
134
|
+
self.settings.m365.tenant_id,
|
|
374
135
|
)
|
|
375
|
-
logger.
|
|
376
|
-
"Microsoft 365 Client ID = %s",
|
|
136
|
+
self.logger.debug(
|
|
137
|
+
"Microsoft 365 Client ID = %s",
|
|
138
|
+
self.settings.m365.client_id,
|
|
377
139
|
)
|
|
378
|
-
logger.debug(
|
|
379
|
-
"Microsoft 365 Client Secret = %s",
|
|
140
|
+
self.logger.debug(
|
|
141
|
+
"Microsoft 365 Client Secret = %s",
|
|
142
|
+
self.settings.m365.client_secret,
|
|
380
143
|
)
|
|
381
|
-
logger.info(
|
|
382
|
-
"Microsoft 365
|
|
383
|
-
|
|
384
|
-
self.m365_settings.user
|
|
385
|
-
if self.m365_settings.user != ""
|
|
386
|
-
else "<not configured>"
|
|
387
|
-
),
|
|
388
|
-
)
|
|
389
|
-
logger.debug(
|
|
390
|
-
"Microsoft 365 Password = %s",
|
|
391
|
-
(
|
|
392
|
-
self.m365_settings.password
|
|
393
|
-
if self.m365_settings.password != ""
|
|
394
|
-
else "<not configured>"
|
|
395
|
-
),
|
|
396
|
-
)
|
|
397
|
-
logger.info(
|
|
398
|
-
"Microsoft 365 Domain = %s", self.m365_settings.domain
|
|
144
|
+
self.logger.info(
|
|
145
|
+
"Microsoft 365 Domain = %s",
|
|
146
|
+
self.settings.m365.domain,
|
|
399
147
|
)
|
|
400
|
-
logger.info(
|
|
401
|
-
"Microsoft 365 Default License SKU = %s",
|
|
148
|
+
self.logger.info(
|
|
149
|
+
"Microsoft 365 Default License SKU = %s",
|
|
150
|
+
self.settings.m365.sku_id,
|
|
402
151
|
)
|
|
403
|
-
logger.info(
|
|
152
|
+
self.logger.info(
|
|
404
153
|
"Microsoft 365 Teams App Name = %s",
|
|
405
|
-
self.
|
|
154
|
+
self.settings.m365.teams_app_name,
|
|
406
155
|
)
|
|
407
|
-
logger.info(
|
|
156
|
+
self.logger.info(
|
|
408
157
|
"Microsoft 365 Teams App External ID = %s",
|
|
409
|
-
self.
|
|
158
|
+
self.settings.m365.teams_app_external_id,
|
|
159
|
+
)
|
|
160
|
+
self.logger.info(
|
|
161
|
+
"Microsoft 365 SharePoint App Root Site = %s",
|
|
162
|
+
self.settings.m365.sharepoint_app_root_site,
|
|
163
|
+
)
|
|
164
|
+
self.logger.info(
|
|
165
|
+
"Microsoft 365 SharePoint App Client ID = %s",
|
|
166
|
+
self.settings.m365.sharepoint_app_client_id,
|
|
167
|
+
)
|
|
168
|
+
self.logger.debug(
|
|
169
|
+
"Microsoft 365 SharePoint App Client Secret = %s",
|
|
170
|
+
self.settings.m365.sharepoint_app_client_secret,
|
|
410
171
|
)
|
|
411
172
|
|
|
412
173
|
m365_object = M365(
|
|
413
|
-
tenant_id=self.
|
|
414
|
-
client_id=self.
|
|
415
|
-
client_secret=self.
|
|
416
|
-
domain=self.
|
|
417
|
-
sku_id=self.
|
|
418
|
-
teams_app_name=self.
|
|
419
|
-
teams_app_external_id=self.
|
|
174
|
+
tenant_id=self.settings.m365.tenant_id,
|
|
175
|
+
client_id=self.settings.m365.client_id,
|
|
176
|
+
client_secret=self.settings.m365.client_secret,
|
|
177
|
+
domain=self.settings.m365.domain,
|
|
178
|
+
sku_id=self.settings.m365.sku_id,
|
|
179
|
+
teams_app_name=self.settings.m365.teams_app_name,
|
|
180
|
+
teams_app_external_id=self.settings.m365.teams_app_external_id,
|
|
181
|
+
sharepoint_app_root_site=self.settings.m365.sharepoint_app_root_site,
|
|
182
|
+
sharepoint_app_client_id=self.settings.m365.sharepoint_app_client_id,
|
|
183
|
+
sharepoint_app_client_secret=self.settings.m365.sharepoint_app_client_secret,
|
|
184
|
+
logger=self.logger,
|
|
420
185
|
)
|
|
421
186
|
|
|
422
187
|
if m365_object and m365_object.authenticate():
|
|
423
|
-
logger.info("Connected to Microsoft Graph API.")
|
|
188
|
+
self.logger.info("Connected to Microsoft Graph API.")
|
|
424
189
|
else:
|
|
425
|
-
logger.error("Failed to connect to Microsoft Graph API.")
|
|
190
|
+
self.logger.error("Failed to connect to Microsoft Graph API.")
|
|
426
191
|
return m365_object
|
|
427
192
|
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
self.
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
"
|
|
439
|
-
"
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
self.m365_settings.teams_app_external_id,
|
|
457
|
-
)
|
|
458
|
-
response = m365_object.get_teams_apps(
|
|
459
|
-
filter_expression="externalId eq '{}'".format(
|
|
460
|
-
self.m365_settings.teams_app_external_id
|
|
193
|
+
# Check if the Teams App should be updated, we don't do this always due to the bug described below
|
|
194
|
+
if self.settings.m365.update_teams_app:
|
|
195
|
+
self.logger.info(
|
|
196
|
+
"Download M365 Teams App -> '%s' (external ID = %s) from Extended ECM (OTCS)...",
|
|
197
|
+
self.settings.m365.teams_app_name,
|
|
198
|
+
self.settings.m365.teams_app_external_id,
|
|
199
|
+
)
|
|
200
|
+
|
|
201
|
+
# Download MS Teams App from OTCS (this has with 23.2 a nasty side-effect
|
|
202
|
+
# of unsetting 2 checkboxes on that config page - we reset these checkboxes
|
|
203
|
+
# with the settings file "O365Settings.xml"):
|
|
204
|
+
file_path = os.path.join(tempfile.gettempdir(), "ot.xecm.teams.zip")
|
|
205
|
+
response = self.otcs_frontend_object.download_config_file(
|
|
206
|
+
otcs_url_suffix="/cs/cs?func=officegroups.DownloadTeamsPackage",
|
|
207
|
+
file_path=file_path,
|
|
208
|
+
)
|
|
209
|
+
|
|
210
|
+
# Check if the app is already installed in the apps catalog
|
|
211
|
+
# ideally we want to use the
|
|
212
|
+
app_exist = False
|
|
213
|
+
|
|
214
|
+
# If the App External ID is provided via Env variable then we
|
|
215
|
+
# prefer to use it instead of the App name:
|
|
216
|
+
if self.settings.m365.teams_app_external_id:
|
|
217
|
+
self.logger.info(
|
|
218
|
+
"Check if M365 Teams App -> '%s' (%s) is already installed in catalog using external app ID...",
|
|
219
|
+
self.settings.m365.teams_app_name,
|
|
220
|
+
self.settings.m365.teams_app_external_id,
|
|
461
221
|
)
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
key="externalId",
|
|
467
|
-
value=self.m365_settings.teams_app_external_id,
|
|
468
|
-
)
|
|
469
|
-
# If the app could not be found via the external ID we fall back to
|
|
470
|
-
# search for the app by name:
|
|
471
|
-
if not app_exist:
|
|
472
|
-
if self.m365_settings.teams_app_external_id:
|
|
473
|
-
logger.info(
|
|
474
|
-
"Could not find M365 Teams App using the external ID -> %s. Try to lookup the app by name -> '%s' instead...",
|
|
475
|
-
self.m365_settings.teams_app_external_id,
|
|
476
|
-
self.m365_settings.teams_app_name,
|
|
222
|
+
response = m365_object.get_teams_apps(
|
|
223
|
+
filter_expression="externalId eq '{}'".format(
|
|
224
|
+
self.settings.m365.teams_app_external_id,
|
|
225
|
+
),
|
|
477
226
|
)
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
filter_expression="contains(displayName, '{}')".format(
|
|
484
|
-
self.m365_settings.teams_app_name
|
|
227
|
+
# this should always be True as ID is unique:
|
|
228
|
+
app_exist = m365_object.exist_result_item(
|
|
229
|
+
response=response,
|
|
230
|
+
key="externalId",
|
|
231
|
+
value=self.settings.m365.teams_app_external_id,
|
|
485
232
|
)
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
app_catalog_name = m365_object.get_result_value(response, "displayName")
|
|
499
|
-
if app_catalog_name != self.m365_settings.teams_app_name:
|
|
500
|
-
logger.warning(
|
|
501
|
-
"The Extended ECM app name -> '%s' in the M365 Teams catalog does not match the defined app name '%s'! Somebody must have manually installed the app with the wrong name!",
|
|
502
|
-
app_catalog_name,
|
|
503
|
-
self.m365_settings.teams_app_name,
|
|
233
|
+
# If the app could not be found via the external ID we fall back to
|
|
234
|
+
# search for the app by name:
|
|
235
|
+
if not app_exist:
|
|
236
|
+
if self.settings.m365.teams_app_external_id:
|
|
237
|
+
self.logger.info(
|
|
238
|
+
"Could not find M365 Teams App by external ID -> %s. Try to lookup the app by name -> '%s' instead...",
|
|
239
|
+
self.settings.m365.teams_app_external_id,
|
|
240
|
+
self.settings.m365.teams_app_name,
|
|
241
|
+
)
|
|
242
|
+
self.logger.info(
|
|
243
|
+
"Check if M365 Teams App -> '%s' is already installed in catalog (using app name)...",
|
|
244
|
+
self.settings.m365.teams_app_name,
|
|
504
245
|
)
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
|
|
508
|
-
|
|
509
|
-
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
self.
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
246
|
+
response = m365_object.get_teams_apps(
|
|
247
|
+
filter_expression="contains(displayName, '{}')".format(
|
|
248
|
+
self.settings.m365.teams_app_name,
|
|
249
|
+
),
|
|
250
|
+
)
|
|
251
|
+
app_exist = m365_object.exist_result_item(
|
|
252
|
+
response=response,
|
|
253
|
+
key="displayName",
|
|
254
|
+
value=self.settings.m365.teams_app_name,
|
|
255
|
+
)
|
|
256
|
+
if app_exist:
|
|
257
|
+
# We double check that we have the effective name of the app
|
|
258
|
+
# in the catalog to avoid errors when the app is looked up
|
|
259
|
+
# by its wrong name in the customizer automation. This can
|
|
260
|
+
# happen if the app is installed manually or the environment
|
|
261
|
+
# variable is set to a wrong name.
|
|
262
|
+
app_catalog_name = m365_object.get_result_value(response, "displayName")
|
|
263
|
+
if app_catalog_name != self.settings.m365.teams_app_name:
|
|
264
|
+
self.logger.warning(
|
|
265
|
+
"The Extended ECM app name -> '%s' in the M365 Teams catalog does not match the defined app name -> '%s'!",
|
|
266
|
+
app_catalog_name,
|
|
267
|
+
self.settings.m365.teams_app_name,
|
|
268
|
+
)
|
|
269
|
+
# Align the name in the settings dict with the existing name in the catalog.
|
|
270
|
+
self.settings.m365.teams_app_name = app_catalog_name
|
|
271
|
+
# Align the name in the M365 object config dict with the existing name in the catalog.
|
|
272
|
+
m365_object.config()["teamsAppName"] = app_catalog_name
|
|
273
|
+
app_internal_id = m365_object.get_result_value(
|
|
274
|
+
response=response,
|
|
275
|
+
key="id",
|
|
276
|
+
index=0,
|
|
277
|
+
) # 0 = Index = first item
|
|
278
|
+
# Store the internal ID for later use
|
|
279
|
+
m365_object.config()["teamsAppInternalId"] = app_internal_id
|
|
280
|
+
app_catalog_version = m365_object.get_result_value(
|
|
281
|
+
response=response,
|
|
282
|
+
key="version",
|
|
283
|
+
index=0,
|
|
284
|
+
sub_dict_name="appDefinitions",
|
|
285
|
+
)
|
|
286
|
+
self.logger.info(
|
|
287
|
+
"M365 Teams App -> '%s' (external ID = %s) is already in app catalog with app internal ID -> %s and version -> %s. Check if we have a newer version to upload...",
|
|
288
|
+
self.settings.m365.teams_app_name,
|
|
289
|
+
self.settings.m365.teams_app_external_id,
|
|
290
|
+
app_internal_id,
|
|
533
291
|
app_catalog_version,
|
|
534
|
-
app_download_version,
|
|
535
292
|
)
|
|
293
|
+
app_path = os.path.join(tempfile.gettempdir(), "ot.xecm.teams.zip")
|
|
294
|
+
app_download_version = m365_object.extract_version_from_app_manifest(
|
|
295
|
+
app_path=app_path,
|
|
296
|
+
)
|
|
297
|
+
if app_catalog_version < app_download_version:
|
|
298
|
+
self.logger.info(
|
|
299
|
+
"Upgrading Extended ECM Teams App in catalog from version -> %s to version -> %s...",
|
|
300
|
+
app_catalog_version,
|
|
301
|
+
app_download_version,
|
|
302
|
+
)
|
|
303
|
+
app_path = os.path.join(tempfile.gettempdir(), "ot.xecm.teams.zip")
|
|
304
|
+
response = m365_object.upload_teams_app(
|
|
305
|
+
app_path=app_path,
|
|
306
|
+
update_existing_app=True,
|
|
307
|
+
app_catalog_id=app_internal_id,
|
|
308
|
+
)
|
|
309
|
+
app_internal_id = m365_object.get_result_value(
|
|
310
|
+
response=response,
|
|
311
|
+
key="teamsAppId",
|
|
312
|
+
)
|
|
313
|
+
if app_internal_id:
|
|
314
|
+
self.logger.info(
|
|
315
|
+
"Successfully upgraded Extended ECM Teams App -> '%s' (external ID = %s). Internal App ID -> %s",
|
|
316
|
+
self.settings.m365.teams_app_name,
|
|
317
|
+
self.settings.m365.teams_app_external_id,
|
|
318
|
+
app_internal_id,
|
|
319
|
+
)
|
|
320
|
+
# Store the internal ID for later use
|
|
321
|
+
m365_object.config()["teamsAppInternalId"] = app_internal_id
|
|
322
|
+
else:
|
|
323
|
+
self.logger.error(
|
|
324
|
+
"Failed to upgrade Extended ECM Teams App -> '%s' (external ID = %s).",
|
|
325
|
+
self.settings.m365.teams_app_name,
|
|
326
|
+
self.settings.m365.teams_app_external_id,
|
|
327
|
+
)
|
|
328
|
+
else:
|
|
329
|
+
self.logger.info(
|
|
330
|
+
"No upgrade required. The downloaded version -> %s is not newer than the version -> %s which is already in the M365 app catalog.",
|
|
331
|
+
app_download_version,
|
|
332
|
+
app_catalog_version,
|
|
333
|
+
)
|
|
334
|
+
else: # Extended ECM M365 Teams app is not yet installed...
|
|
335
|
+
self.logger.info(
|
|
336
|
+
"Extended Teams ECM App -> '%s' (external ID = %s) is not yet in app catalog. Installing as new app...",
|
|
337
|
+
self.settings.m365.teams_app_name,
|
|
338
|
+
self.settings.m365.teams_app_external_id,
|
|
339
|
+
)
|
|
340
|
+
app_path = os.path.join(tempfile.gettempdir(), "ot.xecm.teams.zip")
|
|
536
341
|
response = m365_object.upload_teams_app(
|
|
537
|
-
app_path=
|
|
538
|
-
update_existing_app=
|
|
539
|
-
app_catalog_id=app_internal_id,
|
|
342
|
+
app_path=app_path,
|
|
343
|
+
update_existing_app=False,
|
|
540
344
|
)
|
|
541
345
|
app_internal_id = m365_object.get_result_value(
|
|
542
346
|
response=response,
|
|
543
|
-
key="teamsAppId"
|
|
347
|
+
key="id", # for new installs it is NOT "teamsAppId" but "id" as we use a different M365 Graph API endpoint !!!
|
|
544
348
|
)
|
|
545
349
|
if app_internal_id:
|
|
546
|
-
logger.info(
|
|
547
|
-
"Successfully
|
|
548
|
-
self.
|
|
549
|
-
self.
|
|
350
|
+
self.logger.info(
|
|
351
|
+
"Successfully installed Extended ECM Teams App -> '%s' (external ID = %s). Internal App ID -> %s",
|
|
352
|
+
self.settings.m365.teams_app_name,
|
|
353
|
+
self.settings.m365.teams_app_external_id,
|
|
550
354
|
app_internal_id,
|
|
551
355
|
)
|
|
552
356
|
# Store the internal ID for later use
|
|
553
357
|
m365_object.config()["teamsAppInternalId"] = app_internal_id
|
|
554
358
|
else:
|
|
555
|
-
logger.error(
|
|
556
|
-
"Failed to
|
|
557
|
-
self.
|
|
558
|
-
self.
|
|
359
|
+
self.logger.error(
|
|
360
|
+
"Failed to install Extended ECM Teams App -> '%s' (external ID = %s).",
|
|
361
|
+
self.settings.m365.teams_app_name,
|
|
362
|
+
self.settings.m365.teams_app_external_id,
|
|
559
363
|
)
|
|
560
|
-
else:
|
|
561
|
-
logger.info(
|
|
562
|
-
"No upgrade required. The downloaded version -> %s is not newer than the version -> %s which is already in the M365 app catalog.",
|
|
563
|
-
app_download_version,
|
|
564
|
-
app_catalog_version,
|
|
565
|
-
)
|
|
566
|
-
else: # Extended ECM M365 Teams app is not yet installed...
|
|
567
|
-
logger.info(
|
|
568
|
-
"Extended Teams ECM App -> '%s' (external ID = %s) is not yet in app catalog. Installing as new app...",
|
|
569
|
-
self.m365_settings.teams_app_name,
|
|
570
|
-
self.m365_settings.teams_app_external_id,
|
|
571
|
-
)
|
|
572
|
-
response = m365_object.upload_teams_app(
|
|
573
|
-
app_path="/tmp/ot.xecm.teams.zip", update_existing_app=False
|
|
574
|
-
)
|
|
575
|
-
app_internal_id = m365_object.get_result_value(
|
|
576
|
-
response=response,
|
|
577
|
-
key="id", # for new installs it is NOT "teamsAppId" but "id" as we use a different M365 Graph API endpoint !!!
|
|
578
|
-
)
|
|
579
|
-
if app_internal_id:
|
|
580
|
-
logger.info(
|
|
581
|
-
"Successfully installed Extended ECM Teams App -> '%s' (external ID = %s). Internal App ID -> %s",
|
|
582
|
-
self.m365_settings.teams_app_name,
|
|
583
|
-
self.m365_settings.teams_app_external_id,
|
|
584
|
-
app_internal_id,
|
|
585
|
-
)
|
|
586
|
-
# Store the internal ID for later use
|
|
587
|
-
m365_object.config()["teamsAppInternalId"] = app_internal_id
|
|
588
|
-
else:
|
|
589
|
-
logger.error(
|
|
590
|
-
"Failed to install Extended ECM Teams App -> '%s' (external ID = %s).",
|
|
591
|
-
self.m365_settings.teams_app_name,
|
|
592
|
-
self.m365_settings.teams_app_external_id,
|
|
593
|
-
)
|
|
594
364
|
|
|
595
|
-
# logger.info("======== Upload Outlook Add-In ============")
|
|
365
|
+
# self.logger.info("======== Upload Outlook Add-In ============")
|
|
596
366
|
|
|
597
367
|
# # Download MS Outlook Add-In from OTCS:
|
|
598
368
|
# MANIFEST_FILE = "/tmp/BusinessWorkspace.Manifest.xml"
|
|
@@ -600,77 +370,125 @@ class Customizer:
|
|
|
600
370
|
# "/cs/cs?func=outlookaddin.DownloadManifest",
|
|
601
371
|
# MANIFEST_FILE,
|
|
602
372
|
# "DeployedContentServer",
|
|
603
|
-
# self.
|
|
373
|
+
# self.settings.otcs.public_url,
|
|
604
374
|
# ):
|
|
605
|
-
# logger.error("Failed to download M365 Outlook Add-In from Extended ECM!")
|
|
375
|
+
# self.logger.error("Failed to download M365 Outlook Add-In from Extended ECM!")
|
|
606
376
|
# else:
|
|
607
377
|
# # THIS IS NOT IMPLEMENTED DUE TO LACK OF M365 GRAPH API SUPPORT!
|
|
608
378
|
# # Do it manually for now: https://admin.microsoft.com/#/Settings/IntegratedApps
|
|
609
|
-
# logger.info("Successfully downloaded M365 Outlook Add-In from Extended ECM to %s", MANIFEST_FILE)
|
|
379
|
+
# self.logger.info("Successfully downloaded M365 Outlook Add-In from Extended ECM to %s", MANIFEST_FILE)
|
|
610
380
|
# m365_object.upload_outlook_app(MANIFEST_FILE)
|
|
611
381
|
|
|
612
382
|
return m365_object
|
|
613
383
|
|
|
614
384
|
# end method definition
|
|
615
385
|
|
|
616
|
-
def
|
|
386
|
+
def init_avts(self) -> AVTS:
|
|
617
387
|
"""Initialize the Core Share object we use to talk to the Core Share API.
|
|
618
388
|
|
|
619
389
|
Args:
|
|
620
390
|
None
|
|
391
|
+
|
|
621
392
|
Returns:
|
|
622
|
-
object:
|
|
623
|
-
|
|
393
|
+
AVTS object:
|
|
394
|
+
Aviator Search object or None if the object couldn't be created or
|
|
395
|
+
the authentication fails.
|
|
396
|
+
|
|
624
397
|
"""
|
|
625
398
|
|
|
626
|
-
logger.info(
|
|
627
|
-
"
|
|
399
|
+
self.logger.info(
|
|
400
|
+
"Aviator Search Base URL = %s",
|
|
401
|
+
self.settings.avts.base_url,
|
|
402
|
+
)
|
|
403
|
+
self.logger.info(
|
|
404
|
+
"Aviator Search OTDS URL = %s",
|
|
405
|
+
self.settings.avts.otds_url,
|
|
628
406
|
)
|
|
629
|
-
logger.info(
|
|
630
|
-
"
|
|
407
|
+
self.logger.info(
|
|
408
|
+
"Aviator Search Client ID = %s",
|
|
409
|
+
self.settings.avts.client_id,
|
|
631
410
|
)
|
|
632
|
-
logger.
|
|
633
|
-
"
|
|
411
|
+
self.logger.debug(
|
|
412
|
+
"Aviator Search Client Secret = %s",
|
|
413
|
+
self.settings.avts.client_secret,
|
|
634
414
|
)
|
|
635
|
-
logger.
|
|
415
|
+
self.logger.info(
|
|
416
|
+
"Aviator Search User ID = %s",
|
|
417
|
+
self.settings.avts.username,
|
|
418
|
+
)
|
|
419
|
+
self.logger.debug(
|
|
420
|
+
"Aviator Search User Password = %s",
|
|
421
|
+
self.settings.avts.password,
|
|
422
|
+
)
|
|
423
|
+
|
|
424
|
+
return AVTS(
|
|
425
|
+
otds_url=str(self.settings.avts.otds_url),
|
|
426
|
+
base_url=str(self.settings.avts.base_url),
|
|
427
|
+
client_id=self.settings.avts.client_id,
|
|
428
|
+
client_secret=self.settings.avts.client_secret,
|
|
429
|
+
username=self.settings.avts.username,
|
|
430
|
+
password=self.settings.avts.password,
|
|
431
|
+
logger=self.logger,
|
|
432
|
+
)
|
|
433
|
+
|
|
434
|
+
# end method definition
|
|
435
|
+
|
|
436
|
+
def init_coreshare(self) -> CoreShare:
|
|
437
|
+
"""Initialize the Core Share object we use to talk to the Core Share API.
|
|
438
|
+
|
|
439
|
+
Args:
|
|
440
|
+
None
|
|
441
|
+
Returns:
|
|
442
|
+
CoreShare object:
|
|
443
|
+
Core Share object or None if the object couldn't be created or
|
|
444
|
+
the authentication fails.
|
|
445
|
+
|
|
446
|
+
"""
|
|
447
|
+
|
|
448
|
+
self.logger.info(
|
|
449
|
+
"Core Share Base URL = %s",
|
|
450
|
+
self.settings.coreshare.base_url,
|
|
451
|
+
)
|
|
452
|
+
self.logger.info(
|
|
453
|
+
"Core Share SSO URL = %s",
|
|
454
|
+
self.settings.coreshare.sso_url,
|
|
455
|
+
)
|
|
456
|
+
self.logger.info(
|
|
457
|
+
"Core Share Client ID = %s",
|
|
458
|
+
self.settings.coreshare.client_id,
|
|
459
|
+
)
|
|
460
|
+
self.logger.debug(
|
|
636
461
|
"Core Share Client Secret = %s",
|
|
637
|
-
self.
|
|
462
|
+
self.settings.coreshare.client_secret,
|
|
638
463
|
)
|
|
639
|
-
logger.info(
|
|
464
|
+
self.logger.info(
|
|
640
465
|
"Core Share User = %s",
|
|
641
|
-
(
|
|
642
|
-
self.core_share_settings.username
|
|
643
|
-
if self.core_share_settings.username != ""
|
|
644
|
-
else "<not configured>"
|
|
645
|
-
),
|
|
466
|
+
(self.settings.coreshare.username if self.settings.coreshare.username != "" else "<not configured>"),
|
|
646
467
|
)
|
|
647
|
-
logger.debug(
|
|
468
|
+
self.logger.debug(
|
|
648
469
|
"Core Share Password = %s",
|
|
649
|
-
(
|
|
650
|
-
self.core_share_settings.password
|
|
651
|
-
if self.core_share_settings.password != ""
|
|
652
|
-
else "<not configured>"
|
|
653
|
-
),
|
|
470
|
+
(self.settings.coreshare.password if self.settings.coreshare.password != "" else "<not configured>"),
|
|
654
471
|
)
|
|
655
472
|
|
|
656
473
|
core_share_object = CoreShare(
|
|
657
|
-
base_url=self.
|
|
658
|
-
sso_url=self.
|
|
659
|
-
client_id=self.
|
|
660
|
-
client_secret=self.
|
|
661
|
-
username=self.
|
|
662
|
-
password=self.
|
|
474
|
+
base_url=self.settings.coreshare.base_url,
|
|
475
|
+
sso_url=self.settings.coreshare.sso_url,
|
|
476
|
+
client_id=self.settings.coreshare.client_id,
|
|
477
|
+
client_secret=self.settings.coreshare.client_secret,
|
|
478
|
+
username=self.settings.coreshare.username,
|
|
479
|
+
password=self.settings.coreshare.password.get_secret_value(),
|
|
480
|
+
logger=self.logger,
|
|
663
481
|
)
|
|
664
482
|
|
|
665
483
|
if core_share_object and core_share_object.authenticate_admin():
|
|
666
|
-
logger.info("Connected to Core Share as Tenant Admin.")
|
|
484
|
+
self.logger.info("Connected to Core Share as Tenant Admin.")
|
|
667
485
|
else:
|
|
668
|
-
logger.error("Failed to connect to Core Share as Tenant Admin.")
|
|
486
|
+
self.logger.error("Failed to connect to Core Share as Tenant Admin.")
|
|
669
487
|
|
|
670
488
|
if core_share_object and core_share_object.authenticate_user():
|
|
671
|
-
logger.info("Connected to Core Share as Tenant Service User.")
|
|
489
|
+
self.logger.info("Connected to Core Share as Tenant Service User.")
|
|
672
490
|
else:
|
|
673
|
-
logger.error("Failed to connect to Core Share as Tenant Service User.")
|
|
491
|
+
self.logger.error("Failed to connect to Core Share as Tenant Service User.")
|
|
674
492
|
|
|
675
493
|
return core_share_object
|
|
676
494
|
|
|
@@ -681,64 +499,66 @@ class Customizer:
|
|
|
681
499
|
|
|
682
500
|
Args:
|
|
683
501
|
None
|
|
502
|
+
|
|
684
503
|
Returns:
|
|
685
504
|
K8s: K8s object
|
|
505
|
+
|
|
686
506
|
Side effects:
|
|
687
507
|
The global variables otcs_replicas_frontend and otcs_replicas_backend are initialized
|
|
508
|
+
|
|
688
509
|
"""
|
|
689
510
|
|
|
690
|
-
logger.info("Connection parameters Kubernetes (K8s):")
|
|
691
|
-
logger.info("K8s
|
|
692
|
-
logger.info(
|
|
693
|
-
logger.info(
|
|
511
|
+
self.logger.info("Connection parameters Kubernetes (K8s):")
|
|
512
|
+
self.logger.info("K8s namespace = %s", self.settings.k8s.namespace)
|
|
513
|
+
self.logger.info(
|
|
694
514
|
"K8s kubeconfig file = %s",
|
|
695
|
-
self.
|
|
515
|
+
self.settings.k8s.kubeconfig_file,
|
|
696
516
|
)
|
|
697
517
|
|
|
698
518
|
k8s_object = K8s(
|
|
699
|
-
|
|
700
|
-
|
|
701
|
-
|
|
519
|
+
kubeconfig_file=self.settings.k8s.kubeconfig_file,
|
|
520
|
+
namespace=self.settings.k8s.namespace,
|
|
521
|
+
logger=self.logger,
|
|
702
522
|
)
|
|
703
523
|
if k8s_object:
|
|
704
|
-
logger.info("Kubernetes API is ready now.")
|
|
524
|
+
self.logger.info("Kubernetes API is ready now.")
|
|
705
525
|
else:
|
|
706
|
-
logger.error("Cannot establish connection to Kubernetes.")
|
|
526
|
+
self.logger.error("Cannot establish connection to Kubernetes.")
|
|
707
527
|
|
|
708
528
|
# Get number of replicas for frontend:
|
|
709
529
|
otcs_frontend_scale = k8s_object.get_stateful_set_scale(
|
|
710
|
-
self.
|
|
530
|
+
sts_name=self.settings.k8s.sts_otcs_frontend,
|
|
711
531
|
)
|
|
712
532
|
if not otcs_frontend_scale:
|
|
713
|
-
logger.error(
|
|
533
|
+
self.logger.error(
|
|
714
534
|
"Cannot find Kubernetes Stateful Set -> '%s' for OTCS Frontends!",
|
|
715
|
-
self.
|
|
535
|
+
self.settings.k8s.sts_otcs_frontend,
|
|
716
536
|
)
|
|
717
537
|
sys.exit()
|
|
718
538
|
|
|
719
|
-
self.
|
|
720
|
-
logger.info(
|
|
539
|
+
self.settings.k8s.sts_otcs_frontend_replicas = otcs_frontend_scale.spec.replicas
|
|
540
|
+
self.logger.info(
|
|
721
541
|
"Stateful Set -> '%s' has -> %s replicas",
|
|
722
|
-
self.
|
|
723
|
-
self.
|
|
542
|
+
self.settings.k8s.sts_otcs_frontend,
|
|
543
|
+
self.settings.k8s.sts_otcs_frontend_replicas,
|
|
724
544
|
)
|
|
725
545
|
|
|
726
546
|
# Get number of replicas for backend:
|
|
727
547
|
otcs_backend_scale = k8s_object.get_stateful_set_scale(
|
|
728
|
-
self.
|
|
548
|
+
sts_name=self.settings.k8s.sts_otcs_admin,
|
|
729
549
|
)
|
|
730
550
|
if not otcs_backend_scale:
|
|
731
|
-
logger.error(
|
|
551
|
+
self.logger.error(
|
|
732
552
|
"Cannot find Kubernetes Stateful Set -> '%s' for OTCS Backends!",
|
|
733
|
-
self.
|
|
553
|
+
self.settings.k8s.sts_otcs_admin,
|
|
734
554
|
)
|
|
735
555
|
sys.exit()
|
|
736
556
|
|
|
737
|
-
self.
|
|
738
|
-
logger.info(
|
|
557
|
+
self.settings.k8s.sts_otcs_admin_replicas = otcs_backend_scale.spec.replicas
|
|
558
|
+
self.logger.info(
|
|
739
559
|
"Stateful Set -> '%s' has -> %s replicas",
|
|
740
|
-
self.
|
|
741
|
-
self.
|
|
560
|
+
self.settings.k8s.sts_otcs_admin,
|
|
561
|
+
self.settings.k8s.sts_otcs_admin_replicas,
|
|
742
562
|
)
|
|
743
563
|
|
|
744
564
|
return k8s_object
|
|
@@ -750,52 +570,67 @@ class Customizer:
|
|
|
750
570
|
|
|
751
571
|
Args:
|
|
752
572
|
None
|
|
573
|
+
|
|
753
574
|
Returns:
|
|
754
|
-
|
|
575
|
+
OTDS:
|
|
576
|
+
The OTDS object
|
|
577
|
+
|
|
755
578
|
"""
|
|
756
579
|
|
|
757
|
-
logger.info("Connection parameters OTDS:")
|
|
758
|
-
logger.info("OTDS Protocol = %s", self.
|
|
759
|
-
logger.info(
|
|
760
|
-
|
|
761
|
-
|
|
762
|
-
|
|
763
|
-
logger.info(
|
|
764
|
-
|
|
765
|
-
|
|
766
|
-
|
|
580
|
+
self.logger.info("Connection parameters OTDS:")
|
|
581
|
+
self.logger.info("OTDS Protocol = %s", self.settings.otds.url.scheme)
|
|
582
|
+
self.logger.info(
|
|
583
|
+
"OTDS Hostname = %s",
|
|
584
|
+
self.settings.otds.url_internal.host,
|
|
585
|
+
)
|
|
586
|
+
self.logger.info(
|
|
587
|
+
"OTDS Port = %s",
|
|
588
|
+
str(self.settings.otds.url.port),
|
|
589
|
+
)
|
|
590
|
+
self.logger.info("OTDS Public Protocol = %s", self.settings.otds.url.scheme)
|
|
591
|
+
self.logger.info("OTDS Public URL = %s", self.settings.otds.url.host)
|
|
592
|
+
self.logger.info("OTDS Public Port = %s", self.settings.otds.url.port)
|
|
593
|
+
self.logger.info("OTDS Admin User = %s", self.settings.otds.username)
|
|
594
|
+
self.logger.debug("OTDS Admin Password = %s", self.settings.otds.password)
|
|
595
|
+
self.logger.debug("OTDS Ticket = %s", self.settings.otds.ticket)
|
|
596
|
+
self.logger.info(
|
|
597
|
+
"OTDS Admin Partition = %s",
|
|
598
|
+
self.settings.otds.admin_partition,
|
|
599
|
+
)
|
|
767
600
|
|
|
768
601
|
otds_object = OTDS(
|
|
769
|
-
protocol=self.
|
|
770
|
-
hostname=self.
|
|
771
|
-
port=self.
|
|
772
|
-
username=self.
|
|
773
|
-
password=self.
|
|
774
|
-
otds_ticket=self.
|
|
602
|
+
protocol=self.settings.otds.url_internal.scheme,
|
|
603
|
+
hostname=self.settings.otds.url_internal.host,
|
|
604
|
+
port=self.settings.otds.url_internal.port,
|
|
605
|
+
username=self.settings.otds.username,
|
|
606
|
+
password=self.settings.otds.password.get_secret_value(),
|
|
607
|
+
otds_ticket=self.settings.otds.ticket,
|
|
608
|
+
bind_password=self.settings.otds.bind_password.get_secret_value(),
|
|
609
|
+
logger=self.logger,
|
|
775
610
|
)
|
|
776
611
|
|
|
777
|
-
logger.info("Authenticating to OTDS...")
|
|
612
|
+
self.logger.info("Authenticating to OTDS...")
|
|
778
613
|
otds_cookie = otds_object.authenticate()
|
|
779
614
|
while otds_cookie is None:
|
|
780
|
-
logger.
|
|
615
|
+
self.logger.info("Waiting 30 seconds for OTDS to become ready...")
|
|
781
616
|
time.sleep(30)
|
|
782
617
|
otds_cookie = otds_object.authenticate()
|
|
783
|
-
logger.info("OTDS is ready now.")
|
|
618
|
+
self.logger.info("OTDS is ready now.")
|
|
784
619
|
|
|
785
|
-
logger.info("Enable OTDS audit...")
|
|
620
|
+
self.logger.info("Enable OTDS audit...")
|
|
786
621
|
|
|
787
|
-
if self.
|
|
622
|
+
if self.settings.otds.enable_audit:
|
|
788
623
|
otds_object.enable_audit()
|
|
789
624
|
|
|
790
|
-
if self.
|
|
791
|
-
logger.info("Disable OTDS password expiry...")
|
|
625
|
+
if self.settings.otds.disable_password_policy:
|
|
626
|
+
self.logger.info("Disable OTDS password expiry...")
|
|
792
627
|
# Setting the value to 0 disables password expiry.
|
|
793
628
|
# The default is 90 days and we may have Terrarium
|
|
794
629
|
# instances that are running longer than that. This
|
|
795
630
|
# avoids problems with customerizer re-runs of
|
|
796
631
|
# instances that are > 90 days old.
|
|
797
632
|
otds_object.update_password_policy(
|
|
798
|
-
update_values={"passwordMaximumDuration": 0}
|
|
633
|
+
update_values={"passwordMaximumDuration": 0},
|
|
799
634
|
)
|
|
800
635
|
|
|
801
636
|
return otds_object
|
|
@@ -804,76 +639,83 @@ class Customizer:
|
|
|
804
639
|
|
|
805
640
|
def init_otac(self) -> OTAC:
|
|
806
641
|
"""Initialize the OTAC object and parameters.
|
|
807
|
-
|
|
808
|
-
|
|
642
|
+
|
|
643
|
+
Configure the Archive Server as a known server
|
|
644
|
+
if environment variable OTAC_KNOWN_SERVER is set.
|
|
809
645
|
|
|
810
646
|
Args: None
|
|
811
|
-
|
|
812
|
-
|
|
647
|
+
|
|
648
|
+
Returns:
|
|
649
|
+
The OTAC object.
|
|
650
|
+
|
|
813
651
|
"""
|
|
814
652
|
|
|
815
|
-
logger.info("Connection parameters OTAC:")
|
|
816
|
-
logger.info("OTAC
|
|
817
|
-
logger.info("OTAC
|
|
818
|
-
logger.info("OTAC
|
|
819
|
-
logger.
|
|
820
|
-
logger.info(
|
|
821
|
-
logger.debug("OTAC Admin Password = %s", self.otac_settings.password)
|
|
822
|
-
logger.info(
|
|
653
|
+
self.logger.info("Connection parameters OTAC:")
|
|
654
|
+
self.logger.info("OTAC URL = %s", str(self.settings.otac.url))
|
|
655
|
+
self.logger.info("OTAC URL internal = %s", str(self.settings.otac.url_internal))
|
|
656
|
+
self.logger.info("OTAC Admin User = %s", self.settings.otac.username)
|
|
657
|
+
self.logger.debug("OTAC Admin Password = %s", self.settings.otac.password)
|
|
658
|
+
self.logger.info(
|
|
823
659
|
"OTAC Known Server = %s",
|
|
824
|
-
(
|
|
825
|
-
self.otac_settings.known_server
|
|
826
|
-
if self.otac_settings.known_server != ""
|
|
827
|
-
else "<not configured>"
|
|
828
|
-
),
|
|
660
|
+
(self.settings.otac.known_server if self.settings.otac.known_server != "" else "<not configured>"),
|
|
829
661
|
)
|
|
830
662
|
|
|
831
663
|
otac_object = OTAC(
|
|
832
|
-
self.
|
|
833
|
-
self.
|
|
834
|
-
int(self.
|
|
835
|
-
self.
|
|
836
|
-
self.
|
|
837
|
-
self.
|
|
838
|
-
self.
|
|
664
|
+
self.settings.otac.url_internal.scheme,
|
|
665
|
+
self.settings.otac.url_internal.host,
|
|
666
|
+
int(self.settings.otac.url_internal.port),
|
|
667
|
+
self.settings.otac.username,
|
|
668
|
+
self.settings.otac.password.get_secret_value(),
|
|
669
|
+
self.settings.otds.username,
|
|
670
|
+
self.settings.otds.password.get_secret_value(),
|
|
671
|
+
logger=self.logger,
|
|
839
672
|
)
|
|
840
673
|
|
|
674
|
+
self.logger.info("Authenticating to OTAC...")
|
|
675
|
+
otac_cookie = otac_object.authenticate()
|
|
676
|
+
while otac_cookie is None:
|
|
677
|
+
self.logger.info("Waiting 30 seconds for OTAC to become ready...")
|
|
678
|
+
time.sleep(30)
|
|
679
|
+
otac_cookie = otac_object.authenticate()
|
|
680
|
+
self.logger.info("OTAC is ready now.")
|
|
681
|
+
|
|
841
682
|
# This is a work-around as OTCS container automation is not
|
|
842
683
|
# enabling the certificate reliable.
|
|
843
684
|
response = otac_object.enable_certificate(
|
|
844
|
-
cert_name="SP_otcs-admin-0",
|
|
685
|
+
cert_name="SP_otcs-admin-0",
|
|
686
|
+
cert_type="ARC",
|
|
845
687
|
)
|
|
846
688
|
if not response:
|
|
847
|
-
logger.error("Failed to enable OTAC certificate for Extended ECM!")
|
|
689
|
+
self.logger.error("Failed to enable OTAC certificate for Extended ECM!")
|
|
848
690
|
else:
|
|
849
|
-
logger.info("Successfully enabled OTAC certificate for Extended ECM!")
|
|
691
|
+
self.logger.info("Successfully enabled OTAC certificate for Extended ECM!")
|
|
850
692
|
|
|
851
693
|
# is there a known server configured for Archive Center (to sync content with)
|
|
852
|
-
if otac_object and self.
|
|
694
|
+
if otac_object and self.settings.otac.known_server != "":
|
|
853
695
|
# wait until the OTAC pod is in ready state
|
|
854
|
-
logger.info("Waiting for Archive Center to become ready...")
|
|
855
|
-
self.k8s_object.wait_pod_condition(self.
|
|
696
|
+
self.logger.info("Waiting for Archive Center to become ready...")
|
|
697
|
+
self.k8s_object.wait_pod_condition(self.settings.k8s.pod_otac, "Ready")
|
|
856
698
|
|
|
857
|
-
logger.info("Configure known host for Archive Center...")
|
|
699
|
+
self.logger.info("Configure known host for Archive Center...")
|
|
858
700
|
response = otac_object.exec_command(
|
|
859
|
-
f"cf_create_host {self.
|
|
701
|
+
f"cf_create_host {self.settings.otac.known_server} 0 /archive 8080 8090",
|
|
860
702
|
)
|
|
861
703
|
if not response or not response.ok:
|
|
862
|
-
logger.error("Failed to configure known host for Archive Center!")
|
|
704
|
+
self.logger.error("Failed to configure known host for Archive Center!")
|
|
863
705
|
|
|
864
|
-
logger.info("Configure host alias for Archive Center...")
|
|
706
|
+
self.logger.info("Configure host alias for Archive Center...")
|
|
865
707
|
response = otac_object.exec_command(
|
|
866
|
-
f"cf_set_variable MY_HOST_ALIASES {self.
|
|
708
|
+
f"cf_set_variable MY_HOST_ALIASES {self.settings.k8s.pod_otac},{self.settings.otac.url.host},otac DS",
|
|
867
709
|
)
|
|
868
710
|
if not response or not response.ok:
|
|
869
|
-
logger.error("Failed to configure host alias for Archive Center!")
|
|
711
|
+
self.logger.error("Failed to configure host alias for Archive Center!")
|
|
870
712
|
|
|
871
713
|
# Restart the spawner in Archive Center:
|
|
872
|
-
logger.info("Restart Archive Center Spawner...")
|
|
714
|
+
self.logger.info("Restart Archive Center Spawner...")
|
|
873
715
|
self.restart_otac_service()
|
|
874
716
|
else:
|
|
875
|
-
logger.info(
|
|
876
|
-
"Skip configuration of known host for Archive Center (OTAC_KNOWN_SERVER is not set)."
|
|
717
|
+
self.logger.info(
|
|
718
|
+
"Skip configuration of known host for Archive Center (OTAC_KNOWN_SERVER is not set).",
|
|
877
719
|
)
|
|
878
720
|
|
|
879
721
|
return otac_object
|
|
@@ -882,111 +724,118 @@ class Customizer:
|
|
|
882
724
|
|
|
883
725
|
def init_otcs(
|
|
884
726
|
self,
|
|
885
|
-
|
|
886
|
-
port: int,
|
|
887
|
-
partition_name: str,
|
|
888
|
-
resource_name: str,
|
|
727
|
+
url: HttpUrl,
|
|
889
728
|
) -> OTCS:
|
|
890
729
|
"""Initialize the OTCS class and parameters and authenticate at OTCS once it is ready.
|
|
891
730
|
|
|
892
731
|
Args:
|
|
893
|
-
|
|
894
|
-
|
|
895
|
-
|
|
896
|
-
resource_name (str): name of OTDS resource for Extended ECM
|
|
732
|
+
url (HttpURL):
|
|
733
|
+
The OTCS URL.
|
|
734
|
+
|
|
897
735
|
Returns:
|
|
898
|
-
OTCS:
|
|
736
|
+
OTCS:
|
|
737
|
+
The OTCS object
|
|
738
|
+
|
|
899
739
|
"""
|
|
900
740
|
|
|
901
|
-
logger.info("Connection parameters OTCS (Extended ECM):")
|
|
902
|
-
logger.info("OTCS
|
|
903
|
-
logger.info(
|
|
904
|
-
"OTCS
|
|
905
|
-
|
|
906
|
-
|
|
907
|
-
logger.info(
|
|
908
|
-
|
|
909
|
-
|
|
910
|
-
|
|
911
|
-
logger.info("OTCS User
|
|
912
|
-
logger.
|
|
913
|
-
|
|
914
|
-
|
|
915
|
-
)
|
|
916
|
-
logger.info(
|
|
741
|
+
self.logger.info("Connection parameters OTCS (Extended ECM):")
|
|
742
|
+
self.logger.info("OTCS URL = %s", str(self.settings.otcs.url))
|
|
743
|
+
self.logger.info(
|
|
744
|
+
"OTCS Frontend URL = %s",
|
|
745
|
+
str(self.settings.otcs.url_frontend),
|
|
746
|
+
)
|
|
747
|
+
self.logger.info(
|
|
748
|
+
"OTCS Backend URL = %s",
|
|
749
|
+
str(self.settings.otcs.url_backend),
|
|
750
|
+
)
|
|
751
|
+
self.logger.info("OTCS Admin User = %s", self.settings.otcs.username)
|
|
752
|
+
self.logger.debug(
|
|
753
|
+
"OTCS Admin Password = %s",
|
|
754
|
+
self.settings.otcs.password,
|
|
755
|
+
)
|
|
756
|
+
self.logger.info(
|
|
757
|
+
"OTCS User Partition = %s",
|
|
758
|
+
self.settings.otcs.partition,
|
|
759
|
+
)
|
|
760
|
+
self.logger.info(
|
|
761
|
+
"OTCS Resource Name = %s",
|
|
762
|
+
self.settings.otcs.resource_name,
|
|
763
|
+
)
|
|
764
|
+
self.logger.info(
|
|
765
|
+
"OTCS User Default License = %s",
|
|
766
|
+
self.settings.otcs.license_feature,
|
|
767
|
+
)
|
|
768
|
+
self.logger.info(
|
|
917
769
|
"OTCS K8s Frontend Pods = %s",
|
|
918
|
-
self.
|
|
770
|
+
self.settings.k8s.sts_otcs_frontend,
|
|
919
771
|
)
|
|
920
|
-
logger.info(
|
|
772
|
+
self.logger.info(
|
|
921
773
|
"OTCS K8s Backend Pods = %s",
|
|
922
|
-
self.
|
|
774
|
+
self.settings.k8s.sts_otcs_admin,
|
|
775
|
+
)
|
|
776
|
+
self.logger.info(
|
|
777
|
+
"FEME URI = %s",
|
|
778
|
+
self.settings.otcs.feme_uri,
|
|
923
779
|
)
|
|
924
780
|
|
|
925
|
-
logger.debug("Checking if OTCS object has already been initialized")
|
|
781
|
+
self.logger.debug("Checking if OTCS object has already been initialized")
|
|
926
782
|
|
|
927
|
-
otds_ticket = (
|
|
928
|
-
self.otds_object.cookie()["OTDSTicket"] if self.otds_object else None
|
|
929
|
-
)
|
|
783
|
+
otds_ticket = self.otds_object.cookie()["OTDSTicket"] if self.otds_object else None
|
|
930
784
|
otcs_object = OTCS(
|
|
931
|
-
|
|
932
|
-
|
|
933
|
-
|
|
934
|
-
self.
|
|
935
|
-
self.
|
|
936
|
-
self.
|
|
937
|
-
|
|
938
|
-
resource_name,
|
|
785
|
+
url.scheme,
|
|
786
|
+
url.host,
|
|
787
|
+
url.port,
|
|
788
|
+
self.settings.otcs.url.scheme + "://" + self.settings.otcs.url.host,
|
|
789
|
+
self.settings.otcs.username,
|
|
790
|
+
self.settings.otcs.password.get_secret_value(),
|
|
791
|
+
self.settings.otcs.partition,
|
|
792
|
+
self.settings.otcs.resource_name,
|
|
939
793
|
otds_ticket=otds_ticket,
|
|
940
|
-
base_path=self.
|
|
794
|
+
base_path=self.settings.otcs.base_path,
|
|
795
|
+
feme_uri=self.settings.otcs.feme_uri,
|
|
796
|
+
logger=self.logger,
|
|
941
797
|
)
|
|
942
798
|
|
|
943
799
|
# It is important to wait for OTCS to be configured - otherwise we
|
|
944
800
|
# may interfere with the OTCS container automation and run into errors
|
|
945
|
-
logger.info("Wait for OTCS to be configured...")
|
|
801
|
+
self.logger.info("Wait for OTCS to be configured...")
|
|
946
802
|
otcs_configured = otcs_object.is_configured()
|
|
947
803
|
while not otcs_configured:
|
|
948
|
-
logger.warning("OTCS is not configured yet. Waiting 30 seconds...")
|
|
804
|
+
self.logger.warning("OTCS is not configured yet. Waiting 30 seconds...")
|
|
949
805
|
time.sleep(30)
|
|
950
806
|
otcs_configured = otcs_object.is_configured()
|
|
951
|
-
logger.info("OTCS is configured now.")
|
|
807
|
+
self.logger.info("OTCS is configured now.")
|
|
952
808
|
|
|
953
|
-
logger.info("Authenticating to OTCS...")
|
|
809
|
+
self.logger.info("Authenticating to OTCS...")
|
|
954
810
|
otcs_cookie = otcs_object.authenticate()
|
|
955
811
|
while otcs_cookie is None:
|
|
956
|
-
logger.
|
|
812
|
+
self.logger.info("Waiting 30 seconds for OTCS to become ready...")
|
|
957
813
|
time.sleep(30)
|
|
958
814
|
otcs_cookie = otcs_object.authenticate()
|
|
959
|
-
logger.info("OTCS is ready now.")
|
|
960
|
-
|
|
961
|
-
# if self.otcs_settings.update_admin_user:
|
|
962
|
-
# Set first name and last name of Admin user (ID = 1000):
|
|
963
|
-
# otcs_object.update_user(1000, field="first_name", value="Terrarium")
|
|
964
|
-
# otcs_object.update_user(1000, field="last_name", value="Admin")
|
|
815
|
+
self.logger.info("OTCS is ready now.")
|
|
965
816
|
|
|
966
817
|
if "OTCS_RESSOURCE_ID" not in self.settings.placeholder_values:
|
|
967
|
-
self.settings.placeholder_values["OTCS_RESSOURCE_ID"] = (
|
|
968
|
-
self.
|
|
969
|
-
|
|
970
|
-
|
|
971
|
-
|
|
972
|
-
logger.debug(
|
|
973
|
-
"Placeholder values after OTCS init = %s",
|
|
818
|
+
self.settings.placeholder_values["OTCS_RESSOURCE_ID"] = self.otds_object.get_resource(
|
|
819
|
+
self.settings.otcs.resource_name,
|
|
820
|
+
)["resourceID"]
|
|
821
|
+
self.logger.debug(
|
|
822
|
+
"Placeholder values after OTCS init -> %s",
|
|
974
823
|
self.settings.placeholder_values,
|
|
975
824
|
)
|
|
976
825
|
|
|
977
|
-
if self.
|
|
826
|
+
if self.settings.otawp.enabled:
|
|
978
827
|
otcs_resource = self.otds_object.get_resource(
|
|
979
|
-
self.
|
|
828
|
+
self.settings.otcs.resource_name,
|
|
980
829
|
)
|
|
981
830
|
otcs_resource["logoutURL"] = (
|
|
982
|
-
f"{self.
|
|
831
|
+
f"{self.settings.otawp.public_protocol}://{self.settings.otawp.public_url}/home/system/wcp/sso/sso_logout.htm"
|
|
983
832
|
)
|
|
984
833
|
otcs_resource["logoutMethod"] = "GET"
|
|
985
834
|
|
|
986
835
|
self.otds_object.update_resource(name="cs", resource=otcs_resource)
|
|
987
836
|
|
|
988
837
|
# Allow impersonation of the resource for all users:
|
|
989
|
-
self.otds_object.impersonate_resource(resource_name)
|
|
838
|
+
self.otds_object.impersonate_resource(self.settings.otcs.resource_name)
|
|
990
839
|
|
|
991
840
|
return otcs_object
|
|
992
841
|
|
|
@@ -996,49 +845,67 @@ class Customizer:
|
|
|
996
845
|
"""Initialize the OTIV (Intelligent Viewing) object and its OTDS settings.
|
|
997
846
|
|
|
998
847
|
Args:
|
|
848
|
+
None
|
|
849
|
+
|
|
999
850
|
Returns:
|
|
1000
|
-
|
|
851
|
+
OTIV:
|
|
852
|
+
The OTIV object.
|
|
853
|
+
|
|
1001
854
|
"""
|
|
1002
855
|
|
|
1003
|
-
logger.info("Parameters for OTIV (Intelligent Viewing):")
|
|
1004
|
-
logger.info(
|
|
1005
|
-
|
|
1006
|
-
|
|
1007
|
-
|
|
1008
|
-
|
|
856
|
+
self.logger.info("Parameters for OTIV (Intelligent Viewing):")
|
|
857
|
+
self.logger.info(
|
|
858
|
+
"OTDS Resource Name = %s",
|
|
859
|
+
self.settings.otiv.resource_name,
|
|
860
|
+
)
|
|
861
|
+
self.logger.info(
|
|
862
|
+
"OTIV License File = %s",
|
|
863
|
+
self.settings.otiv.license_file,
|
|
864
|
+
)
|
|
865
|
+
self.logger.info(
|
|
866
|
+
"OTIV Product Name = %s",
|
|
867
|
+
self.settings.otiv.product_name,
|
|
868
|
+
)
|
|
869
|
+
self.logger.info(
|
|
870
|
+
"OTIV Product Description = %s",
|
|
871
|
+
self.settings.otiv.product_description,
|
|
872
|
+
)
|
|
873
|
+
self.logger.info(
|
|
874
|
+
"OTIV License Feature = %s",
|
|
875
|
+
self.settings.otiv.license_feature,
|
|
1009
876
|
)
|
|
1010
|
-
logger.info("OTIV License Feature = %s", self.otiv_settings.license_feature)
|
|
1011
877
|
|
|
1012
878
|
otiv_object = OTIV(
|
|
1013
|
-
resource_name=self.
|
|
1014
|
-
product_name=self.
|
|
1015
|
-
product_description=self.
|
|
1016
|
-
license_file=self.
|
|
1017
|
-
default_license=self.
|
|
879
|
+
resource_name=self.settings.otiv.resource_name,
|
|
880
|
+
product_name=self.settings.otiv.product_name,
|
|
881
|
+
product_description=self.settings.otiv.product_description,
|
|
882
|
+
license_file=self.settings.otiv.license_file,
|
|
883
|
+
default_license=self.settings.otiv.license_feature,
|
|
884
|
+
logger=self.logger,
|
|
1018
885
|
)
|
|
1019
886
|
|
|
1020
|
-
otiv_resource = self.otds_object.get_resource(self.
|
|
887
|
+
otiv_resource = self.otds_object.get_resource(self.settings.otiv.resource_name)
|
|
1021
888
|
while otiv_resource is None:
|
|
1022
|
-
logger.
|
|
889
|
+
self.logger.info(
|
|
1023
890
|
"OTDS Resource -> %s for Intelligent Viewing not found. OTIV may not be ready. Wait 30 sec...",
|
|
1024
|
-
self.
|
|
891
|
+
self.settings.otiv.resource_name,
|
|
1025
892
|
)
|
|
1026
893
|
time.sleep(30)
|
|
1027
894
|
otiv_resource = self.otds_object.get_resource(
|
|
1028
|
-
self.
|
|
895
|
+
self.settings.otiv.resource_name,
|
|
1029
896
|
)
|
|
1030
897
|
|
|
1031
898
|
otiv_license = self.otds_object.add_license_to_resource(
|
|
1032
|
-
self.
|
|
1033
|
-
self.
|
|
1034
|
-
self.
|
|
899
|
+
self.settings.otiv.license_file,
|
|
900
|
+
self.settings.otiv.product_name,
|
|
901
|
+
self.settings.otiv.product_description,
|
|
1035
902
|
otiv_resource["resourceID"],
|
|
1036
903
|
)
|
|
1037
904
|
if not otiv_license:
|
|
1038
|
-
logger.info(
|
|
905
|
+
self.logger.info(
|
|
1039
906
|
"Couldn't apply license -> %s for product -> %s. Intelligent Viewing may not be deployed!",
|
|
1040
|
-
self.
|
|
1041
|
-
self.
|
|
907
|
+
self.settings.otiv.license_file,
|
|
908
|
+
self.settings.otiv.product_name,
|
|
1042
909
|
)
|
|
1043
910
|
return None
|
|
1044
911
|
|
|
@@ -1058,7 +925,7 @@ class Customizer:
|
|
|
1058
925
|
)
|
|
1059
926
|
time.sleep(30)
|
|
1060
927
|
|
|
1061
|
-
logger.info("OTDS user iv-publisher -> updating oTType=ServiceUser")
|
|
928
|
+
self.logger.info("OTDS user iv-publisher -> updating oTType=ServiceUser")
|
|
1062
929
|
|
|
1063
930
|
return otiv_object
|
|
1064
931
|
|
|
@@ -1069,103 +936,118 @@ class Customizer:
|
|
|
1069
936
|
|
|
1070
937
|
Args:
|
|
1071
938
|
None
|
|
939
|
+
|
|
1072
940
|
Returns:
|
|
1073
|
-
|
|
941
|
+
OTPD:
|
|
942
|
+
The OTPD (PowerDocs) object.
|
|
943
|
+
|
|
1074
944
|
"""
|
|
1075
945
|
|
|
1076
|
-
logger.info("Connection parameters OTPD (PowerDocs):")
|
|
1077
|
-
logger.info(
|
|
1078
|
-
|
|
1079
|
-
|
|
1080
|
-
|
|
1081
|
-
logger.info("OTPD
|
|
1082
|
-
logger.info(
|
|
946
|
+
self.logger.info("Connection parameters OTPD (PowerDocs):")
|
|
947
|
+
self.logger.info(
|
|
948
|
+
"OTPD Protocol = %s",
|
|
949
|
+
self.settings.otpd.url.scheme,
|
|
950
|
+
)
|
|
951
|
+
self.logger.info("OTPD Hostname = %s", self.settings.otpd.url.host)
|
|
952
|
+
self.logger.info("OTPD Port = %s", self.settings.otpd.url.port)
|
|
953
|
+
self.logger.info("OTPD API User = %s", self.settings.otpd.username)
|
|
954
|
+
self.logger.info("OTPD Tenant = %s", self.settings.otpd.tenant)
|
|
955
|
+
self.logger.info(
|
|
1083
956
|
"OTPD Database Import File = %s",
|
|
1084
|
-
(
|
|
1085
|
-
self.otpd_settings.db_importfile
|
|
1086
|
-
if self.otpd_settings.db_importfile != ""
|
|
1087
|
-
else "<not configured>"
|
|
1088
|
-
),
|
|
957
|
+
(self.settings.otpd.db_importfile if self.settings.otpd.db_importfile != "" else "<not configured>"),
|
|
1089
958
|
)
|
|
1090
|
-
logger.info("OTPD K8s Pod Name = %s", self.
|
|
959
|
+
self.logger.info("OTPD K8s Pod Name = %s", self.settings.k8s.pod_otpd)
|
|
1091
960
|
|
|
1092
961
|
otpd_object = OTPD(
|
|
1093
|
-
self.
|
|
1094
|
-
self.
|
|
1095
|
-
|
|
1096
|
-
self.
|
|
1097
|
-
self.
|
|
962
|
+
self.settings.otpd.url.scheme,
|
|
963
|
+
self.settings.otpd.url.host,
|
|
964
|
+
self.settings.otpd.url.port,
|
|
965
|
+
self.settings.otpd.username,
|
|
966
|
+
self.settings.otpd.password,
|
|
967
|
+
logger=self.logger,
|
|
1098
968
|
)
|
|
1099
969
|
|
|
1100
970
|
# wait until the OTPD pod is in ready state
|
|
1101
|
-
self.k8s_object.wait_pod_condition(self.
|
|
971
|
+
self.k8s_object.wait_pod_condition(self.settings.k8s.pod_otpd, "Ready")
|
|
1102
972
|
|
|
1103
973
|
# We have a race condition here. Even if the pod is ready
|
|
1104
974
|
# it may not yet have fully initialized its database.
|
|
1105
975
|
# Then the "apply_setting()" calls below may fail with
|
|
1106
976
|
# an error. This should be improved in the future. For now
|
|
1107
977
|
# we just wait a minute hoping that the DB is initialized then.
|
|
1108
|
-
logger.info("Wait some time for PowerDocs database to be initialized...")
|
|
1109
|
-
time.sleep(60)
|
|
1110
|
-
logger.info("Configure some basic PowerDocs settings...")
|
|
978
|
+
# self.logger.info("Wait some time for PowerDocs database to be initialized...")
|
|
979
|
+
# time.sleep(60)
|
|
980
|
+
# self.logger.info("Configure some basic PowerDocs settings...")
|
|
1111
981
|
|
|
1112
982
|
# Fix settings for local Kubernetes deployments.
|
|
1113
983
|
# Unclear why this is not the default.
|
|
1114
|
-
if otpd_object:
|
|
1115
|
-
|
|
1116
|
-
|
|
1117
|
-
|
|
1118
|
-
|
|
1119
|
-
|
|
1120
|
-
|
|
984
|
+
# if otpd_object:
|
|
985
|
+
# otpd_object.apply_setting("LocalOtdsUrl", "http://otds/otdsws")
|
|
986
|
+
# otpd_object.apply_setting(
|
|
987
|
+
# "LocalApplicationServerUrlForContentManager",
|
|
988
|
+
# "http://localhost:8080/c4ApplicationServer",
|
|
989
|
+
# self.settings.otpd.tenant,
|
|
990
|
+
# )
|
|
1121
991
|
|
|
1122
992
|
return otpd_object
|
|
1123
993
|
|
|
1124
994
|
# end function definition
|
|
1125
995
|
|
|
1126
|
-
def init_otawp(self):
|
|
1127
|
-
"""Initialize OTDS for Appworks Platform
|
|
1128
|
-
|
|
1129
|
-
|
|
996
|
+
def init_otawp(self) -> OTAWP:
|
|
997
|
+
"""Initialize OTDS for Appworks Platform.
|
|
998
|
+
|
|
999
|
+
Returns:
|
|
1000
|
+
OTAWP:
|
|
1001
|
+
The AppWorks Platform object.
|
|
1002
|
+
|
|
1130
1003
|
"""
|
|
1131
1004
|
|
|
1132
|
-
logger.info("Connection parameters OTAWP:")
|
|
1133
|
-
logger.info(
|
|
1134
|
-
|
|
1135
|
-
|
|
1136
|
-
|
|
1137
|
-
logger.
|
|
1138
|
-
|
|
1139
|
-
|
|
1005
|
+
self.logger.info("Connection parameters OTAWP:")
|
|
1006
|
+
self.logger.info(
|
|
1007
|
+
"OTAWP Enabled = %s",
|
|
1008
|
+
str(self.settings.otawp.enabled),
|
|
1009
|
+
)
|
|
1010
|
+
self.logger.info(
|
|
1011
|
+
"OTAWP Resource = %s",
|
|
1012
|
+
self.settings.otawp.resource_name,
|
|
1013
|
+
)
|
|
1014
|
+
self.logger.info(
|
|
1015
|
+
"OTAWP Access Role = %s",
|
|
1016
|
+
self.settings.otawp.access_role_name,
|
|
1017
|
+
)
|
|
1018
|
+
self.logger.info("OTAWP Admin User = %s", self.settings.otawp.username)
|
|
1019
|
+
self.logger.debug("OTAWP Password = %s", self.settings.otawp.password)
|
|
1020
|
+
self.logger.info("OTAWP K8s Stateful Set = %s", self.settings.k8s.sts_otawp)
|
|
1021
|
+
self.logger.info("OTAWP K8s Config Map = %s", self.settings.k8s.cm_otawp)
|
|
1140
1022
|
|
|
1141
|
-
logger.info(
|
|
1023
|
+
self.logger.info(
|
|
1142
1024
|
"Wait for OTCS to create its OTDS resource with name -> '%s'...",
|
|
1143
|
-
self.
|
|
1025
|
+
self.settings.otcs.resource_name,
|
|
1144
1026
|
)
|
|
1145
1027
|
|
|
1146
1028
|
# Loop to wait for OTCS to create its OTDS resource
|
|
1147
1029
|
# (we need it to update the AppWorks K8s Config Map):
|
|
1148
|
-
otcs_resource = self.otds_object.get_resource(self.
|
|
1030
|
+
otcs_resource = self.otds_object.get_resource(self.settings.otcs.resource_name)
|
|
1149
1031
|
while otcs_resource is None:
|
|
1150
|
-
logger.warning(
|
|
1032
|
+
self.logger.warning(
|
|
1151
1033
|
"OTDS resource for Content Server with name -> '%s' does not exist yet. Waiting...",
|
|
1152
|
-
self.
|
|
1034
|
+
self.settings.otcs.resource_name,
|
|
1153
1035
|
)
|
|
1154
1036
|
time.sleep(30)
|
|
1155
1037
|
otcs_resource = self.otds_object.get_resource(
|
|
1156
|
-
self.
|
|
1038
|
+
self.settings.otcs.resource_name,
|
|
1157
1039
|
)
|
|
1158
1040
|
|
|
1159
1041
|
otcs_resource_id = otcs_resource["resourceID"]
|
|
1160
1042
|
|
|
1161
|
-
logger.info("OTDS resource ID
|
|
1043
|
+
self.logger.info("Found Content Server OTDS resource ID -> %s", otcs_resource_id)
|
|
1162
1044
|
|
|
1163
1045
|
# make sure code is idempotent and only try to add ressource if it doesn't exist already:
|
|
1164
|
-
awp_resource = self.otds_object.get_resource(self.
|
|
1046
|
+
awp_resource = self.otds_object.get_resource(self.settings.otawp.resource_name)
|
|
1165
1047
|
if not awp_resource:
|
|
1166
|
-
logger.info(
|
|
1167
|
-
"OTDS resource -> %s for AppWorks Platform does not yet exist. Creating...",
|
|
1168
|
-
self.
|
|
1048
|
+
self.logger.info(
|
|
1049
|
+
"OTDS resource -> '%s' for AppWorks Platform does not yet exist. Creating...",
|
|
1050
|
+
self.settings.otawp.resource_name,
|
|
1169
1051
|
)
|
|
1170
1052
|
# Create a Python dict with the special payload we need for AppWorks:
|
|
1171
1053
|
additional_payload = {}
|
|
@@ -1372,170 +1254,177 @@ class Customizer:
|
|
|
1372
1254
|
"name": "fBaseURL",
|
|
1373
1255
|
"value": "http://appworks:8080/home/system/app/otdspush",
|
|
1374
1256
|
},
|
|
1375
|
-
{"name": "fUsername", "value": self.
|
|
1376
|
-
{
|
|
1257
|
+
{"name": "fUsername", "value": self.settings.otawp.username},
|
|
1258
|
+
{
|
|
1259
|
+
"name": "fPassword",
|
|
1260
|
+
"value": self.settings.otawp.password.get_secret_value(),
|
|
1261
|
+
},
|
|
1377
1262
|
]
|
|
1378
1263
|
|
|
1379
1264
|
awp_resource = self.otds_object.add_resource(
|
|
1380
|
-
self.
|
|
1381
|
-
"AppWorks Platform",
|
|
1382
|
-
"AppWorks Platform",
|
|
1383
|
-
additional_payload,
|
|
1265
|
+
name=self.settings.otawp.resource_name,
|
|
1266
|
+
description="AppWorks Platform",
|
|
1267
|
+
display_name="AppWorks Platform",
|
|
1268
|
+
additional_payload=additional_payload,
|
|
1384
1269
|
)
|
|
1385
1270
|
else:
|
|
1386
|
-
logger.info(
|
|
1387
|
-
"OTDS resource -> %s for AppWorks Platform does already exist.",
|
|
1388
|
-
self.
|
|
1271
|
+
self.logger.info(
|
|
1272
|
+
"OTDS resource -> '%s' for AppWorks Platform does already exist.",
|
|
1273
|
+
self.settings.otawp.resource_name,
|
|
1389
1274
|
)
|
|
1390
1275
|
|
|
1391
1276
|
awp_resource_id = awp_resource["resourceID"]
|
|
1392
1277
|
|
|
1393
|
-
logger.info(
|
|
1278
|
+
self.logger.info(
|
|
1279
|
+
"OTDS resource ID for AppWorks Platform -> %s",
|
|
1280
|
+
awp_resource_id,
|
|
1281
|
+
)
|
|
1394
1282
|
|
|
1395
1283
|
self.settings.placeholder_values["OTAWP_RESOURCE_ID"] = str(awp_resource_id)
|
|
1396
1284
|
|
|
1397
|
-
logger.debug(
|
|
1398
|
-
"Placeholder values after OTAWP init = %s",
|
|
1285
|
+
self.logger.debug(
|
|
1286
|
+
"Placeholder values after OTAWP init = %s",
|
|
1287
|
+
self.settings.placeholder_values,
|
|
1399
1288
|
)
|
|
1400
1289
|
|
|
1401
|
-
logger.info(
|
|
1290
|
+
self.logger.info(
|
|
1291
|
+
"Update AppWorks Kubernetes Config Map with OTDS resource IDs...",
|
|
1292
|
+
)
|
|
1402
1293
|
|
|
1403
|
-
config_map = self.k8s_object.get_config_map(self.
|
|
1294
|
+
config_map = self.k8s_object.get_config_map(self.settings.k8s.cm_otawp)
|
|
1404
1295
|
if not config_map:
|
|
1405
|
-
logger.error(
|
|
1296
|
+
self.logger.error(
|
|
1406
1297
|
"Failed to retrieve AppWorks Kubernetes Config Map -> %s",
|
|
1407
|
-
self.
|
|
1298
|
+
self.settings.k8s.cm_otawp,
|
|
1408
1299
|
)
|
|
1409
1300
|
else:
|
|
1410
|
-
solution = yaml.safe_load(config_map.data["solution.yaml"])
|
|
1301
|
+
solution = yaml.safe_load(config_map.data["solution.yaml"])
|
|
1411
1302
|
|
|
1412
1303
|
# Change values as required
|
|
1413
|
-
solution["platform"]["organizations"]["system"]["otds"][
|
|
1414
|
-
|
|
1415
|
-
|
|
1416
|
-
|
|
1417
|
-
|
|
1418
|
-
|
|
1419
|
-
|
|
1420
|
-
|
|
1421
|
-
] = f"{self.otcs_settings.public_protocol}://{self.otcs_settings.public_url}/cssupport"
|
|
1422
|
-
solution["platform"]["content"]["ContentServer"][
|
|
1423
|
-
"otdsResourceId"
|
|
1424
|
-
] = otcs_resource_id
|
|
1304
|
+
solution["platform"]["organizations"]["system"]["otds"]["resourceId"] = awp_resource_id
|
|
1305
|
+
solution["platform"]["content"]["ContentServer"]["contentServerUrl"] = (
|
|
1306
|
+
f"{self.settings.otcs.url!s}{self.settings.otcs.base_path}"
|
|
1307
|
+
)
|
|
1308
|
+
solution["platform"]["content"]["ContentServer"]["contentServerSupportDirectoryUrl"] = (
|
|
1309
|
+
f"{self.settings.otcs.url!s}/cssupport"
|
|
1310
|
+
)
|
|
1311
|
+
solution["platform"]["content"]["ContentServer"]["otdsResourceId"] = otcs_resource_id
|
|
1425
1312
|
solution["platform"]["authenticators"]["OTDS_auth"]["publicLoginUrl"] = (
|
|
1426
|
-
self.
|
|
1427
|
-
+ "://"
|
|
1428
|
-
+ self.otds_settings.public_url
|
|
1429
|
-
+ "/otdsws/login"
|
|
1313
|
+
str(self.settings.otds.url) + "/otdsws/login"
|
|
1430
1314
|
)
|
|
1431
|
-
solution["platform"]["security"]["contentSecurityPolicy"] = (
|
|
1432
|
-
|
|
1433
|
-
+ self.otcs_settings.public_protocol
|
|
1434
|
-
+ "://"
|
|
1435
|
-
+ self.otcs_settings.public_url
|
|
1315
|
+
solution["platform"]["security"]["contentSecurityPolicy"] = "frame-ancestors 'self' " + str(
|
|
1316
|
+
self.settings.otcs.url,
|
|
1436
1317
|
)
|
|
1437
|
-
data
|
|
1318
|
+
config_map.data["solution.yaml"] = yaml.dump(solution)
|
|
1438
1319
|
result = self.k8s_object.replace_config_map(
|
|
1439
|
-
self.
|
|
1320
|
+
self.settings.k8s.cm_otawp,
|
|
1321
|
+
config_map.data,
|
|
1440
1322
|
)
|
|
1441
1323
|
if result:
|
|
1442
|
-
logger.info("Successfully updated AppWorks
|
|
1324
|
+
self.logger.info("Successfully updated AppWorks solution YAML.")
|
|
1443
1325
|
else:
|
|
1444
|
-
logger.error("Failed to update AppWorks Solution YAML.")
|
|
1445
|
-
logger.debug("Solution YAML for AppWorks -> %s", solution)
|
|
1326
|
+
self.logger.error("Failed to update AppWorks Solution YAML.")
|
|
1327
|
+
self.logger.debug("Solution YAML for AppWorks -> %s", solution)
|
|
1446
1328
|
|
|
1447
|
-
logger.info("Scale AppWorks Kubernetes Stateful Set to 1...")
|
|
1329
|
+
self.logger.info("Scale AppWorks Kubernetes Stateful Set to 1...")
|
|
1448
1330
|
self.k8s_object.scale_stateful_set(
|
|
1449
|
-
sts_name=self.
|
|
1331
|
+
sts_name=self.settings.k8s.sts_otawp,
|
|
1332
|
+
scale=1,
|
|
1450
1333
|
)
|
|
1451
1334
|
|
|
1452
1335
|
# Add the OTCS Admin user to the AppWorks Access Role in OTDS
|
|
1453
1336
|
self.otds_object.add_user_to_access_role(
|
|
1454
|
-
"Access to " + self.
|
|
1337
|
+
"Access to " + self.settings.otawp.resource_name,
|
|
1338
|
+
"otadmin@otds.admin",
|
|
1455
1339
|
)
|
|
1456
1340
|
|
|
1457
1341
|
# Loop to wait for OTCS to create its OTDS user partition:
|
|
1458
1342
|
otcs_partition = self.otds_object.get_partition(
|
|
1459
|
-
self.
|
|
1343
|
+
self.settings.otcs.partition,
|
|
1344
|
+
show_error=False,
|
|
1460
1345
|
)
|
|
1461
1346
|
while otcs_partition is None:
|
|
1462
|
-
logger.warning(
|
|
1347
|
+
self.logger.warning(
|
|
1463
1348
|
"OTDS user partition for Content Server with name -> '%s' does not exist yet. Waiting...",
|
|
1464
|
-
self.
|
|
1349
|
+
self.settings.otcs.partition,
|
|
1465
1350
|
)
|
|
1466
1351
|
|
|
1467
1352
|
time.sleep(30)
|
|
1468
1353
|
otcs_partition = self.otds_object.get_partition(
|
|
1469
|
-
self.
|
|
1354
|
+
self.settings.otcs.partition,
|
|
1355
|
+
show_error=False,
|
|
1470
1356
|
)
|
|
1471
1357
|
|
|
1472
1358
|
# Add the OTDS user partition for OTCS to the AppWorks Platform Access Role in OTDS.
|
|
1473
1359
|
# This will effectvely sync all OTCS users with AppWorks Platform:
|
|
1474
1360
|
self.otds_object.add_partition_to_access_role(
|
|
1475
|
-
self.
|
|
1361
|
+
self.settings.otawp.access_role_name,
|
|
1362
|
+
self.settings.otcs.partition,
|
|
1476
1363
|
)
|
|
1477
1364
|
|
|
1478
1365
|
# Add the OTDS admin partition to the AppWorks Platform Access Role in OTDS.
|
|
1479
1366
|
self.otds_object.add_partition_to_access_role(
|
|
1480
|
-
self.
|
|
1367
|
+
self.settings.otawp.access_role_name,
|
|
1368
|
+
self.settings.otds.admin_partition,
|
|
1481
1369
|
)
|
|
1482
1370
|
|
|
1483
1371
|
# Set Group inclusion for Access Role for OTAWP to "True":
|
|
1484
1372
|
self.otds_object.update_access_role_attributes(
|
|
1485
|
-
self.
|
|
1373
|
+
self.settings.otawp.access_role_name,
|
|
1486
1374
|
[{"name": "pushAllGroups", "values": ["True"]}],
|
|
1487
1375
|
)
|
|
1488
1376
|
|
|
1489
1377
|
# Add ResourceID User to OTDSAdmin to allow push
|
|
1490
1378
|
self.otds_object.add_user_to_group(
|
|
1491
|
-
user=str(awp_resource_id) + "@otds.admin",
|
|
1379
|
+
user=str(awp_resource_id) + "@otds.admin",
|
|
1380
|
+
group="otdsadmins@otds.admin",
|
|
1492
1381
|
)
|
|
1493
1382
|
|
|
1494
1383
|
# Allow impersonation for all users:
|
|
1495
|
-
self.otds_object.impersonate_resource(self.
|
|
1384
|
+
self.otds_object.impersonate_resource(self.settings.otawp.resource_name)
|
|
1496
1385
|
|
|
1497
1386
|
# Add SPS license for OTAWP
|
|
1498
1387
|
# check if the license file exists, otherwise skip for versions pre 24.1
|
|
1499
|
-
if os.path.isfile(self.
|
|
1500
|
-
logger.info(
|
|
1388
|
+
if os.path.isfile(self.settings.otawp.license_file):
|
|
1389
|
+
self.logger.info(
|
|
1501
1390
|
"Found OTAWP license file -> '%s', assiging it to ressource '%s'...",
|
|
1502
|
-
self.
|
|
1503
|
-
self.
|
|
1391
|
+
self.settings.otawp.license_file,
|
|
1392
|
+
self.settings.otawp.resource_name,
|
|
1504
1393
|
)
|
|
1505
1394
|
|
|
1506
1395
|
otawp_license = self.otds_object.add_license_to_resource(
|
|
1507
|
-
self.
|
|
1508
|
-
self.
|
|
1509
|
-
self.
|
|
1396
|
+
self.settings.otawp.license_file,
|
|
1397
|
+
self.settings.otawp.product_name,
|
|
1398
|
+
self.settings.otawp.product_description,
|
|
1510
1399
|
awp_resource["resourceID"],
|
|
1511
1400
|
)
|
|
1512
1401
|
if not otawp_license:
|
|
1513
|
-
logger.error(
|
|
1402
|
+
self.logger.error(
|
|
1514
1403
|
"Couldn't apply license -> '%s' for product -> '%s' to OTDS resource -> '%s'",
|
|
1515
|
-
self.
|
|
1516
|
-
self.
|
|
1404
|
+
self.settings.otawp.license_file,
|
|
1405
|
+
self.settings.otawp.product_name,
|
|
1517
1406
|
awp_resource["resourceID"],
|
|
1518
1407
|
)
|
|
1519
1408
|
else:
|
|
1520
|
-
logger.info(
|
|
1409
|
+
self.logger.info(
|
|
1521
1410
|
"Successfully applied license -> '%s' for product -> '%s' to OTDS resource -> '%s'",
|
|
1522
|
-
self.
|
|
1523
|
-
self.
|
|
1411
|
+
self.settings.otawp.license_file,
|
|
1412
|
+
self.settings.otawp.product_name,
|
|
1524
1413
|
awp_resource["resourceID"],
|
|
1525
1414
|
)
|
|
1526
1415
|
|
|
1527
1416
|
# Assign AppWorks license to Content Server Members Partiton and otds.admin:
|
|
1528
|
-
for partition_name in ["otds.admin", self.
|
|
1417
|
+
for partition_name in ["otds.admin", self.settings.otcs.partition]:
|
|
1529
1418
|
if self.otds_object.is_partition_licensed(
|
|
1530
1419
|
partition_name=partition_name,
|
|
1531
1420
|
resource_id=awp_resource["resourceID"],
|
|
1532
1421
|
license_feature="USERS",
|
|
1533
|
-
license_name=self.
|
|
1422
|
+
license_name=self.settings.otawp.product_name,
|
|
1534
1423
|
):
|
|
1535
|
-
logger.info(
|
|
1536
|
-
"Partition -> %s is already licensed for -> %s (%s)",
|
|
1424
|
+
self.logger.info(
|
|
1425
|
+
"Partition -> '%s' is already licensed for -> '%s' (%s)",
|
|
1537
1426
|
partition_name,
|
|
1538
|
-
self.
|
|
1427
|
+
self.settings.otawp.product_name,
|
|
1539
1428
|
"USERS",
|
|
1540
1429
|
)
|
|
1541
1430
|
else:
|
|
@@ -1543,393 +1432,498 @@ class Customizer:
|
|
|
1543
1432
|
partition_name,
|
|
1544
1433
|
awp_resource["resourceID"],
|
|
1545
1434
|
"USERS",
|
|
1546
|
-
self.
|
|
1435
|
+
self.settings.otawp.product_name,
|
|
1547
1436
|
)
|
|
1548
1437
|
if not assigned_license:
|
|
1549
|
-
logger.error(
|
|
1438
|
+
self.logger.error(
|
|
1550
1439
|
"Partition -> '%s' could not be assigned to license -> '%s' (%s)",
|
|
1551
1440
|
partition_name,
|
|
1552
|
-
self.
|
|
1441
|
+
self.settings.otawp.product_name,
|
|
1553
1442
|
"USERS",
|
|
1554
1443
|
)
|
|
1555
1444
|
else:
|
|
1556
|
-
logger.info(
|
|
1445
|
+
self.logger.info(
|
|
1557
1446
|
"Partition -> '%s' successfully assigned to license -> '%s' (%s)",
|
|
1558
1447
|
partition_name,
|
|
1559
|
-
self.
|
|
1448
|
+
self.settings.otawp.product_name,
|
|
1560
1449
|
"USERS",
|
|
1561
1450
|
)
|
|
1451
|
+
otawp_object = OTAWP(
|
|
1452
|
+
self.settings.otawp.protocol,
|
|
1453
|
+
self.settings.k8s.sts_otawp,
|
|
1454
|
+
str(self.settings.otawp.port),
|
|
1455
|
+
"sysadmin",
|
|
1456
|
+
self.settings.otawp.password.get_secret_value(),
|
|
1457
|
+
"",
|
|
1458
|
+
self.settings.otcs.partition,
|
|
1459
|
+
self.settings.otds.admin_partition,
|
|
1460
|
+
self.settings.k8s.cm_otawp,
|
|
1461
|
+
otcs_resource_id,
|
|
1462
|
+
self.settings.otds.url,
|
|
1463
|
+
self.settings.otcs.url,
|
|
1464
|
+
self.settings.otcs.base_path,
|
|
1465
|
+
self.settings.otawp.license_file,
|
|
1466
|
+
self.settings.otawp.product_name,
|
|
1467
|
+
self.settings.otawp.product_description,
|
|
1468
|
+
logger=self.logger,
|
|
1469
|
+
)
|
|
1470
|
+
return otawp_object
|
|
1562
1471
|
|
|
1563
1472
|
# end method definition
|
|
1564
1473
|
|
|
1565
|
-
def restart_otcs_service(
|
|
1566
|
-
|
|
1474
|
+
def restart_otcs_service(
|
|
1475
|
+
self,
|
|
1476
|
+
backend: OTCS,
|
|
1477
|
+
frontend: OTCS,
|
|
1478
|
+
extra_wait_time: int = 60,
|
|
1479
|
+
) -> None:
|
|
1480
|
+
"""Restart the Content Server service in all OTCS pods.
|
|
1567
1481
|
|
|
1568
1482
|
Args:
|
|
1569
|
-
|
|
1483
|
+
backend:
|
|
1484
|
+
OTCS object of the backend.
|
|
1485
|
+
frontend:
|
|
1486
|
+
OTCS object of the frontend.
|
|
1487
|
+
extra_wait_time (int):
|
|
1488
|
+
Extra wait time after the restart to make sure pods are responsive again.
|
|
1489
|
+
|
|
1570
1490
|
Returns:
|
|
1571
1491
|
None
|
|
1492
|
+
|
|
1572
1493
|
"""
|
|
1573
1494
|
|
|
1574
1495
|
if not self.k8s_object:
|
|
1575
|
-
logger.warning(
|
|
1576
|
-
"Kubernetes integration not available, skipping restart of services"
|
|
1496
|
+
self.logger.warning(
|
|
1497
|
+
"Kubernetes integration not available, skipping restart of services",
|
|
1577
1498
|
)
|
|
1578
1499
|
return
|
|
1579
1500
|
|
|
1580
|
-
logger.info("Restart OTCS frontend and backend pods...")
|
|
1501
|
+
self.logger.info("Restart OTCS frontend and backend pods...")
|
|
1581
1502
|
|
|
1582
1503
|
# Restart all frontends:
|
|
1583
|
-
for x in range(
|
|
1584
|
-
pod_name = self.
|
|
1504
|
+
for x in range(self.settings.k8s.sts_otcs_frontend_replicas):
|
|
1505
|
+
pod_name = self.settings.k8s.sts_otcs_frontend + "-" + str(x)
|
|
1585
1506
|
|
|
1586
|
-
logger.info("Deactivate Liveness probe for pod -> '%s'", pod_name)
|
|
1507
|
+
self.logger.info("Deactivate Liveness probe for pod -> '%s'", pod_name)
|
|
1587
1508
|
self.k8s_object.exec_pod_command(
|
|
1588
|
-
pod_name,
|
|
1509
|
+
pod_name,
|
|
1510
|
+
["/bin/sh", "-c", "touch /tmp/keepalive"],
|
|
1511
|
+
container="otcs-frontend-container",
|
|
1589
1512
|
)
|
|
1590
|
-
logger.info("Restarting pod -> '%s'", pod_name)
|
|
1513
|
+
self.logger.info("Restarting pod -> '%s'", pod_name)
|
|
1591
1514
|
self.k8s_object.exec_pod_command(
|
|
1592
|
-
pod_name,
|
|
1515
|
+
pod_name,
|
|
1516
|
+
["/bin/sh", "-c", "/opt/opentext/cs/stop_csserver"],
|
|
1517
|
+
container="otcs-frontend-container",
|
|
1593
1518
|
)
|
|
1594
1519
|
self.k8s_object.exec_pod_command(
|
|
1595
|
-
pod_name,
|
|
1520
|
+
pod_name,
|
|
1521
|
+
["/bin/sh", "-c", "/opt/opentext/cs/start_csserver"],
|
|
1522
|
+
container="otcs-frontend-container",
|
|
1596
1523
|
)
|
|
1597
1524
|
|
|
1598
1525
|
# Restart all backends:
|
|
1599
|
-
for x in range(
|
|
1600
|
-
pod_name = self.
|
|
1526
|
+
for x in range(self.settings.k8s.sts_otcs_admin_replicas):
|
|
1527
|
+
pod_name = self.settings.k8s.sts_otcs_admin + "-" + str(x)
|
|
1601
1528
|
|
|
1602
|
-
logger.info("Deactivate Liveness probe for pod -> '%s'", pod_name)
|
|
1529
|
+
self.logger.info("Deactivate Liveness probe for pod -> '%s'", pod_name)
|
|
1603
1530
|
self.k8s_object.exec_pod_command(
|
|
1604
|
-
pod_name,
|
|
1531
|
+
pod_name,
|
|
1532
|
+
["/bin/sh", "-c", "touch /tmp/keepalive"],
|
|
1533
|
+
container="otcs-admin-container",
|
|
1605
1534
|
)
|
|
1606
|
-
logger.info("Restarting pod -> '%s'", pod_name)
|
|
1535
|
+
self.logger.info("Restarting pod -> '%s'", pod_name)
|
|
1607
1536
|
self.k8s_object.exec_pod_command(
|
|
1608
|
-
pod_name,
|
|
1537
|
+
pod_name,
|
|
1538
|
+
["/bin/sh", "-c", "/opt/opentext/cs/stop_csserver"],
|
|
1539
|
+
container="otcs-admin-container",
|
|
1609
1540
|
)
|
|
1610
1541
|
self.k8s_object.exec_pod_command(
|
|
1611
|
-
pod_name,
|
|
1542
|
+
pod_name,
|
|
1543
|
+
["/bin/sh", "-c", "/opt/opentext/cs/start_csserver"],
|
|
1544
|
+
container="otcs-admin-container",
|
|
1612
1545
|
)
|
|
1613
1546
|
|
|
1614
|
-
|
|
1615
|
-
|
|
1547
|
+
# Reauthenticate at frontend:
|
|
1548
|
+
self.logger.info(
|
|
1549
|
+
"Re-Authenticating to OTCS frontend after restart of frontend pods...",
|
|
1550
|
+
)
|
|
1551
|
+
otcs_cookie = frontend.authenticate(revalidate=True)
|
|
1552
|
+
while otcs_cookie is None:
|
|
1553
|
+
self.logger.info("Waiting 30 seconds for OTCS frontend to become ready...")
|
|
1554
|
+
time.sleep(30)
|
|
1555
|
+
otcs_cookie = frontend.authenticate(revalidate=True)
|
|
1556
|
+
self.logger.info("OTCS frontend is ready again.")
|
|
1557
|
+
|
|
1558
|
+
# Reauthenticate at backend:
|
|
1559
|
+
self.logger.info(
|
|
1560
|
+
"Re-Authenticating to OTCS backend after restart of backend pods...",
|
|
1561
|
+
)
|
|
1562
|
+
otcs_cookie = backend.authenticate(revalidate=True)
|
|
1616
1563
|
while otcs_cookie is None:
|
|
1617
|
-
logger.
|
|
1564
|
+
self.logger.info("Waiting 30 seconds for OTCS backend to become ready...")
|
|
1618
1565
|
time.sleep(30)
|
|
1619
|
-
otcs_cookie =
|
|
1620
|
-
logger.info("OTCS is ready again.")
|
|
1566
|
+
otcs_cookie = backend.authenticate(revalidate=True)
|
|
1567
|
+
self.logger.info("OTCS backend is ready again.")
|
|
1621
1568
|
|
|
1622
1569
|
# Reactivate Liveness probes in all pods:
|
|
1623
|
-
for x in range(
|
|
1624
|
-
pod_name = self.
|
|
1570
|
+
for x in range(self.settings.k8s.sts_otcs_frontend_replicas):
|
|
1571
|
+
pod_name = self.settings.k8s.sts_otcs_frontend + "-" + str(x)
|
|
1625
1572
|
|
|
1626
|
-
logger.info("Reactivate Liveness probe for pod -> '%s'", pod_name)
|
|
1573
|
+
self.logger.info("Reactivate Liveness probe for pod -> '%s'", pod_name)
|
|
1627
1574
|
self.k8s_object.exec_pod_command(
|
|
1628
|
-
pod_name,
|
|
1575
|
+
pod_name,
|
|
1576
|
+
["/bin/sh", "-c", "rm /tmp/keepalive"],
|
|
1577
|
+
container="otcs-frontend-container",
|
|
1629
1578
|
)
|
|
1630
1579
|
|
|
1631
|
-
for x in range(
|
|
1632
|
-
pod_name = self.
|
|
1580
|
+
for x in range(self.settings.k8s.sts_otcs_admin_replicas):
|
|
1581
|
+
pod_name = self.settings.k8s.sts_otcs_admin + "-" + str(x)
|
|
1633
1582
|
|
|
1634
|
-
logger.info("Reactivate Liveness probe for pod -> '%s'", pod_name)
|
|
1583
|
+
self.logger.info("Reactivate Liveness probe for pod -> '%s'", pod_name)
|
|
1635
1584
|
self.k8s_object.exec_pod_command(
|
|
1636
|
-
pod_name,
|
|
1585
|
+
pod_name,
|
|
1586
|
+
["/bin/sh", "-c", "rm /tmp/keepalive"],
|
|
1587
|
+
container="otcs-admin-container",
|
|
1637
1588
|
)
|
|
1638
1589
|
|
|
1639
|
-
logger.info("Restart OTCS frontend and backend pods has been completed.")
|
|
1590
|
+
self.logger.info("Restart OTCS frontend and backend pods has been completed.")
|
|
1640
1591
|
|
|
1641
1592
|
# optional, give some additional time to make sure service is responsive
|
|
1642
1593
|
if extra_wait_time > 0:
|
|
1643
|
-
logger.info(
|
|
1594
|
+
self.logger.info(
|
|
1644
1595
|
"Wait %s seconds to make sure OTCS is responsive again...",
|
|
1645
1596
|
str(extra_wait_time),
|
|
1646
1597
|
)
|
|
1647
1598
|
time.sleep(extra_wait_time)
|
|
1648
|
-
logger.info("Continue customizing...")
|
|
1599
|
+
self.logger.info("Continue customizing...")
|
|
1649
1600
|
|
|
1650
1601
|
# end method definition
|
|
1651
1602
|
|
|
1652
1603
|
def restart_otac_service(self) -> bool:
|
|
1653
|
-
"""Restart the Archive Center spawner service in OTAC pod
|
|
1604
|
+
"""Restart the Archive Center spawner service in OTAC pod.
|
|
1654
1605
|
|
|
1655
|
-
Args:
|
|
1656
|
-
None
|
|
1657
1606
|
Returns:
|
|
1658
|
-
bool: True if restart was done, False if error occured
|
|
1607
|
+
bool: True if restart was done, False if error occured.
|
|
1608
|
+
|
|
1659
1609
|
"""
|
|
1660
1610
|
|
|
1661
|
-
if not self.
|
|
1611
|
+
if not self.settings.otac.enabled:
|
|
1662
1612
|
return False
|
|
1663
1613
|
|
|
1664
|
-
logger.info(
|
|
1614
|
+
self.logger.info(
|
|
1665
1615
|
"Restarting spawner service in Archive Center pod -> '%s'",
|
|
1666
|
-
self.
|
|
1616
|
+
self.settings.k8s.pod_otac,
|
|
1667
1617
|
)
|
|
1668
1618
|
# The Archive Center Spawner needs to be run in "interactive" mode - otherwise the command will "hang":
|
|
1669
1619
|
# The "-c" parameter is not required in this case
|
|
1670
1620
|
# False is given as parameter as OTAC writes non-errors to stderr
|
|
1671
1621
|
response = self.k8s_object.exec_pod_command_interactive(
|
|
1672
|
-
self.
|
|
1673
|
-
["/bin/sh", "/etc/init.d/spawner restart"],
|
|
1674
|
-
60,
|
|
1675
|
-
False,
|
|
1622
|
+
pod_name=self.settings.k8s.pod_otac,
|
|
1623
|
+
commands=["/bin/sh", "/etc/init.d/spawner restart"],
|
|
1624
|
+
timeout=60,
|
|
1625
|
+
write_stderr_to_error_log=False,
|
|
1676
1626
|
)
|
|
1677
1627
|
|
|
1678
|
-
|
|
1679
|
-
return True
|
|
1680
|
-
else:
|
|
1681
|
-
return False
|
|
1628
|
+
return bool(response)
|
|
1682
1629
|
|
|
1683
1630
|
# end method definition
|
|
1684
1631
|
|
|
1685
|
-
def restart_otawp_pod(self):
|
|
1686
|
-
"""Delete the AppWorks Platform Pod to make Kubernetes restart it.
|
|
1632
|
+
def restart_otawp_pod(self) -> None:
|
|
1633
|
+
"""Delete the AppWorks Platform Pod to make Kubernetes restart it."""
|
|
1687
1634
|
|
|
1688
|
-
|
|
1689
|
-
Returns:
|
|
1690
|
-
None
|
|
1691
|
-
"""
|
|
1692
|
-
|
|
1693
|
-
self.k8s_object.delete_pod(self.otawp_settings.k8s_statefulset + "-0")
|
|
1635
|
+
self.k8s_object.delete_pod(self.settings.k8s.sts_otawp + "-0")
|
|
1694
1636
|
|
|
1695
1637
|
# end method definition
|
|
1696
1638
|
|
|
1697
|
-
def consolidate_otds(self):
|
|
1698
|
-
"""Consolidate OTDS resources
|
|
1699
|
-
Args:
|
|
1700
|
-
Return: None
|
|
1701
|
-
"""
|
|
1639
|
+
def consolidate_otds(self) -> None:
|
|
1640
|
+
"""Consolidate OTDS resources."""
|
|
1702
1641
|
|
|
1703
|
-
self.otds_object.consolidate(self.
|
|
1642
|
+
self.otds_object.consolidate(self.settings.otcs.resource_name)
|
|
1704
1643
|
|
|
1705
|
-
if self.
|
|
1706
|
-
self.otds_object.consolidate(self.
|
|
1644
|
+
if self.settings.otawp.enabled: # is AppWorks Platform deployed?
|
|
1645
|
+
self.otds_object.consolidate(self.settings.otawp.resource_name)
|
|
1707
1646
|
|
|
1708
1647
|
# end method definition
|
|
1709
1648
|
|
|
1710
|
-
def import_powerdocs_configuration(self, otpd_object: OTPD):
|
|
1711
|
-
"""Import a database export (zip file) into the PowerDocs database
|
|
1649
|
+
def import_powerdocs_configuration(self, otpd_object: OTPD) -> None:
|
|
1650
|
+
"""Import a database export (zip file) into the PowerDocs database.
|
|
1712
1651
|
|
|
1713
1652
|
Args:
|
|
1714
|
-
otpd_object (
|
|
1653
|
+
otpd_object (OTPD):
|
|
1654
|
+
The PowerDocs object.
|
|
1655
|
+
|
|
1715
1656
|
"""
|
|
1716
1657
|
|
|
1717
|
-
if self.
|
|
1658
|
+
if self.settings.otpd.db_importfile.startswith("http"):
|
|
1718
1659
|
# Download file from remote location specified by the OTPD_DBIMPORTFILE
|
|
1719
1660
|
# this must be a public place without authentication:
|
|
1720
|
-
logger.info(
|
|
1661
|
+
self.logger.info(
|
|
1721
1662
|
"Download PowerDocs database file from URL -> '%s'",
|
|
1722
|
-
self.
|
|
1663
|
+
self.settings.otpd.db_importfile,
|
|
1723
1664
|
)
|
|
1724
1665
|
|
|
1725
1666
|
try:
|
|
1726
|
-
package = requests.get(self.
|
|
1667
|
+
package = requests.get(self.settings.otpd.db_importfile, timeout=60)
|
|
1727
1668
|
package.raise_for_status()
|
|
1728
|
-
logger.info(
|
|
1669
|
+
self.logger.info(
|
|
1729
1670
|
"Successfully downloaded PowerDocs database file -> '%s'; status code -> %s",
|
|
1730
|
-
self.
|
|
1671
|
+
self.settings.otpd.db_importfile,
|
|
1731
1672
|
package.status_code,
|
|
1732
1673
|
)
|
|
1733
|
-
filename = "
|
|
1674
|
+
filename = os.path.join(tempfile.gettempdir(), "otpd_db_import.zip")
|
|
1734
1675
|
with open(filename, mode="wb") as localfile:
|
|
1735
1676
|
localfile.write(package.content)
|
|
1736
1677
|
|
|
1737
|
-
logger.info(
|
|
1678
|
+
self.logger.info(
|
|
1738
1679
|
"Starting import on %s://%s:%s of %s",
|
|
1739
|
-
self.
|
|
1740
|
-
self.
|
|
1741
|
-
self.
|
|
1742
|
-
self.
|
|
1680
|
+
self.settings.otpd.url.scheme,
|
|
1681
|
+
self.settings.otpd.url.host,
|
|
1682
|
+
self.settings.otpd.url.port,
|
|
1683
|
+
self.settings.otpd.db_importfile,
|
|
1743
1684
|
)
|
|
1744
|
-
response = otpd_object.import_database(
|
|
1745
|
-
logger.info("Response -> %s", response)
|
|
1685
|
+
response = otpd_object.import_database(file_path=filename)
|
|
1686
|
+
self.logger.info("Response -> %s", response)
|
|
1746
1687
|
|
|
1747
|
-
except requests.exceptions.HTTPError
|
|
1748
|
-
logger.error("
|
|
1688
|
+
except requests.exceptions.HTTPError:
|
|
1689
|
+
self.logger.error("HTTP request error!")
|
|
1749
1690
|
|
|
1750
1691
|
# end method definition
|
|
1751
1692
|
|
|
1752
|
-
def set_maintenance_mode(self, enable: bool = True):
|
|
1753
|
-
"""Enable or Disable Maintenance Mode
|
|
1693
|
+
def set_maintenance_mode(self, enable: bool = True) -> None:
|
|
1694
|
+
"""Enable or Disable Maintenance Mode.
|
|
1695
|
+
|
|
1696
|
+
This redirects the Kubernetes Ingress to a maintenace web page.
|
|
1754
1697
|
|
|
1755
1698
|
Args:
|
|
1756
|
-
enable (bool, optional):
|
|
1699
|
+
enable (bool, optional):
|
|
1700
|
+
Whether or not to activate the maintenance mode web page.
|
|
1701
|
+
Defaults to True.
|
|
1702
|
+
|
|
1757
1703
|
"""
|
|
1758
|
-
|
|
1704
|
+
|
|
1705
|
+
if enable and self.settings.k8s.enabled:
|
|
1759
1706
|
self.log_header("Enable Maintenance Mode")
|
|
1760
|
-
logger.info(
|
|
1761
|
-
"Put OTCS frontends in Maitenance Mode by changing the Kubernetes Ingress backend service..."
|
|
1707
|
+
self.logger.info(
|
|
1708
|
+
"Put OTCS frontends in Maitenance Mode by changing the Kubernetes Ingress backend service...",
|
|
1762
1709
|
)
|
|
1763
1710
|
self.k8s_object.update_ingress_backend_services(
|
|
1764
|
-
self.
|
|
1711
|
+
self.settings.k8s.ingress_otxecm,
|
|
1765
1712
|
"otcs",
|
|
1766
|
-
self.
|
|
1767
|
-
self.
|
|
1713
|
+
self.settings.k8s.maintenance_service_name,
|
|
1714
|
+
self.settings.k8s.maintenance_service_port,
|
|
1768
1715
|
)
|
|
1769
|
-
logger.info("OTCS frontend is now in Maintenance Mode!")
|
|
1770
|
-
elif not self.
|
|
1771
|
-
logger.warning(
|
|
1772
|
-
"Kubernetes Integration disabled - Cannot Enable/Disable Maintenance Mode"
|
|
1716
|
+
self.logger.info("OTCS frontend is now in Maintenance Mode!")
|
|
1717
|
+
elif not self.settings.k8s.enabled:
|
|
1718
|
+
self.logger.warning(
|
|
1719
|
+
"Kubernetes Integration disabled - Cannot Enable/Disable Maintenance Mode",
|
|
1773
1720
|
)
|
|
1774
1721
|
self.k8s_object = None
|
|
1775
1722
|
else:
|
|
1776
1723
|
# Changing the Ingress backend service to OTCS frontend service:
|
|
1777
|
-
logger.info(
|
|
1778
|
-
"Put OTCS frontend back in Production Mode by changing the Kubernetes Ingress backend service..."
|
|
1724
|
+
self.logger.info(
|
|
1725
|
+
"Put OTCS frontend back in Production Mode by changing the Kubernetes Ingress backend service...",
|
|
1779
1726
|
)
|
|
1780
1727
|
self.k8s_object.update_ingress_backend_services(
|
|
1781
|
-
self.
|
|
1728
|
+
self.settings.k8s.ingress_otxecm,
|
|
1782
1729
|
"otcs",
|
|
1783
|
-
self.
|
|
1784
|
-
self.
|
|
1730
|
+
self.settings.otcs.url_frontend.host,
|
|
1731
|
+
self.settings.otcs.url_frontend.port,
|
|
1785
1732
|
)
|
|
1786
|
-
logger.info("OTCS frontend is now back in Production Mode!")
|
|
1733
|
+
self.logger.info("OTCS frontend is now back in Production Mode!")
|
|
1787
1734
|
|
|
1788
1735
|
# end method definition
|
|
1789
1736
|
|
|
1790
|
-
def
|
|
1791
|
-
"""
|
|
1792
|
-
|
|
1793
|
-
|
|
1794
|
-
|
|
1795
|
-
)
|
|
1737
|
+
def init_customizer(self) -> bool:
|
|
1738
|
+
"""Initialize all objects used by the customizer.
|
|
1739
|
+
|
|
1740
|
+
This includes:
|
|
1741
|
+
* OTDS
|
|
1742
|
+
* Kubernetes (K8S)
|
|
1743
|
+
* AppWorks Platform
|
|
1744
|
+
* OTCS (frontend + backend)
|
|
1745
|
+
* OTAC (Archive Center)
|
|
1746
|
+
* OTIV (Intelligent Viewing)
|
|
1747
|
+
* OTPD (PowerDocs)
|
|
1748
|
+
* Core Share
|
|
1749
|
+
* Microsoft 365
|
|
1750
|
+
* Aviator Search
|
|
1796
1751
|
|
|
1797
|
-
|
|
1798
|
-
|
|
1752
|
+
Returns:
|
|
1753
|
+
bool:
|
|
1754
|
+
True = success. False = error.
|
|
1755
|
+
|
|
1756
|
+
"""
|
|
1799
1757
|
|
|
1800
1758
|
self.log_header("Initialize OTDS")
|
|
1801
1759
|
|
|
1802
1760
|
self.otds_object = self.init_otds()
|
|
1803
1761
|
if not self.otds_object:
|
|
1804
|
-
logger.error("Failed to initialize OTDS - exiting...")
|
|
1805
|
-
|
|
1762
|
+
self.logger.error("Failed to initialize OTDS - exiting...")
|
|
1763
|
+
return False
|
|
1806
1764
|
|
|
1807
1765
|
# Establish in-cluster Kubernetes connection
|
|
1808
1766
|
self.log_header("Initialize Kubernetes")
|
|
1809
|
-
if self.
|
|
1810
|
-
|
|
1811
|
-
|
|
1812
|
-
|
|
1813
|
-
|
|
1814
|
-
|
|
1815
|
-
|
|
1816
|
-
|
|
1817
|
-
|
|
1818
|
-
|
|
1819
|
-
|
|
1767
|
+
if self.settings.k8s.enabled:
|
|
1768
|
+
try:
|
|
1769
|
+
self.k8s_object = self.init_k8s()
|
|
1770
|
+
|
|
1771
|
+
if not self.k8s_object:
|
|
1772
|
+
self.logger.error("Failed to initialize Kubernetes - exiting...")
|
|
1773
|
+
return False
|
|
1774
|
+
except Exception as err:
|
|
1775
|
+
self.logger.error(
|
|
1776
|
+
"Failed to initialize Kubernetes, disabling Kubernetes integration...",
|
|
1777
|
+
)
|
|
1778
|
+
self.logger.debug(err)
|
|
1779
|
+
self.settings.k8s.enabled = False
|
|
1820
1780
|
|
|
1821
|
-
if self.
|
|
1781
|
+
if self.settings.otawp.enabled: # is AppWorks Platform deployed?
|
|
1822
1782
|
self.log_header("Initialize OTAWP")
|
|
1823
1783
|
|
|
1824
1784
|
# Configure required OTDS resources as AppWorks doesn't do this on its own:
|
|
1825
|
-
self.init_otawp()
|
|
1785
|
+
self.otawp_object = self.init_otawp()
|
|
1826
1786
|
else:
|
|
1827
1787
|
self.settings.placeholder_values["OTAWP_RESOURCE_ID"] = ""
|
|
1828
1788
|
|
|
1829
1789
|
self.log_header("Initialize OTCS backend")
|
|
1830
1790
|
self.otcs_backend_object = self.init_otcs(
|
|
1831
|
-
self.
|
|
1832
|
-
int(self.otcs_settings.port_backend),
|
|
1833
|
-
self.otcs_settings.partition,
|
|
1834
|
-
self.otcs_settings.resource_name,
|
|
1791
|
+
url=self.settings.otcs.url_backend,
|
|
1835
1792
|
)
|
|
1836
1793
|
if not self.otcs_backend_object:
|
|
1837
|
-
logger.error("Failed to initialize OTCS backend - exiting...")
|
|
1794
|
+
self.logger.error("Failed to initialize OTCS backend - exiting...")
|
|
1838
1795
|
sys.exit()
|
|
1839
1796
|
|
|
1840
1797
|
self.log_header("Initialize OTCS frontend")
|
|
1841
1798
|
self.otcs_frontend_object = self.init_otcs(
|
|
1842
|
-
self.
|
|
1843
|
-
int(self.otcs_settings.port_frontend),
|
|
1844
|
-
self.otcs_settings.partition,
|
|
1845
|
-
self.otcs_settings.resource_name,
|
|
1799
|
+
url=self.settings.otcs.url_frontend,
|
|
1846
1800
|
)
|
|
1847
1801
|
if not self.otcs_frontend_object:
|
|
1848
|
-
logger.error("Failed to initialize OTCS frontend - exiting...")
|
|
1849
|
-
|
|
1802
|
+
self.logger.error("Failed to initialize OTCS frontend - exiting...")
|
|
1803
|
+
return False
|
|
1850
1804
|
|
|
1851
|
-
if self.
|
|
1805
|
+
if self.settings.otac.enabled: # is Archive Center deployed?
|
|
1852
1806
|
self.log_header("Initialize OTAC")
|
|
1853
1807
|
|
|
1854
1808
|
self.otac_object = self.init_otac()
|
|
1855
1809
|
if not self.otac_object:
|
|
1856
|
-
logger.error("Failed to initialize OTAC - exiting...")
|
|
1857
|
-
|
|
1810
|
+
self.logger.error("Failed to initialize OTAC - exiting...")
|
|
1811
|
+
return False
|
|
1858
1812
|
else:
|
|
1859
1813
|
self.otac_object = None
|
|
1860
1814
|
|
|
1861
|
-
if self.
|
|
1815
|
+
if self.settings.otiv.enabled: # is Intelligent Viewing deployed?
|
|
1862
1816
|
self.log_header("Initialize OTIV")
|
|
1863
1817
|
|
|
1864
1818
|
self.otiv_object = self.init_otiv()
|
|
1865
1819
|
else:
|
|
1866
1820
|
self.otiv_object = None
|
|
1867
1821
|
|
|
1868
|
-
if self.
|
|
1822
|
+
if self.settings.otpd.enabled: # is PowerDocs deployed?
|
|
1869
1823
|
self.log_header("Initialize OTPD")
|
|
1870
1824
|
|
|
1871
1825
|
self.otpd_object = self.init_otpd()
|
|
1872
1826
|
if not self.otpd_object:
|
|
1873
|
-
logger.error("Failed to initialize OTPD - exiting...")
|
|
1874
|
-
|
|
1827
|
+
self.logger.error("Failed to initialize OTPD - exiting...")
|
|
1828
|
+
return False
|
|
1875
1829
|
else:
|
|
1876
1830
|
self.otpd_object = None
|
|
1877
1831
|
|
|
1878
|
-
if self.
|
|
1832
|
+
if self.settings.coreshare.enabled: # is Core Share enabled?
|
|
1879
1833
|
self.log_header("Initialize Core Share")
|
|
1880
1834
|
|
|
1881
1835
|
self.core_share_object = self.init_coreshare()
|
|
1882
1836
|
if not self.core_share_object:
|
|
1883
|
-
logger.error("Failed to initialize Core Share - exiting...")
|
|
1884
|
-
|
|
1837
|
+
self.logger.error("Failed to initialize Core Share - exiting...")
|
|
1838
|
+
return False
|
|
1885
1839
|
else:
|
|
1886
1840
|
self.core_share_object = None
|
|
1887
1841
|
|
|
1888
1842
|
if (
|
|
1889
|
-
self.
|
|
1890
|
-
and self.m365_settings.user != ""
|
|
1891
|
-
and self.m365_settings.password != ""
|
|
1843
|
+
self.settings.m365.enabled and self.settings.m365.username != "" and self.settings.m365.password != ""
|
|
1892
1844
|
): # is M365 enabled?
|
|
1893
1845
|
self.log_header("Initialize Microsoft 365")
|
|
1894
1846
|
|
|
1895
1847
|
# Initialize the M365 object and connection to M365 Graph API:
|
|
1896
1848
|
self.m365_object = self.init_m365()
|
|
1897
1849
|
if not self.m365_object:
|
|
1898
|
-
logger.error("Failed to initialize Microsoft 365!")
|
|
1899
|
-
|
|
1850
|
+
self.logger.error("Failed to initialize Microsoft 365!")
|
|
1851
|
+
return False
|
|
1852
|
+
|
|
1853
|
+
if self.settings.avts.enabled:
|
|
1854
|
+
self.log_header("Initialize Aviator Search")
|
|
1855
|
+
self.avts_object = self.init_avts()
|
|
1856
|
+
if not self.avts_object:
|
|
1857
|
+
self.logger.error("Failed to initialize Aviator Search")
|
|
1858
|
+
return False
|
|
1859
|
+
else:
|
|
1860
|
+
self.avts_object = None
|
|
1861
|
+
|
|
1862
|
+
return True
|
|
1900
1863
|
|
|
1901
|
-
|
|
1864
|
+
# end method definition
|
|
1865
|
+
|
|
1866
|
+
def customization_run(self) -> bool:
|
|
1867
|
+
"""Central method to initiate the customization."""
|
|
1868
|
+
|
|
1869
|
+
success = True
|
|
1870
|
+
|
|
1871
|
+
# Set Timer for duration calculation
|
|
1872
|
+
self.customizer_start_time = datetime.now(timezone.utc)
|
|
1873
|
+
|
|
1874
|
+
if not self.init_customizer():
|
|
1875
|
+
self.logger.error("Initialization of customizer failed!")
|
|
1876
|
+
return False
|
|
1877
|
+
|
|
1878
|
+
# Put Frontend in Maintenance mode to make sure nobody interferes
|
|
1879
|
+
# during customization:
|
|
1880
|
+
if self.settings.otcs.maintenance_mode:
|
|
1881
|
+
self.set_maintenance_mode(enable=True)
|
|
1882
|
+
|
|
1883
|
+
self.log_header("Collect payload files to process")
|
|
1902
1884
|
|
|
1903
1885
|
cust_payload_list = []
|
|
1904
1886
|
# Is uncompressed payload provided?
|
|
1905
|
-
if os.path.exists(self.settings.cust_payload):
|
|
1906
|
-
logger.info("Found payload file -> '%s'", self.settings.cust_payload)
|
|
1887
|
+
if self.settings.cust_payload and os.path.exists(self.settings.cust_payload):
|
|
1888
|
+
self.logger.info("Found payload file -> '%s'", self.settings.cust_payload)
|
|
1907
1889
|
cust_payload_list.append(self.settings.cust_payload)
|
|
1908
1890
|
# Is compressed payload provided?
|
|
1909
|
-
if os.path.exists(
|
|
1910
|
-
|
|
1911
|
-
|
|
1891
|
+
if self.settings.cust_payload_gz and os.path.exists(
|
|
1892
|
+
self.settings.cust_payload_gz,
|
|
1893
|
+
):
|
|
1894
|
+
self.logger.info(
|
|
1895
|
+
"Found compressed payload file -> '%s'",
|
|
1896
|
+
self.settings.cust_payload_gz,
|
|
1912
1897
|
)
|
|
1913
1898
|
cust_payload_list.append(self.settings.cust_payload_gz)
|
|
1914
1899
|
|
|
1915
1900
|
# do we have additional payload as an external file?
|
|
1916
|
-
if os.path.exists(
|
|
1917
|
-
|
|
1901
|
+
if self.settings.cust_payload_external and os.path.exists(
|
|
1902
|
+
self.settings.cust_payload_external,
|
|
1903
|
+
):
|
|
1904
|
+
for filename in sorted(
|
|
1905
|
+
os.scandir(self.settings.cust_payload_external),
|
|
1906
|
+
key=lambda e: e.name,
|
|
1907
|
+
):
|
|
1918
1908
|
if filename.is_file() and os.path.getsize(filename) > 0:
|
|
1919
|
-
logger.info(
|
|
1909
|
+
self.logger.info(
|
|
1910
|
+
"Found external payload file -> '%s'",
|
|
1911
|
+
filename.path,
|
|
1912
|
+
)
|
|
1920
1913
|
cust_payload_list.append(filename.path)
|
|
1921
|
-
|
|
1922
|
-
logger.
|
|
1923
|
-
"
|
|
1914
|
+
elif self.settings.cust_payload_external:
|
|
1915
|
+
self.logger.warning(
|
|
1916
|
+
"External payload file -> '%s' does not exist!",
|
|
1917
|
+
self.settings.cust_payload_external,
|
|
1924
1918
|
)
|
|
1925
1919
|
|
|
1926
1920
|
for cust_payload in cust_payload_list:
|
|
1927
|
-
|
|
1928
|
-
logger.info("Starting processing of payload -> '%s'", cust_payload)
|
|
1921
|
+
self.log_header("Start processing of payload -> '{}'".format(cust_payload))
|
|
1929
1922
|
|
|
1930
1923
|
# Set startTime for duration calculation
|
|
1931
|
-
start_time = datetime.now()
|
|
1924
|
+
start_time = datetime.now(timezone.utc)
|
|
1932
1925
|
|
|
1926
|
+
# Create payload object:
|
|
1933
1927
|
payload_object = Payload(
|
|
1934
1928
|
payload_source=cust_payload,
|
|
1935
1929
|
custom_settings_dir=self.settings.cust_settings_dir,
|
|
@@ -1940,20 +1934,26 @@ class Customizer:
|
|
|
1940
1934
|
otcs_frontend_object=self.otcs_frontend_object,
|
|
1941
1935
|
otcs_restart_callback=self.restart_otcs_service,
|
|
1942
1936
|
otiv_object=self.otiv_object,
|
|
1937
|
+
otpd_object=self.otpd_object,
|
|
1943
1938
|
m365_object=self.m365_object,
|
|
1944
1939
|
core_share_object=self.core_share_object,
|
|
1945
1940
|
browser_automation_object=self.browser_automation_object,
|
|
1946
1941
|
placeholder_values=self.settings.placeholder_values, # this dict includes placeholder replacements for the Ressource IDs of OTAWP and OTCS
|
|
1947
1942
|
log_header_callback=self.log_header,
|
|
1948
1943
|
stop_on_error=self.settings.stop_on_error,
|
|
1949
|
-
aviator_enabled=self.
|
|
1950
|
-
upload_status_files=self.
|
|
1944
|
+
aviator_enabled=self.settings.aviator.enabled,
|
|
1945
|
+
upload_status_files=self.settings.otcs.upload_status_files,
|
|
1946
|
+
otawp_object=self.otawp_object,
|
|
1947
|
+
avts_object=self.avts_object,
|
|
1948
|
+
logger=self.logger,
|
|
1951
1949
|
)
|
|
1952
1950
|
# Load the payload file and initialize the payload sections:
|
|
1953
1951
|
if not payload_object.init_payload():
|
|
1954
|
-
logger.error(
|
|
1955
|
-
"Failed to initialize payload -> %s - skipping...",
|
|
1952
|
+
self.logger.error(
|
|
1953
|
+
"Failed to initialize payload -> '%s' - skipping payload file...",
|
|
1954
|
+
cust_payload,
|
|
1956
1955
|
)
|
|
1956
|
+
success = False
|
|
1957
1957
|
continue
|
|
1958
1958
|
|
|
1959
1959
|
# Now process the payload in the defined ordering:
|
|
@@ -1963,119 +1963,134 @@ class Customizer:
|
|
|
1963
1963
|
self.consolidate_otds()
|
|
1964
1964
|
|
|
1965
1965
|
# Upload payload file for later review to Enterprise Workspace
|
|
1966
|
-
if self.
|
|
1966
|
+
if self.settings.otcs.upload_config_files:
|
|
1967
1967
|
self.log_header("Upload Payload file to Extended ECM")
|
|
1968
1968
|
response = self.otcs_backend_object.get_node_from_nickname(
|
|
1969
|
-
self.settings.cust_target_folder_nickname
|
|
1969
|
+
nickname=self.settings.cust_target_folder_nickname,
|
|
1970
1970
|
)
|
|
1971
1971
|
target_folder_id = self.otcs_backend_object.get_result_value(
|
|
1972
|
-
response,
|
|
1972
|
+
response=response,
|
|
1973
|
+
key="id",
|
|
1973
1974
|
)
|
|
1974
1975
|
if not target_folder_id:
|
|
1975
1976
|
target_folder_id = 2000 # use Enterprise Workspace as fallback
|
|
1976
1977
|
# Write YAML file with upadated payload (including IDs, etc.).
|
|
1977
|
-
# We need to write to
|
|
1978
|
+
# We need to write to a temporary location as initial location is read-only:
|
|
1978
1979
|
payload_file = os.path.basename(cust_payload)
|
|
1979
|
-
payload_file = (
|
|
1980
|
-
|
|
1981
|
-
|
|
1982
|
-
|
|
1980
|
+
payload_file = payload_file.removesuffix(".gz.b64")
|
|
1981
|
+
payload_file = payload_file.replace(".tfvars", ".yaml").replace(
|
|
1982
|
+
".tf",
|
|
1983
|
+
".yaml",
|
|
1983
1984
|
)
|
|
1984
|
-
cust_payload =
|
|
1985
|
+
cust_payload = os.path.join(tempfile.gettempdir(), payload_file)
|
|
1985
1986
|
|
|
1986
1987
|
with open(cust_payload, "w", encoding="utf-8") as file:
|
|
1987
|
-
yaml.dump(
|
|
1988
|
+
yaml.dump(
|
|
1989
|
+
data=payload_object.get_payload(
|
|
1990
|
+
drop_bulk_datasources_data=True,
|
|
1991
|
+
),
|
|
1992
|
+
stream=file,
|
|
1993
|
+
)
|
|
1988
1994
|
|
|
1989
1995
|
# Check if the payload file has been uploaded before.
|
|
1990
1996
|
# This can happen if we re-run the python container.
|
|
1991
1997
|
# In this case we add a version to the existing document:
|
|
1992
1998
|
response = self.otcs_backend_object.get_node_by_parent_and_name(
|
|
1993
|
-
int(target_folder_id),
|
|
1999
|
+
parent_id=int(target_folder_id),
|
|
2000
|
+
name=os.path.basename(cust_payload),
|
|
1994
2001
|
)
|
|
1995
2002
|
target_document_id = self.otcs_backend_object.get_result_value(
|
|
1996
|
-
response,
|
|
2003
|
+
response=response,
|
|
2004
|
+
key="id",
|
|
1997
2005
|
)
|
|
1998
2006
|
if target_document_id:
|
|
1999
2007
|
response = self.otcs_backend_object.add_document_version(
|
|
2000
|
-
int(target_document_id),
|
|
2001
|
-
cust_payload,
|
|
2002
|
-
os.path.basename(cust_payload),
|
|
2003
|
-
"text/plain",
|
|
2004
|
-
"Updated payload file after re-run of customization",
|
|
2008
|
+
node_id=int(target_document_id),
|
|
2009
|
+
file_url=cust_payload,
|
|
2010
|
+
file_name=os.path.basename(cust_payload),
|
|
2011
|
+
mime_type="text/plain",
|
|
2012
|
+
description="Updated payload file after re-run of customization",
|
|
2005
2013
|
)
|
|
2006
2014
|
else:
|
|
2007
2015
|
response = self.otcs_backend_object.upload_file_to_parent(
|
|
2008
|
-
cust_payload,
|
|
2009
|
-
os.path.basename(cust_payload),
|
|
2010
|
-
"text/plain",
|
|
2011
|
-
int(target_folder_id),
|
|
2016
|
+
file_url=cust_payload,
|
|
2017
|
+
file_name=os.path.basename(cust_payload),
|
|
2018
|
+
mime_type="text/plain",
|
|
2019
|
+
parent_id=int(target_folder_id),
|
|
2012
2020
|
)
|
|
2013
2021
|
|
|
2014
|
-
duration = datetime.now() - start_time
|
|
2022
|
+
duration = datetime.now(timezone.utc) - start_time
|
|
2015
2023
|
self.log_header(
|
|
2016
2024
|
"Customizer completed processing of payload -> {} in {}".format(
|
|
2017
2025
|
cust_payload,
|
|
2018
2026
|
duration,
|
|
2019
|
-
)
|
|
2027
|
+
),
|
|
2020
2028
|
)
|
|
2029
|
+
# end for cust_payload in cust_payload_list
|
|
2021
2030
|
|
|
2022
|
-
if self.
|
|
2023
|
-
self.set_maintenance_mode(False)
|
|
2031
|
+
if self.settings.otcs.maintenance_mode:
|
|
2032
|
+
self.set_maintenance_mode(enable=False)
|
|
2024
2033
|
|
|
2025
2034
|
# Restart AppWorksPlatform pod if it is deployed (to make settings effective):
|
|
2026
|
-
if self.
|
|
2035
|
+
if self.settings.otawp.enabled: # is AppWorks Platform deployed?
|
|
2027
2036
|
otawp_resource = self.otds_object.get_resource(
|
|
2028
|
-
self.
|
|
2037
|
+
name=self.settings.otawp.resource_name,
|
|
2029
2038
|
)
|
|
2030
|
-
if
|
|
2031
|
-
not "allowImpersonation" in otawp_resource
|
|
2032
|
-
or not otawp_resource["allowImpersonation"]
|
|
2033
|
-
):
|
|
2039
|
+
if "allowImpersonation" not in otawp_resource or not otawp_resource["allowImpersonation"]:
|
|
2034
2040
|
# Allow impersonation for all users:
|
|
2035
|
-
logger.warning(
|
|
2036
|
-
"OTAWP impersonation is not correct in OTDS before OTAWP pod restart!"
|
|
2041
|
+
self.logger.warning(
|
|
2042
|
+
"OTAWP impersonation is not correct in OTDS before OTAWP pod restart!",
|
|
2037
2043
|
)
|
|
2038
2044
|
else:
|
|
2039
|
-
logger.info(
|
|
2040
|
-
"OTAWP impersonation is correct in OTDS before OTAWP pod restart!"
|
|
2045
|
+
self.logger.info(
|
|
2046
|
+
"OTAWP impersonation is correct in OTDS before OTAWP pod restart!",
|
|
2041
2047
|
)
|
|
2042
|
-
logger.info("Restart OTAWP pod...")
|
|
2048
|
+
self.logger.info("Restart OTAWP pod...")
|
|
2043
2049
|
self.restart_otawp_pod()
|
|
2044
|
-
# For some reason we need to double-check that the impersonation
|
|
2045
|
-
# and if not set it again:
|
|
2050
|
+
# For some reason we need to double-check that the impersonation
|
|
2051
|
+
# for OTAWP has been set correctly and if not set it again:
|
|
2046
2052
|
otawp_resource = self.otds_object.get_resource(
|
|
2047
|
-
self.
|
|
2053
|
+
name=self.settings.otawp.resource_name,
|
|
2048
2054
|
)
|
|
2049
|
-
if
|
|
2050
|
-
not "allowImpersonation" in otawp_resource
|
|
2051
|
-
or not otawp_resource["allowImpersonation"]
|
|
2052
|
-
):
|
|
2055
|
+
if "allowImpersonation" not in otawp_resource or not otawp_resource["allowImpersonation"]:
|
|
2053
2056
|
# Allow impersonation for all users:
|
|
2054
|
-
logger.warning(
|
|
2055
|
-
"OTAWP impersonation is not correct in OTDS - set it once more..."
|
|
2057
|
+
self.logger.warning(
|
|
2058
|
+
"OTAWP impersonation is not correct in OTDS - set it once more...",
|
|
2059
|
+
)
|
|
2060
|
+
self.otds_object.impersonate_resource(
|
|
2061
|
+
resource_name=self.settings.otawp.resource_name,
|
|
2056
2062
|
)
|
|
2057
|
-
self.otds_object.impersonate_resource(self.otawp_settings.resource_name)
|
|
2058
2063
|
|
|
2059
|
-
#
|
|
2060
|
-
if
|
|
2061
|
-
|
|
2062
|
-
|
|
2063
|
-
|
|
2064
|
+
# Restart Aviator Search (Omnigroup) to ensure group synchronisation is working
|
|
2065
|
+
if self.settings.avts.enabled: # is Aviator Search deployed?
|
|
2066
|
+
self.logger.info(
|
|
2067
|
+
"Restarting Aviator Search Omnigroup server after creation of OTDS ClientID/ClientSecret...",
|
|
2068
|
+
)
|
|
2069
|
+
self.k8s_object.restart_stateful_set(sts_name="idol-omnigroupserver")
|
|
2070
|
+
|
|
2071
|
+
# Upload log file for later review to "Deployment" folder
|
|
2072
|
+
# in "Administration" folder in OTCS Enterprise volume:
|
|
2073
|
+
if os.path.exists(self.settings.cust_log_file) and self.settings.otcs.upload_log_file:
|
|
2064
2074
|
self.log_header("Upload log file to Extended ECM")
|
|
2065
2075
|
response = self.otcs_backend_object.get_node_from_nickname(
|
|
2066
|
-
self.settings.cust_target_folder_nickname
|
|
2076
|
+
nickname=self.settings.cust_target_folder_nickname,
|
|
2077
|
+
)
|
|
2078
|
+
target_folder_id = self.otcs_backend_object.get_result_value(
|
|
2079
|
+
response=response,
|
|
2080
|
+
key="id",
|
|
2067
2081
|
)
|
|
2068
|
-
target_folder_id = self.otcs_backend_object.get_result_value(response, "id")
|
|
2069
2082
|
if not target_folder_id:
|
|
2070
2083
|
target_folder_id = 2000 # use Enterprise Workspace as fallback
|
|
2071
2084
|
# Check if the log file has been uploaded before.
|
|
2072
2085
|
# This can happen if we re-run the python container:
|
|
2073
2086
|
# In this case we add a version to the existing document:
|
|
2074
2087
|
response = self.otcs_backend_object.get_node_by_parent_and_name(
|
|
2075
|
-
int(target_folder_id),
|
|
2088
|
+
parent_id=int(target_folder_id),
|
|
2089
|
+
name=os.path.basename(self.settings.cust_log_file),
|
|
2076
2090
|
)
|
|
2077
2091
|
target_document_id = self.otcs_backend_object.get_result_value(
|
|
2078
|
-
response,
|
|
2092
|
+
response=response,
|
|
2093
|
+
key="id",
|
|
2079
2094
|
)
|
|
2080
2095
|
if target_document_id:
|
|
2081
2096
|
response = self.otcs_backend_object.add_document_version(
|
|
@@ -2094,11 +2109,14 @@ class Customizer:
|
|
|
2094
2109
|
description="Initial Python Log after first run of customization",
|
|
2095
2110
|
)
|
|
2096
2111
|
|
|
2097
|
-
self.
|
|
2112
|
+
self.customizer_end_time = datetime.now(timezone.utc)
|
|
2098
2113
|
self.log_header(
|
|
2099
2114
|
"Customizer completed in {}".format(
|
|
2100
|
-
self.
|
|
2101
|
-
)
|
|
2115
|
+
self.customizer_end_time - self.customizer_start_time,
|
|
2116
|
+
),
|
|
2102
2117
|
)
|
|
2103
2118
|
|
|
2119
|
+
# Return the success status:
|
|
2120
|
+
return success
|
|
2121
|
+
|
|
2104
2122
|
# end method definition
|