mcli-framework 7.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mcli-framework might be problematic. Click here for more details.

Files changed (186) hide show
  1. mcli/app/chat_cmd.py +42 -0
  2. mcli/app/commands_cmd.py +226 -0
  3. mcli/app/completion_cmd.py +216 -0
  4. mcli/app/completion_helpers.py +288 -0
  5. mcli/app/cron_test_cmd.py +697 -0
  6. mcli/app/logs_cmd.py +419 -0
  7. mcli/app/main.py +492 -0
  8. mcli/app/model/model.py +1060 -0
  9. mcli/app/model_cmd.py +227 -0
  10. mcli/app/redis_cmd.py +269 -0
  11. mcli/app/video/video.py +1114 -0
  12. mcli/app/visual_cmd.py +303 -0
  13. mcli/chat/chat.py +2409 -0
  14. mcli/chat/command_rag.py +514 -0
  15. mcli/chat/enhanced_chat.py +652 -0
  16. mcli/chat/system_controller.py +1010 -0
  17. mcli/chat/system_integration.py +1016 -0
  18. mcli/cli.py +25 -0
  19. mcli/config.toml +20 -0
  20. mcli/lib/api/api.py +586 -0
  21. mcli/lib/api/daemon_client.py +203 -0
  22. mcli/lib/api/daemon_client_local.py +44 -0
  23. mcli/lib/api/daemon_decorator.py +217 -0
  24. mcli/lib/api/mcli_decorators.py +1032 -0
  25. mcli/lib/auth/auth.py +85 -0
  26. mcli/lib/auth/aws_manager.py +85 -0
  27. mcli/lib/auth/azure_manager.py +91 -0
  28. mcli/lib/auth/credential_manager.py +192 -0
  29. mcli/lib/auth/gcp_manager.py +93 -0
  30. mcli/lib/auth/key_manager.py +117 -0
  31. mcli/lib/auth/mcli_manager.py +93 -0
  32. mcli/lib/auth/token_manager.py +75 -0
  33. mcli/lib/auth/token_util.py +1011 -0
  34. mcli/lib/config/config.py +47 -0
  35. mcli/lib/discovery/__init__.py +1 -0
  36. mcli/lib/discovery/command_discovery.py +274 -0
  37. mcli/lib/erd/erd.py +1345 -0
  38. mcli/lib/erd/generate_graph.py +453 -0
  39. mcli/lib/files/files.py +76 -0
  40. mcli/lib/fs/fs.py +109 -0
  41. mcli/lib/lib.py +29 -0
  42. mcli/lib/logger/logger.py +611 -0
  43. mcli/lib/performance/optimizer.py +409 -0
  44. mcli/lib/performance/rust_bridge.py +502 -0
  45. mcli/lib/performance/uvloop_config.py +154 -0
  46. mcli/lib/pickles/pickles.py +50 -0
  47. mcli/lib/search/cached_vectorizer.py +479 -0
  48. mcli/lib/services/data_pipeline.py +460 -0
  49. mcli/lib/services/lsh_client.py +441 -0
  50. mcli/lib/services/redis_service.py +387 -0
  51. mcli/lib/shell/shell.py +137 -0
  52. mcli/lib/toml/toml.py +33 -0
  53. mcli/lib/ui/styling.py +47 -0
  54. mcli/lib/ui/visual_effects.py +634 -0
  55. mcli/lib/watcher/watcher.py +185 -0
  56. mcli/ml/api/app.py +215 -0
  57. mcli/ml/api/middleware.py +224 -0
  58. mcli/ml/api/routers/admin_router.py +12 -0
  59. mcli/ml/api/routers/auth_router.py +244 -0
  60. mcli/ml/api/routers/backtest_router.py +12 -0
  61. mcli/ml/api/routers/data_router.py +12 -0
  62. mcli/ml/api/routers/model_router.py +302 -0
  63. mcli/ml/api/routers/monitoring_router.py +12 -0
  64. mcli/ml/api/routers/portfolio_router.py +12 -0
  65. mcli/ml/api/routers/prediction_router.py +267 -0
  66. mcli/ml/api/routers/trade_router.py +12 -0
  67. mcli/ml/api/routers/websocket_router.py +76 -0
  68. mcli/ml/api/schemas.py +64 -0
  69. mcli/ml/auth/auth_manager.py +425 -0
  70. mcli/ml/auth/models.py +154 -0
  71. mcli/ml/auth/permissions.py +302 -0
  72. mcli/ml/backtesting/backtest_engine.py +502 -0
  73. mcli/ml/backtesting/performance_metrics.py +393 -0
  74. mcli/ml/cache.py +400 -0
  75. mcli/ml/cli/main.py +398 -0
  76. mcli/ml/config/settings.py +394 -0
  77. mcli/ml/configs/dvc_config.py +230 -0
  78. mcli/ml/configs/mlflow_config.py +131 -0
  79. mcli/ml/configs/mlops_manager.py +293 -0
  80. mcli/ml/dashboard/app.py +532 -0
  81. mcli/ml/dashboard/app_integrated.py +738 -0
  82. mcli/ml/dashboard/app_supabase.py +560 -0
  83. mcli/ml/dashboard/app_training.py +615 -0
  84. mcli/ml/dashboard/cli.py +51 -0
  85. mcli/ml/data_ingestion/api_connectors.py +501 -0
  86. mcli/ml/data_ingestion/data_pipeline.py +567 -0
  87. mcli/ml/data_ingestion/stream_processor.py +512 -0
  88. mcli/ml/database/migrations/env.py +94 -0
  89. mcli/ml/database/models.py +667 -0
  90. mcli/ml/database/session.py +200 -0
  91. mcli/ml/experimentation/ab_testing.py +845 -0
  92. mcli/ml/features/ensemble_features.py +607 -0
  93. mcli/ml/features/political_features.py +676 -0
  94. mcli/ml/features/recommendation_engine.py +809 -0
  95. mcli/ml/features/stock_features.py +573 -0
  96. mcli/ml/features/test_feature_engineering.py +346 -0
  97. mcli/ml/logging.py +85 -0
  98. mcli/ml/mlops/data_versioning.py +518 -0
  99. mcli/ml/mlops/experiment_tracker.py +377 -0
  100. mcli/ml/mlops/model_serving.py +481 -0
  101. mcli/ml/mlops/pipeline_orchestrator.py +614 -0
  102. mcli/ml/models/base_models.py +324 -0
  103. mcli/ml/models/ensemble_models.py +675 -0
  104. mcli/ml/models/recommendation_models.py +474 -0
  105. mcli/ml/models/test_models.py +487 -0
  106. mcli/ml/monitoring/drift_detection.py +676 -0
  107. mcli/ml/monitoring/metrics.py +45 -0
  108. mcli/ml/optimization/portfolio_optimizer.py +834 -0
  109. mcli/ml/preprocessing/data_cleaners.py +451 -0
  110. mcli/ml/preprocessing/feature_extractors.py +491 -0
  111. mcli/ml/preprocessing/ml_pipeline.py +382 -0
  112. mcli/ml/preprocessing/politician_trading_preprocessor.py +569 -0
  113. mcli/ml/preprocessing/test_preprocessing.py +294 -0
  114. mcli/ml/scripts/populate_sample_data.py +200 -0
  115. mcli/ml/tasks.py +400 -0
  116. mcli/ml/tests/test_integration.py +429 -0
  117. mcli/ml/tests/test_training_dashboard.py +387 -0
  118. mcli/public/oi/oi.py +15 -0
  119. mcli/public/public.py +4 -0
  120. mcli/self/self_cmd.py +1246 -0
  121. mcli/workflow/daemon/api_daemon.py +800 -0
  122. mcli/workflow/daemon/async_command_database.py +681 -0
  123. mcli/workflow/daemon/async_process_manager.py +591 -0
  124. mcli/workflow/daemon/client.py +530 -0
  125. mcli/workflow/daemon/commands.py +1196 -0
  126. mcli/workflow/daemon/daemon.py +905 -0
  127. mcli/workflow/daemon/daemon_api.py +59 -0
  128. mcli/workflow/daemon/enhanced_daemon.py +571 -0
  129. mcli/workflow/daemon/process_cli.py +244 -0
  130. mcli/workflow/daemon/process_manager.py +439 -0
  131. mcli/workflow/daemon/test_daemon.py +275 -0
  132. mcli/workflow/dashboard/dashboard_cmd.py +113 -0
  133. mcli/workflow/docker/docker.py +0 -0
  134. mcli/workflow/file/file.py +100 -0
  135. mcli/workflow/gcloud/config.toml +21 -0
  136. mcli/workflow/gcloud/gcloud.py +58 -0
  137. mcli/workflow/git_commit/ai_service.py +328 -0
  138. mcli/workflow/git_commit/commands.py +430 -0
  139. mcli/workflow/lsh_integration.py +355 -0
  140. mcli/workflow/model_service/client.py +594 -0
  141. mcli/workflow/model_service/download_and_run_efficient_models.py +288 -0
  142. mcli/workflow/model_service/lightweight_embedder.py +397 -0
  143. mcli/workflow/model_service/lightweight_model_server.py +714 -0
  144. mcli/workflow/model_service/lightweight_test.py +241 -0
  145. mcli/workflow/model_service/model_service.py +1955 -0
  146. mcli/workflow/model_service/ollama_efficient_runner.py +425 -0
  147. mcli/workflow/model_service/pdf_processor.py +386 -0
  148. mcli/workflow/model_service/test_efficient_runner.py +234 -0
  149. mcli/workflow/model_service/test_example.py +315 -0
  150. mcli/workflow/model_service/test_integration.py +131 -0
  151. mcli/workflow/model_service/test_new_features.py +149 -0
  152. mcli/workflow/openai/openai.py +99 -0
  153. mcli/workflow/politician_trading/commands.py +1790 -0
  154. mcli/workflow/politician_trading/config.py +134 -0
  155. mcli/workflow/politician_trading/connectivity.py +490 -0
  156. mcli/workflow/politician_trading/data_sources.py +395 -0
  157. mcli/workflow/politician_trading/database.py +410 -0
  158. mcli/workflow/politician_trading/demo.py +248 -0
  159. mcli/workflow/politician_trading/models.py +165 -0
  160. mcli/workflow/politician_trading/monitoring.py +413 -0
  161. mcli/workflow/politician_trading/scrapers.py +966 -0
  162. mcli/workflow/politician_trading/scrapers_california.py +412 -0
  163. mcli/workflow/politician_trading/scrapers_eu.py +377 -0
  164. mcli/workflow/politician_trading/scrapers_uk.py +350 -0
  165. mcli/workflow/politician_trading/scrapers_us_states.py +438 -0
  166. mcli/workflow/politician_trading/supabase_functions.py +354 -0
  167. mcli/workflow/politician_trading/workflow.py +852 -0
  168. mcli/workflow/registry/registry.py +180 -0
  169. mcli/workflow/repo/repo.py +223 -0
  170. mcli/workflow/scheduler/commands.py +493 -0
  171. mcli/workflow/scheduler/cron_parser.py +238 -0
  172. mcli/workflow/scheduler/job.py +182 -0
  173. mcli/workflow/scheduler/monitor.py +139 -0
  174. mcli/workflow/scheduler/persistence.py +324 -0
  175. mcli/workflow/scheduler/scheduler.py +679 -0
  176. mcli/workflow/sync/sync_cmd.py +437 -0
  177. mcli/workflow/sync/test_cmd.py +314 -0
  178. mcli/workflow/videos/videos.py +242 -0
  179. mcli/workflow/wakatime/wakatime.py +11 -0
  180. mcli/workflow/workflow.py +37 -0
  181. mcli_framework-7.0.0.dist-info/METADATA +479 -0
  182. mcli_framework-7.0.0.dist-info/RECORD +186 -0
  183. mcli_framework-7.0.0.dist-info/WHEEL +5 -0
  184. mcli_framework-7.0.0.dist-info/entry_points.txt +7 -0
  185. mcli_framework-7.0.0.dist-info/licenses/LICENSE +21 -0
  186. mcli_framework-7.0.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,1011 @@
1
+ #!/usr/bin/env python3
2
+
3
+ import base64
4
+ import json
5
+ import math
6
+ import os
7
+ import sys
8
+ import time
9
+ from urllib.request import urlopen
10
+
11
+ DEV_SECRETS_ROOT = "~/.mcli_dev/"
12
+ IDP_URL_FILE = DEV_SECRETS_ROOT + "idp"
13
+ USER_INFO_FILE = DEV_SECRETS_ROOT + "user"
14
+ PRIVATE_KEY_PATH = DEV_SECRETS_ROOT + "keys/private-key.pem"
15
+
16
+ ALLOWED_ACTIONS = [
17
+ "REVOKE_AZURE",
18
+ "REVOKE_AWS",
19
+ "PROVISION_AZURE",
20
+ "REVOKE_GCP",
21
+ "PROVISION_AWS",
22
+ "PROVISION_GCP",
23
+ "PROVISION_THIRDPARTY",
24
+ ]
25
+
26
+
27
+ def help(action, args):
28
+ logger.info("Allowed actions are:")
29
+ for action in ALLOWED_ACTIONS:
30
+ logger.info("-", action)
31
+
32
+
33
+ def execute(action, args):
34
+ """Delegates the execution of the action to the appropriate handler."""
35
+
36
+ if action == "PROVISION_AZURE":
37
+ mcli = mcli_as_idp_user()
38
+ ensure_directory_exists(DEV_SECRETS_ROOT + "azure")
39
+ # persist_azure_storage_creds(resp.get('storage_account_name'), resp.get('storage_access_key'))
40
+
41
+ elif action == "PROVISION_AWS":
42
+ mcli = mcli_as_idp_user()
43
+ ensure_directory_exists(DEV_SECRETS_ROOT + "aws")
44
+
45
+ elif action == "PROVISION_GCP":
46
+ mcli = mcli_as_idp_user()
47
+ ensure_directory_exists(DEV_SECRETS_ROOT + "gcp")
48
+
49
+ elif action == "REVOKE_AZURE":
50
+ mcli = mcli_as_idp_user()
51
+ delete_file(DEV_SECRETS_ROOT + "azure/azure.json")
52
+ delete_directory(DEV_SECRETS_ROOT + "azure")
53
+ logger.info("Deleted locally persisted secrets... ")
54
+ logger.info("Successfully revoked provisioned Azure resources...")
55
+
56
+ elif action == "REVOKE_AWS":
57
+ mcli = mcli_as_idp_user()
58
+ delete_file(DEV_SECRETS_ROOT + "aws/aws.json")
59
+ delete_directory(DEV_SECRETS_ROOT + "aws")
60
+ logger.info("Deleted locally persisted secrets... ")
61
+ logger.info("Successfully revoked provisioned Aws resources...")
62
+
63
+ elif action == "REVOKE_GCP":
64
+ mcli = mcli_as_idp_user()
65
+ delete_file(DEV_SECRETS_ROOT + "gcp/gcp.json")
66
+ delete_directory(DEV_SECRETS_ROOT + "gcp")
67
+ logger.info("Deleted locally persisted secrets... ")
68
+ logger.info("Successfully revoked provisioned Gcp resources...")
69
+
70
+ elif action == "PROVISION_THIRDPARTY":
71
+ ensure_directory_exists(DEV_SECRETS_ROOT + "thirdParty")
72
+ mcli = mcli_as_basic_user()
73
+
74
+ else:
75
+ help(action, args)
76
+
77
+
78
+ def persist_thirdParty_creds(thirdPartyApiKind, creds):
79
+ filepath = get_absolute_path(DEV_SECRETS_ROOT + "thirdParty/" + thirdPartyApiKind + ".txt")
80
+ with open(filepath, "w") as f:
81
+ f.write(json.dumps(str(creds)))
82
+ logger.info(thirdPartyApiKind + " secrets have been persisted into:", filepath)
83
+
84
+
85
+ def persist_azure_storage_creds(account_name, access_key):
86
+ filepath = get_absolute_path(DEV_SECRETS_ROOT + "azure/azure.json")
87
+ with open(filepath, "w") as f:
88
+ json.dump({"storage_account_name": account_name, "storage_access_key": access_key}, f)
89
+ logger.info("Azure secrets have been persisted into:", filepath)
90
+
91
+
92
+ def persist_aws_storage_creds(access_key, secret_key):
93
+ filepath = get_absolute_path(DEV_SECRETS_ROOT + "aws/aws.json")
94
+ with open(filepath, "w") as f:
95
+ json.dump({"access_key": access_key, "secret_key": secret_key}, f)
96
+ logger.info("Aws secrets have been persisted into:", filepath)
97
+
98
+
99
+ def persist_gcp_storage_creds(account_id, account_email, access_key, secret_key):
100
+ filepath = get_absolute_path(DEV_SECRETS_ROOT + "gcp/gcp.json")
101
+ with open(filepath, "w") as f:
102
+ json.dump(
103
+ {
104
+ "accountId": account_id,
105
+ "accountEmail": account_email,
106
+ "accessKey": access_key,
107
+ "secretKey": secret_key,
108
+ },
109
+ f,
110
+ )
111
+ logger.info("Gcp secrets have been persisted into:", filepath)
112
+
113
+
114
+ def ensure_directory_exists(dirpath):
115
+ dirpath = get_absolute_path(dirpath)
116
+ os.makedirs(dirpath, exist_ok=True)
117
+
118
+
119
+ def delete_directory(dirpath):
120
+ dirpath = get_absolute_path(dirpath)
121
+ if os.path.exists(dirpath):
122
+ os.rmdir(dirpath)
123
+
124
+
125
+ def delete_file(filepath):
126
+ filepath = get_absolute_path(filepath)
127
+ if os.path.exists(filepath):
128
+ os.remove(filepath)
129
+
130
+
131
+ def get_absolute_path(pth):
132
+ pth = os.path.expanduser(pth)
133
+ pth = os.path.abspath(pth)
134
+ return pth
135
+
136
+
137
+ def mcli_as_basic_user():
138
+ url = get_mcli_url()
139
+ token = _create_basic_auth_token("BA", "BA")
140
+ basicAuthHeader = "Basic " + token
141
+ return _fetch_remote_mcli_with_custom_auth(url, basicAuthHeader)
142
+
143
+
144
+ def mcli_as_idp_user():
145
+ url = get_mcli_url()
146
+ user_id = get_user_id()
147
+ token = _create_key_auth_token(user_id, PRIVATE_KEY_PATH)
148
+ keyAuthHeader = "mcli_key " + token
149
+ return _fetch_remote_mcli_with_custom_auth(url, keyAuthHeader)
150
+
151
+
152
+ def get_user_id():
153
+ return _read_line_from_file(USER_INFO_FILE)
154
+
155
+
156
+ def get_mcli_url():
157
+ return _read_line_from_file(IDP_URL_FILE)
158
+
159
+
160
+ def _create_basic_auth_token(user, password):
161
+ basic_content_bytes = "BA:BA".encode("ASCII")
162
+ basic_token_b64 = base64.b64encode(basic_content_bytes).decode("ASCII")
163
+ return basic_token_b64
164
+
165
+
166
+ def _create_key_auth_token(user_id, private_key_path):
167
+ sig, hex_nonce = _generate_signature(private_key_path)
168
+ mcli_key = user_id + ":" + hex_nonce + ":" + sig
169
+ mcli_key_bytes = mcli_key.encode("utf-8")
170
+ mcli_key_b64 = base64.b64encode(mcli_key_bytes).decode("ascii")
171
+ return mcli_key_b64
172
+
173
+
174
+ def _fetch_remote_mcli_with_custom_auth(url, authHeader):
175
+ """Loads and returns the mcli type system."""
176
+ src = urlopen(url + "/remote/mcli.py").read()
177
+ exec_scope = {}
178
+ exec(src, exec_scope)
179
+ return exec_scope["get_mcli"](url=url, authz=authHeader)
180
+
181
+
182
+ def _generate_signature(private_key_path):
183
+ private_key_path = get_absolute_path(private_key_path)
184
+ if not os.path.exists(private_key_path):
185
+ raise Exception("Private key does not exist at path:" + private_key_path)
186
+ nonce = str(math.floor(time.time() * 1000))
187
+ # Generate the signature using the private key
188
+ sig = os.popen(
189
+ "logger.infof "
190
+ + nonce
191
+ + " | openssl dgst -hex -sigopt rsa_padding_mode:pss -sha256 -sign "
192
+ + private_key_path
193
+ ).read()
194
+ # Remove the '(stdin)=' prefix from the output
195
+ sig = sig[len("SHA2-256(stdin)=") :].strip()
196
+ # Encode the nonce in hexadecimal format
197
+ hex_nonce = nonce.encode("ascii").hex()
198
+ return (sig, hex_nonce)
199
+
200
+
201
+ def _read_line_from_file(filepath):
202
+ filepath = get_absolute_path(filepath)
203
+ if not os.path.exists(filepath):
204
+ raise Exception("File does not exist at: " + filepath)
205
+ with open(filepath) as f:
206
+ return f.readline().strip()
207
+
208
+
209
+ def _parse_args(args):
210
+ """Parses the args passed into the python script leaving out the first argument (the script name)."""
211
+ action = args[0]
212
+ return (action, args[1:])
213
+
214
+
215
+ #!/usr/bin/env python3
216
+
217
+ import json
218
+ import os
219
+ import sys
220
+ from enum import Enum
221
+
222
+ mcli_DIR = os.getenv("mcli_DIR")
223
+ DEV_SECRETS_ROOT = os.path.expanduser("~/.mclidev/")
224
+ CLOUD_CREDENTIALS_ROOT_SUBDIR = "server/vault/_/cloud/"
225
+ FILE_SYSTEM_CONFIG_SUBDIR = "/server/config/_cluster_/local/FileSystemConfig/"
226
+ FILE_SYSTEM_CONFIG_FILE_NAME = "FileSystemConfig.json"
227
+
228
+
229
+ def resetConfig(cloud_name):
230
+ cloud = None
231
+ if cloud_name == CloudName.AWS.value:
232
+ cloud = Cloud(CloudName.AWS.value, "s3", "aws.json", "aws/aws.json")
233
+ cloud.writeCredentials()
234
+ elif cloud_name == CloudName.AZURE.value:
235
+ cloud = Cloud(CloudName.AZURE.value, "adl", "azure.json", "azure/azure.json")
236
+ # TODO PLAT-42946: write azure credentials
237
+ if cloud:
238
+ cloud.writeFileSystemConfig()
239
+
240
+
241
+ def writeJsonToFile(parent_dir, file_path, json_content):
242
+ # create parent directories up to file_path if they do not already exist
243
+ os.makedirs(parent_dir, exist_ok=True)
244
+ with open(file_path, "w") as f:
245
+ json.dump(json_content, f)
246
+
247
+
248
+ class CloudName(Enum):
249
+ AWS = "aws"
250
+ AZURE = "azure"
251
+
252
+
253
+ class Cloud:
254
+
255
+ def __init__(
256
+ self, name, file_system_name, credentials_file_name, provisioned_credentials_subpath
257
+ ):
258
+ # name of the Cloud
259
+ self.name = name
260
+ # the name of the file system to set mounts to. e.g. 's3'
261
+ self.file_system_name = file_system_name
262
+ # the file name containing CloudCredentials to write into the Config Framework. e.g. 'aws.json'
263
+ self.credentials_file_name = credentials_file_name
264
+ # the subpath to the provisioned credentials file. e.g. '/aws/aws.json'
265
+ self.provisioned_credentials_subpath = provisioned_credentials_subpath
266
+
267
+ def writeFileSystemConfig(self):
268
+ logger.info(
269
+ f"Setting {self.file_system_name} as default filesystem at the _cluster_ override"
270
+ )
271
+ file_system_config_dir = mcli_DIR + FILE_SYSTEM_CONFIG_SUBDIR
272
+ file_system_config_path = file_system_config_dir + FILE_SYSTEM_CONFIG_FILE_NAME
273
+ file_system_config_json_map = {}
274
+ file_system_config_json_map["default"] = self.file_system_name
275
+ writeJsonToFile(
276
+ file_system_config_dir, file_system_config_path, file_system_config_json_map
277
+ )
278
+
279
+ def writeCredentials(self):
280
+ # write provisioned credentials to the config framework
281
+ provisioned_credentials_path = os.path.join(
282
+ DEV_SECRETS_ROOT, self.provisioned_credentials_subpath
283
+ )
284
+ if not os.path.exists(provisioned_credentials_path):
285
+ logger.info("No provisioned credentials found, please follow documentation")
286
+ return
287
+ with open(provisioned_credentials_path, "r") as provisioned_credentials_file:
288
+ provisioned_credentials_mapping = json.load(provisioned_credentials_file)
289
+ config_credentials_dir = os.path.join(mcli_DIR, CLOUD_CREDENTIALS_ROOT_SUBDIR)
290
+ config_credentials_path = config_credentials_dir + self.credentials_file_name
291
+ logger.info(f"Moving credentials to {config_credentials_path}")
292
+ # CloudCredentials field name -> Cloud dependent values
293
+ credentials_map = {}
294
+ if self.name == CloudName.AWS.value:
295
+ credentials_map["type"] = "AwsCredentials"
296
+ credentials_map["region"] = "us-east-1"
297
+ credentials_map["accessKey"] = provisioned_credentials_mapping["access_key"]
298
+ credentials_map["secretKey"] = provisioned_credentials_mapping["secret_key"]
299
+ elif self.name == CloudName.AZURE.value:
300
+ # TODO: PLAT-42946 need to add remaining Azure fields
301
+ credentials_map["type"] = "AzureCredentials"
302
+ credentials_map["region"] = "eastus2"
303
+ storageCredentials = {}
304
+ storageCredentials["accountName"] = provisioned_credentials_mapping[
305
+ "storage_account_name"
306
+ ]
307
+ storageCredentials["accessKey"] = provisioned_credentials_mapping[
308
+ "storage_access_key"
309
+ ]
310
+ credentials_map["storageCredentials"] = storageCredentials
311
+
312
+ config_map = {}
313
+ config_map["credentials"] = credentials_map
314
+ writeJsonToFile(config_credentials_dir, config_credentials_path, config_map)
315
+
316
+
317
+ #!/usr/bin/env python3
318
+
319
+ # Copyright 2009-2022 mcli AI. All Rights Reserved.
320
+ # This material, including without limitation any software, is the confidential trade secret and proprietary
321
+ # information of mcli and its licensors. Reproduction, use and/or distribution of this material in any form is
322
+ # strictly prohibited except as set forth in a written license agreement with mcli and/or its authorized distributors.
323
+ # This material may be covered by one or more patents or pending patent applications.
324
+
325
+ import base64
326
+ import builtins
327
+ import json
328
+ import logging
329
+ import os
330
+ import platform
331
+ import socket
332
+ import subprocess
333
+ import sys
334
+ import time
335
+ import urllib.error
336
+
337
+ try:
338
+ import kubernetes as k8s
339
+
340
+ K8S_CLI_AVAILABLE = True
341
+ except ImportError:
342
+ K8S_CLI_AVAILABLE = False
343
+
344
+
345
+ # Configure logger
346
+ class FormatWithColors(logging.Formatter):
347
+ COLOR_MAP = {
348
+ logging.DEBUG: "\x1b[34;20m", # blue
349
+ logging.INFO: "\x1b[38;20m", # white
350
+ logging.INFO + 1: "\x1b[32;20m", # green
351
+ logging.WARNING: "\x1b[33;20m", # yellow
352
+ logging.ERROR: "\x1b[31;20m", # red
353
+ logging.CRITICAL: "\x1b[31;1m", # bold red
354
+ }
355
+
356
+ def __init__(self, record_format):
357
+ super().__init__()
358
+ self._colors = True
359
+ self._default_formatter = logging.Formatter(record_format)
360
+ self._formatters = {
361
+ level: logging.Formatter(color + record_format + "\x1b[0m")
362
+ for level, color in self.COLOR_MAP.items()
363
+ }
364
+
365
+ def no_colors(self, flag):
366
+ self._colors = not flag
367
+
368
+ def _formatter(self, level):
369
+ return (
370
+ self._formatters.get(level, self._default_formatter)
371
+ if self._colors
372
+ else self._default_formatter
373
+ )
374
+
375
+ def format(self, record):
376
+ return self._formatter(record.levelno).format(record)
377
+
378
+
379
+ logger = logging.getLogger()
380
+ ch = logging.StreamHandler()
381
+ ch.setLevel(logging.DEBUG)
382
+ formatter = FormatWithColors("[%(levelname)s] %(message)s")
383
+ ch.setFormatter(formatter)
384
+ logger.addHandler(ch)
385
+ logger.setLevel(logging.INFO)
386
+
387
+ mcli_SERVER_ROOT = os.getenv("mcli_SERVER_ROOT")
388
+ V8_INSTALL_HINT = "Reapplying configuration by running `v8 setup` may fix this issue."
389
+
390
+
391
+ is_macos = platform.system() == "Darwin"
392
+ is_linux = platform.system() == "Linux"
393
+
394
+
395
+ def fatal_error(msg):
396
+ logger.critical(msg + " Unable to recover from the error, exiting.")
397
+ if not logger.isEnabledFor(logging.DEBUG):
398
+ logger.error(
399
+ "Debug output may help you to fix this issue or will be useful for maintainers of this tool."
400
+ " Please try to rerun tool with `-d` flag to enable debug output"
401
+ )
402
+ sys.exit(1)
403
+
404
+
405
+ def execute_os_command(command, fail_on_error=True, stdin=None):
406
+ logger.debug("Executing command '%s'", command)
407
+ process = subprocess.Popen(
408
+ command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE
409
+ )
410
+ if stdin is not None:
411
+ stdin = stdin.encode()
412
+ stdout, stderr = [stream.decode().strip() for stream in process.communicate(input=stdin)]
413
+
414
+ logger.debug("rc > %s", process.returncode)
415
+ if stdout:
416
+ logger.debug("stdout> %s", stdout)
417
+ if stderr:
418
+ logger.debug("stderr> %s", stderr)
419
+
420
+ if process.returncode:
421
+ msg = f'Failed to execute command "{command}", error:\n{stdout}{stderr}'
422
+ if fail_on_error:
423
+ fatal_error(msg)
424
+ else:
425
+ raise RuntimeError(msg)
426
+
427
+ return stdout
428
+
429
+
430
+ def service_account_secret_name(context, namespace):
431
+ assert context == "dev", f'Only "dev" context is supported, got "{context}"'
432
+ return f"{namespace}-admin"
433
+
434
+
435
+ def k8s_token(context, namespace, secret_name):
436
+ logger.debug(
437
+ f"Retrieving token from secret {secret_name} in context {context} for namespace {namespace}"
438
+ )
439
+ encoded_token = execute_os_command(
440
+ f"kubectl"
441
+ f" --context {context}"
442
+ f" -n {namespace} get secret {secret_name}"
443
+ f" -o jsonpath='{{.data.token}}'"
444
+ )
445
+ assert (
446
+ encoded_token
447
+ ), f"Failed to retrieve token from secret {secret_name} in context {context} for namespace {namespace}; run `v8 setup` and try again"
448
+ return base64.b64decode(encoded_token).decode()
449
+
450
+
451
+ def k8s_context_name():
452
+ logger.debug("Requesting current k8s context name")
453
+ context = execute_os_command("kubectl config current-context")
454
+ logger.info('Found k8s context "%s"', context)
455
+ return context
456
+
457
+
458
+ def k8s_api_server_url(context_name):
459
+ logger.debug('Looking for a K8s ApiServer url by context name "%s"', context_name)
460
+ url = execute_os_command(
461
+ f"kubectl config view -o"
462
+ f" jsonpath='{{.clusters[?(@.name==\"{context_name}\")].cluster.server}}'"
463
+ )
464
+ if not url:
465
+ fatal_error(f'Cannot determine K8s APIServer url for context "{context_name}"')
466
+ logger.debug('Current K8s APIServer url for context "%s" is %s', context_name, url)
467
+ return url
468
+
469
+
470
+ def mcli_cluster_url(host):
471
+ # noinspection HttpUrlsUsage
472
+ return f"http://{host}/mcli/mcli"
473
+
474
+
475
+ def configure_K8sApiServer(namespace, context):
476
+ url = k8s_api_server_url(context)
477
+ dsa = service_account_secret_name(context, namespace)
478
+ token = k8s_token(context, namespace, dsa)
479
+
480
+ mcli.K8sApiServer().config().clearConfigAndSecretAllOverrides()
481
+ mcli.K8sApiServer.setApiUrlAndAuth(url, f"Bearer {token}", mcli.ConfigOverride.CLUSTER)
482
+ logger.info("mcli K8sApiServer configured!")
483
+
484
+
485
+ def ask_user(prompt):
486
+ return input(f"{prompt} (yes/NO) ").lower() in ["yes", "y", "1", "ye"]
487
+
488
+
489
+ def delete_namespace(context, namespace):
490
+ if namespace == "default":
491
+ logger.debug("Skipping removal for the default namespace")
492
+ return
493
+ logger.info(
494
+ 'Deleting namespace "%s" please wait '
495
+ "(It may take some time to ensure all resources are cleaned)",
496
+ namespace,
497
+ )
498
+ try:
499
+ execute_os_command(
500
+ f"kubectl --context={context} delete ns {namespace}", fail_on_error=False
501
+ )
502
+ except BaseException as e:
503
+ if "Error from server (NotFound): namespaces" in str(e):
504
+ return # no need to report if no namespace found
505
+ logger.warning("Failed to delete namespace. See error:\n%s", str(e))
506
+
507
+
508
+ def configure_k8s_context(namespace, context):
509
+ # This assumes K8s context and minikube profile name are same.
510
+
511
+ logger.debug(
512
+ 'Configuring mcli Server to use k8s namespace "%s" in context %s', namespace, context
513
+ )
514
+ context_name = k8s_context_name()
515
+
516
+ if context_name != context:
517
+ logger.warning(
518
+ f'K8s context configured to different context ("{context_name}") than requested '
519
+ f'context ("{context}").'
520
+ )
521
+ if not ask_user(
522
+ f"Would you like to set context to ({context})"
523
+ f" & namespace to ({namespace}) and proceed forward?"
524
+ ):
525
+ sys.exit(1)
526
+ # noinspection PyBroadException
527
+ try:
528
+ execute_os_command(f"kubectl config use-context {context}")
529
+ logger.info(
530
+ "Configured successfully to Namespace (%s) and Context (%s)", namespace, context
531
+ )
532
+ except BaseException:
533
+ fatal_error(
534
+ f'No context exists with the name: "{context}"'
535
+ f" Run the following command to start minikube:\n"
536
+ f" minikube -p {context} start"
537
+ )
538
+
539
+
540
+ def load_mcli(host):
541
+
542
+ if getattr(builtins, "mcli", None) is not None:
543
+ return # already configured.
544
+
545
+ url = f"{mcli_cluster_url(host)}"
546
+
547
+ # noinspection PyBroadException
548
+ try:
549
+ from urllib.request import urlopen
550
+
551
+ src = urlopen(mcli_cluster_url(host) + "/remote/mcli.py").read()
552
+ exec_scope = {}
553
+ exec(src, exec_scope) # pylint: disable=exec-used
554
+ builtins.mcli = exec_scope["get_mcli"](url)
555
+ except (urllib.error.HTTPError, urllib.error.URLError, ConnectionRefusedError):
556
+ logger.error(
557
+ f"Cannot connect to mcli server on {url}\nPlease, ensure mcli server is running and try again."
558
+ )
559
+ sys.exit(1)
560
+ except BaseException:
561
+ logger.exception("Failed to load mcli from local server.")
562
+ fatal_error("Please try again.")
563
+
564
+
565
+ def get_next_debug_port():
566
+
567
+ def is_port_in_use(port_):
568
+ with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
569
+ return s.connect_ex(("localhost", port_)) == 0
570
+
571
+ port = 7702
572
+ while is_port_in_use(port):
573
+ port += 1
574
+
575
+ return port
576
+
577
+
578
+ def container_access_token(container_registry) -> str:
579
+ """Returns access token for the given container registry"""
580
+
581
+ az_login()
582
+ return execute_os_command(
583
+ f"az acr login --name {container_registry} --expose-token --output tsv --query accessToken"
584
+ )
585
+
586
+
587
+ def imagepull_secret_name(container_registry: str) -> str:
588
+ """Returns image pull secret name for the given container registry"""
589
+
590
+ return f"{container_registry}-secret"
591
+
592
+
593
+ def ensure_namespace(namespace, context=None):
594
+ context = "" if context is None else f" --context {context}"
595
+ namespaces = json.loads(execute_os_command(f"kubectl{context} get ns -o json")).get("items")
596
+ namespace_exists = any(n.get("metadata").get("name") == namespace for n in namespaces)
597
+
598
+ if not namespace_exists:
599
+ logger.debug(f"Creating namespace:${namespace}")
600
+ execute_os_command(f"kubectl{context} create ns {namespace}")
601
+
602
+
603
+ def patch_service_account(
604
+ namespace, container_registry, context=None, service_account_name="default"
605
+ ):
606
+ """Patches the provided service account with the image pull secret for the given container registry"""
607
+
608
+ imagepull_secrets = (
609
+ f'{{"imagePullSecrets": [{{"name": "{imagepull_secret_name(container_registry)}"}}]}}'
610
+ )
611
+ execute_os_command(
612
+ f"kubectl{context} -n {namespace} patch serviceaccount {service_account_name} -p '{imagepull_secrets}' --type=merge"
613
+ )
614
+
615
+
616
+ def configure_registry_secret(namespace, container_registry, context=None):
617
+ ensure_namespace(namespace, context)
618
+
619
+ logger.debug(f"Configuring image pull credentials for {container_registry}")
620
+
621
+ # https://learn.microsoft.com/en-us/azure/container-registry/container-registry-authentication?tabs=azure-cli#az-acr-login-with---expose-token
622
+ json_credentials = {
623
+ "auths": {
624
+ container_registry: {
625
+ "username": "00000000-0000-0000-0000-000000000000",
626
+ "password": container_access_token(container_registry),
627
+ }
628
+ }
629
+ }
630
+
631
+ base64_json_encoded_credentials = base64.b64encode(
632
+ json.dumps(json_credentials).encode("utf-8")
633
+ ).decode()
634
+
635
+ context = "" if context is None else f" --context {context}"
636
+ secret = f"""
637
+ cat <<EOF | kubectl{context} -n {namespace} apply -f -
638
+ apiVersion: v1
639
+ data:
640
+ .dockerconfigjson: {base64_json_encoded_credentials}
641
+ kind: Secret
642
+ metadata:
643
+ name: {imagepull_secret_name(container_registry)}
644
+ type: kubernetes.io/dockerconfigjson
645
+ EOF"""
646
+
647
+ execute_os_command(secret)
648
+ time.sleep(10)
649
+ patch_service_account(namespace, container_registry, context)
650
+
651
+
652
+ def az_login():
653
+ logger.info("Logging into Azure")
654
+ # noinspection PyBroadException
655
+ try:
656
+ execute_os_command("az account show")
657
+ except BaseException:
658
+ fatal_error(
659
+ "Please run `az login` and try again. Run `./v8 setup` if `az` (Azure CLI) is missing"
660
+ )
661
+
662
+
663
+ def uninstall_helm(namespace, release, context):
664
+ logger.info(f"Uninstalling {release} helm chart")
665
+ try:
666
+ execute_os_command(
667
+ "helm uninstall" f" --namespace {namespace}" f" --kube-context {context}" f" {release}",
668
+ fail_on_error=False,
669
+ )
670
+ except BaseException as e:
671
+ logger.warning(f"Failed to uninstall helm chart:\n{str(e)}")
672
+
673
+
674
+ class K8sClient:
675
+ """K8s client for managing resources in a K8s cluster given a context"""
676
+
677
+ from contextlib import contextmanager
678
+
679
+ def __init__(self, context="dev") -> None:
680
+
681
+ if not K8S_CLI_AVAILABLE:
682
+ fatal_error(f"kubernetes package is missing; {V8_INSTALL_HINT}")
683
+
684
+ self._context = context
685
+ self._client = k8s.config.new_client_from_config(context=context)
686
+
687
+ _, active_context = k8s.config.list_kube_config_contexts()
688
+ self._namespace = active_context["context"]["namespace"]
689
+
690
+ assert self._namespace, f"Namespace is not set in K8s context {context}"
691
+
692
+ @contextmanager
693
+ def api(self):
694
+ """Context manager for K8s client"""
695
+
696
+ with self._client as api:
697
+ try:
698
+ yield api
699
+ except k8s.client.ApiException as e:
700
+ fatal_error("Exception in K8s client: %s\n" % e)
701
+
702
+ def delete_resources(self, selector="mcli__cluster-0=0local0"):
703
+ """Delete all resources in the namespace with the provided label selector"""
704
+
705
+ # https://github.com/kubernetes-client/python/blob/master/kubernetes/README.md
706
+ resource_apis = {
707
+ "CoreV1Api": ["service", "config_map", "secret", "pod", "persistent_volume_claim"],
708
+ "AppsV1Api": ["deployment"],
709
+ "NetworkingV1Api": ["ingress", "network_policy"],
710
+ }
711
+
712
+ with self.api() as api_client:
713
+ for api, resources in resource_apis.items():
714
+ cli = getattr(k8s.client, api)(api_client)
715
+
716
+ for resource in resources:
717
+ destructor = getattr(cli, f"delete_collection_namespaced_{resource}")
718
+ destructor(namespace=self._namespace, label_selector=selector)
719
+
720
+
721
+ PKG_JSN_EXT = ".mclipkg.json"
722
+ VERSION_FILE_PATH = "platform/platform/src/main/resources/mcli/server/version.txt"
723
+
724
+ # Paths and subpaths used in exclusion / inclusion
725
+ JAVA_PKG_JSN_SUBPATH = "/src/main/mcli/"
726
+ PLAT_ZOO_SUBPATH = "platform/zoo" + JAVA_PKG_JSN_SUBPATH
727
+ PLAT_ZOO_PKG_JSN_PATH = PLAT_ZOO_SUBPATH + "zoo/zoo" + PKG_JSN_EXT
728
+ PLAT_REPO_SERVER_SUBPATH = "platform/repo/server"
729
+
730
+ # BUILD and TEST RESOURCES (Mostly to exclude when finding pkg json files)
731
+ IDE_PKG_JSN_RSRCS = "/out/production/resources/"
732
+ GRADLE_PKG_JSN_RSRCS = "/build/resources/main/"
733
+ TEST_RSRCS = "/src/test/resources/"
734
+
735
+ # Keys of Pkg Decl
736
+ K_NAME = "name"
737
+ K_VERSION = "version"
738
+ K_DEPS = "dependencies"
739
+ K_COMPAT = "compatibleToVersion"
740
+
741
+ CSS_LIB = "cssLibrary"
742
+
743
+
744
+ def get_current_version(file_path=VERSION_FILE_PATH):
745
+ """
746
+ Read the version from VERSION_FILE_PATH and return.
747
+ The file is assumed to contain a single word which should be a valid <major>.<minor>.<patch>
748
+ """
749
+ import os
750
+
751
+ if not os.path.exists(file_path):
752
+ raise ValueError(f"File '{file_path}' does not exist.")
753
+
754
+ try:
755
+ with open(file_path, "r", encoding="utf-8") as file:
756
+ content = file.read().strip()
757
+ version = content.split()[0] if content else None
758
+ except IOError as e:
759
+ logger.info(f"Error reading file {file_path}: {e}")
760
+ return None
761
+
762
+ if version == None:
763
+ raise ValueError(f"File '{file_path}' does not contain version.")
764
+ elif is_major_minor_patch(version):
765
+ return version
766
+ else:
767
+ raise ValueError(
768
+ f"File '{file_path}' does not have a valid <major>.<minor>.<patch> version. Version: '{version}'"
769
+ )
770
+
771
+
772
+ def is_major_minor_patch(version):
773
+ """
774
+ Checks if the provided string is a valid <major>.<minor>.<patch>. i.e three integers separated by dots.
775
+ """
776
+ import re
777
+
778
+ pattern = r"^\d+\.\d+\.\d+$"
779
+ return bool(re.match(pattern, version))
780
+
781
+
782
+ def previous_version(version):
783
+ """
784
+ Assuming that the provided version is a valid <major>.<minor>, return the
785
+ previous version (<major>.<minor>) from it.
786
+ """
787
+ f_version = float(version)
788
+ return str(f_version - 0.1)
789
+
790
+
791
+ def get_pkg_jsn_files(base_dir=".", excludes=[], force_allowed=[]):
792
+ """
793
+ Traverses the directory and collects paths of pkg json files.
794
+
795
+
796
+ Args:
797
+ base_dir (str): The starting directory for the traversal.
798
+ excludes (list): Exclude all files which contain any of the provided paths / subpaths
799
+ force_allowed (list): Forcefully allow these files in the list even if they get excludes using excludes
800
+ """
801
+ import os
802
+
803
+ matched_files = []
804
+
805
+ for root, dirs, files in os.walk(base_dir):
806
+ # Exclude directories
807
+ dirs[:] = [d for d in dirs if os.path.join(root, d) not in excludes]
808
+
809
+ for file in files:
810
+ file_path = os.path.join(root, file)
811
+
812
+ # Check for extension and exclusion rules
813
+ if file.endswith(PKG_JSN_EXT):
814
+ if any(e in file_path for e in excludes):
815
+ # Include the file only if it's in the force_allowed
816
+ if not any(allowed in file_path for allowed in force_allowed):
817
+ continue
818
+ matched_files.append(file_path)
819
+
820
+ return sorted(matched_files)
821
+
822
+
823
+ def get_platform_pkgs(files):
824
+ """
825
+ Iterate of the file paths and extract the pkg name from it to return a list of pkgs.
826
+ """
827
+ pkgs = []
828
+ # logger.info(files)
829
+ for file_path in files:
830
+ # Include only those files which follows the platform pkg convention
831
+ if any(p in file_path for p in [JAVA_PKG_JSN_SUBPATH, PLAT_REPO_SERVER_SUBPATH]):
832
+ pkgs.append(file_path.split("/")[-1].replace(PKG_JSN_EXT, ""))
833
+ return pkgs
834
+
835
+
836
+ def align_version(
837
+ files,
838
+ platform_pkgs,
839
+ version,
840
+ compatible_to_version,
841
+ ui_pkgs=set(),
842
+ ui_compatible_to_version=None,
843
+ is_platform=False,
844
+ ):
845
+ """
846
+ Parse all provided mclipkg.json files and update various fields in the package json.
847
+ 1. All the platform_pkgs, if they exist in the dependencies, should be updated to the provided version <major>.<minor>
848
+ 2. The version field should be changed to the provided version
849
+ 3. compatibleToVersion field should be changed to the previous major minor
850
+ 4. For near term, ui_pkgs will have their own "compatibleToVersion"
851
+ 5. Version and compatibleVersionTo of the Zoo Test pkgs will not be updated.
852
+ """
853
+ import json
854
+
855
+ maj_min = version.rsplit(".", 1)[0]
856
+ desired_key_order = ["name", "description", "author", "icon", K_VERSION, K_COMPAT]
857
+
858
+ for file_path in files:
859
+ with open(file_path, "r", encoding="utf-8") as file:
860
+ try:
861
+ data = json.load(file)
862
+ modified = False
863
+
864
+ # update dependencies to major.minor
865
+ if K_DEPS in data and isinstance(data[K_DEPS], dict):
866
+ for key in platform_pkgs:
867
+ if key in data[K_DEPS]:
868
+ val = data[K_DEPS][key]
869
+ if val == None or val != maj_min:
870
+ data[K_DEPS][key] = maj_min
871
+ modified = True
872
+
873
+ # If the provided pkg json files are for platform pkgs, only then update version and compatibleToVersion
874
+ if is_platform:
875
+ # update version to major.minor.patch
876
+ if (
877
+ K_VERSION not in data
878
+ or data[K_VERSION] == None
879
+ or data[K_VERSION] != version
880
+ ):
881
+ data[K_VERSION] = version
882
+ modified = True
883
+
884
+ # Change the compatibleToVersion based based on the ui pkgs
885
+ # TODO: PLAT-108921 - Remove special logic for UI pkgs
886
+ if data[K_NAME] in ui_pkgs:
887
+ compat_version = ui_compatible_to_version
888
+ else:
889
+ compat_version = compatible_to_version
890
+
891
+ # update compatibleToVersion to previous major.minor
892
+ if (
893
+ K_COMPAT not in data
894
+ or data[K_COMPAT] == None
895
+ or data[K_COMPAT] != compat_version
896
+ ):
897
+ data[K_COMPAT] = compat_version
898
+ modified = True
899
+
900
+ if modified:
901
+ with open(file_path, "w", encoding="utf-8") as file:
902
+ json.dump(ensure_key_order(data, desired_key_order), file, indent=2)
903
+ logger.info(f"Updated file: {file_path}")
904
+
905
+ except (json.JSONDecodeError, IOError) as e:
906
+ logger.info(f"Error processing file {file_path}: {e}")
907
+
908
+
909
+ def ensure_key_order(json_obj, ordered_keys):
910
+ """
911
+ Ensures that certain keys in the JSON object occur in the specified order.
912
+
913
+ Args:
914
+ json_obj (dict): The JSON object to reorder.
915
+ ordered_keys (list): List of keys specifying the desired order.
916
+
917
+ Returns:
918
+ OrderedDict: A new JSON object with the specified keys reordered.
919
+ """
920
+ from collections import OrderedDict
921
+
922
+ if not isinstance(json_obj, dict):
923
+ raise TypeError("Input must be a JSON object (dictionary).")
924
+
925
+ if not isinstance(ordered_keys, list):
926
+ raise TypeError("ordered_keys must be a list of keys.")
927
+
928
+ # Create an ordered dictionary
929
+ reordered_json = OrderedDict()
930
+
931
+ # Add keys in the specified order if they exist in the JSON object
932
+ for key in ordered_keys:
933
+ if key in json_obj:
934
+ reordered_json[key] = json_obj[key]
935
+
936
+ # Add remaining keys in their original order
937
+ for key, value in json_obj.items():
938
+ if key not in reordered_json:
939
+ reordered_json[key] = value
940
+
941
+ return reordered_json
942
+
943
+
944
+ def main():
945
+ import argparse
946
+
947
+ try:
948
+ version = get_current_version()
949
+
950
+ parser = argparse.ArgumentParser(
951
+ description="Script to align the platform pkgDecl versions with version.txt"
952
+ )
953
+
954
+ parser.add_argument(
955
+ "--compatibleToVersion",
956
+ type=str,
957
+ help='The compatiableVersionTo for ALL platform pkgs. Default is "platform version" - 1.',
958
+ )
959
+ parser.add_argument(
960
+ "--uiCompatibleToVersion",
961
+ type=str,
962
+ help="The compatiableVersionTo for ALL UI pkgs. Default is the same as --compatibleToVersion. (optional)",
963
+ )
964
+
965
+ # Parse arguments
966
+ args = parser.parse_args()
967
+
968
+ # Look in root directory (".") and exclude test & build resources and zoo pkgs (but force allow zoo pkg json)
969
+ excludes = [IDE_PKG_JSN_RSRCS, GRADLE_PKG_JSN_RSRCS, TEST_RSRCS, PLAT_ZOO_SUBPATH]
970
+ force_allowed = [PLAT_ZOO_PKG_JSN_PATH]
971
+ pkg_jsn_files = get_pkg_jsn_files(".", excludes, force_allowed)
972
+
973
+ # Extract platform pkg names from it
974
+ pkgs = get_platform_pkgs(pkg_jsn_files)
975
+ # TODO - PLAT-108921 - Remove any specialization of ui pkgs in compatibleToVersion
976
+ ui_pkgs = {p for p in pkgs if p.startswith("ui") or p == CSS_LIB}
977
+
978
+ # Only look in Zoo but exclude test resources and zoo pkg json
979
+ base_dir = PLAT_ZOO_SUBPATH
980
+ excludes = [PLAT_ZOO_PKG_JSN_PATH, TEST_RSRCS]
981
+ test_pkg_jsn_files = get_pkg_jsn_files(base_dir, excludes)
982
+
983
+ compatible_to_version = (
984
+ args.compatibleToVersion
985
+ if args.compatibleToVersion is not None
986
+ else previous_version(version.rsplit(".", 1)[0])
987
+ )
988
+ ui_compatible_to_version = (
989
+ args.uiCompatibleToVersion
990
+ if args.uiCompatibleToVersion is not None
991
+ else compatible_to_version
992
+ )
993
+
994
+ # Ensure Version in platform pkgs (align version, compatibleVersionTo and dependencies)
995
+ align_version(
996
+ pkg_jsn_files,
997
+ pkgs,
998
+ version,
999
+ compatible_to_version,
1000
+ ui_pkgs,
1001
+ ui_compatible_to_version,
1002
+ True,
1003
+ )
1004
+ # Ensure Version in zoo test pkgs (align dependencies only)
1005
+ align_version(test_pkg_jsn_files, pkgs, version, None, set(), None, False)
1006
+ except ValueError as e:
1007
+ logger.info(f"Error: {e}")
1008
+
1009
+
1010
+ if __name__ == "__main__":
1011
+ main()