zenml-nightly 0.58.2.dev20240615__py3-none-any.whl → 0.58.2.dev20240622__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (313) hide show
  1. zenml/VERSION +1 -1
  2. zenml/_hub/client.py +8 -5
  3. zenml/actions/base_action.py +8 -10
  4. zenml/artifact_stores/base_artifact_store.py +20 -15
  5. zenml/artifact_stores/local_artifact_store.py +3 -2
  6. zenml/artifacts/artifact_config.py +34 -19
  7. zenml/artifacts/external_artifact.py +18 -8
  8. zenml/artifacts/external_artifact_config.py +14 -6
  9. zenml/artifacts/unmaterialized_artifact.py +2 -11
  10. zenml/cli/__init__.py +6 -0
  11. zenml/cli/artifact.py +20 -2
  12. zenml/cli/base.py +1 -1
  13. zenml/cli/served_model.py +0 -1
  14. zenml/cli/server.py +3 -3
  15. zenml/cli/utils.py +36 -40
  16. zenml/cli/web_login.py +2 -2
  17. zenml/client.py +198 -24
  18. zenml/client_lazy_loader.py +20 -14
  19. zenml/config/base_settings.py +5 -6
  20. zenml/config/build_configuration.py +1 -1
  21. zenml/config/compiler.py +3 -3
  22. zenml/config/docker_settings.py +27 -28
  23. zenml/config/global_config.py +33 -37
  24. zenml/config/pipeline_configurations.py +8 -11
  25. zenml/config/pipeline_run_configuration.py +6 -2
  26. zenml/config/pipeline_spec.py +3 -4
  27. zenml/config/resource_settings.py +8 -9
  28. zenml/config/schedule.py +16 -20
  29. zenml/config/secret_reference_mixin.py +6 -3
  30. zenml/config/secrets_store_config.py +16 -23
  31. zenml/config/server_config.py +50 -46
  32. zenml/config/settings_resolver.py +1 -1
  33. zenml/config/source.py +45 -35
  34. zenml/config/step_configurations.py +53 -31
  35. zenml/config/step_run_info.py +3 -0
  36. zenml/config/store_config.py +20 -19
  37. zenml/config/strict_base_model.py +2 -6
  38. zenml/constants.py +26 -2
  39. zenml/container_registries/base_container_registry.py +3 -2
  40. zenml/container_registries/default_container_registry.py +3 -3
  41. zenml/event_hub/base_event_hub.py +1 -1
  42. zenml/event_sources/base_event_source.py +11 -16
  43. zenml/exceptions.py +4 -0
  44. zenml/integrations/airflow/__init__.py +2 -6
  45. zenml/integrations/airflow/flavors/airflow_orchestrator_flavor.py +6 -7
  46. zenml/integrations/airflow/orchestrators/airflow_orchestrator.py +13 -249
  47. zenml/integrations/airflow/orchestrators/dag_generator.py +5 -3
  48. zenml/integrations/argilla/flavors/argilla_annotator_flavor.py +5 -4
  49. zenml/integrations/aws/__init__.py +1 -1
  50. zenml/integrations/aws/flavors/aws_container_registry_flavor.py +3 -2
  51. zenml/integrations/aws/flavors/sagemaker_orchestrator_flavor.py +11 -5
  52. zenml/integrations/aws/flavors/sagemaker_step_operator_flavor.py +6 -2
  53. zenml/integrations/aws/service_connectors/aws_service_connector.py +5 -4
  54. zenml/integrations/aws/step_operators/sagemaker_step_operator.py +1 -1
  55. zenml/integrations/azure/flavors/azureml_step_operator_flavor.py +4 -4
  56. zenml/integrations/azure/service_connectors/azure_service_connector.py +4 -3
  57. zenml/integrations/azure/step_operators/azureml_step_operator.py +2 -1
  58. zenml/integrations/bentoml/steps/bentoml_deployer.py +1 -1
  59. zenml/integrations/bitbucket/plugins/event_sources/bitbucket_webhook_event_source.py +8 -12
  60. zenml/integrations/comet/flavors/comet_experiment_tracker_flavor.py +1 -1
  61. zenml/integrations/constants.py +0 -1
  62. zenml/integrations/deepchecks/__init__.py +1 -0
  63. zenml/integrations/evidently/__init__.py +5 -3
  64. zenml/integrations/evidently/column_mapping.py +11 -3
  65. zenml/integrations/evidently/data_validators/evidently_data_validator.py +21 -3
  66. zenml/integrations/evidently/metrics.py +5 -6
  67. zenml/integrations/evidently/tests.py +5 -6
  68. zenml/integrations/facets/models.py +2 -6
  69. zenml/integrations/feast/__init__.py +3 -1
  70. zenml/integrations/feast/feature_stores/feast_feature_store.py +0 -23
  71. zenml/integrations/gcp/__init__.py +1 -1
  72. zenml/integrations/gcp/flavors/vertex_orchestrator_flavor.py +1 -1
  73. zenml/integrations/gcp/flavors/vertex_step_operator_flavor.py +1 -1
  74. zenml/integrations/gcp/orchestrators/vertex_orchestrator.py +234 -103
  75. zenml/integrations/gcp/service_connectors/gcp_service_connector.py +57 -42
  76. zenml/integrations/gcp/step_operators/vertex_step_operator.py +1 -0
  77. zenml/integrations/github/code_repositories/github_code_repository.py +1 -1
  78. zenml/integrations/github/plugins/event_sources/github_webhook_event_source.py +9 -13
  79. zenml/integrations/great_expectations/__init__.py +1 -1
  80. zenml/integrations/great_expectations/data_validators/ge_data_validator.py +44 -44
  81. zenml/integrations/great_expectations/flavors/great_expectations_data_validator_flavor.py +35 -2
  82. zenml/integrations/great_expectations/ge_store_backend.py +24 -11
  83. zenml/integrations/great_expectations/materializers/ge_materializer.py +3 -3
  84. zenml/integrations/great_expectations/utils.py +5 -5
  85. zenml/integrations/huggingface/__init__.py +3 -0
  86. zenml/integrations/huggingface/flavors/huggingface_model_deployer_flavor.py +1 -1
  87. zenml/integrations/huggingface/steps/__init__.py +3 -0
  88. zenml/integrations/huggingface/steps/accelerate_runner.py +149 -0
  89. zenml/integrations/huggingface/steps/huggingface_deployer.py +2 -2
  90. zenml/integrations/hyperai/flavors/hyperai_orchestrator_flavor.py +1 -1
  91. zenml/integrations/hyperai/service_connectors/hyperai_service_connector.py +4 -3
  92. zenml/integrations/kubeflow/__init__.py +1 -1
  93. zenml/integrations/kubeflow/flavors/kubeflow_orchestrator_flavor.py +48 -81
  94. zenml/integrations/kubeflow/orchestrators/kubeflow_orchestrator.py +295 -245
  95. zenml/integrations/kubernetes/flavors/kubernetes_orchestrator_flavor.py +1 -1
  96. zenml/integrations/kubernetes/orchestrators/kubernetes_orchestrator_entrypoint.py +11 -2
  97. zenml/integrations/kubernetes/pod_settings.py +17 -31
  98. zenml/integrations/kubernetes/service_connectors/kubernetes_service_connector.py +8 -7
  99. zenml/integrations/label_studio/__init__.py +1 -3
  100. zenml/integrations/label_studio/annotators/label_studio_annotator.py +3 -4
  101. zenml/integrations/label_studio/flavors/label_studio_annotator_flavor.py +2 -2
  102. zenml/integrations/langchain/__init__.py +5 -1
  103. zenml/integrations/langchain/materializers/document_materializer.py +44 -8
  104. zenml/integrations/mlflow/__init__.py +9 -3
  105. zenml/integrations/mlflow/experiment_trackers/mlflow_experiment_tracker.py +1 -1
  106. zenml/integrations/mlflow/flavors/mlflow_experiment_tracker_flavor.py +29 -37
  107. zenml/integrations/mlflow/model_registries/mlflow_model_registry.py +4 -4
  108. zenml/integrations/mlflow/steps/mlflow_deployer.py +1 -1
  109. zenml/integrations/neptune/flavors/neptune_experiment_tracker_flavor.py +1 -1
  110. zenml/integrations/neural_prophet/__init__.py +5 -1
  111. zenml/integrations/pigeon/flavors/pigeon_annotator_flavor.py +1 -1
  112. zenml/integrations/s3/flavors/s3_artifact_store_flavor.py +9 -8
  113. zenml/integrations/seldon/seldon_client.py +52 -67
  114. zenml/integrations/seldon/services/seldon_deployment.py +3 -3
  115. zenml/integrations/seldon/steps/seldon_deployer.py +4 -4
  116. zenml/integrations/skypilot/flavors/skypilot_orchestrator_base_vm_config.py +15 -5
  117. zenml/integrations/skypilot_aws/__init__.py +1 -1
  118. zenml/integrations/skypilot_aws/flavors/skypilot_orchestrator_aws_vm_flavor.py +1 -1
  119. zenml/integrations/skypilot_azure/__init__.py +1 -1
  120. zenml/integrations/skypilot_azure/flavors/skypilot_orchestrator_azure_vm_flavor.py +1 -1
  121. zenml/integrations/skypilot_gcp/__init__.py +2 -1
  122. zenml/integrations/skypilot_gcp/flavors/skypilot_orchestrator_gcp_vm_flavor.py +1 -1
  123. zenml/integrations/skypilot_lambda/flavors/skypilot_orchestrator_lambda_vm_flavor.py +2 -2
  124. zenml/integrations/spark/flavors/spark_step_operator_flavor.py +1 -1
  125. zenml/integrations/spark/step_operators/spark_step_operator.py +2 -0
  126. zenml/integrations/tekton/__init__.py +1 -1
  127. zenml/integrations/tekton/flavors/tekton_orchestrator_flavor.py +66 -23
  128. zenml/integrations/tekton/orchestrators/tekton_orchestrator.py +547 -233
  129. zenml/integrations/tensorboard/__init__.py +1 -12
  130. zenml/integrations/tensorboard/services/tensorboard_service.py +3 -5
  131. zenml/integrations/tensorboard/visualizers/tensorboard_visualizer.py +6 -6
  132. zenml/integrations/tensorflow/__init__.py +2 -10
  133. zenml/integrations/tensorflow/materializers/keras_materializer.py +17 -9
  134. zenml/integrations/wandb/flavors/wandb_experiment_tracker_flavor.py +9 -14
  135. zenml/integrations/whylogs/flavors/whylogs_data_validator_flavor.py +1 -1
  136. zenml/lineage_graph/lineage_graph.py +1 -1
  137. zenml/logging/step_logging.py +15 -7
  138. zenml/materializers/built_in_materializer.py +3 -3
  139. zenml/materializers/pydantic_materializer.py +2 -2
  140. zenml/metadata/lazy_load.py +4 -4
  141. zenml/metadata/metadata_types.py +64 -4
  142. zenml/model/model.py +79 -54
  143. zenml/model_deployers/base_model_deployer.py +14 -12
  144. zenml/model_registries/base_model_registry.py +17 -15
  145. zenml/models/__init__.py +79 -206
  146. zenml/models/v2/base/base.py +54 -41
  147. zenml/models/v2/base/base_plugin_flavor.py +2 -6
  148. zenml/models/v2/base/filter.py +91 -76
  149. zenml/models/v2/base/page.py +2 -12
  150. zenml/models/v2/base/scoped.py +4 -7
  151. zenml/models/v2/core/api_key.py +22 -8
  152. zenml/models/v2/core/artifact.py +2 -2
  153. zenml/models/v2/core/artifact_version.py +74 -40
  154. zenml/models/v2/core/code_repository.py +37 -10
  155. zenml/models/v2/core/component.py +65 -16
  156. zenml/models/v2/core/device.py +14 -4
  157. zenml/models/v2/core/event_source.py +1 -2
  158. zenml/models/v2/core/flavor.py +74 -8
  159. zenml/models/v2/core/logs.py +68 -8
  160. zenml/models/v2/core/model.py +8 -4
  161. zenml/models/v2/core/model_version.py +25 -6
  162. zenml/models/v2/core/model_version_artifact.py +51 -21
  163. zenml/models/v2/core/model_version_pipeline_run.py +45 -13
  164. zenml/models/v2/core/pipeline.py +37 -72
  165. zenml/models/v2/core/pipeline_build.py +29 -17
  166. zenml/models/v2/core/pipeline_deployment.py +18 -6
  167. zenml/models/v2/core/pipeline_namespace.py +113 -0
  168. zenml/models/v2/core/pipeline_run.py +50 -22
  169. zenml/models/v2/core/run_metadata.py +59 -36
  170. zenml/models/v2/core/schedule.py +37 -24
  171. zenml/models/v2/core/secret.py +31 -12
  172. zenml/models/v2/core/service.py +64 -36
  173. zenml/models/v2/core/service_account.py +24 -11
  174. zenml/models/v2/core/service_connector.py +219 -44
  175. zenml/models/v2/core/stack.py +45 -17
  176. zenml/models/v2/core/step_run.py +28 -8
  177. zenml/models/v2/core/tag.py +8 -4
  178. zenml/models/v2/core/trigger.py +2 -2
  179. zenml/models/v2/core/trigger_execution.py +1 -0
  180. zenml/models/v2/core/user.py +18 -21
  181. zenml/models/v2/core/workspace.py +13 -3
  182. zenml/models/v2/misc/build_item.py +3 -3
  183. zenml/models/v2/misc/external_user.py +2 -6
  184. zenml/models/v2/misc/hub_plugin_models.py +9 -9
  185. zenml/models/v2/misc/loaded_visualization.py +2 -2
  186. zenml/models/v2/misc/service_connector_type.py +8 -17
  187. zenml/models/v2/misc/user_auth.py +7 -2
  188. zenml/new/pipelines/build_utils.py +3 -3
  189. zenml/new/pipelines/pipeline.py +17 -13
  190. zenml/new/pipelines/run_utils.py +103 -1
  191. zenml/orchestrators/base_orchestrator.py +10 -7
  192. zenml/orchestrators/local_docker/local_docker_orchestrator.py +1 -1
  193. zenml/orchestrators/step_launcher.py +28 -4
  194. zenml/orchestrators/step_runner.py +3 -6
  195. zenml/orchestrators/utils.py +1 -1
  196. zenml/plugins/base_plugin_flavor.py +6 -10
  197. zenml/plugins/plugin_flavor_registry.py +3 -7
  198. zenml/secret/base_secret.py +7 -8
  199. zenml/service_connectors/docker_service_connector.py +4 -3
  200. zenml/service_connectors/service_connector.py +5 -12
  201. zenml/service_connectors/service_connector_registry.py +2 -4
  202. zenml/services/container/container_service.py +1 -1
  203. zenml/services/container/container_service_endpoint.py +1 -1
  204. zenml/services/local/local_service.py +1 -1
  205. zenml/services/local/local_service_endpoint.py +1 -1
  206. zenml/services/service.py +16 -10
  207. zenml/services/service_type.py +4 -5
  208. zenml/services/terraform/terraform_service.py +1 -1
  209. zenml/stack/flavor.py +1 -5
  210. zenml/stack/flavor_registry.py +4 -4
  211. zenml/stack/stack.py +4 -1
  212. zenml/stack/stack_component.py +55 -31
  213. zenml/step_operators/step_operator_entrypoint_configuration.py +1 -0
  214. zenml/steps/base_step.py +34 -28
  215. zenml/steps/entrypoint_function_utils.py +3 -5
  216. zenml/steps/utils.py +12 -14
  217. zenml/utils/cuda_utils.py +50 -0
  218. zenml/utils/deprecation_utils.py +18 -20
  219. zenml/utils/dict_utils.py +1 -1
  220. zenml/utils/filesync_model.py +65 -28
  221. zenml/utils/function_utils.py +260 -0
  222. zenml/utils/json_utils.py +131 -0
  223. zenml/utils/mlstacks_utils.py +2 -2
  224. zenml/utils/pipeline_docker_image_builder.py +9 -0
  225. zenml/utils/pydantic_utils.py +270 -62
  226. zenml/utils/secret_utils.py +65 -12
  227. zenml/utils/source_utils.py +2 -2
  228. zenml/utils/typed_model.py +5 -3
  229. zenml/utils/typing_utils.py +243 -0
  230. zenml/utils/yaml_utils.py +1 -1
  231. zenml/zen_server/auth.py +2 -2
  232. zenml/zen_server/cloud_utils.py +6 -6
  233. zenml/zen_server/deploy/base_provider.py +1 -1
  234. zenml/zen_server/deploy/deployment.py +6 -8
  235. zenml/zen_server/deploy/docker/docker_zen_server.py +3 -4
  236. zenml/zen_server/deploy/local/local_provider.py +0 -1
  237. zenml/zen_server/deploy/local/local_zen_server.py +6 -6
  238. zenml/zen_server/deploy/terraform/terraform_zen_server.py +4 -6
  239. zenml/zen_server/exceptions.py +4 -1
  240. zenml/zen_server/feature_gate/zenml_cloud_feature_gate.py +1 -1
  241. zenml/zen_server/pipeline_deployment/utils.py +48 -68
  242. zenml/zen_server/rbac/models.py +2 -5
  243. zenml/zen_server/rbac/utils.py +11 -14
  244. zenml/zen_server/routers/auth_endpoints.py +2 -2
  245. zenml/zen_server/routers/pipeline_builds_endpoints.py +1 -1
  246. zenml/zen_server/routers/runs_endpoints.py +1 -1
  247. zenml/zen_server/routers/secrets_endpoints.py +3 -2
  248. zenml/zen_server/routers/server_endpoints.py +1 -1
  249. zenml/zen_server/routers/steps_endpoints.py +1 -1
  250. zenml/zen_server/routers/workspaces_endpoints.py +1 -1
  251. zenml/zen_stores/base_zen_store.py +46 -9
  252. zenml/zen_stores/migrations/utils.py +42 -46
  253. zenml/zen_stores/migrations/versions/0701da9951a0_added_service_table.py +1 -1
  254. zenml/zen_stores/migrations/versions/1041bc644e0d_remove_secrets_manager.py +5 -3
  255. zenml/zen_stores/migrations/versions/10a907dad202_delete_mlmd_tables.py +1 -1
  256. zenml/zen_stores/migrations/versions/26b776ad583e_redesign_artifacts.py +8 -10
  257. zenml/zen_stores/migrations/versions/37835ce041d2_optimizing_database.py +3 -3
  258. zenml/zen_stores/migrations/versions/46506f72f0ed_add_server_settings.py +10 -12
  259. zenml/zen_stores/migrations/versions/5994f9ad0489_introduce_role_permissions.py +3 -2
  260. zenml/zen_stores/migrations/versions/6917bce75069_add_pipeline_run_unique_constraint.py +4 -4
  261. zenml/zen_stores/migrations/versions/728c6369cfaa_add_name_column_to_input_artifact_pk.py +3 -2
  262. zenml/zen_stores/migrations/versions/743ec82b1b3c_update_size_of_build_images.py +2 -2
  263. zenml/zen_stores/migrations/versions/7500f434b71c_remove_shared_columns.py +3 -2
  264. zenml/zen_stores/migrations/versions/7834208cc3f6_artifact_project_scoping.py +8 -7
  265. zenml/zen_stores/migrations/versions/7b651bf6822e_track_secrets_in_db.py +6 -4
  266. zenml/zen_stores/migrations/versions/7e4a481d17f7_add_identity_table.py +2 -2
  267. zenml/zen_stores/migrations/versions/7f603e583dd7_fixed_migration.py +1 -1
  268. zenml/zen_stores/migrations/versions/a39c4184c8ce_remove_secrets_manager_flavors.py +2 -2
  269. zenml/zen_stores/migrations/versions/a91762e6be36_artifact_version_table.py +4 -4
  270. zenml/zen_stores/migrations/versions/alembic_start.py +1 -1
  271. zenml/zen_stores/migrations/versions/fbd7f18ced1e_increase_step_run_field_lengths.py +4 -4
  272. zenml/zen_stores/rest_zen_store.py +109 -49
  273. zenml/zen_stores/schemas/api_key_schemas.py +1 -1
  274. zenml/zen_stores/schemas/artifact_schemas.py +8 -8
  275. zenml/zen_stores/schemas/artifact_visualization_schemas.py +3 -3
  276. zenml/zen_stores/schemas/code_repository_schemas.py +1 -1
  277. zenml/zen_stores/schemas/component_schemas.py +8 -3
  278. zenml/zen_stores/schemas/device_schemas.py +8 -6
  279. zenml/zen_stores/schemas/event_source_schemas.py +3 -4
  280. zenml/zen_stores/schemas/flavor_schemas.py +5 -3
  281. zenml/zen_stores/schemas/model_schemas.py +26 -1
  282. zenml/zen_stores/schemas/pipeline_build_schemas.py +1 -1
  283. zenml/zen_stores/schemas/pipeline_deployment_schemas.py +4 -4
  284. zenml/zen_stores/schemas/pipeline_run_schemas.py +6 -6
  285. zenml/zen_stores/schemas/pipeline_schemas.py +5 -2
  286. zenml/zen_stores/schemas/run_metadata_schemas.py +2 -2
  287. zenml/zen_stores/schemas/secret_schemas.py +8 -5
  288. zenml/zen_stores/schemas/server_settings_schemas.py +3 -1
  289. zenml/zen_stores/schemas/service_connector_schemas.py +1 -1
  290. zenml/zen_stores/schemas/service_schemas.py +11 -2
  291. zenml/zen_stores/schemas/stack_schemas.py +1 -1
  292. zenml/zen_stores/schemas/step_run_schemas.py +11 -11
  293. zenml/zen_stores/schemas/tag_schemas.py +6 -2
  294. zenml/zen_stores/schemas/trigger_schemas.py +2 -2
  295. zenml/zen_stores/schemas/user_schemas.py +2 -2
  296. zenml/zen_stores/schemas/workspace_schemas.py +3 -1
  297. zenml/zen_stores/secrets_stores/aws_secrets_store.py +19 -20
  298. zenml/zen_stores/secrets_stores/azure_secrets_store.py +17 -20
  299. zenml/zen_stores/secrets_stores/base_secrets_store.py +79 -12
  300. zenml/zen_stores/secrets_stores/gcp_secrets_store.py +17 -20
  301. zenml/zen_stores/secrets_stores/hashicorp_secrets_store.py +4 -8
  302. zenml/zen_stores/secrets_stores/service_connector_secrets_store.py +10 -7
  303. zenml/zen_stores/secrets_stores/sql_secrets_store.py +5 -6
  304. zenml/zen_stores/sql_zen_store.py +196 -120
  305. zenml/zen_stores/zen_store_interface.py +33 -0
  306. {zenml_nightly-0.58.2.dev20240615.dist-info → zenml_nightly-0.58.2.dev20240622.dist-info}/METADATA +9 -7
  307. {zenml_nightly-0.58.2.dev20240615.dist-info → zenml_nightly-0.58.2.dev20240622.dist-info}/RECORD +310 -307
  308. zenml/integrations/kubeflow/utils.py +0 -95
  309. zenml/models/v2/base/internal.py +0 -37
  310. zenml/models/v2/base/update.py +0 -44
  311. {zenml_nightly-0.58.2.dev20240615.dist-info → zenml_nightly-0.58.2.dev20240622.dist-info}/LICENSE +0 -0
  312. {zenml_nightly-0.58.2.dev20240615.dist-info → zenml_nightly-0.58.2.dev20240622.dist-info}/WHEEL +0 -0
  313. {zenml_nightly-0.58.2.dev20240615.dist-info → zenml_nightly-0.58.2.dev20240622.dist-info}/entry_points.txt +0 -0
@@ -14,25 +14,32 @@
14
14
  """Implementation of the Tekton orchestrator."""
15
15
 
16
16
  import os
17
- import sys
18
- from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, cast
17
+ from types import FunctionType
18
+ from typing import (
19
+ TYPE_CHECKING,
20
+ Any,
21
+ Dict,
22
+ List,
23
+ Optional,
24
+ Tuple,
25
+ Type,
26
+ cast,
27
+ )
19
28
 
20
- import yaml
29
+ import kfp
30
+ import requests
31
+ import urllib3
21
32
  from kfp import dsl
22
- from kfp_tekton.compiler import TektonCompiler
23
- from kfp_tekton.compiler.pipeline_utils import TektonPipelineConf
33
+ from kfp.client import Client as KFPClient
34
+ from kfp.compiler import Compiler as KFPCompiler
35
+ from kfp_server_api.exceptions import ApiException
24
36
  from kubernetes import client as k8s_client
25
37
  from kubernetes import config as k8s_config
26
38
 
27
- from zenml.client import Client
28
- from zenml.config.global_config import GlobalConfiguration
29
- from zenml.constants import (
30
- ENV_ZENML_LOCAL_STORES_PATH,
31
- )
39
+ from zenml.config.resource_settings import ResourceSettings
32
40
  from zenml.entrypoints import StepEntrypointConfiguration
33
41
  from zenml.enums import StackComponentType
34
42
  from zenml.environment import Environment
35
- from zenml.integrations.kubeflow.utils import apply_pod_settings
36
43
  from zenml.integrations.tekton.flavors.tekton_orchestrator_flavor import (
37
44
  TektonOrchestratorConfig,
38
45
  TektonOrchestratorSettings,
@@ -42,18 +49,78 @@ from zenml.logger import get_logger
42
49
  from zenml.orchestrators import ContainerizedOrchestrator
43
50
  from zenml.orchestrators.utils import get_orchestrator_run_name
44
51
  from zenml.stack import StackValidator
45
- from zenml.utils import io_utils
52
+ from zenml.utils import io_utils, yaml_utils
46
53
 
47
54
  if TYPE_CHECKING:
48
55
  from zenml.config.base_settings import BaseSettings
49
56
  from zenml.models import PipelineDeploymentResponse
50
57
  from zenml.stack import Stack
51
- from zenml.steps import ResourceSettings
52
58
 
53
59
 
54
60
  logger = get_logger(__name__)
55
61
 
56
62
  ENV_ZENML_TEKTON_RUN_ID = "ZENML_TEKTON_RUN_ID"
63
+ KFP_ACCELERATOR_NODE_SELECTOR_CONSTRAINT_LABEL = "accelerator"
64
+
65
+
66
+ class KubeClientKFPClient(kfp.Client): # type: ignore[misc]
67
+ """KFP client initialized from a Kubernetes client.
68
+
69
+ This is a workaround for the fact that the native KFP client does not
70
+ support initialization from an existing Kubernetes client.
71
+ """
72
+
73
+ def __init__(
74
+ self, client: k8s_client.ApiClient, *args: Any, **kwargs: Any
75
+ ) -> None:
76
+ """Initializes the KFP client from a Kubernetes client.
77
+
78
+ Args:
79
+ client: pre-configured Kubernetes client.
80
+ args: standard KFP client positional arguments.
81
+ kwargs: standard KFP client keyword arguments.
82
+ """
83
+ self._k8s_client = client
84
+ super().__init__(*args, **kwargs)
85
+
86
+ def _load_config(self, *args: Any, **kwargs: Any) -> Any:
87
+ """Loads the KFP configuration.
88
+
89
+ Initializes the KFP configuration from the Kubernetes client.
90
+
91
+ Args:
92
+ args: standard KFP client positional arguments.
93
+ kwargs: standard KFP client keyword arguments.
94
+
95
+ Returns:
96
+ The KFP configuration.
97
+ """
98
+ from kfp_server_api.configuration import Configuration
99
+
100
+ kube_config = self._k8s_client.configuration
101
+
102
+ host = (
103
+ kube_config.host
104
+ + "/"
105
+ + self._KUBE_PROXY_PATH.format(kwargs.get("namespace", "kubeflow"))
106
+ )
107
+
108
+ config = Configuration(
109
+ host=host,
110
+ api_key=kube_config.api_key,
111
+ api_key_prefix=kube_config.api_key_prefix,
112
+ username=kube_config.username,
113
+ password=kube_config.password,
114
+ discard_unknown_keys=kube_config.discard_unknown_keys,
115
+ )
116
+
117
+ # Extra attributes not present in the Configuration constructor
118
+ keys = ["ssl_ca_cert", "cert_file", "key_file", "verify_ssl"]
119
+ for key in keys:
120
+ if key in kube_config.__dict__:
121
+ setattr(config, key, getattr(kube_config, key))
122
+
123
+ return config
57
124
 
58
125
 
59
126
  class TektonOrchestrator(ContainerizedOrchestrator):
@@ -61,21 +128,31 @@ class TektonOrchestrator(ContainerizedOrchestrator):
61
128
 
62
129
  _k8s_client: Optional[k8s_client.ApiClient] = None
63
130
 
64
- @property
65
- def kube_client(self) -> k8s_client.ApiClient:
66
- """Getter for the Kubernetes API client.
131
+ def _get_kfp_client(
132
+ self,
133
+ settings: TektonOrchestratorSettings,
134
+ ) -> kfp.Client:
135
+ """Creates a KFP client instance.
136
+
137
+ Args:
138
+ settings: Settings which can be used to
139
+ configure the client instance.
67
140
 
68
141
  Returns:
69
- The Kubernetes API client.
142
+ A KFP client instance.
70
143
 
71
144
  Raises:
72
- RuntimeError: if the Kubernetes connector behaves unexpectedly.
145
+ RuntimeError: If the linked Kubernetes connector behaves
146
+ unexpectedly.
73
147
  """
74
- # Refresh the client also if the connector has expired
75
- if self._k8s_client and not self.connector_has_expired():
76
- return self._k8s_client
77
-
78
148
  connector = self.get_connector()
149
+ client_args = settings.client_args.copy()
150
+
151
+ # The kube_context, host and namespace are stack component
152
+ # configurations that refer to the Tekton deployment. We don't want
153
+ # these overwritten on a run by run basis by user settings
154
+ client_args["namespace"] = self.config.kubernetes_namespace
155
+
79
156
  if connector:
80
157
  client = connector.connect()
81
158
  if not isinstance(client, k8s_client.ApiClient):
@@ -83,12 +160,97 @@ class TektonOrchestrator(ContainerizedOrchestrator):
83
160
  f"Expected a k8s_client.ApiClient while trying to use the "
84
161
  f"linked connector, but got {type(client)}."
85
162
  )
86
- self._k8s_client = client
87
- else:
88
- k8s_config.load_kube_config(context=self.config.kubernetes_context)
89
- self._k8s_client = k8s_client.ApiClient()
163
+ return KubeClientKFPClient(
164
+ client=client,
165
+ **client_args,
166
+ )
167
+
168
+ elif self.config.kubernetes_context:
169
+ client_args["kube_context"] = self.config.kubernetes_context
170
+
171
+ elif self.config.tekton_hostname:
172
+ client_args["host"] = self.config.tekton_hostname
173
+
174
+ # Handle username and password, ignore the case if one is passed and
175
+ # not the other. Also do not attempt to get cookie if cookie is
176
+ # already passed in client_args
177
+ if settings.client_username and settings.client_password:
178
+ # If cookie is already set, then ignore
179
+ if "cookie" in client_args:
180
+ logger.warning(
181
+ "Cookie already set in `client_args`, ignoring "
182
+ "`client_username` and `client_password`..."
183
+ )
184
+ else:
185
+ session_cookie = self._get_session_cookie(
186
+ username=settings.client_username,
187
+ password=settings.client_password,
188
+ )
189
+
190
+ client_args["cookies"] = session_cookie
191
+ return KFPClient(**client_args)
192
+
193
+ def _get_session_cookie(self, username: str, password: str) -> str:
194
+ """Gets session cookie from username and password.
195
+
196
+ Args:
197
+ username: Username for tekoton host.
198
+ password: Password for tekoton host.
199
+
200
+ Raises:
201
+ RuntimeError: If the cookie fetching failed.
202
+
203
+ Returns:
204
+ Cookie with the prefix `authsession=`.
205
+ """
206
+ if self.config.tekton_hostname is None:
207
+ raise RuntimeError(
208
+ "You must configure the tekoton orchestrator "
209
+ "with the `tekton_hostname` parameter which usually ends "
210
+ "with `/pipeline` (e.g. `https://mykubeflow.com/pipeline`). "
211
+ "Please update the current tekoton orchestrator with: "
212
+ f"`zenml orchestrator update {self.name} "
213
+ "--tekton_hostname=<MY_KUBEFLOW_HOST>`"
214
+ )
215
+
216
+ # Get cookie
217
+ logger.info(
218
+ f"Attempting to fetch session cookie from {self.config.tekton_hostname} "
219
+ "with supplied username and password..."
220
+ )
221
+ session = requests.Session()
222
+ try:
223
+ response = session.get(self.config.tekton_hostname)
224
+ response.raise_for_status()
225
+ except (
226
+ requests.exceptions.HTTPError,
227
+ requests.exceptions.ConnectionError,
228
+ requests.exceptions.Timeout,
229
+ requests.exceptions.RequestException,
230
+ ) as e:
231
+ raise RuntimeError(
232
+ f"Error while trying to fetch tekoton cookie: {e}"
233
+ )
234
+
235
+ headers = {
236
+ "Content-Type": "application/x-www-form-urlencoded",
237
+ }
238
+ data = {"login": username, "password": password}
239
+ try:
240
+ response = session.post(response.url, headers=headers, data=data)
241
+ response.raise_for_status()
242
+ except requests.exceptions.HTTPError as errh:
243
+ raise RuntimeError(
244
+ f"Error while trying to fetch tekoton cookie: {errh}"
245
+ )
246
+ cookie_dict: Dict[str, str] = session.cookies.get_dict() # type: ignore[no-untyped-call]
90
247
 
91
- return self._k8s_client
248
+ if "authservice_session" not in cookie_dict:
249
+ raise RuntimeError("Invalid username and/or password!")
250
+
251
+ logger.info("Session cookie fetched successfully!")
252
+
253
+ return "authservice_session=" + str(cookie_dict["authservice_session"])
92
254
 
93
255
  @property
94
256
  def config(self) -> TektonOrchestratorConfig:
@@ -148,9 +310,9 @@ class TektonOrchestrator(ContainerizedOrchestrator):
148
310
  f"{msg}you must either link this stack component to a "
149
311
  "Kubernetes service connector (see the 'zenml "
150
312
  "orchestrator connect' CLI command) or explicitly set "
151
- "the `kubernetes_context` attribute to the name of the "
152
- "Kubernetes config context pointing to the cluster "
153
- "where you would like to run pipelines."
313
+ "the `kubernetes_context` attribute to the name of "
314
+ "the Kubernetes config context pointing to the "
315
+ "cluster where you would like to run pipelines."
154
316
  )
155
317
 
156
318
  contexts, active_context = self.get_kubernetes_contexts()
@@ -160,9 +322,9 @@ class TektonOrchestrator(ContainerizedOrchestrator):
160
322
  f"{msg}could not find a Kubernetes context named "
161
323
  f"'{kubernetes_context}' in the local "
162
324
  "Kubernetes configuration. Please make sure that the "
163
- "Kubernetes cluster is running and that the kubeconfig "
164
- "file is configured correctly. To list all configured "
165
- "contexts, run:\n\n"
325
+ "Kubernetes cluster is running and that the "
326
+ "kubeconfig file is configured correctly. To list all "
327
+ "configured contexts, run:\n\n"
166
328
  " `kubectl config get-contexts`\n"
167
329
  )
168
330
  if kubernetes_context != active_context:
@@ -192,16 +354,13 @@ class TektonOrchestrator(ContainerizedOrchestrator):
192
354
  f"--skip_local_validations=True'\n"
193
355
  )
194
356
 
195
- if (
196
- not self.config.skip_local_validations
197
- and not self.config.is_local
198
- ):
357
+ if not self.config.is_local:
199
358
  # if the orchestrator is not running in a local k3d cluster,
200
359
  # we cannot have any other local components in our stack,
201
360
  # because we cannot mount the local path into the container.
202
- # This may result in problems when running the pipeline, because
203
- # the local components will not be available inside the
204
- # Tekton containers.
361
+ # This may result in problems when running the pipeline, "
362
+ # because the local components will not be available inside
363
+ # the Tekton containers.
205
364
 
206
365
  # go through all stack components and identify those that
207
366
  # advertise a local path where they persist information that
@@ -252,95 +411,49 @@ class TektonOrchestrator(ContainerizedOrchestrator):
252
411
  custom_validation_function=_validate,
253
412
  )
254
413
 
255
- def _configure_container_op(
414
+ def _create_dynamic_component(
256
415
  self,
257
- container_op: dsl.ContainerOp,
258
- ) -> None:
259
- """Makes changes in place to the configuration of the container op.
260
-
261
- Configures persistent mounted volumes for each stack component that
262
- writes to a local path.
416
+ image: str,
417
+ command: List[str],
418
+ arguments: List[str],
419
+ component_name: str,
420
+ ) -> dsl.PipelineTask:
421
+ """Creates a dynamic container component for a Tekton pipeline.
263
422
 
264
423
  Args:
265
- container_op: The Tekton container operation to configure.
266
- """
267
- volumes: Dict[str, k8s_client.V1Volume] = {}
268
-
269
- stack = Client().active_stack
424
+ image: The image to use for the component.
425
+ command: The command to use for the component.
426
+ arguments: The arguments to use for the component.
427
+ component_name: The name of the component.
270
428
 
271
- if self.config.is_local:
272
- stack.check_local_paths()
273
-
274
- local_stores_path = GlobalConfiguration().local_stores_path
275
-
276
- host_path = k8s_client.V1HostPathVolumeSource(
277
- path=local_stores_path, type="Directory"
278
- )
279
-
280
- volumes[local_stores_path] = k8s_client.V1Volume(
281
- name="local-stores",
282
- host_path=host_path,
283
- )
284
- logger.debug(
285
- "Adding host path volume for the local ZenML stores (path: %s) "
286
- "in Tekton pipelines container.",
287
- local_stores_path,
288
- )
429
+ Returns:
430
+ The dynamic container component.
431
+ """
289
432
 
290
- if sys.platform == "win32":
291
- # File permissions are not checked on Windows. This if clause
292
- # prevents mypy from complaining about unused 'type: ignore'
293
- # statements
294
- pass
295
- else:
296
- # Run KFP containers in the context of the local UID/GID
297
- # to ensure that the local stores can be shared
298
- # with the local pipeline runs.
299
- container_op.container.security_context = (
300
- k8s_client.V1SecurityContext(
301
- run_as_user=os.getuid(),
302
- run_as_group=os.getgid(),
303
- )
304
- )
305
- logger.debug(
306
- "Setting security context UID and GID to local user/group "
307
- "in Tekton pipelines container."
308
- )
433
+ def dynamic_container_component() -> dsl.ContainerSpec:
434
+ """Dynamic container component.
309
435
 
310
- container_op.container.add_env_variable(
311
- k8s_client.V1EnvVar(
312
- name=ENV_ZENML_LOCAL_STORES_PATH,
313
- value=local_stores_path,
314
- )
436
+ Returns:
437
+ The dynamic container component.
438
+ """
439
+ _component = dsl.ContainerSpec(
440
+ image=image,
441
+ command=command,
442
+ args=arguments,
315
443
  )
316
444
 
317
- container_op.add_pvolumes(volumes)
445
+ _component.__name__ = component_name
446
+ return _component
318
447
 
319
- @staticmethod
320
- def _configure_container_resources(
321
- container_op: dsl.ContainerOp,
322
- resource_settings: "ResourceSettings",
323
- ) -> None:
324
- """Adds resource requirements to the container.
325
-
326
- Args:
327
- container_op: The container operation to configure.
328
- resource_settings: The resource settings to use for this
329
- container.
330
- """
331
- if resource_settings.cpu_count is not None:
332
- container_op = container_op.set_cpu_limit(
333
- str(resource_settings.cpu_count)
334
- )
335
-
336
- if resource_settings.gpu_count is not None:
337
- container_op = container_op.set_gpu_limit(
338
- resource_settings.gpu_count
339
- )
448
+ dynamic_func = FunctionType(
449
+ dynamic_container_component.__code__,
450
+ dynamic_container_component.__globals__,
451
+ name=component_name,
452
+ argdefs=dynamic_container_component.__defaults__,
453
+ closure=dynamic_container_component.__closure__,
454
+ )
340
455
 
341
- if resource_settings.memory is not None:
342
- memory_limit = resource_settings.memory[:-1]
343
- container_op = container_op.set_memory_limit(memory_limit)
456
+ return dsl.container_component(dynamic_func)
344
457
 
345
458
  def prepare_or_run_pipeline(
346
459
  self,
@@ -378,83 +491,146 @@ class TektonOrchestrator(ContainerizedOrchestrator):
378
491
 
379
492
  orchestrator_run_name = get_orchestrator_run_name(
380
493
  pipeline_name=deployment.pipeline_configuration.name
381
- )
382
-
383
- def _construct_kfp_pipeline() -> None:
384
- """Create a container_op for each step.
494
+ ).replace("_", "-")
385
495
 
386
- This should contain the name of the docker image and configures the
387
- entrypoint of the docker image to run the step.
496
+ def _create_dynamic_pipeline() -> Any:
497
+ """Create a dynamic pipeline including each step.
388
498
 
389
- Additionally, this gives each container_op information about its
390
- direct downstream steps.
499
+ Returns:
500
+ pipeline_func
391
501
  """
392
- # Dictionary of container_ops index by the associated step name
393
- step_name_to_container_op: Dict[str, dsl.ContainerOp] = {}
502
+ step_name_to_dynamic_component: Dict[str, Any] = {}
394
503
 
395
504
  for step_name, step in deployment.step_configurations.items():
396
505
  image = self.get_image(
397
- deployment=deployment, step_name=step_name
506
+ deployment=deployment,
507
+ step_name=step_name,
398
508
  )
399
-
400
509
  command = StepEntrypointConfiguration.get_entrypoint_command()
401
510
  arguments = (
402
511
  StepEntrypointConfiguration.get_entrypoint_arguments(
403
- step_name=step_name, deployment_id=deployment.id
512
+ step_name=step_name,
513
+ deployment_id=deployment.id,
404
514
  )
405
515
  )
406
-
407
- container_op = dsl.ContainerOp(
408
- name=step_name,
409
- image=image,
410
- command=command,
411
- arguments=arguments,
516
+ dynamic_component = self._create_dynamic_component(
517
+ image, command, arguments, step_name
412
518
  )
413
-
414
- settings = cast(
519
+ step_settings = cast(
415
520
  TektonOrchestratorSettings, self.get_settings(step)
416
521
  )
417
- self._configure_container_op(
418
- container_op=container_op,
419
- )
420
-
421
- if settings.pod_settings:
422
- apply_pod_settings(
423
- container_op=container_op,
424
- settings=settings.pod_settings,
425
- )
522
+ node_selector_constraint: Optional[Tuple[str, str]] = None
523
+ pod_settings = step_settings.pod_settings
524
+ if pod_settings:
525
+ if pod_settings.host_ipc:
526
+ logger.warning(
527
+ "Host IPC is set to `True` but not supported in "
528
+ "this orchestrator. Ignoring..."
529
+ )
530
+ if pod_settings.affinity:
531
+ logger.warning(
532
+ "Affinity is set but not supported in Tekton with "
533
+ "Tekton Pipelines 2.x. Ignoring..."
534
+ )
535
+ if pod_settings.tolerations:
536
+ logger.warning(
537
+ "Tolerations are set but not supported in "
538
+ "Tekton with Tekton Pipelines 2.x. Ignoring..."
539
+ )
540
+ if pod_settings.volumes:
541
+ logger.warning(
542
+ "Volumes are set but not supported in Tekton with "
543
+ "Tekton Pipelines 2.x. Ignoring..."
544
+ )
545
+ if pod_settings.volume_mounts:
546
+ logger.warning(
547
+ "Volume mounts are set but not supported in "
548
+ "Tekton with Tekton Pipelines 2.x. Ignoring..."
549
+ )
550
+ # apply pod settings
551
+ if (
552
+ KFP_ACCELERATOR_NODE_SELECTOR_CONSTRAINT_LABEL
553
+ in pod_settings.node_selectors.keys()
554
+ ):
555
+ node_selector_constraint = (
556
+ KFP_ACCELERATOR_NODE_SELECTOR_CONSTRAINT_LABEL,
557
+ pod_settings.node_selectors[
558
+ KFP_ACCELERATOR_NODE_SELECTOR_CONSTRAINT_LABEL
559
+ ],
560
+ )
426
561
 
427
- container_op.container.add_env_variable(
428
- k8s_client.V1EnvVar(
429
- name=ENV_ZENML_TEKTON_RUN_ID,
430
- value="$(context.pipelineRun.name)",
431
- )
432
- )
562
+ step_name_to_dynamic_component[step_name] = dynamic_component
433
563
 
434
- for key, value in environment.items():
435
- container_op.container.add_env_variable(
436
- k8s_client.V1EnvVar(
437
- name=key,
438
- value=value,
564
+ @dsl.pipeline( # type: ignore[misc]
565
+ display_name=orchestrator_run_name,
566
+ )
567
+ def dynamic_pipeline() -> None:
568
+ """Dynamic pipeline."""
569
+ # iterate through the components one by one
570
+ # (from step_name_to_dynamic_component)
571
+ for (
572
+ component_name,
573
+ component,
574
+ ) in step_name_to_dynamic_component.items():
575
+ # for each component, check to see what other steps are
576
+ # upstream of it
577
+ step = deployment.step_configurations[component_name]
578
+ upstream_step_components = [
579
+ step_name_to_dynamic_component[upstream_step_name]
580
+ for upstream_step_name in step.spec.upstream_steps
581
+ ]
582
+ task = (
583
+ component()
584
+ .set_display_name(
585
+ name=component_name,
586
+ )
587
+ .set_caching_options(enable_caching=False)
588
+ .set_env_variable(
589
+ name=ENV_ZENML_TEKTON_RUN_ID,
590
+ value=dsl.PIPELINE_JOB_NAME_PLACEHOLDER,
439
591
  )
592
+ .after(*upstream_step_components)
440
593
  )
441
-
442
- if self.requires_resources_in_orchestration_environment(step):
443
594
  self._configure_container_resources(
444
- container_op=container_op,
445
- resource_settings=step.config.resource_settings,
595
+ task,
596
+ step.config.resource_settings,
597
+ node_selector_constraint,
446
598
  )
447
599
 
448
- # Find the upstream container ops of the current step and
449
- # configure the current container op to run after them
450
- for upstream_step_name in step.spec.upstream_steps:
451
- upstream_container_op = step_name_to_container_op[
452
- upstream_step_name
453
- ]
454
- container_op.after(upstream_container_op)
600
+ return dynamic_pipeline
601
+
602
+ def _update_yaml_with_environment(
603
+ yaml_file_path: str, environment: Dict[str, str]
604
+ ) -> None:
605
+ """Updates the env section of the steps in the YAML file with the given environment variables.
455
606
 
456
- # Update dictionary of container ops with the current one
457
- step_name_to_container_op[step_name] = container_op
607
+ Args:
608
+ yaml_file_path: The path to the YAML file to update.
609
+ environment: A dictionary of environment variables to add.
610
+ """
611
+ pipeline_definition = yaml_utils.read_yaml(pipeline_file_path)
612
+
613
+ # Iterate through each component and add the environment variables
614
+ for executor in pipeline_definition["deploymentSpec"]["executors"]:
615
+ if (
616
+ "container"
617
+ in pipeline_definition["deploymentSpec"]["executors"][
618
+ executor
619
+ ]
620
+ ):
621
+ container = pipeline_definition["deploymentSpec"][
622
+ "executors"
623
+ ][executor]["container"]
624
+ if "env" not in container:
625
+ container["env"] = []
626
+ for key, value in environment.items():
627
+ container["env"].append({"name": key, "value": value})
628
+
629
+ yaml_utils.write_yaml(pipeline_file_path, pipeline_definition)
630
+
631
+ print(
632
+ f"Updated YAML file with environment variables at {yaml_file_path}"
633
+ )
458
634
 
459
635
  # Get a filepath to use to save the finished yaml to
460
636
  fileio.makedirs(self.pipeline_directory)
@@ -462,73 +638,158 @@ class TektonOrchestrator(ContainerizedOrchestrator):
462
638
  self.pipeline_directory, f"{orchestrator_run_name}.yaml"
463
639
  )
464
640
 
465
- # Set the run name, which Tekton reads from this attribute of the
466
- # pipeline function
467
- setattr(
468
- _construct_kfp_pipeline,
469
- "_component_human_name",
470
- orchestrator_run_name,
471
- )
472
- pipeline_config = TektonPipelineConf()
473
- pipeline_config.add_pipeline_label(
474
- "pipelines.kubeflow.org/cache_enabled", "false"
475
- )
476
- TektonCompiler().compile(
477
- _construct_kfp_pipeline,
478
- pipeline_file_path,
479
- tekton_pipeline_conf=pipeline_config,
641
+ KFPCompiler().compile(
642
+ pipeline_func=_create_dynamic_pipeline(),
643
+ package_path=pipeline_file_path,
644
+ pipeline_name=orchestrator_run_name,
480
645
  )
646
+
647
+ # Let's update the YAML file with the environment variables
648
+ _update_yaml_with_environment(pipeline_file_path, environment)
649
+
481
650
  logger.info(
482
651
  "Writing Tekton workflow definition to `%s`.", pipeline_file_path
483
652
  )
484
653
 
485
- if deployment.schedule:
486
- logger.warning(
487
- "The Tekton Orchestrator currently does not support the "
488
- "use of schedules. The `schedule` will be ignored "
489
- "and the pipeline will be run immediately."
490
- )
654
+ # using the kfp client uploads the pipeline to Tekton pipelines and
655
+ # runs it there
656
+ self._upload_and_run_pipeline(
657
+ deployment=deployment,
658
+ pipeline_file_path=pipeline_file_path,
659
+ run_name=orchestrator_run_name,
660
+ )
491
661
 
492
- kubernetes_context = self.config.kubernetes_context
493
- if kubernetes_context:
494
- logger.info(
495
- "Running Tekton pipeline in kubernetes context '%s' and "
496
- "namespace '%s'.",
497
- kubernetes_context,
498
- self.config.kubernetes_namespace,
499
- )
500
- elif self.connector:
501
- connector = self.get_connector()
502
- assert connector is not None
503
- logger.info(
504
- "Running Tekton pipeline with Kubernetes credentials from "
505
- "connector '%s'.",
506
- connector.name or str(connector),
507
- )
662
+ def _upload_and_run_pipeline(
663
+ self,
664
+ deployment: "PipelineDeploymentResponse",
665
+ pipeline_file_path: str,
666
+ run_name: str,
667
+ ) -> None:
668
+ """Tries to upload and run a KFP pipeline.
508
669
 
509
- # Read the Tekton pipeline resource from the generated YAML file
510
- with open(pipeline_file_path, "r") as f:
511
- tekton_resource = yaml.safe_load(f)
670
+ Args:
671
+ deployment: The pipeline deployment.
672
+ pipeline_file_path: Path to the pipeline definition file.
673
+ run_name: The Tekton run name.
512
674
 
513
- # Upload the Tekton pipeline to the Kubernetes cluster
514
- custom_objects_api = k8s_client.CustomObjectsApi(self.kube_client)
675
+ Raises:
676
+ RuntimeError: If Tekton API returns an error.
677
+ """
678
+ pipeline_name = deployment.pipeline_configuration.name
679
+ settings = cast(
680
+ TektonOrchestratorSettings, self.get_settings(deployment)
681
+ )
682
+ user_namespace = settings.user_namespace
515
683
 
684
+ kubernetes_context = self.config.kubernetes_context
516
685
  try:
517
- logger.debug("Creating Tekton resource ...")
518
- response = custom_objects_api.create_namespaced_custom_object(
519
- group=tekton_resource["apiVersion"].split("/")[0],
520
- version=tekton_resource["apiVersion"].split("/")[1],
521
- namespace=self.config.kubernetes_namespace,
522
- plural=tekton_resource["kind"].lower() + "s",
523
- body=tekton_resource,
524
- )
525
- logger.debug("Tekton API response: %s", response)
526
- except k8s_client.rest.ApiException as e:
527
- logger.error("Exception when creating Tekton resource: %s", str(e))
528
- raise RuntimeError(
529
- f"Failed to upload Tekton pipeline: {str(e)}. "
530
- f"Please make sure your Kubernetes cluster is running and "
531
- f"accessible.",
686
+ if kubernetes_context:
687
+ logger.info(
688
+ "Running in kubernetes context '%s'.",
689
+ kubernetes_context,
690
+ )
691
+ elif self.config.tekton_hostname:
692
+ logger.info(
693
+ "Running on Tekton deployment '%s'.",
694
+ self.config.tekton_hostname,
695
+ )
696
+ elif self.connector:
697
+ logger.info(
698
+ "Running with Kubernetes credentials from connector '%s'.",
699
+ str(self.connector),
700
+ )
701
+
702
+ # upload the pipeline to Tekton and start it
703
+
704
+ client = self._get_kfp_client(settings=settings)
705
+ if deployment.schedule:
706
+ try:
707
+ experiment = client.get_experiment(
708
+ pipeline_name, namespace=user_namespace
709
+ )
710
+ logger.info(
711
+ "A recurring run has already been created with this "
712
+ "pipeline. Creating new recurring run now.."
713
+ )
714
+ except (ValueError, ApiException):
715
+ experiment = client.create_experiment(
716
+ pipeline_name, namespace=user_namespace
717
+ )
718
+ logger.info(
719
+ "Creating a new recurring run for pipeline '%s'.. ",
720
+ pipeline_name,
721
+ )
722
+ logger.info(
723
+ "You can see all recurring runs under the '%s' experiment.",
724
+ pipeline_name,
725
+ )
726
+
727
+ interval_seconds = (
728
+ deployment.schedule.interval_second.seconds
729
+ if deployment.schedule.interval_second
730
+ else None
731
+ )
732
+ result = client.create_recurring_run(
733
+ experiment_id=experiment.experiment_id,
734
+ job_name=run_name,
735
+ pipeline_package_path=pipeline_file_path,
736
+ enable_caching=False,
737
+ cron_expression=deployment.schedule.cron_expression,
738
+ start_time=deployment.schedule.utc_start_time,
739
+ end_time=deployment.schedule.utc_end_time,
740
+ interval_second=interval_seconds,
741
+ no_catchup=not deployment.schedule.catchup,
742
+ )
743
+
744
+ logger.info(
745
+ "Started recurring run with ID '%s'.",
746
+ result.recurring_run_id,
747
+ )
748
+ else:
749
+ logger.info(
750
+ "No schedule detected. Creating a one-off pipeline run.."
751
+ )
752
+ try:
753
+ result = client.create_run_from_pipeline_package(
754
+ pipeline_file_path,
755
+ arguments={},
756
+ run_name=run_name,
757
+ enable_caching=False,
758
+ namespace=user_namespace,
759
+ )
760
+ except ApiException:
761
+ raise RuntimeError(
762
+ f"Failed to create {run_name} on Tekton! "
763
+ "Please check stack component settings and "
764
+ "configuration!"
765
+ )
766
+
767
+ logger.info(
768
+ "Started one-off pipeline run with ID '%s'.", result.run_id
769
+ )
770
+
771
+ if settings.synchronous:
772
+ client.wait_for_run_completion(
773
+ run_id=result.run_id, timeout=settings.timeout
774
+ )
775
+ except urllib3.exceptions.HTTPError as error:
776
+ if kubernetes_context:
777
+ msg = (
778
+ f"Please make sure your kubernetes config is present and "
779
+ f"the '{kubernetes_context}' kubernetes context is "
780
+ "configured correctly."
781
+ )
782
+ elif self.connector:
783
+ msg = (
784
+ f"Please check that the '{self.connector}' connector "
785
+ f"linked to this component is configured correctly with "
786
+ "valid credentials."
787
+ )
788
+ else:
789
+ msg = ""
790
+
791
+ logger.warning(
792
+ f"Failed to upload Tekton pipeline: {error}. {msg}",
532
793
  )
533
794
 
534
795
  def get_orchestrator_run_id(self) -> str:
@@ -543,15 +804,15 @@ class TektonOrchestrator(ContainerizedOrchestrator):
543
804
  """
544
805
  try:
545
806
  return os.environ[ENV_ZENML_TEKTON_RUN_ID]
546
- except KeyError:
807
+ except KeyError as e:
547
808
  raise RuntimeError(
548
809
  "Unable to read run id from environment variable "
549
810
  f"{ENV_ZENML_TEKTON_RUN_ID}."
550
- )
811
+ ) from e
551
812
 
552
813
  @property
553
814
  def root_directory(self) -> str:
554
- """Returns path to the root directory for all files concerning this orchestrator.
815
+ """Returns path to the root directory.
555
816
 
556
817
  Returns:
557
818
  Path to the root directory.
@@ -588,3 +849,56 @@ class TektonOrchestrator(ContainerizedOrchestrator):
588
849
  Path of the daemon log file.
589
850
  """
590
851
  return os.path.join(self.root_directory, "tekton_daemon.log")
852
+
853
+ def _configure_container_resources(
854
+ self,
855
+ dynamic_component: dsl.PipelineTask,
856
+ resource_settings: "ResourceSettings",
857
+ node_selector_constraint: Optional[Tuple[str, str]] = None,
858
+ ) -> dsl.PipelineTask:
859
+ """Adds resource requirements to the container.
860
+
861
+ Args:
862
+ dynamic_component: The dynamic component to add the resource
863
+ settings to.
864
+ resource_settings: The resource settings to use for this
865
+ container.
866
+ node_selector_constraint: Node selector constraint to apply to
867
+ the container.
868
+
869
+ Returns:
870
+ The dynamic component with the resource settings applied.
871
+ """
872
+ # Set optional CPU, RAM and GPU constraints for the pipeline
873
+ if resource_settings:
874
+ cpu_limit = resource_settings.cpu_count or None
875
+
876
+ if cpu_limit is not None:
877
+ dynamic_component = dynamic_component.set_cpu_limit(str(cpu_limit))
878
+
879
+ memory_limit = resource_settings.get_memory() or None
880
+ if memory_limit is not None:
881
+ dynamic_component = dynamic_component.set_memory_limit(
882
+ memory_limit
883
+ )
884
+
885
+ gpu_limit = (
886
+ resource_settings.gpu_count
887
+ if resource_settings.gpu_count is not None
888
+ else 0
889
+ )
890
+
891
+ if node_selector_constraint:
892
+ (constraint_label, value) = node_selector_constraint
893
+ if gpu_limit is not None and gpu_limit > 0:
894
+ dynamic_component = (
895
+ dynamic_component.set_accelerator_type(value)
896
+ .set_accelerator_limit(gpu_limit)
897
+ .set_gpu_limit(gpu_limit)
898
+ )
899
+ elif constraint_label == "accelerator" and gpu_limit == 0:
900
+ logger.warning(
901
+ "GPU limit is set to 0 but a GPU type is specified. Ignoring GPU settings."
902
+ )
903
+
904
+ return dynamic_component