zenml-nightly 0.64.0.dev20240811__py3-none-any.whl → 0.66.0.dev20240910__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- README.md +1 -1
- RELEASE_NOTES.md +126 -4
- zenml/VERSION +1 -1
- zenml/artifacts/utils.py +13 -6
- zenml/cli/__init__.py +1 -1
- zenml/cli/base.py +4 -4
- zenml/cli/integration.py +48 -9
- zenml/cli/pipeline.py +9 -2
- zenml/cli/stack.py +39 -27
- zenml/cli/utils.py +13 -0
- zenml/client.py +15 -17
- zenml/config/compiler.py +34 -0
- zenml/config/server_config.py +30 -0
- zenml/config/source.py +3 -7
- zenml/constants.py +5 -3
- zenml/entrypoints/base_entrypoint_configuration.py +41 -27
- zenml/entrypoints/step_entrypoint_configuration.py +5 -2
- zenml/enums.py +2 -0
- zenml/environment.py +31 -0
- zenml/feature_stores/base_feature_store.py +4 -6
- zenml/integrations/__init__.py +3 -0
- zenml/integrations/airflow/flavors/airflow_orchestrator_flavor.py +9 -0
- zenml/integrations/aws/__init__.py +2 -2
- zenml/integrations/aws/orchestrators/sagemaker_orchestrator.py +2 -1
- zenml/integrations/azure/__init__.py +2 -2
- zenml/integrations/azure/azureml_utils.py +201 -0
- zenml/integrations/azure/flavors/azureml.py +139 -0
- zenml/integrations/azure/flavors/azureml_orchestrator_flavor.py +20 -118
- zenml/integrations/azure/flavors/azureml_step_operator_flavor.py +67 -14
- zenml/integrations/azure/orchestrators/azureml_orchestrator.py +58 -172
- zenml/integrations/azure/orchestrators/azureml_orchestrator_entrypoint_config.py +1 -0
- zenml/integrations/azure/service_connectors/azure_service_connector.py +4 -0
- zenml/integrations/azure/step_operators/azureml_step_operator.py +78 -177
- zenml/integrations/constants.py +3 -0
- zenml/integrations/databricks/__init__.py +22 -4
- zenml/integrations/databricks/flavors/databricks_orchestrator_flavor.py +9 -0
- zenml/integrations/deepchecks/__init__.py +29 -11
- zenml/integrations/deepchecks/materializers/deepchecks_dataset_materializer.py +3 -1
- zenml/integrations/deepchecks/validation_checks.py +0 -30
- zenml/integrations/evidently/__init__.py +17 -2
- zenml/integrations/facets/__init__.py +21 -5
- zenml/integrations/feast/__init__.py +19 -6
- zenml/integrations/gcp/__init__.py +2 -2
- zenml/integrations/gcp/flavors/vertex_orchestrator_flavor.py +9 -0
- zenml/integrations/gcp/orchestrators/vertex_orchestrator.py +10 -1
- zenml/integrations/great_expectations/__init__.py +21 -7
- zenml/integrations/huggingface/__init__.py +39 -15
- zenml/integrations/huggingface/materializers/__init__.py +3 -0
- zenml/integrations/huggingface/materializers/huggingface_datasets_materializer.py +3 -1
- zenml/integrations/huggingface/materializers/huggingface_pt_model_materializer.py +1 -1
- zenml/integrations/huggingface/materializers/huggingface_t5_materializer.py +107 -0
- zenml/integrations/huggingface/materializers/huggingface_tf_model_materializer.py +1 -1
- zenml/integrations/huggingface/materializers/huggingface_tokenizer_materializer.py +2 -2
- zenml/integrations/huggingface/steps/accelerate_runner.py +108 -85
- zenml/integrations/hyperai/flavors/hyperai_orchestrator_flavor.py +9 -0
- zenml/integrations/kubeflow/flavors/kubeflow_orchestrator_flavor.py +9 -0
- zenml/integrations/kubeflow/orchestrators/kubeflow_orchestrator.py +10 -1
- zenml/integrations/kubernetes/flavors/kubernetes_orchestrator_flavor.py +9 -0
- zenml/integrations/kubernetes/orchestrators/kubernetes_orchestrator.py +10 -1
- zenml/integrations/lightning/__init__.py +48 -0
- zenml/integrations/lightning/flavors/__init__.py +23 -0
- zenml/integrations/lightning/flavors/lightning_orchestrator_flavor.py +148 -0
- zenml/integrations/lightning/orchestrators/__init__.py +23 -0
- zenml/integrations/lightning/orchestrators/lightning_orchestrator.py +596 -0
- zenml/integrations/lightning/orchestrators/lightning_orchestrator_entrypoint.py +307 -0
- zenml/integrations/lightning/orchestrators/lightning_orchestrator_entrypoint_configuration.py +77 -0
- zenml/integrations/lightning/orchestrators/utils.py +67 -0
- zenml/integrations/mlflow/__init__.py +43 -5
- zenml/integrations/mlflow/services/mlflow_deployment.py +26 -0
- zenml/integrations/numpy/__init__.py +32 -0
- zenml/integrations/numpy/materializers/__init__.py +18 -0
- zenml/integrations/numpy/materializers/numpy_materializer.py +246 -0
- zenml/integrations/pandas/__init__.py +32 -0
- zenml/integrations/pandas/materializers/__init__.py +18 -0
- zenml/integrations/pandas/materializers/pandas_materializer.py +192 -0
- zenml/integrations/prodigy/annotators/prodigy_annotator.py +1 -1
- zenml/integrations/seldon/__init__.py +18 -3
- zenml/integrations/sklearn/__init__.py +1 -1
- zenml/integrations/skypilot_azure/__init__.py +1 -1
- zenml/integrations/tensorboard/__init__.py +1 -1
- zenml/integrations/tensorflow/__init__.py +2 -2
- zenml/integrations/tensorflow/materializers/tf_dataset_materializer.py +2 -2
- zenml/integrations/whylogs/__init__.py +18 -2
- zenml/logging/step_logging.py +9 -2
- zenml/materializers/__init__.py +0 -4
- zenml/materializers/base_materializer.py +4 -0
- zenml/materializers/numpy_materializer.py +23 -234
- zenml/materializers/pandas_materializer.py +22 -179
- zenml/model/model.py +91 -2
- zenml/model/utils.py +5 -5
- zenml/models/__init__.py +16 -3
- zenml/models/v2/core/model_version.py +1 -1
- zenml/models/v2/core/pipeline_run.py +31 -1
- zenml/models/v2/core/stack.py +51 -20
- zenml/models/v2/core/step_run.py +28 -0
- zenml/models/v2/misc/info_models.py +78 -0
- zenml/new/pipelines/pipeline.py +65 -25
- zenml/new/pipelines/run_utils.py +57 -136
- zenml/new/steps/step_context.py +17 -6
- zenml/orchestrators/base_orchestrator.py +9 -0
- zenml/orchestrators/step_launcher.py +37 -14
- zenml/orchestrators/step_runner.py +14 -13
- zenml/orchestrators/utils.py +107 -7
- zenml/service_connectors/service_connector_utils.py +2 -2
- zenml/stack/utils.py +11 -2
- zenml/stack_deployments/azure_stack_deployment.py +2 -1
- zenml/steps/base_step.py +62 -25
- zenml/steps/utils.py +115 -3
- zenml/utils/cloud_utils.py +8 -8
- zenml/utils/code_utils.py +130 -32
- zenml/utils/function_utils.py +7 -7
- zenml/utils/notebook_utils.py +14 -0
- zenml/utils/pipeline_docker_image_builder.py +1 -11
- zenml/utils/pydantic_utils.py +3 -3
- zenml/utils/secret_utils.py +2 -2
- zenml/utils/settings_utils.py +1 -1
- zenml/utils/source_utils.py +67 -21
- zenml/utils/string_utils.py +29 -0
- zenml/zen_server/dashboard/assets/{404-CRAA_Lew.js → 404-iO8vpun1.js} +1 -1
- zenml/zen_server/dashboard/assets/{@radix-BXWm7HOa.js → @radix-DnFH_oo1.js} +1 -1
- zenml/zen_server/dashboard/assets/{@react-router-l3lMcXA2.js → @react-router-APVeuk-U.js} +1 -1
- zenml/zen_server/dashboard/assets/{@reactflow-CeVxyqYT.js → @reactflow-B6kq9fJZ.js} +2 -2
- zenml/zen_server/dashboard/assets/{@tanstack-FmcYZMuX.js → @tanstack-QbMbTrh5.js} +1 -1
- zenml/zen_server/dashboard/assets/AlertDialogDropdownItem-BXeSvmMY.js +1 -0
- zenml/zen_server/dashboard/assets/{CodeSnippet-D0VLxT2A.js → CodeSnippet-DNWdQmbo.js} +2 -2
- zenml/zen_server/dashboard/assets/CollapsibleCard-B2OVjWYE.js +1 -0
- zenml/zen_server/dashboard/assets/Commands-DsoaVElZ.js +1 -0
- zenml/zen_server/dashboard/assets/CopyButton-BqE_-PHO.js +2 -0
- zenml/zen_server/dashboard/assets/{CsvVizualization-D3kAypDj.js → CsvVizualization-Dyasr2jU.js} +6 -6
- zenml/zen_server/dashboard/assets/{edit-C0MVvPD2.js → DialogItem-Cz1VLRwa.js} +1 -1
- zenml/zen_server/dashboard/assets/{DisplayDate-DizbSeT-.js → DisplayDate-DkCy54Bp.js} +1 -1
- zenml/zen_server/dashboard/assets/EditSecretDialog-Du423_3U.js +1 -0
- zenml/zen_server/dashboard/assets/{EmptyState-BHblM39I.js → EmptyState-Cs3DEmso.js} +1 -1
- zenml/zen_server/dashboard/assets/{Error-C6LeJSER.js → Error-DorJD_va.js} +1 -1
- zenml/zen_server/dashboard/assets/ExecutionStatus-CIfQTutR.js +1 -0
- zenml/zen_server/dashboard/assets/{Helpbox-aAB2XP-z.js → Helpbox-CmfvtNeq.js} +1 -1
- zenml/zen_server/dashboard/assets/Infobox-BL9NOS37.js +1 -0
- zenml/zen_server/dashboard/assets/{InlineAvatar-DpTLgM3Q.js → InlineAvatar-Ds2ZFHPc.js} +1 -1
- zenml/zen_server/dashboard/assets/{Lock-CNyJvf2r.js → Lock-CmIn0szs.js} +1 -1
- zenml/zen_server/dashboard/assets/{MarkdownVisualization-Bajxn0HY.js → MarkdownVisualization-DS05sfBm.js} +1 -1
- zenml/zen_server/dashboard/assets/{NumberBox-BmKE0qnO.js → NumberBox-CrN0_kqI.js} +1 -1
- zenml/zen_server/dashboard/assets/Partials-DX-8iEa1.js +1 -0
- zenml/zen_server/dashboard/assets/{PasswordChecker-yGGoJSB-.js → PasswordChecker-DE71J_3F.js} +1 -1
- zenml/zen_server/dashboard/assets/ProviderIcon-BOQJgapd.js +1 -0
- zenml/zen_server/dashboard/assets/ProviderRadio-BsYBw9YA.js +1 -0
- zenml/zen_server/dashboard/assets/SearchField-W3GXpLlI.js +1 -0
- zenml/zen_server/dashboard/assets/SetPassword-B-0a8UCj.js +1 -0
- zenml/zen_server/dashboard/assets/{Tick-uxv80Q6a.js → Tick-i1DYsVcX.js} +1 -1
- zenml/zen_server/dashboard/assets/{UpdatePasswordSchemas-oN4G3sKz.js → UpdatePasswordSchemas-C6Zb7ASL.js} +1 -1
- zenml/zen_server/dashboard/assets/UsageReason-CCnzmwS8.js +1 -0
- zenml/zen_server/dashboard/assets/WizardFooter-BHbO7zOa.js +1 -0
- zenml/zen_server/dashboard/assets/all-pipeline-runs-query-BBEe6I9-.js +1 -0
- zenml/zen_server/dashboard/assets/{check-circle-1_I207rW.js → check-circle-DOoS4yhF.js} +1 -1
- zenml/zen_server/dashboard/assets/{chevron-down-BpaF8JqM.js → chevron-down-Cwb-W_B_.js} +1 -1
- zenml/zen_server/dashboard/assets/{chevron-right-double-Dk8e2L99.js → chevron-right-double-c9H46Kl8.js} +1 -1
- zenml/zen_server/dashboard/assets/{cloud-only-BkUuI0lZ.js → cloud-only-BuP4Kt_7.js} +1 -1
- zenml/zen_server/dashboard/assets/code-browser-BJYErIjr.js +1 -0
- zenml/zen_server/dashboard/assets/codespaces-BitYDX9d.gif +0 -0
- zenml/zen_server/dashboard/assets/{copy-f3XGPPxt.js → copy-CaGlDsUy.js} +1 -1
- zenml/zen_server/dashboard/assets/create-stack-B2x2d4r1.js +1 -0
- zenml/zen_server/dashboard/assets/{docker-8uj__HHK.js → docker-BFAFXr2_.js} +1 -1
- zenml/zen_server/dashboard/assets/{dots-horizontal-sKQlWEni.js → dots-horizontal-C6K59vUm.js} +1 -1
- zenml/zen_server/dashboard/assets/flyte-Cj-xy_8I.svg +10 -0
- zenml/zen_server/dashboard/assets/form-schemas-Bap0f854.js +1 -0
- zenml/zen_server/dashboard/assets/gcp-Dj6ntk0L.js +1 -0
- zenml/zen_server/dashboard/assets/{help-FuHlZwn0.js → help-CwN931fX.js} +1 -1
- zenml/zen_server/dashboard/assets/{index-Bd1xgUQG.js → index-5GJ5ysEZ.js} +1 -1
- zenml/zen_server/dashboard/assets/{index-DaGknux4.css → index-6DYjZgDn.css} +1 -1
- zenml/zen_server/dashboard/assets/index-B9wVwe7u.js +55 -0
- zenml/zen_server/dashboard/assets/index-DFi8BroH.js +1 -0
- zenml/zen_server/dashboard/assets/{index.esm-DT4uyn2i.js → index.esm-BE1uqCX5.js} +1 -1
- zenml/zen_server/dashboard/assets/kubernetes-BjbR6D-1.js +1 -0
- zenml/zen_server/dashboard/assets/{layout-D6oiSbfd.js → layout-Dru15_XR.js} +1 -1
- zenml/zen_server/dashboard/assets/link-external-BT2L8hAQ.js +1 -0
- zenml/zen_server/dashboard/assets/{login-mutation-13A_JSVA.js → login-mutation-DwxUz8VA.js} +1 -1
- zenml/zen_server/dashboard/assets/{logs-CgeE2vZP.js → logs-GiDJXbLS.js} +1 -1
- zenml/zen_server/dashboard/assets/metaflow-weOkWNyT.svg +10 -0
- zenml/zen_server/dashboard/assets/{not-found-B0Mmb90p.js → not-found-D5i9DunU.js} +1 -1
- zenml/zen_server/dashboard/assets/{package-DdkziX79.js → package-DYKZ5jKW.js} +1 -1
- zenml/zen_server/dashboard/assets/page-BFuJICXM.js +9 -0
- zenml/zen_server/dashboard/assets/{page-BGwA9B1M.js → page-BiF8hLbO.js} +1 -1
- zenml/zen_server/dashboard/assets/{page-DugsjcQ_.js → page-BitfWsiW.js} +1 -1
- zenml/zen_server/dashboard/assets/page-CDOQLrPC.js +1 -0
- zenml/zen_server/dashboard/assets/page-CEJWu1YO.js +1 -0
- zenml/zen_server/dashboard/assets/page-CIbehp7V.js +1 -0
- zenml/zen_server/dashboard/assets/page-CLiRGfWo.js +1 -0
- zenml/zen_server/dashboard/assets/page-CV44mQn9.js +1 -0
- zenml/zen_server/dashboard/assets/page-CrSdkteO.js +2 -0
- zenml/zen_server/dashboard/assets/page-D5F3DJjm.js +1 -0
- zenml/zen_server/dashboard/assets/page-DE03uZZR.js +1 -0
- zenml/zen_server/dashboard/assets/page-DFCK65G9.js +1 -0
- zenml/zen_server/dashboard/assets/{page-RnG-qhv9.js → page-DGMa3ZQL.js} +1 -1
- zenml/zen_server/dashboard/assets/page-DI-qTWrm.js +1 -0
- zenml/zen_server/dashboard/assets/page-DQGCHKrQ.js +1 -0
- zenml/zen_server/dashboard/assets/{page-DSTQnBk-.js → page-DQdwZZ9x.js} +1 -1
- zenml/zen_server/dashboard/assets/page-DgM-N9RL.js +1 -0
- zenml/zen_server/dashboard/assets/page-Dt8VgzbE.js +1 -0
- zenml/zen_server/dashboard/assets/{page-DLpOnf7u.js → page-J0s8Sq3N.js} +1 -1
- zenml/zen_server/dashboard/assets/page-WCQ659by.js +1 -0
- zenml/zen_server/dashboard/assets/page-bimkItOg.js +1 -0
- zenml/zen_server/dashboard/assets/{page-hQaiQXfg.js → page-iwoJnwPv.js} +1 -1
- zenml/zen_server/dashboard/assets/{page-YiF_fNbe.js → page-oS4hqS8M.js} +1 -1
- zenml/zen_server/dashboard/assets/page-oSqx9dkH.js +1 -0
- zenml/zen_server/dashboard/assets/page-p3GqEAUW.js +1 -0
- zenml/zen_server/dashboard/assets/page-qvcUVPE-.js +1 -0
- zenml/zen_server/dashboard/assets/page-xQG6GmFJ.js +1 -0
- zenml/zen_server/dashboard/assets/{persist-3-5nOJ6m.js → persist-mEZN_fgH.js} +1 -1
- zenml/zen_server/dashboard/assets/persist-xsYgVtR1.js +1 -0
- zenml/zen_server/dashboard/assets/{plus-FB9-lEq_.js → plus-Bc8eLSDM.js} +1 -1
- zenml/zen_server/dashboard/assets/{refresh-COb6KYDi.js → refresh-hfgWPeto.js} +1 -1
- zenml/zen_server/dashboard/assets/rocket-SESCGQXm.js +1 -0
- zenml/zen_server/dashboard/assets/sharedSchema-BfZcy7aP.js +14 -0
- zenml/zen_server/dashboard/assets/stack-detail-query-CU4egfhp.js +1 -0
- zenml/zen_server/dashboard/assets/templates-1S_8WeSK.webp +0 -0
- zenml/zen_server/dashboard/assets/{trash-Cd5CSFqA.js → trash-DUWZWzse.js} +1 -1
- zenml/zen_server/dashboard/assets/{update-server-settings-mutation-B8GB_ubU.js → update-server-settings-mutation-DNqmQXDM.js} +1 -1
- zenml/zen_server/dashboard/assets/{url-hcMJkz8p.js → url-DwbuKk1b.js} +1 -1
- zenml/zen_server/dashboard/assets/{zod-CnykDKJj.js → zod-uFd1wBcd.js} +1 -1
- zenml/zen_server/dashboard/index.html +7 -7
- zenml/zen_server/dashboard_legacy/asset-manifest.json +4 -4
- zenml/zen_server/dashboard_legacy/index.html +1 -1
- zenml/zen_server/dashboard_legacy/{precache-manifest.9c473c96a43298343a7ce1256183123b.js → precache-manifest.290b95d5b43efa3368b3dc63d20c4782.js} +4 -4
- zenml/zen_server/dashboard_legacy/service-worker.js +1 -1
- zenml/zen_server/dashboard_legacy/static/js/{main.463c90b9.chunk.js → main.840d1bf0.chunk.js} +2 -2
- zenml/zen_server/dashboard_legacy/static/js/{main.463c90b9.chunk.js.map → main.840d1bf0.chunk.js.map} +1 -1
- zenml/zen_server/deploy/helm/Chart.yaml +1 -1
- zenml/zen_server/deploy/helm/README.md +2 -2
- zenml/zen_server/routers/service_connectors_endpoints.py +2 -4
- zenml/zen_server/routers/workspaces_endpoints.py +20 -66
- zenml/zen_server/secure_headers.py +120 -0
- zenml/zen_server/template_execution/runner_entrypoint_configuration.py +0 -2
- zenml/zen_server/template_execution/utils.py +1 -0
- zenml/zen_server/utils.py +0 -100
- zenml/zen_server/zen_server_api.py +4 -2
- zenml/zen_stores/migrations/versions/0.65.0_release.py +23 -0
- zenml/zen_stores/migrations/versions/0.66.0_release.py +23 -0
- zenml/zen_stores/migrations/versions/bf2120261b5a_add_configured_model_version_id.py +74 -0
- zenml/zen_stores/rest_zen_store.py +4 -21
- zenml/zen_stores/schemas/constants.py +16 -0
- zenml/zen_stores/schemas/model_schemas.py +9 -3
- zenml/zen_stores/schemas/pipeline_run_schemas.py +22 -8
- zenml/zen_stores/schemas/step_run_schemas.py +23 -12
- zenml/zen_stores/sql_zen_store.py +312 -300
- zenml/zen_stores/zen_store_interface.py +0 -16
- {zenml_nightly-0.64.0.dev20240811.dist-info → zenml_nightly-0.66.0.dev20240910.dist-info}/METADATA +10 -12
- {zenml_nightly-0.64.0.dev20240811.dist-info → zenml_nightly-0.66.0.dev20240910.dist-info}/RECORD +249 -217
- zenml/models/v2/misc/full_stack.py +0 -129
- zenml/new/pipelines/model_utils.py +0 -72
- zenml/zen_server/dashboard/assets/AlertDialogDropdownItem-ErO9aOgK.js +0 -1
- zenml/zen_server/dashboard/assets/AwarenessChannel-CLXo5rKM.js +0 -1
- zenml/zen_server/dashboard/assets/CollapsibleCard-BaUPiVg0.js +0 -1
- zenml/zen_server/dashboard/assets/Commands-JrcZK-3j.js +0 -1
- zenml/zen_server/dashboard/assets/CopyButton-Dbo52T1K.js +0 -2
- zenml/zen_server/dashboard/assets/EditSecretDialog-Bd7mFLS4.js +0 -1
- zenml/zen_server/dashboard/assets/ExecutionStatus-jH4OrWBq.js +0 -1
- zenml/zen_server/dashboard/assets/Infobox-BQ0aty32.js +0 -1
- zenml/zen_server/dashboard/assets/ProviderRadio-BBqkIuTd.js +0 -1
- zenml/zen_server/dashboard/assets/RadioItem-xLhXoiFV.js +0 -1
- zenml/zen_server/dashboard/assets/SearchField-C9R0mdaX.js +0 -1
- zenml/zen_server/dashboard/assets/SetPassword-52sNxNiO.js +0 -1
- zenml/zen_server/dashboard/assets/SuccessStep-DlkItqYG.js +0 -1
- zenml/zen_server/dashboard/assets/aws-0_3UsPif.js +0 -1
- zenml/zen_server/dashboard/assets/database-cXYNX9tt.js +0 -1
- zenml/zen_server/dashboard/assets/file-text-B9JibxTs.js +0 -1
- zenml/zen_server/dashboard/assets/index-DhIZtpxB.js +0 -55
- zenml/zen_server/dashboard/assets/page-7-v2OBm-.js +0 -1
- zenml/zen_server/dashboard/assets/page-B3ozwdD1.js +0 -1
- zenml/zen_server/dashboard/assets/page-BkjAUyTA.js +0 -1
- zenml/zen_server/dashboard/assets/page-BnacgBiy.js +0 -1
- zenml/zen_server/dashboard/assets/page-BxF_KMQ3.js +0 -2
- zenml/zen_server/dashboard/assets/page-C4POHC0K.js +0 -1
- zenml/zen_server/dashboard/assets/page-C9kudd44.js +0 -9
- zenml/zen_server/dashboard/assets/page-CA1j3GpJ.js +0 -1
- zenml/zen_server/dashboard/assets/page-CCY6yfmu.js +0 -1
- zenml/zen_server/dashboard/assets/page-CgTe7Bme.js +0 -1
- zenml/zen_server/dashboard/assets/page-Cgn-6v2Y.js +0 -1
- zenml/zen_server/dashboard/assets/page-CxQmQqDw.js +0 -1
- zenml/zen_server/dashboard/assets/page-D2Goey3H.js +0 -1
- zenml/zen_server/dashboard/assets/page-DTysUGOy.js +0 -1
- zenml/zen_server/dashboard/assets/page-D_EXUFJb.js +0 -1
- zenml/zen_server/dashboard/assets/page-Db15QzsM.js +0 -1
- zenml/zen_server/dashboard/assets/page-OFKSPyN7.js +0 -1
- zenml/zen_server/dashboard/assets/page-T2BtjwPl.js +0 -1
- zenml/zen_server/dashboard/assets/page-TXe1Eo3Z.js +0 -1
- zenml/zen_server/dashboard/assets/play-circle-XSkLR12B.js +0 -1
- zenml/zen_server/dashboard/assets/sharedSchema-BoYx_B_L.js +0 -14
- zenml/zen_server/dashboard/assets/stack-detail-query-B-US_-wa.js +0 -1
- zenml/zen_server/dashboard/assets/terminal-grtjrIEJ.js +0 -1
- {zenml_nightly-0.64.0.dev20240811.dist-info → zenml_nightly-0.66.0.dev20240910.dist-info}/LICENSE +0 -0
- {zenml_nightly-0.64.0.dev20240811.dist-info → zenml_nightly-0.66.0.dev20240910.dist-info}/WHEEL +0 -0
- {zenml_nightly-0.64.0.dev20240811.dist-info → zenml_nightly-0.66.0.dev20240910.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,246 @@
|
|
1
|
+
# Copyright (c) ZenML GmbH 2024. All Rights Reserved.
|
2
|
+
#
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
4
|
+
# you may not use this file except in compliance with the License.
|
5
|
+
# You may obtain a copy of the License at:
|
6
|
+
#
|
7
|
+
# https://www.apache.org/licenses/LICENSE-2.0
|
8
|
+
#
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
|
12
|
+
# or implied. See the License for the specific language governing
|
13
|
+
# permissions and limitations under the License.
|
14
|
+
"""Implementation of the ZenML NumPy materializer."""
|
15
|
+
|
16
|
+
import os
|
17
|
+
from collections import Counter
|
18
|
+
from typing import TYPE_CHECKING, Any, ClassVar, Dict, Tuple, Type
|
19
|
+
|
20
|
+
import numpy as np
|
21
|
+
|
22
|
+
from zenml.enums import ArtifactType, VisualizationType
|
23
|
+
from zenml.logger import get_logger
|
24
|
+
from zenml.materializers.base_materializer import BaseMaterializer
|
25
|
+
from zenml.metadata.metadata_types import DType, MetadataType
|
26
|
+
|
27
|
+
if TYPE_CHECKING:
|
28
|
+
from numpy.typing import NDArray
|
29
|
+
|
30
|
+
logger = get_logger(__name__)
|
31
|
+
|
32
|
+
|
33
|
+
NUMPY_FILENAME = "data.npy"
|
34
|
+
|
35
|
+
DATA_FILENAME = "data.parquet"
|
36
|
+
SHAPE_FILENAME = "shape.json"
|
37
|
+
DATA_VAR = "data_var"
|
38
|
+
|
39
|
+
|
40
|
+
class NumpyMaterializer(BaseMaterializer):
|
41
|
+
"""Materializer to read data to and from pandas."""
|
42
|
+
|
43
|
+
ASSOCIATED_TYPES: ClassVar[Tuple[Type[Any], ...]] = (np.ndarray,)
|
44
|
+
ASSOCIATED_ARTIFACT_TYPE: ClassVar[ArtifactType] = ArtifactType.DATA
|
45
|
+
|
46
|
+
def load(self, data_type: Type[Any]) -> "Any":
|
47
|
+
"""Reads a numpy array from a `.npy` file.
|
48
|
+
|
49
|
+
Args:
|
50
|
+
data_type: The type of the data to read.
|
51
|
+
|
52
|
+
|
53
|
+
Raises:
|
54
|
+
ImportError: If pyarrow is not installed.
|
55
|
+
|
56
|
+
Returns:
|
57
|
+
The numpy array.
|
58
|
+
"""
|
59
|
+
numpy_file = os.path.join(self.uri, NUMPY_FILENAME)
|
60
|
+
|
61
|
+
if self.artifact_store.exists(numpy_file):
|
62
|
+
with self.artifact_store.open(numpy_file, "rb") as f:
|
63
|
+
return np.load(f, allow_pickle=True)
|
64
|
+
elif self.artifact_store.exists(os.path.join(self.uri, DATA_FILENAME)):
|
65
|
+
logger.warning(
|
66
|
+
"A legacy artifact was found. "
|
67
|
+
"This artifact was created with an older version of "
|
68
|
+
"ZenML. You can still use it, but it will be "
|
69
|
+
"converted to the new format on the next materialization."
|
70
|
+
)
|
71
|
+
try:
|
72
|
+
# Import old materializer dependencies
|
73
|
+
import pyarrow as pa # type: ignore
|
74
|
+
import pyarrow.parquet as pq # type: ignore
|
75
|
+
|
76
|
+
from zenml.utils import yaml_utils
|
77
|
+
|
78
|
+
# Read numpy array from parquet file
|
79
|
+
shape_dict = yaml_utils.read_json(
|
80
|
+
os.path.join(self.uri, SHAPE_FILENAME)
|
81
|
+
)
|
82
|
+
shape_tuple = tuple(shape_dict.values())
|
83
|
+
with self.artifact_store.open(
|
84
|
+
os.path.join(self.uri, DATA_FILENAME), "rb"
|
85
|
+
) as f:
|
86
|
+
input_stream = pa.input_stream(f)
|
87
|
+
data = pq.read_table(input_stream)
|
88
|
+
vals = getattr(data.to_pandas(), DATA_VAR).values
|
89
|
+
return np.reshape(vals, shape_tuple)
|
90
|
+
except ImportError:
|
91
|
+
raise ImportError(
|
92
|
+
"You have an old version of a `NumpyMaterializer` ",
|
93
|
+
"data artifact stored in the artifact store ",
|
94
|
+
"as a `.parquet` file, which requires `pyarrow` for reading. ",
|
95
|
+
"You can install `pyarrow` by running `pip install pyarrow`.",
|
96
|
+
)
|
97
|
+
|
98
|
+
def save(self, arr: "NDArray[Any]") -> None:
|
99
|
+
"""Writes a np.ndarray to the artifact store as a `.npy` file.
|
100
|
+
|
101
|
+
Args:
|
102
|
+
arr: The numpy array to write.
|
103
|
+
"""
|
104
|
+
with self.artifact_store.open(
|
105
|
+
os.path.join(self.uri, NUMPY_FILENAME), "wb"
|
106
|
+
) as f:
|
107
|
+
np.save(f, arr)
|
108
|
+
|
109
|
+
def save_visualizations(
|
110
|
+
self, arr: "NDArray[Any]"
|
111
|
+
) -> Dict[str, VisualizationType]:
|
112
|
+
"""Saves visualizations for a numpy array.
|
113
|
+
|
114
|
+
If the array is 1D, a histogram is saved. If the array is 2D or 3D with
|
115
|
+
3 or 4 channels, an image is saved.
|
116
|
+
|
117
|
+
Args:
|
118
|
+
arr: The numpy array to visualize.
|
119
|
+
|
120
|
+
Returns:
|
121
|
+
A dictionary of visualization URIs and their types.
|
122
|
+
"""
|
123
|
+
if not np.issubdtype(arr.dtype, np.number):
|
124
|
+
return {}
|
125
|
+
|
126
|
+
try:
|
127
|
+
# Save histogram for 1D arrays
|
128
|
+
if len(arr.shape) == 1:
|
129
|
+
histogram_path = os.path.join(self.uri, "histogram.png")
|
130
|
+
histogram_path = histogram_path.replace("\\", "/")
|
131
|
+
self._save_histogram(histogram_path, arr)
|
132
|
+
return {histogram_path: VisualizationType.IMAGE}
|
133
|
+
|
134
|
+
# Save as image for 3D arrays with 3 or 4 channels
|
135
|
+
if len(arr.shape) == 3 and arr.shape[2] in [3, 4]:
|
136
|
+
image_path = os.path.join(self.uri, "image.png")
|
137
|
+
image_path = image_path.replace("\\", "/")
|
138
|
+
self._save_image(image_path, arr)
|
139
|
+
return {image_path: VisualizationType.IMAGE}
|
140
|
+
|
141
|
+
except ImportError:
|
142
|
+
logger.info(
|
143
|
+
"Skipping visualization of numpy array because matplotlib "
|
144
|
+
"is not installed. To install matplotlib, run "
|
145
|
+
"`pip install matplotlib`."
|
146
|
+
)
|
147
|
+
|
148
|
+
return {}
|
149
|
+
|
150
|
+
def _save_histogram(self, output_path: str, arr: "NDArray[Any]") -> None:
|
151
|
+
"""Saves a histogram of a numpy array.
|
152
|
+
|
153
|
+
Args:
|
154
|
+
output_path: The path to save the histogram to.
|
155
|
+
arr: The numpy array of which to save the histogram.
|
156
|
+
"""
|
157
|
+
import matplotlib.pyplot as plt
|
158
|
+
|
159
|
+
plt.hist(arr)
|
160
|
+
with self.artifact_store.open(output_path, "wb") as f:
|
161
|
+
plt.savefig(f)
|
162
|
+
plt.close()
|
163
|
+
|
164
|
+
def _save_image(self, output_path: str, arr: "NDArray[Any]") -> None:
|
165
|
+
"""Saves a numpy array as an image.
|
166
|
+
|
167
|
+
Args:
|
168
|
+
output_path: The path to save the image to.
|
169
|
+
arr: The numpy array to save.
|
170
|
+
"""
|
171
|
+
from matplotlib.image import imsave
|
172
|
+
|
173
|
+
with self.artifact_store.open(output_path, "wb") as f:
|
174
|
+
imsave(f, arr)
|
175
|
+
|
176
|
+
def extract_metadata(
|
177
|
+
self, arr: "NDArray[Any]"
|
178
|
+
) -> Dict[str, "MetadataType"]:
|
179
|
+
"""Extract metadata from the given numpy array.
|
180
|
+
|
181
|
+
Args:
|
182
|
+
arr: The numpy array to extract metadata from.
|
183
|
+
|
184
|
+
Returns:
|
185
|
+
The extracted metadata as a dictionary.
|
186
|
+
"""
|
187
|
+
if np.issubdtype(arr.dtype, np.number):
|
188
|
+
return self._extract_numeric_metadata(arr)
|
189
|
+
elif np.issubdtype(arr.dtype, np.unicode_) or np.issubdtype(
|
190
|
+
arr.dtype, np.object_
|
191
|
+
):
|
192
|
+
return self._extract_text_metadata(arr)
|
193
|
+
else:
|
194
|
+
return {}
|
195
|
+
|
196
|
+
def _extract_numeric_metadata(
|
197
|
+
self, arr: "NDArray[Any]"
|
198
|
+
) -> Dict[str, "MetadataType"]:
|
199
|
+
"""Extracts numeric metadata from a numpy array.
|
200
|
+
|
201
|
+
Args:
|
202
|
+
arr: The numpy array to extract metadata from.
|
203
|
+
|
204
|
+
Returns:
|
205
|
+
A dictionary of metadata.
|
206
|
+
"""
|
207
|
+
min_val = np.min(arr).item()
|
208
|
+
max_val = np.max(arr).item()
|
209
|
+
|
210
|
+
numpy_metadata: Dict[str, "MetadataType"] = {
|
211
|
+
"shape": tuple(arr.shape),
|
212
|
+
"dtype": DType(arr.dtype.type),
|
213
|
+
"mean": np.mean(arr).item(),
|
214
|
+
"std": np.std(arr).item(),
|
215
|
+
"min": min_val,
|
216
|
+
"max": max_val,
|
217
|
+
}
|
218
|
+
return numpy_metadata
|
219
|
+
|
220
|
+
def _extract_text_metadata(
|
221
|
+
self, arr: "NDArray[Any]"
|
222
|
+
) -> Dict[str, "MetadataType"]:
|
223
|
+
"""Extracts text metadata from a numpy array.
|
224
|
+
|
225
|
+
Args:
|
226
|
+
arr: The numpy array to extract metadata from.
|
227
|
+
|
228
|
+
Returns:
|
229
|
+
A dictionary of metadata.
|
230
|
+
"""
|
231
|
+
text = " ".join(arr)
|
232
|
+
words = text.split()
|
233
|
+
word_counts = Counter(words)
|
234
|
+
unique_words = len(word_counts)
|
235
|
+
total_words = len(words)
|
236
|
+
most_common_word, most_common_count = word_counts.most_common(1)[0]
|
237
|
+
|
238
|
+
text_metadata: Dict[str, "MetadataType"] = {
|
239
|
+
"shape": tuple(arr.shape),
|
240
|
+
"dtype": DType(arr.dtype.type),
|
241
|
+
"unique_words": unique_words,
|
242
|
+
"total_words": total_words,
|
243
|
+
"most_common_word": most_common_word,
|
244
|
+
"most_common_count": most_common_count,
|
245
|
+
}
|
246
|
+
return text_metadata
|
@@ -0,0 +1,32 @@
|
|
1
|
+
# Copyright (c) ZenML GmbH 2024. All Rights Reserved.
|
2
|
+
#
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
4
|
+
# you may not use this file except in compliance with the License.
|
5
|
+
# You may obtain a copy of the License at:
|
6
|
+
#
|
7
|
+
# https://www.apache.org/licenses/LICENSE-2.0
|
8
|
+
#
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
|
12
|
+
# or implied. See the License for the specific language governing
|
13
|
+
# permissions and limitations under the License.
|
14
|
+
"""Initialization of the Pandas integration."""
|
15
|
+
|
16
|
+
from zenml.integrations.constants import PANDAS
|
17
|
+
from zenml.integrations.integration import Integration
|
18
|
+
|
19
|
+
|
20
|
+
class PandasIntegration(Integration):
|
21
|
+
"""Definition of Pandas integration for ZenML."""
|
22
|
+
|
23
|
+
NAME = PANDAS
|
24
|
+
REQUIREMENTS = ["pandas>=2.0.0"]
|
25
|
+
|
26
|
+
@classmethod
|
27
|
+
def activate(cls) -> None:
|
28
|
+
"""Activates the integration."""
|
29
|
+
from zenml.integrations.pandas import materializers # noqa
|
30
|
+
|
31
|
+
|
32
|
+
PandasIntegration.check_installation()
|
@@ -0,0 +1,18 @@
|
|
1
|
+
# Copyright (c) ZenML GmbH 2024. All Rights Reserved.
|
2
|
+
#
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
4
|
+
# you may not use this file except in compliance with the License.
|
5
|
+
# You may obtain a copy of the License at:
|
6
|
+
#
|
7
|
+
# https://www.apache.org/licenses/LICENSE-2.0
|
8
|
+
#
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
|
12
|
+
# or implied. See the License for the specific language governing
|
13
|
+
# permissions and limitations under the License.
|
14
|
+
"""Initialization of the Pandas materializer."""
|
15
|
+
|
16
|
+
from zenml.integrations.pandas.materializers.pandas_materializer import ( # noqa
|
17
|
+
PandasMaterializer,
|
18
|
+
)
|
@@ -0,0 +1,192 @@
|
|
1
|
+
# Copyright (c) ZenML GmbH 2024. All Rights Reserved.
|
2
|
+
#
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
4
|
+
# you may not use this file except in compliance with the License.
|
5
|
+
# You may obtain a copy of the License at:
|
6
|
+
#
|
7
|
+
# https://www.apache.org/licenses/LICENSE-2.0
|
8
|
+
#
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
|
12
|
+
# or implied. See the License for the specific language governing
|
13
|
+
# permissions and limitations under the License.
|
14
|
+
"""Materializer for Pandas."""
|
15
|
+
|
16
|
+
import os
|
17
|
+
from typing import Any, ClassVar, Dict, Optional, Tuple, Type, Union
|
18
|
+
|
19
|
+
import pandas as pd
|
20
|
+
|
21
|
+
from zenml.artifact_stores.base_artifact_store import BaseArtifactStore
|
22
|
+
from zenml.enums import ArtifactType, VisualizationType
|
23
|
+
from zenml.logger import get_logger
|
24
|
+
from zenml.materializers.base_materializer import BaseMaterializer
|
25
|
+
from zenml.metadata.metadata_types import DType, MetadataType
|
26
|
+
|
27
|
+
logger = get_logger(__name__)
|
28
|
+
|
29
|
+
PARQUET_FILENAME = "df.parquet.gzip"
|
30
|
+
COMPRESSION_TYPE = "gzip"
|
31
|
+
|
32
|
+
CSV_FILENAME = "df.csv"
|
33
|
+
|
34
|
+
|
35
|
+
class PandasMaterializer(BaseMaterializer):
|
36
|
+
"""Materializer to read data to and from pandas."""
|
37
|
+
|
38
|
+
ASSOCIATED_TYPES: ClassVar[Tuple[Type[Any], ...]] = (
|
39
|
+
pd.DataFrame,
|
40
|
+
pd.Series,
|
41
|
+
)
|
42
|
+
ASSOCIATED_ARTIFACT_TYPE: ClassVar[ArtifactType] = ArtifactType.DATA
|
43
|
+
|
44
|
+
def __init__(
|
45
|
+
self, uri: str, artifact_store: Optional[BaseArtifactStore] = None
|
46
|
+
):
|
47
|
+
"""Define `self.data_path`.
|
48
|
+
|
49
|
+
Args:
|
50
|
+
uri: The URI where the artifact data is stored.
|
51
|
+
artifact_store: The artifact store where the artifact data is stored.
|
52
|
+
"""
|
53
|
+
super().__init__(uri, artifact_store)
|
54
|
+
try:
|
55
|
+
import pyarrow # type: ignore # noqa
|
56
|
+
|
57
|
+
self.pyarrow_exists = True
|
58
|
+
except ImportError:
|
59
|
+
self.pyarrow_exists = False
|
60
|
+
logger.warning(
|
61
|
+
"By default, the `PandasMaterializer` stores data as a "
|
62
|
+
"`.csv` file. If you want to store data more efficiently, "
|
63
|
+
"you can install `pyarrow` by running "
|
64
|
+
"'`pip install pyarrow`'. This will allow `PandasMaterializer` "
|
65
|
+
"to automatically store the data as a `.parquet` file instead."
|
66
|
+
)
|
67
|
+
finally:
|
68
|
+
self.parquet_path = os.path.join(self.uri, PARQUET_FILENAME)
|
69
|
+
self.csv_path = os.path.join(self.uri, CSV_FILENAME)
|
70
|
+
|
71
|
+
def load(self, data_type: Type[Any]) -> Union[pd.DataFrame, pd.Series]:
|
72
|
+
"""Reads `pd.DataFrame` or `pd.Series` from a `.parquet` or `.csv` file.
|
73
|
+
|
74
|
+
Args:
|
75
|
+
data_type: The type of the data to read.
|
76
|
+
|
77
|
+
Raises:
|
78
|
+
ImportError: If pyarrow or fastparquet is not installed.
|
79
|
+
|
80
|
+
Returns:
|
81
|
+
The pandas dataframe or series.
|
82
|
+
"""
|
83
|
+
if self.artifact_store.exists(self.parquet_path):
|
84
|
+
if self.pyarrow_exists:
|
85
|
+
with self.artifact_store.open(
|
86
|
+
self.parquet_path, mode="rb"
|
87
|
+
) as f:
|
88
|
+
df = pd.read_parquet(f)
|
89
|
+
else:
|
90
|
+
raise ImportError(
|
91
|
+
"You have an old version of a `PandasMaterializer` "
|
92
|
+
"data artifact stored in the artifact store "
|
93
|
+
"as a `.parquet` file, which requires `pyarrow` "
|
94
|
+
"for reading, You can install `pyarrow` by running "
|
95
|
+
"'`pip install pyarrow fastparquet`'."
|
96
|
+
)
|
97
|
+
else:
|
98
|
+
with self.artifact_store.open(self.csv_path, mode="rb") as f:
|
99
|
+
df = pd.read_csv(f, index_col=0, parse_dates=True)
|
100
|
+
|
101
|
+
# validate the type of the data.
|
102
|
+
def is_dataframe_or_series(
|
103
|
+
df: Union[pd.DataFrame, pd.Series],
|
104
|
+
) -> Union[pd.DataFrame, pd.Series]:
|
105
|
+
"""Checks if the data is a `pd.DataFrame` or `pd.Series`.
|
106
|
+
|
107
|
+
Args:
|
108
|
+
df: The data to check.
|
109
|
+
|
110
|
+
Returns:
|
111
|
+
The data if it is a `pd.DataFrame` or `pd.Series`.
|
112
|
+
"""
|
113
|
+
if issubclass(data_type, pd.Series):
|
114
|
+
# Taking the first column if its a series as the assumption
|
115
|
+
# is that there will only be one
|
116
|
+
assert len(df.columns) == 1
|
117
|
+
df = df[df.columns[0]]
|
118
|
+
return df
|
119
|
+
else:
|
120
|
+
return df
|
121
|
+
|
122
|
+
return is_dataframe_or_series(df)
|
123
|
+
|
124
|
+
def save(self, df: Union[pd.DataFrame, pd.Series]) -> None:
|
125
|
+
"""Writes a pandas dataframe or series to the specified filename.
|
126
|
+
|
127
|
+
Args:
|
128
|
+
df: The pandas dataframe or series to write.
|
129
|
+
"""
|
130
|
+
if isinstance(df, pd.Series):
|
131
|
+
df = df.to_frame(name="series")
|
132
|
+
|
133
|
+
if self.pyarrow_exists:
|
134
|
+
with self.artifact_store.open(self.parquet_path, mode="wb") as f:
|
135
|
+
df.to_parquet(f, compression=COMPRESSION_TYPE)
|
136
|
+
else:
|
137
|
+
with self.artifact_store.open(self.csv_path, mode="wb") as f:
|
138
|
+
df.to_csv(f, index=True)
|
139
|
+
|
140
|
+
def save_visualizations(
|
141
|
+
self, df: Union[pd.DataFrame, pd.Series]
|
142
|
+
) -> Dict[str, VisualizationType]:
|
143
|
+
"""Save visualizations of the given pandas dataframe or series.
|
144
|
+
|
145
|
+
Args:
|
146
|
+
df: The pandas dataframe or series to visualize.
|
147
|
+
|
148
|
+
Returns:
|
149
|
+
A dictionary of visualization URIs and their types.
|
150
|
+
"""
|
151
|
+
describe_uri = os.path.join(self.uri, "describe.csv")
|
152
|
+
describe_uri = describe_uri.replace("\\", "/")
|
153
|
+
with self.artifact_store.open(describe_uri, mode="wb") as f:
|
154
|
+
df.describe().to_csv(f)
|
155
|
+
return {describe_uri: VisualizationType.CSV}
|
156
|
+
|
157
|
+
def extract_metadata(
|
158
|
+
self, df: Union[pd.DataFrame, pd.Series]
|
159
|
+
) -> Dict[str, "MetadataType"]:
|
160
|
+
"""Extract metadata from the given pandas dataframe or series.
|
161
|
+
|
162
|
+
Args:
|
163
|
+
df: The pandas dataframe or series to extract metadata from.
|
164
|
+
|
165
|
+
Returns:
|
166
|
+
The extracted metadata as a dictionary.
|
167
|
+
"""
|
168
|
+
pandas_metadata: Dict[str, "MetadataType"] = {"shape": df.shape}
|
169
|
+
|
170
|
+
if isinstance(df, pd.Series):
|
171
|
+
pandas_metadata["dtype"] = DType(df.dtype.type)
|
172
|
+
pandas_metadata["mean"] = float(df.mean().item())
|
173
|
+
pandas_metadata["std"] = float(df.std().item())
|
174
|
+
pandas_metadata["min"] = float(df.min().item())
|
175
|
+
pandas_metadata["max"] = float(df.max().item())
|
176
|
+
|
177
|
+
else:
|
178
|
+
pandas_metadata["dtype"] = {
|
179
|
+
str(key): DType(value.type) for key, value in df.dtypes.items()
|
180
|
+
}
|
181
|
+
for stat_name, stat in {
|
182
|
+
"mean": df.mean,
|
183
|
+
"std": df.std,
|
184
|
+
"min": df.min,
|
185
|
+
"max": df.max,
|
186
|
+
}.items():
|
187
|
+
pandas_metadata[stat_name] = {
|
188
|
+
str(key): float(value)
|
189
|
+
for key, value in stat(numeric_only=True).to_dict().items()
|
190
|
+
}
|
191
|
+
|
192
|
+
return pandas_metadata
|
@@ -221,7 +221,7 @@ class ProdigyAnnotator(BaseAnnotator, AuthenticationMixin):
|
|
221
221
|
def get_dataset(self, **kwargs: Any) -> Any:
|
222
222
|
"""Gets the dataset metadata for the given name.
|
223
223
|
|
224
|
-
If you would like the
|
224
|
+
If you would like the labeled data, use `get_labeled_data` instead.
|
225
225
|
|
226
226
|
Args:
|
227
227
|
**kwargs: Additional keyword arguments to pass to the Prodigy client.
|
@@ -16,9 +16,8 @@
|
|
16
16
|
The Seldon Core integration allows you to use the Seldon Core model serving
|
17
17
|
platform to implement continuous model deployment.
|
18
18
|
"""
|
19
|
-
from typing import List, Type
|
19
|
+
from typing import List, Type, Optional
|
20
20
|
|
21
|
-
from zenml.enums import StackComponentType
|
22
21
|
from zenml.integrations.constants import SELDON
|
23
22
|
from zenml.integrations.integration import Integration
|
24
23
|
from zenml.stack import Flavor
|
@@ -33,7 +32,8 @@ class SeldonIntegration(Integration):
|
|
33
32
|
REQUIREMENTS = [
|
34
33
|
"kubernetes==18.20.0",
|
35
34
|
]
|
36
|
-
|
35
|
+
|
36
|
+
REQUIREMENTS_IGNORED_ON_UNINSTALL = ["kubernetes", "numpy"]
|
37
37
|
|
38
38
|
@classmethod
|
39
39
|
def activate(cls) -> None:
|
@@ -52,5 +52,20 @@ class SeldonIntegration(Integration):
|
|
52
52
|
|
53
53
|
return [SeldonModelDeployerFlavor]
|
54
54
|
|
55
|
+
@classmethod
|
56
|
+
def get_requirements(cls, target_os: Optional[str] = None) -> List[str]:
|
57
|
+
"""Method to get the requirements for the integration.
|
58
|
+
|
59
|
+
Args:
|
60
|
+
target_os: The target operating system to get the requirements for.
|
61
|
+
|
62
|
+
Returns:
|
63
|
+
A list of requirements.
|
64
|
+
"""
|
65
|
+
from zenml.integrations.numpy import NumpyIntegration
|
66
|
+
|
67
|
+
return cls.REQUIREMENTS + \
|
68
|
+
NumpyIntegration.get_requirements(target_os=target_os)
|
69
|
+
|
55
70
|
|
56
71
|
SeldonIntegration.check_installation()
|
@@ -31,7 +31,7 @@ class SkypilotAzureIntegration(Integration):
|
|
31
31
|
"""Definition of Skypilot (Azure) Integration for ZenML."""
|
32
32
|
|
33
33
|
NAME = SKYPILOT_AZURE
|
34
|
-
REQUIREMENTS = ["skypilot
|
34
|
+
REQUIREMENTS = ["skypilot[azure]>=0.6.1"]
|
35
35
|
APT_PACKAGES = ["openssh-client", "rsync"]
|
36
36
|
|
37
37
|
@classmethod
|
@@ -62,11 +62,11 @@ class TensorflowIntegration(Integration):
|
|
62
62
|
target_os = target_os or platform.system()
|
63
63
|
if target_os == "Darwin" and platform.machine() == "arm64":
|
64
64
|
requirements = [
|
65
|
-
"tensorflow-macos>=2.12
|
65
|
+
"tensorflow-macos>=2.12,<2.15",
|
66
66
|
]
|
67
67
|
else:
|
68
68
|
requirements = [
|
69
|
-
"tensorflow>=2.12
|
69
|
+
"tensorflow>=2.12,<2.15",
|
70
70
|
"tensorflow_io>=0.24.0",
|
71
71
|
]
|
72
72
|
if sys.version_info.minor == 8:
|
@@ -48,7 +48,7 @@ class TensorflowDatasetMaterializer(BaseMaterializer):
|
|
48
48
|
temp_dir = tempfile.mkdtemp()
|
49
49
|
io_utils.copy_dir(self.uri, temp_dir)
|
50
50
|
path = os.path.join(temp_dir, DEFAULT_FILENAME)
|
51
|
-
dataset = tf.data.
|
51
|
+
dataset = tf.data.Dataset.load(path)
|
52
52
|
# Don't delete the temporary directory here as the dataset is lazily
|
53
53
|
# loaded and needs to read it when the object gets used
|
54
54
|
return dataset
|
@@ -62,7 +62,7 @@ class TensorflowDatasetMaterializer(BaseMaterializer):
|
|
62
62
|
temp_dir = tempfile.TemporaryDirectory()
|
63
63
|
path = os.path.join(temp_dir.name, DEFAULT_FILENAME)
|
64
64
|
try:
|
65
|
-
tf.data.
|
65
|
+
tf.data.Dataset.save(
|
66
66
|
dataset, path, compression=None, shard_func=None
|
67
67
|
)
|
68
68
|
io_utils.copy_dir(temp_dir.name, self.uri)
|
@@ -13,9 +13,8 @@
|
|
13
13
|
# permissions and limitations under the License.
|
14
14
|
"""Initialization of the whylogs integration."""
|
15
15
|
|
16
|
-
from typing import List, Type
|
16
|
+
from typing import List, Type, Optional
|
17
17
|
|
18
|
-
from zenml.enums import StackComponentType
|
19
18
|
from zenml.integrations.constants import WHYLOGS
|
20
19
|
from zenml.integrations.integration import Integration
|
21
20
|
from zenml.stack import Flavor
|
@@ -29,6 +28,8 @@ class WhylogsIntegration(Integration):
|
|
29
28
|
NAME = WHYLOGS
|
30
29
|
REQUIREMENTS = ["whylogs[viz]~=1.0.5", "whylogs[whylabs]~=1.0.5"]
|
31
30
|
|
31
|
+
REQUIREMENTS_IGNORED_ON_UNINSTALL = ["pandas"]
|
32
|
+
|
32
33
|
@classmethod
|
33
34
|
def activate(cls) -> None:
|
34
35
|
"""Activates the integration."""
|
@@ -48,5 +49,20 @@ class WhylogsIntegration(Integration):
|
|
48
49
|
|
49
50
|
return [WhylogsDataValidatorFlavor]
|
50
51
|
|
52
|
+
@classmethod
|
53
|
+
def get_requirements(cls, target_os: Optional[str] = None) -> List[str]:
|
54
|
+
"""Method to get the requirements for the integration.
|
55
|
+
|
56
|
+
Args:
|
57
|
+
target_os: The target operating system to get the requirements for.
|
58
|
+
|
59
|
+
Returns:
|
60
|
+
A list of requirements.
|
61
|
+
"""
|
62
|
+
from zenml.integrations.pandas import PandasIntegration
|
63
|
+
|
64
|
+
return cls.REQUIREMENTS + \
|
65
|
+
PandasIntegration.get_requirements(target_os=target_os)
|
66
|
+
|
51
67
|
|
52
68
|
WhylogsIntegration.check_installation()
|
zenml/logging/step_logging.py
CHANGED
@@ -13,6 +13,7 @@
|
|
13
13
|
# permissions and limitations under the License.
|
14
14
|
"""ZenML logging handler."""
|
15
15
|
|
16
|
+
import datetime
|
16
17
|
import os
|
17
18
|
import re
|
18
19
|
import sys
|
@@ -312,16 +313,22 @@ class StepLogsStorage:
|
|
312
313
|
"w",
|
313
314
|
) as file:
|
314
315
|
for message in self.buffer:
|
316
|
+
timestamp = datetime.datetime.now(
|
317
|
+
datetime.timezone.utc
|
318
|
+
).strftime("%Y-%m-%d %H:%M:%S")
|
315
319
|
file.write(
|
316
|
-
remove_ansi_escape_codes(message)
|
320
|
+
f"[{timestamp} UTC] {remove_ansi_escape_codes(message)}\n"
|
317
321
|
)
|
318
322
|
else:
|
319
323
|
with self.artifact_store.open(
|
320
324
|
self.logs_uri, "a"
|
321
325
|
) as file:
|
322
326
|
for message in self.buffer:
|
327
|
+
timestamp = datetime.datetime.now(
|
328
|
+
datetime.timezone.utc
|
329
|
+
).strftime("%Y-%m-%d %H:%M:%S")
|
323
330
|
file.write(
|
324
|
-
remove_ansi_escape_codes(message)
|
331
|
+
f"[{timestamp} UTC] {remove_ansi_escape_codes(message)}\n"
|
325
332
|
)
|
326
333
|
|
327
334
|
except (OSError, IOError) as e:
|