oracle-ads 2.13.8__py3-none-any.whl → 2.13.9rc0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {oracle_ads-2.13.8.dist-info → oracle_ads-2.13.9rc0.dist-info}/METADATA +151 -151
- oracle_ads-2.13.9rc0.dist-info/RECORD +9 -0
- {oracle_ads-2.13.8.dist-info → oracle_ads-2.13.9rc0.dist-info}/WHEEL +2 -1
- {oracle_ads-2.13.8.dist-info → oracle_ads-2.13.9rc0.dist-info}/entry_points.txt +1 -2
- oracle_ads-2.13.9rc0.dist-info/top_level.txt +1 -0
- ads/aqua/__init__.py +0 -40
- ads/aqua/app.py +0 -506
- ads/aqua/cli.py +0 -96
- ads/aqua/client/__init__.py +0 -3
- ads/aqua/client/client.py +0 -836
- ads/aqua/client/openai_client.py +0 -305
- ads/aqua/common/__init__.py +0 -5
- ads/aqua/common/decorator.py +0 -125
- ads/aqua/common/entities.py +0 -266
- ads/aqua/common/enums.py +0 -122
- ads/aqua/common/errors.py +0 -109
- ads/aqua/common/utils.py +0 -1285
- ads/aqua/config/__init__.py +0 -4
- ads/aqua/config/container_config.py +0 -248
- ads/aqua/config/evaluation/__init__.py +0 -4
- ads/aqua/config/evaluation/evaluation_service_config.py +0 -147
- ads/aqua/config/utils/__init__.py +0 -4
- ads/aqua/config/utils/serializer.py +0 -339
- ads/aqua/constants.py +0 -114
- ads/aqua/data.py +0 -14
- ads/aqua/dummy_data/icon.txt +0 -1
- ads/aqua/dummy_data/oci_model_deployments.json +0 -56
- ads/aqua/dummy_data/oci_models.json +0 -1
- ads/aqua/dummy_data/readme.md +0 -26
- ads/aqua/evaluation/__init__.py +0 -8
- ads/aqua/evaluation/constants.py +0 -53
- ads/aqua/evaluation/entities.py +0 -186
- ads/aqua/evaluation/errors.py +0 -70
- ads/aqua/evaluation/evaluation.py +0 -1814
- ads/aqua/extension/__init__.py +0 -42
- ads/aqua/extension/aqua_ws_msg_handler.py +0 -76
- ads/aqua/extension/base_handler.py +0 -90
- ads/aqua/extension/common_handler.py +0 -121
- ads/aqua/extension/common_ws_msg_handler.py +0 -36
- ads/aqua/extension/deployment_handler.py +0 -298
- ads/aqua/extension/deployment_ws_msg_handler.py +0 -54
- ads/aqua/extension/errors.py +0 -30
- ads/aqua/extension/evaluation_handler.py +0 -129
- ads/aqua/extension/evaluation_ws_msg_handler.py +0 -61
- ads/aqua/extension/finetune_handler.py +0 -96
- ads/aqua/extension/model_handler.py +0 -390
- ads/aqua/extension/models/__init__.py +0 -0
- ads/aqua/extension/models/ws_models.py +0 -145
- ads/aqua/extension/models_ws_msg_handler.py +0 -50
- ads/aqua/extension/ui_handler.py +0 -282
- ads/aqua/extension/ui_websocket_handler.py +0 -130
- ads/aqua/extension/utils.py +0 -133
- ads/aqua/finetuning/__init__.py +0 -7
- ads/aqua/finetuning/constants.py +0 -23
- ads/aqua/finetuning/entities.py +0 -181
- ads/aqua/finetuning/finetuning.py +0 -731
- ads/aqua/model/__init__.py +0 -8
- ads/aqua/model/constants.py +0 -60
- ads/aqua/model/entities.py +0 -306
- ads/aqua/model/enums.py +0 -30
- ads/aqua/model/model.py +0 -2080
- ads/aqua/modeldeployment/__init__.py +0 -8
- ads/aqua/modeldeployment/constants.py +0 -10
- ads/aqua/modeldeployment/deployment.py +0 -1324
- ads/aqua/modeldeployment/entities.py +0 -653
- ads/aqua/modeldeployment/inference.py +0 -74
- ads/aqua/modeldeployment/utils.py +0 -543
- ads/aqua/resources/gpu_shapes_index.json +0 -94
- ads/aqua/server/__init__.py +0 -4
- ads/aqua/server/__main__.py +0 -24
- ads/aqua/server/app.py +0 -47
- ads/aqua/server/aqua_spec.yml +0 -1291
- ads/aqua/training/__init__.py +0 -4
- ads/aqua/training/exceptions.py +0 -476
- ads/aqua/ui.py +0 -499
- ads/automl/__init__.py +0 -9
- ads/automl/driver.py +0 -330
- ads/automl/provider.py +0 -975
- ads/bds/__init__.py +0 -5
- ads/bds/auth.py +0 -127
- ads/bds/big_data_service.py +0 -255
- ads/catalog/__init__.py +0 -19
- ads/catalog/model.py +0 -1576
- ads/catalog/notebook.py +0 -461
- ads/catalog/project.py +0 -468
- ads/catalog/summary.py +0 -178
- ads/common/__init__.py +0 -11
- ads/common/analyzer.py +0 -65
- ads/common/artifact/.model-ignore +0 -63
- ads/common/artifact/__init__.py +0 -10
- ads/common/auth.py +0 -1122
- ads/common/card_identifier.py +0 -83
- ads/common/config.py +0 -647
- ads/common/data.py +0 -165
- ads/common/decorator/__init__.py +0 -9
- ads/common/decorator/argument_to_case.py +0 -88
- ads/common/decorator/deprecate.py +0 -69
- ads/common/decorator/require_nonempty_arg.py +0 -65
- ads/common/decorator/runtime_dependency.py +0 -178
- ads/common/decorator/threaded.py +0 -97
- ads/common/decorator/utils.py +0 -35
- ads/common/dsc_file_system.py +0 -303
- ads/common/error.py +0 -14
- ads/common/extended_enum.py +0 -81
- ads/common/function/__init__.py +0 -5
- ads/common/function/fn_util.py +0 -142
- ads/common/function/func_conf.yaml +0 -25
- ads/common/ipython.py +0 -76
- ads/common/model.py +0 -679
- ads/common/model_artifact.py +0 -1759
- ads/common/model_artifact_schema.json +0 -107
- ads/common/model_export_util.py +0 -664
- ads/common/model_metadata.py +0 -24
- ads/common/object_storage_details.py +0 -296
- ads/common/oci_client.py +0 -175
- ads/common/oci_datascience.py +0 -46
- ads/common/oci_logging.py +0 -1144
- ads/common/oci_mixin.py +0 -957
- ads/common/oci_resource.py +0 -136
- ads/common/serializer.py +0 -559
- ads/common/utils.py +0 -1852
- ads/common/word_lists.py +0 -1491
- ads/common/work_request.py +0 -189
- ads/data_labeling/__init__.py +0 -13
- ads/data_labeling/boundingbox.py +0 -253
- ads/data_labeling/constants.py +0 -47
- ads/data_labeling/data_labeling_service.py +0 -244
- ads/data_labeling/interface/__init__.py +0 -5
- ads/data_labeling/interface/loader.py +0 -16
- ads/data_labeling/interface/parser.py +0 -16
- ads/data_labeling/interface/reader.py +0 -23
- ads/data_labeling/loader/__init__.py +0 -5
- ads/data_labeling/loader/file_loader.py +0 -241
- ads/data_labeling/metadata.py +0 -110
- ads/data_labeling/mixin/__init__.py +0 -5
- ads/data_labeling/mixin/data_labeling.py +0 -232
- ads/data_labeling/ner.py +0 -129
- ads/data_labeling/parser/__init__.py +0 -5
- ads/data_labeling/parser/dls_record_parser.py +0 -388
- ads/data_labeling/parser/export_metadata_parser.py +0 -94
- ads/data_labeling/parser/export_record_parser.py +0 -473
- ads/data_labeling/reader/__init__.py +0 -5
- ads/data_labeling/reader/dataset_reader.py +0 -574
- ads/data_labeling/reader/dls_record_reader.py +0 -121
- ads/data_labeling/reader/export_record_reader.py +0 -62
- ads/data_labeling/reader/jsonl_reader.py +0 -75
- ads/data_labeling/reader/metadata_reader.py +0 -203
- ads/data_labeling/reader/record_reader.py +0 -263
- ads/data_labeling/record.py +0 -52
- ads/data_labeling/visualizer/__init__.py +0 -5
- ads/data_labeling/visualizer/image_visualizer.py +0 -525
- ads/data_labeling/visualizer/text_visualizer.py +0 -357
- ads/database/__init__.py +0 -5
- ads/database/connection.py +0 -338
- ads/dataset/__init__.py +0 -10
- ads/dataset/capabilities.md +0 -51
- ads/dataset/classification_dataset.py +0 -339
- ads/dataset/correlation.py +0 -226
- ads/dataset/correlation_plot.py +0 -563
- ads/dataset/dask_series.py +0 -173
- ads/dataset/dataframe_transformer.py +0 -110
- ads/dataset/dataset.py +0 -1979
- ads/dataset/dataset_browser.py +0 -360
- ads/dataset/dataset_with_target.py +0 -995
- ads/dataset/exception.py +0 -25
- ads/dataset/factory.py +0 -987
- ads/dataset/feature_engineering_transformer.py +0 -35
- ads/dataset/feature_selection.py +0 -107
- ads/dataset/forecasting_dataset.py +0 -26
- ads/dataset/helper.py +0 -1450
- ads/dataset/label_encoder.py +0 -99
- ads/dataset/mixin/__init__.py +0 -5
- ads/dataset/mixin/dataset_accessor.py +0 -134
- ads/dataset/pipeline.py +0 -58
- ads/dataset/plot.py +0 -710
- ads/dataset/progress.py +0 -86
- ads/dataset/recommendation.py +0 -297
- ads/dataset/recommendation_transformer.py +0 -502
- ads/dataset/regression_dataset.py +0 -14
- ads/dataset/sampled_dataset.py +0 -1050
- ads/dataset/target.py +0 -98
- ads/dataset/timeseries.py +0 -18
- ads/dbmixin/__init__.py +0 -5
- ads/dbmixin/db_pandas_accessor.py +0 -153
- ads/environment/__init__.py +0 -9
- ads/environment/ml_runtime.py +0 -66
- ads/evaluations/README.md +0 -14
- ads/evaluations/__init__.py +0 -109
- ads/evaluations/evaluation_plot.py +0 -983
- ads/evaluations/evaluator.py +0 -1334
- ads/evaluations/statistical_metrics.py +0 -543
- ads/experiments/__init__.py +0 -9
- ads/experiments/capabilities.md +0 -0
- ads/explanations/__init__.py +0 -21
- ads/explanations/base_explainer.py +0 -142
- ads/explanations/capabilities.md +0 -83
- ads/explanations/explainer.py +0 -190
- ads/explanations/mlx_global_explainer.py +0 -1050
- ads/explanations/mlx_interface.py +0 -386
- ads/explanations/mlx_local_explainer.py +0 -287
- ads/explanations/mlx_whatif_explainer.py +0 -201
- ads/feature_engineering/__init__.py +0 -20
- ads/feature_engineering/accessor/__init__.py +0 -5
- ads/feature_engineering/accessor/dataframe_accessor.py +0 -535
- ads/feature_engineering/accessor/mixin/__init__.py +0 -5
- ads/feature_engineering/accessor/mixin/correlation.py +0 -166
- ads/feature_engineering/accessor/mixin/eda_mixin.py +0 -266
- ads/feature_engineering/accessor/mixin/eda_mixin_series.py +0 -85
- ads/feature_engineering/accessor/mixin/feature_types_mixin.py +0 -211
- ads/feature_engineering/accessor/mixin/utils.py +0 -65
- ads/feature_engineering/accessor/series_accessor.py +0 -431
- ads/feature_engineering/adsimage/__init__.py +0 -5
- ads/feature_engineering/adsimage/image.py +0 -192
- ads/feature_engineering/adsimage/image_reader.py +0 -170
- ads/feature_engineering/adsimage/interface/__init__.py +0 -5
- ads/feature_engineering/adsimage/interface/reader.py +0 -19
- ads/feature_engineering/adsstring/__init__.py +0 -7
- ads/feature_engineering/adsstring/oci_language/__init__.py +0 -8
- ads/feature_engineering/adsstring/string/__init__.py +0 -8
- ads/feature_engineering/data_schema.json +0 -57
- ads/feature_engineering/dataset/__init__.py +0 -5
- ads/feature_engineering/dataset/zip_code_data.py +0 -42062
- ads/feature_engineering/exceptions.py +0 -40
- ads/feature_engineering/feature_type/__init__.py +0 -133
- ads/feature_engineering/feature_type/address.py +0 -184
- ads/feature_engineering/feature_type/adsstring/__init__.py +0 -5
- ads/feature_engineering/feature_type/adsstring/common_regex_mixin.py +0 -164
- ads/feature_engineering/feature_type/adsstring/oci_language.py +0 -93
- ads/feature_engineering/feature_type/adsstring/parsers/__init__.py +0 -5
- ads/feature_engineering/feature_type/adsstring/parsers/base.py +0 -47
- ads/feature_engineering/feature_type/adsstring/parsers/nltk_parser.py +0 -96
- ads/feature_engineering/feature_type/adsstring/parsers/spacy_parser.py +0 -221
- ads/feature_engineering/feature_type/adsstring/string.py +0 -258
- ads/feature_engineering/feature_type/base.py +0 -58
- ads/feature_engineering/feature_type/boolean.py +0 -183
- ads/feature_engineering/feature_type/category.py +0 -146
- ads/feature_engineering/feature_type/constant.py +0 -137
- ads/feature_engineering/feature_type/continuous.py +0 -151
- ads/feature_engineering/feature_type/creditcard.py +0 -314
- ads/feature_engineering/feature_type/datetime.py +0 -190
- ads/feature_engineering/feature_type/discrete.py +0 -134
- ads/feature_engineering/feature_type/document.py +0 -43
- ads/feature_engineering/feature_type/gis.py +0 -251
- ads/feature_engineering/feature_type/handler/__init__.py +0 -5
- ads/feature_engineering/feature_type/handler/feature_validator.py +0 -524
- ads/feature_engineering/feature_type/handler/feature_warning.py +0 -319
- ads/feature_engineering/feature_type/handler/warnings.py +0 -128
- ads/feature_engineering/feature_type/integer.py +0 -142
- ads/feature_engineering/feature_type/ip_address.py +0 -144
- ads/feature_engineering/feature_type/ip_address_v4.py +0 -138
- ads/feature_engineering/feature_type/ip_address_v6.py +0 -138
- ads/feature_engineering/feature_type/lat_long.py +0 -256
- ads/feature_engineering/feature_type/object.py +0 -43
- ads/feature_engineering/feature_type/ordinal.py +0 -132
- ads/feature_engineering/feature_type/phone_number.py +0 -135
- ads/feature_engineering/feature_type/string.py +0 -171
- ads/feature_engineering/feature_type/text.py +0 -93
- ads/feature_engineering/feature_type/unknown.py +0 -43
- ads/feature_engineering/feature_type/zip_code.py +0 -164
- ads/feature_engineering/feature_type_manager.py +0 -406
- ads/feature_engineering/schema.py +0 -795
- ads/feature_engineering/utils.py +0 -245
- ads/feature_store/.readthedocs.yaml +0 -19
- ads/feature_store/README.md +0 -65
- ads/feature_store/__init__.py +0 -9
- ads/feature_store/common/__init__.py +0 -0
- ads/feature_store/common/enums.py +0 -339
- ads/feature_store/common/exceptions.py +0 -18
- ads/feature_store/common/spark_session_singleton.py +0 -125
- ads/feature_store/common/utils/__init__.py +0 -0
- ads/feature_store/common/utils/base64_encoder_decoder.py +0 -72
- ads/feature_store/common/utils/feature_schema_mapper.py +0 -283
- ads/feature_store/common/utils/transformation_utils.py +0 -82
- ads/feature_store/common/utils/utility.py +0 -403
- ads/feature_store/data_validation/__init__.py +0 -0
- ads/feature_store/data_validation/great_expectation.py +0 -129
- ads/feature_store/dataset.py +0 -1230
- ads/feature_store/dataset_job.py +0 -530
- ads/feature_store/docs/Dockerfile +0 -7
- ads/feature_store/docs/Makefile +0 -44
- ads/feature_store/docs/conf.py +0 -28
- ads/feature_store/docs/requirements.txt +0 -14
- ads/feature_store/docs/source/ads.feature_store.query.rst +0 -20
- ads/feature_store/docs/source/cicd.rst +0 -137
- ads/feature_store/docs/source/conf.py +0 -86
- ads/feature_store/docs/source/data_versioning.rst +0 -33
- ads/feature_store/docs/source/dataset.rst +0 -388
- ads/feature_store/docs/source/dataset_job.rst +0 -27
- ads/feature_store/docs/source/demo.rst +0 -70
- ads/feature_store/docs/source/entity.rst +0 -78
- ads/feature_store/docs/source/feature_group.rst +0 -624
- ads/feature_store/docs/source/feature_group_job.rst +0 -29
- ads/feature_store/docs/source/feature_store.rst +0 -122
- ads/feature_store/docs/source/feature_store_class.rst +0 -123
- ads/feature_store/docs/source/feature_validation.rst +0 -66
- ads/feature_store/docs/source/figures/cicd.png +0 -0
- ads/feature_store/docs/source/figures/data_validation.png +0 -0
- ads/feature_store/docs/source/figures/data_versioning.png +0 -0
- ads/feature_store/docs/source/figures/dataset.gif +0 -0
- ads/feature_store/docs/source/figures/dataset.png +0 -0
- ads/feature_store/docs/source/figures/dataset_lineage.png +0 -0
- ads/feature_store/docs/source/figures/dataset_statistics.png +0 -0
- ads/feature_store/docs/source/figures/dataset_statistics_viz.png +0 -0
- ads/feature_store/docs/source/figures/dataset_validation_results.png +0 -0
- ads/feature_store/docs/source/figures/dataset_validation_summary.png +0 -0
- ads/feature_store/docs/source/figures/drift_monitoring.png +0 -0
- ads/feature_store/docs/source/figures/entity.png +0 -0
- ads/feature_store/docs/source/figures/feature_group.png +0 -0
- ads/feature_store/docs/source/figures/feature_group_lineage.png +0 -0
- ads/feature_store/docs/source/figures/feature_group_statistics_viz.png +0 -0
- ads/feature_store/docs/source/figures/feature_store_deployment.png +0 -0
- ads/feature_store/docs/source/figures/feature_store_overview.png +0 -0
- ads/feature_store/docs/source/figures/featuregroup.gif +0 -0
- ads/feature_store/docs/source/figures/lineage_d1.png +0 -0
- ads/feature_store/docs/source/figures/lineage_d2.png +0 -0
- ads/feature_store/docs/source/figures/lineage_fg.png +0 -0
- ads/feature_store/docs/source/figures/logo-dark-mode.png +0 -0
- ads/feature_store/docs/source/figures/logo-light-mode.png +0 -0
- ads/feature_store/docs/source/figures/overview.png +0 -0
- ads/feature_store/docs/source/figures/resource_manager.png +0 -0
- ads/feature_store/docs/source/figures/resource_manager_feature_store_stack.png +0 -0
- ads/feature_store/docs/source/figures/resource_manager_home.png +0 -0
- ads/feature_store/docs/source/figures/stats_1.png +0 -0
- ads/feature_store/docs/source/figures/stats_2.png +0 -0
- ads/feature_store/docs/source/figures/stats_d.png +0 -0
- ads/feature_store/docs/source/figures/stats_fg.png +0 -0
- ads/feature_store/docs/source/figures/transformation.png +0 -0
- ads/feature_store/docs/source/figures/transformations.gif +0 -0
- ads/feature_store/docs/source/figures/validation.png +0 -0
- ads/feature_store/docs/source/figures/validation_fg.png +0 -0
- ads/feature_store/docs/source/figures/validation_results.png +0 -0
- ads/feature_store/docs/source/figures/validation_summary.png +0 -0
- ads/feature_store/docs/source/index.rst +0 -81
- ads/feature_store/docs/source/module.rst +0 -8
- ads/feature_store/docs/source/notebook.rst +0 -94
- ads/feature_store/docs/source/overview.rst +0 -47
- ads/feature_store/docs/source/quickstart.rst +0 -176
- ads/feature_store/docs/source/release_notes.rst +0 -194
- ads/feature_store/docs/source/setup_feature_store.rst +0 -81
- ads/feature_store/docs/source/statistics.rst +0 -58
- ads/feature_store/docs/source/transformation.rst +0 -199
- ads/feature_store/docs/source/ui.rst +0 -65
- ads/feature_store/docs/source/user_guides.setup.feature_store_operator.rst +0 -66
- ads/feature_store/docs/source/user_guides.setup.helm_chart.rst +0 -192
- ads/feature_store/docs/source/user_guides.setup.terraform.rst +0 -338
- ads/feature_store/entity.py +0 -718
- ads/feature_store/execution_strategy/__init__.py +0 -0
- ads/feature_store/execution_strategy/delta_lake/__init__.py +0 -0
- ads/feature_store/execution_strategy/delta_lake/delta_lake_service.py +0 -375
- ads/feature_store/execution_strategy/engine/__init__.py +0 -0
- ads/feature_store/execution_strategy/engine/spark_engine.py +0 -316
- ads/feature_store/execution_strategy/execution_strategy.py +0 -113
- ads/feature_store/execution_strategy/execution_strategy_provider.py +0 -47
- ads/feature_store/execution_strategy/spark/__init__.py +0 -0
- ads/feature_store/execution_strategy/spark/spark_execution.py +0 -618
- ads/feature_store/feature.py +0 -192
- ads/feature_store/feature_group.py +0 -1494
- ads/feature_store/feature_group_expectation.py +0 -346
- ads/feature_store/feature_group_job.py +0 -602
- ads/feature_store/feature_lineage/__init__.py +0 -0
- ads/feature_store/feature_lineage/graphviz_service.py +0 -180
- ads/feature_store/feature_option_details.py +0 -50
- ads/feature_store/feature_statistics/__init__.py +0 -0
- ads/feature_store/feature_statistics/statistics_service.py +0 -99
- ads/feature_store/feature_store.py +0 -699
- ads/feature_store/feature_store_registrar.py +0 -518
- ads/feature_store/input_feature_detail.py +0 -149
- ads/feature_store/mixin/__init__.py +0 -4
- ads/feature_store/mixin/oci_feature_store.py +0 -145
- ads/feature_store/model_details.py +0 -73
- ads/feature_store/query/__init__.py +0 -0
- ads/feature_store/query/filter.py +0 -266
- ads/feature_store/query/generator/__init__.py +0 -0
- ads/feature_store/query/generator/query_generator.py +0 -298
- ads/feature_store/query/join.py +0 -161
- ads/feature_store/query/query.py +0 -403
- ads/feature_store/query/validator/__init__.py +0 -0
- ads/feature_store/query/validator/query_validator.py +0 -57
- ads/feature_store/response/__init__.py +0 -0
- ads/feature_store/response/response_builder.py +0 -68
- ads/feature_store/service/__init__.py +0 -0
- ads/feature_store/service/oci_dataset.py +0 -139
- ads/feature_store/service/oci_dataset_job.py +0 -199
- ads/feature_store/service/oci_entity.py +0 -125
- ads/feature_store/service/oci_feature_group.py +0 -164
- ads/feature_store/service/oci_feature_group_job.py +0 -214
- ads/feature_store/service/oci_feature_store.py +0 -182
- ads/feature_store/service/oci_lineage.py +0 -87
- ads/feature_store/service/oci_transformation.py +0 -104
- ads/feature_store/statistics/__init__.py +0 -0
- ads/feature_store/statistics/abs_feature_value.py +0 -49
- ads/feature_store/statistics/charts/__init__.py +0 -0
- ads/feature_store/statistics/charts/abstract_feature_plot.py +0 -37
- ads/feature_store/statistics/charts/box_plot.py +0 -148
- ads/feature_store/statistics/charts/frequency_distribution.py +0 -65
- ads/feature_store/statistics/charts/probability_distribution.py +0 -68
- ads/feature_store/statistics/charts/top_k_frequent_elements.py +0 -98
- ads/feature_store/statistics/feature_stat.py +0 -126
- ads/feature_store/statistics/generic_feature_value.py +0 -33
- ads/feature_store/statistics/statistics.py +0 -41
- ads/feature_store/statistics_config.py +0 -101
- ads/feature_store/templates/feature_store_template.yaml +0 -45
- ads/feature_store/transformation.py +0 -499
- ads/feature_store/validation_output.py +0 -57
- ads/hpo/__init__.py +0 -9
- ads/hpo/_imports.py +0 -91
- ads/hpo/ads_search_space.py +0 -439
- ads/hpo/distributions.py +0 -325
- ads/hpo/objective.py +0 -280
- ads/hpo/search_cv.py +0 -1657
- ads/hpo/stopping_criterion.py +0 -75
- ads/hpo/tuner_artifact.py +0 -413
- ads/hpo/utils.py +0 -91
- ads/hpo/validation.py +0 -140
- ads/hpo/visualization/__init__.py +0 -5
- ads/hpo/visualization/_contour.py +0 -23
- ads/hpo/visualization/_edf.py +0 -20
- ads/hpo/visualization/_intermediate_values.py +0 -21
- ads/hpo/visualization/_optimization_history.py +0 -25
- ads/hpo/visualization/_parallel_coordinate.py +0 -169
- ads/hpo/visualization/_param_importances.py +0 -26
- ads/jobs/__init__.py +0 -53
- ads/jobs/ads_job.py +0 -663
- ads/jobs/builders/__init__.py +0 -5
- ads/jobs/builders/base.py +0 -156
- ads/jobs/builders/infrastructure/__init__.py +0 -6
- ads/jobs/builders/infrastructure/base.py +0 -165
- ads/jobs/builders/infrastructure/dataflow.py +0 -1252
- ads/jobs/builders/infrastructure/dsc_job.py +0 -1894
- ads/jobs/builders/infrastructure/dsc_job_runtime.py +0 -1233
- ads/jobs/builders/infrastructure/utils.py +0 -65
- ads/jobs/builders/runtimes/__init__.py +0 -5
- ads/jobs/builders/runtimes/artifact.py +0 -338
- ads/jobs/builders/runtimes/base.py +0 -325
- ads/jobs/builders/runtimes/container_runtime.py +0 -242
- ads/jobs/builders/runtimes/python_runtime.py +0 -1016
- ads/jobs/builders/runtimes/pytorch_runtime.py +0 -204
- ads/jobs/cli.py +0 -104
- ads/jobs/env_var_parser.py +0 -131
- ads/jobs/extension.py +0 -160
- ads/jobs/schema/__init__.py +0 -5
- ads/jobs/schema/infrastructure_schema.json +0 -116
- ads/jobs/schema/job_schema.json +0 -42
- ads/jobs/schema/runtime_schema.json +0 -183
- ads/jobs/schema/validator.py +0 -141
- ads/jobs/serializer.py +0 -296
- ads/jobs/templates/__init__.py +0 -5
- ads/jobs/templates/container.py +0 -6
- ads/jobs/templates/driver_notebook.py +0 -177
- ads/jobs/templates/driver_oci.py +0 -500
- ads/jobs/templates/driver_python.py +0 -48
- ads/jobs/templates/driver_pytorch.py +0 -852
- ads/jobs/templates/driver_utils.py +0 -615
- ads/jobs/templates/hostname_from_env.c +0 -55
- ads/jobs/templates/oci_metrics.py +0 -181
- ads/jobs/utils.py +0 -104
- ads/llm/__init__.py +0 -28
- ads/llm/autogen/__init__.py +0 -2
- ads/llm/autogen/constants.py +0 -15
- ads/llm/autogen/reports/__init__.py +0 -2
- ads/llm/autogen/reports/base.py +0 -67
- ads/llm/autogen/reports/data.py +0 -103
- ads/llm/autogen/reports/session.py +0 -526
- ads/llm/autogen/reports/templates/chat_box.html +0 -13
- ads/llm/autogen/reports/templates/chat_box_lt.html +0 -5
- ads/llm/autogen/reports/templates/chat_box_rt.html +0 -6
- ads/llm/autogen/reports/utils.py +0 -56
- ads/llm/autogen/v02/__init__.py +0 -4
- ads/llm/autogen/v02/client.py +0 -295
- ads/llm/autogen/v02/log_handlers/__init__.py +0 -2
- ads/llm/autogen/v02/log_handlers/oci_file_handler.py +0 -83
- ads/llm/autogen/v02/loggers/__init__.py +0 -6
- ads/llm/autogen/v02/loggers/metric_logger.py +0 -320
- ads/llm/autogen/v02/loggers/session_logger.py +0 -580
- ads/llm/autogen/v02/loggers/utils.py +0 -86
- ads/llm/autogen/v02/runtime_logging.py +0 -163
- ads/llm/chain.py +0 -268
- ads/llm/chat_template.py +0 -31
- ads/llm/deploy.py +0 -63
- ads/llm/guardrails/__init__.py +0 -5
- ads/llm/guardrails/base.py +0 -442
- ads/llm/guardrails/huggingface.py +0 -44
- ads/llm/langchain/__init__.py +0 -5
- ads/llm/langchain/plugins/__init__.py +0 -5
- ads/llm/langchain/plugins/chat_models/__init__.py +0 -5
- ads/llm/langchain/plugins/chat_models/oci_data_science.py +0 -1027
- ads/llm/langchain/plugins/embeddings/__init__.py +0 -4
- ads/llm/langchain/plugins/embeddings/oci_data_science_model_deployment_endpoint.py +0 -184
- ads/llm/langchain/plugins/llms/__init__.py +0 -5
- ads/llm/langchain/plugins/llms/oci_data_science_model_deployment_endpoint.py +0 -979
- ads/llm/requirements.txt +0 -3
- ads/llm/serialize.py +0 -219
- ads/llm/serializers/__init__.py +0 -0
- ads/llm/serializers/retrieval_qa.py +0 -153
- ads/llm/serializers/runnable_parallel.py +0 -27
- ads/llm/templates/score_chain.jinja2 +0 -155
- ads/llm/templates/tool_chat_template_hermes.jinja +0 -130
- ads/llm/templates/tool_chat_template_mistral_parallel.jinja +0 -94
- ads/model/__init__.py +0 -52
- ads/model/artifact.py +0 -573
- ads/model/artifact_downloader.py +0 -254
- ads/model/artifact_uploader.py +0 -267
- ads/model/base_properties.py +0 -238
- ads/model/common/.model-ignore +0 -66
- ads/model/common/__init__.py +0 -5
- ads/model/common/utils.py +0 -142
- ads/model/datascience_model.py +0 -2635
- ads/model/deployment/__init__.py +0 -20
- ads/model/deployment/common/__init__.py +0 -5
- ads/model/deployment/common/utils.py +0 -308
- ads/model/deployment/model_deployer.py +0 -466
- ads/model/deployment/model_deployment.py +0 -1846
- ads/model/deployment/model_deployment_infrastructure.py +0 -671
- ads/model/deployment/model_deployment_properties.py +0 -493
- ads/model/deployment/model_deployment_runtime.py +0 -838
- ads/model/extractor/__init__.py +0 -5
- ads/model/extractor/automl_extractor.py +0 -74
- ads/model/extractor/embedding_onnx_extractor.py +0 -80
- ads/model/extractor/huggingface_extractor.py +0 -88
- ads/model/extractor/keras_extractor.py +0 -84
- ads/model/extractor/lightgbm_extractor.py +0 -93
- ads/model/extractor/model_info_extractor.py +0 -114
- ads/model/extractor/model_info_extractor_factory.py +0 -105
- ads/model/extractor/pytorch_extractor.py +0 -87
- ads/model/extractor/sklearn_extractor.py +0 -112
- ads/model/extractor/spark_extractor.py +0 -89
- ads/model/extractor/tensorflow_extractor.py +0 -85
- ads/model/extractor/xgboost_extractor.py +0 -94
- ads/model/framework/__init__.py +0 -5
- ads/model/framework/automl_model.py +0 -178
- ads/model/framework/embedding_onnx_model.py +0 -438
- ads/model/framework/huggingface_model.py +0 -399
- ads/model/framework/lightgbm_model.py +0 -266
- ads/model/framework/pytorch_model.py +0 -266
- ads/model/framework/sklearn_model.py +0 -250
- ads/model/framework/spark_model.py +0 -326
- ads/model/framework/tensorflow_model.py +0 -254
- ads/model/framework/xgboost_model.py +0 -258
- ads/model/generic_model.py +0 -3518
- ads/model/model_artifact_boilerplate/README.md +0 -381
- ads/model/model_artifact_boilerplate/__init__.py +0 -5
- ads/model/model_artifact_boilerplate/artifact_introspection_test/__init__.py +0 -5
- ads/model/model_artifact_boilerplate/artifact_introspection_test/model_artifact_validate.py +0 -427
- ads/model/model_artifact_boilerplate/artifact_introspection_test/requirements.txt +0 -2
- ads/model/model_artifact_boilerplate/runtime.yaml +0 -7
- ads/model/model_artifact_boilerplate/score.py +0 -61
- ads/model/model_file_description_schema.json +0 -68
- ads/model/model_introspect.py +0 -331
- ads/model/model_metadata.py +0 -1810
- ads/model/model_metadata_mixin.py +0 -460
- ads/model/model_properties.py +0 -63
- ads/model/model_version_set.py +0 -739
- ads/model/runtime/__init__.py +0 -5
- ads/model/runtime/env_info.py +0 -306
- ads/model/runtime/model_deployment_details.py +0 -37
- ads/model/runtime/model_provenance_details.py +0 -58
- ads/model/runtime/runtime_info.py +0 -81
- ads/model/runtime/schemas/inference_env_info_schema.yaml +0 -16
- ads/model/runtime/schemas/model_provenance_schema.yaml +0 -36
- ads/model/runtime/schemas/training_env_info_schema.yaml +0 -16
- ads/model/runtime/utils.py +0 -201
- ads/model/serde/__init__.py +0 -5
- ads/model/serde/common.py +0 -40
- ads/model/serde/model_input.py +0 -547
- ads/model/serde/model_serializer.py +0 -1184
- ads/model/service/__init__.py +0 -5
- ads/model/service/oci_datascience_model.py +0 -1076
- ads/model/service/oci_datascience_model_deployment.py +0 -500
- ads/model/service/oci_datascience_model_version_set.py +0 -176
- ads/model/transformer/__init__.py +0 -5
- ads/model/transformer/onnx_transformer.py +0 -324
- ads/mysqldb/__init__.py +0 -5
- ads/mysqldb/mysql_db.py +0 -227
- ads/opctl/__init__.py +0 -18
- ads/opctl/anomaly_detection.py +0 -11
- ads/opctl/backend/__init__.py +0 -5
- ads/opctl/backend/ads_dataflow.py +0 -353
- ads/opctl/backend/ads_ml_job.py +0 -710
- ads/opctl/backend/ads_ml_pipeline.py +0 -164
- ads/opctl/backend/ads_model_deployment.py +0 -209
- ads/opctl/backend/base.py +0 -146
- ads/opctl/backend/local.py +0 -1053
- ads/opctl/backend/marketplace/__init__.py +0 -9
- ads/opctl/backend/marketplace/helm_helper.py +0 -173
- ads/opctl/backend/marketplace/local_marketplace.py +0 -271
- ads/opctl/backend/marketplace/marketplace_backend_runner.py +0 -71
- ads/opctl/backend/marketplace/marketplace_operator_interface.py +0 -44
- ads/opctl/backend/marketplace/marketplace_operator_runner.py +0 -24
- ads/opctl/backend/marketplace/marketplace_utils.py +0 -212
- ads/opctl/backend/marketplace/models/__init__.py +0 -5
- ads/opctl/backend/marketplace/models/bearer_token.py +0 -94
- ads/opctl/backend/marketplace/models/marketplace_type.py +0 -70
- ads/opctl/backend/marketplace/models/ocir_details.py +0 -56
- ads/opctl/backend/marketplace/prerequisite_checker.py +0 -238
- ads/opctl/cli.py +0 -707
- ads/opctl/cmds.py +0 -869
- ads/opctl/conda/__init__.py +0 -5
- ads/opctl/conda/cli.py +0 -193
- ads/opctl/conda/cmds.py +0 -749
- ads/opctl/conda/config.yaml +0 -34
- ads/opctl/conda/manifest_template.yaml +0 -13
- ads/opctl/conda/multipart_uploader.py +0 -188
- ads/opctl/conda/pack.py +0 -89
- ads/opctl/config/__init__.py +0 -5
- ads/opctl/config/base.py +0 -57
- ads/opctl/config/diagnostics/__init__.py +0 -5
- ads/opctl/config/diagnostics/distributed/default_requirements_config.yaml +0 -62
- ads/opctl/config/merger.py +0 -255
- ads/opctl/config/resolver.py +0 -297
- ads/opctl/config/utils.py +0 -79
- ads/opctl/config/validator.py +0 -17
- ads/opctl/config/versioner.py +0 -68
- ads/opctl/config/yaml_parsers/__init__.py +0 -7
- ads/opctl/config/yaml_parsers/base.py +0 -58
- ads/opctl/config/yaml_parsers/distributed/__init__.py +0 -7
- ads/opctl/config/yaml_parsers/distributed/yaml_parser.py +0 -201
- ads/opctl/constants.py +0 -66
- ads/opctl/decorator/__init__.py +0 -5
- ads/opctl/decorator/common.py +0 -129
- ads/opctl/diagnostics/__init__.py +0 -5
- ads/opctl/diagnostics/__main__.py +0 -25
- ads/opctl/diagnostics/check_distributed_job_requirements.py +0 -212
- ads/opctl/diagnostics/check_requirements.py +0 -144
- ads/opctl/diagnostics/requirement_exception.py +0 -9
- ads/opctl/distributed/README.md +0 -109
- ads/opctl/distributed/__init__.py +0 -5
- ads/opctl/distributed/certificates.py +0 -32
- ads/opctl/distributed/cli.py +0 -207
- ads/opctl/distributed/cmds.py +0 -731
- ads/opctl/distributed/common/__init__.py +0 -5
- ads/opctl/distributed/common/abstract_cluster_provider.py +0 -449
- ads/opctl/distributed/common/abstract_framework_spec_builder.py +0 -88
- ads/opctl/distributed/common/cluster_config_helper.py +0 -103
- ads/opctl/distributed/common/cluster_provider_factory.py +0 -21
- ads/opctl/distributed/common/cluster_runner.py +0 -54
- ads/opctl/distributed/common/framework_factory.py +0 -29
- ads/opctl/docker/Dockerfile.job +0 -103
- ads/opctl/docker/Dockerfile.job.arm +0 -107
- ads/opctl/docker/Dockerfile.job.gpu +0 -175
- ads/opctl/docker/base-env.yaml +0 -13
- ads/opctl/docker/cuda.repo +0 -6
- ads/opctl/docker/operator/.dockerignore +0 -0
- ads/opctl/docker/operator/Dockerfile +0 -41
- ads/opctl/docker/operator/Dockerfile.gpu +0 -85
- ads/opctl/docker/operator/cuda.repo +0 -6
- ads/opctl/docker/operator/environment.yaml +0 -8
- ads/opctl/forecast.py +0 -11
- ads/opctl/index.yaml +0 -3
- ads/opctl/model/__init__.py +0 -5
- ads/opctl/model/cli.py +0 -65
- ads/opctl/model/cmds.py +0 -73
- ads/opctl/operator/README.md +0 -4
- ads/opctl/operator/__init__.py +0 -31
- ads/opctl/operator/cli.py +0 -344
- ads/opctl/operator/cmd.py +0 -596
- ads/opctl/operator/common/__init__.py +0 -5
- ads/opctl/operator/common/backend_factory.py +0 -460
- ads/opctl/operator/common/const.py +0 -27
- ads/opctl/operator/common/data/synthetic.csv +0 -16001
- ads/opctl/operator/common/dictionary_merger.py +0 -148
- ads/opctl/operator/common/errors.py +0 -42
- ads/opctl/operator/common/operator_config.py +0 -99
- ads/opctl/operator/common/operator_loader.py +0 -811
- ads/opctl/operator/common/operator_schema.yaml +0 -130
- ads/opctl/operator/common/operator_yaml_generator.py +0 -152
- ads/opctl/operator/common/utils.py +0 -208
- ads/opctl/operator/lowcode/__init__.py +0 -5
- ads/opctl/operator/lowcode/anomaly/MLoperator +0 -16
- ads/opctl/operator/lowcode/anomaly/README.md +0 -207
- ads/opctl/operator/lowcode/anomaly/__init__.py +0 -5
- ads/opctl/operator/lowcode/anomaly/__main__.py +0 -103
- ads/opctl/operator/lowcode/anomaly/cmd.py +0 -35
- ads/opctl/operator/lowcode/anomaly/const.py +0 -167
- ads/opctl/operator/lowcode/anomaly/environment.yaml +0 -10
- ads/opctl/operator/lowcode/anomaly/model/__init__.py +0 -5
- ads/opctl/operator/lowcode/anomaly/model/anomaly_dataset.py +0 -146
- ads/opctl/operator/lowcode/anomaly/model/anomaly_merlion.py +0 -162
- ads/opctl/operator/lowcode/anomaly/model/automlx.py +0 -99
- ads/opctl/operator/lowcode/anomaly/model/autots.py +0 -115
- ads/opctl/operator/lowcode/anomaly/model/base_model.py +0 -404
- ads/opctl/operator/lowcode/anomaly/model/factory.py +0 -110
- ads/opctl/operator/lowcode/anomaly/model/isolationforest.py +0 -78
- ads/opctl/operator/lowcode/anomaly/model/oneclasssvm.py +0 -78
- ads/opctl/operator/lowcode/anomaly/model/randomcutforest.py +0 -120
- ads/opctl/operator/lowcode/anomaly/model/tods.py +0 -119
- ads/opctl/operator/lowcode/anomaly/operator_config.py +0 -127
- ads/opctl/operator/lowcode/anomaly/schema.yaml +0 -401
- ads/opctl/operator/lowcode/anomaly/utils.py +0 -88
- ads/opctl/operator/lowcode/common/__init__.py +0 -5
- ads/opctl/operator/lowcode/common/const.py +0 -10
- ads/opctl/operator/lowcode/common/data.py +0 -116
- ads/opctl/operator/lowcode/common/errors.py +0 -47
- ads/opctl/operator/lowcode/common/transformations.py +0 -296
- ads/opctl/operator/lowcode/common/utils.py +0 -293
- ads/opctl/operator/lowcode/feature_store_marketplace/MLoperator +0 -13
- ads/opctl/operator/lowcode/feature_store_marketplace/README.md +0 -30
- ads/opctl/operator/lowcode/feature_store_marketplace/__init__.py +0 -5
- ads/opctl/operator/lowcode/feature_store_marketplace/__main__.py +0 -116
- ads/opctl/operator/lowcode/feature_store_marketplace/cmd.py +0 -85
- ads/opctl/operator/lowcode/feature_store_marketplace/const.py +0 -15
- ads/opctl/operator/lowcode/feature_store_marketplace/environment.yaml +0 -0
- ads/opctl/operator/lowcode/feature_store_marketplace/models/__init__.py +0 -4
- ads/opctl/operator/lowcode/feature_store_marketplace/models/apigw_config.py +0 -32
- ads/opctl/operator/lowcode/feature_store_marketplace/models/db_config.py +0 -43
- ads/opctl/operator/lowcode/feature_store_marketplace/models/mysql_config.py +0 -120
- ads/opctl/operator/lowcode/feature_store_marketplace/models/serializable_yaml_model.py +0 -34
- ads/opctl/operator/lowcode/feature_store_marketplace/operator_utils.py +0 -386
- ads/opctl/operator/lowcode/feature_store_marketplace/schema.yaml +0 -160
- ads/opctl/operator/lowcode/forecast/MLoperator +0 -25
- ads/opctl/operator/lowcode/forecast/README.md +0 -209
- ads/opctl/operator/lowcode/forecast/__init__.py +0 -5
- ads/opctl/operator/lowcode/forecast/__main__.py +0 -89
- ads/opctl/operator/lowcode/forecast/cmd.py +0 -40
- ads/opctl/operator/lowcode/forecast/const.py +0 -92
- ads/opctl/operator/lowcode/forecast/environment.yaml +0 -20
- ads/opctl/operator/lowcode/forecast/errors.py +0 -26
- ads/opctl/operator/lowcode/forecast/model/__init__.py +0 -5
- ads/opctl/operator/lowcode/forecast/model/arima.py +0 -279
- ads/opctl/operator/lowcode/forecast/model/automlx.py +0 -542
- ads/opctl/operator/lowcode/forecast/model/autots.py +0 -312
- ads/opctl/operator/lowcode/forecast/model/base_model.py +0 -863
- ads/opctl/operator/lowcode/forecast/model/factory.py +0 -106
- ads/opctl/operator/lowcode/forecast/model/forecast_datasets.py +0 -492
- ads/opctl/operator/lowcode/forecast/model/ml_forecast.py +0 -243
- ads/opctl/operator/lowcode/forecast/model/neuralprophet.py +0 -486
- ads/opctl/operator/lowcode/forecast/model/prophet.py +0 -445
- ads/opctl/operator/lowcode/forecast/model_evaluator.py +0 -244
- ads/opctl/operator/lowcode/forecast/operator_config.py +0 -234
- ads/opctl/operator/lowcode/forecast/schema.yaml +0 -506
- ads/opctl/operator/lowcode/forecast/utils.py +0 -413
- ads/opctl/operator/lowcode/forecast/whatifserve/__init__.py +0 -7
- ads/opctl/operator/lowcode/forecast/whatifserve/deployment_manager.py +0 -285
- ads/opctl/operator/lowcode/forecast/whatifserve/score.py +0 -246
- ads/opctl/operator/lowcode/pii/MLoperator +0 -17
- ads/opctl/operator/lowcode/pii/README.md +0 -208
- ads/opctl/operator/lowcode/pii/__init__.py +0 -5
- ads/opctl/operator/lowcode/pii/__main__.py +0 -78
- ads/opctl/operator/lowcode/pii/cmd.py +0 -39
- ads/opctl/operator/lowcode/pii/constant.py +0 -84
- ads/opctl/operator/lowcode/pii/environment.yaml +0 -17
- ads/opctl/operator/lowcode/pii/errors.py +0 -27
- ads/opctl/operator/lowcode/pii/model/__init__.py +0 -5
- ads/opctl/operator/lowcode/pii/model/factory.py +0 -82
- ads/opctl/operator/lowcode/pii/model/guardrails.py +0 -167
- ads/opctl/operator/lowcode/pii/model/pii.py +0 -145
- ads/opctl/operator/lowcode/pii/model/processor/__init__.py +0 -34
- ads/opctl/operator/lowcode/pii/model/processor/email_replacer.py +0 -34
- ads/opctl/operator/lowcode/pii/model/processor/mbi_replacer.py +0 -35
- ads/opctl/operator/lowcode/pii/model/processor/name_replacer.py +0 -225
- ads/opctl/operator/lowcode/pii/model/processor/number_replacer.py +0 -73
- ads/opctl/operator/lowcode/pii/model/processor/remover.py +0 -26
- ads/opctl/operator/lowcode/pii/model/report.py +0 -487
- ads/opctl/operator/lowcode/pii/operator_config.py +0 -95
- ads/opctl/operator/lowcode/pii/schema.yaml +0 -108
- ads/opctl/operator/lowcode/pii/utils.py +0 -43
- ads/opctl/operator/lowcode/recommender/MLoperator +0 -16
- ads/opctl/operator/lowcode/recommender/README.md +0 -206
- ads/opctl/operator/lowcode/recommender/__init__.py +0 -5
- ads/opctl/operator/lowcode/recommender/__main__.py +0 -82
- ads/opctl/operator/lowcode/recommender/cmd.py +0 -33
- ads/opctl/operator/lowcode/recommender/constant.py +0 -30
- ads/opctl/operator/lowcode/recommender/environment.yaml +0 -11
- ads/opctl/operator/lowcode/recommender/model/base_model.py +0 -212
- ads/opctl/operator/lowcode/recommender/model/factory.py +0 -56
- ads/opctl/operator/lowcode/recommender/model/recommender_dataset.py +0 -25
- ads/opctl/operator/lowcode/recommender/model/svd.py +0 -106
- ads/opctl/operator/lowcode/recommender/operator_config.py +0 -81
- ads/opctl/operator/lowcode/recommender/schema.yaml +0 -265
- ads/opctl/operator/lowcode/recommender/utils.py +0 -13
- ads/opctl/operator/runtime/__init__.py +0 -5
- ads/opctl/operator/runtime/const.py +0 -17
- ads/opctl/operator/runtime/container_runtime_schema.yaml +0 -50
- ads/opctl/operator/runtime/marketplace_runtime.py +0 -50
- ads/opctl/operator/runtime/python_marketplace_runtime_schema.yaml +0 -21
- ads/opctl/operator/runtime/python_runtime_schema.yaml +0 -21
- ads/opctl/operator/runtime/runtime.py +0 -115
- ads/opctl/schema.yaml.yml +0 -36
- ads/opctl/script.py +0 -40
- ads/opctl/spark/__init__.py +0 -5
- ads/opctl/spark/cli.py +0 -43
- ads/opctl/spark/cmds.py +0 -147
- ads/opctl/templates/diagnostic_report_template.jinja2 +0 -102
- ads/opctl/utils.py +0 -344
- ads/oracledb/__init__.py +0 -5
- ads/oracledb/oracle_db.py +0 -346
- ads/pipeline/__init__.py +0 -39
- ads/pipeline/ads_pipeline.py +0 -2279
- ads/pipeline/ads_pipeline_run.py +0 -772
- ads/pipeline/ads_pipeline_step.py +0 -605
- ads/pipeline/builders/__init__.py +0 -5
- ads/pipeline/builders/infrastructure/__init__.py +0 -5
- ads/pipeline/builders/infrastructure/custom_script.py +0 -32
- ads/pipeline/cli.py +0 -119
- ads/pipeline/extension.py +0 -291
- ads/pipeline/schema/__init__.py +0 -5
- ads/pipeline/schema/cs_step_schema.json +0 -35
- ads/pipeline/schema/ml_step_schema.json +0 -31
- ads/pipeline/schema/pipeline_schema.json +0 -71
- ads/pipeline/visualizer/__init__.py +0 -5
- ads/pipeline/visualizer/base.py +0 -570
- ads/pipeline/visualizer/graph_renderer.py +0 -272
- ads/pipeline/visualizer/text_renderer.py +0 -84
- ads/secrets/__init__.py +0 -11
- ads/secrets/adb.py +0 -386
- ads/secrets/auth_token.py +0 -86
- ads/secrets/big_data_service.py +0 -365
- ads/secrets/mysqldb.py +0 -149
- ads/secrets/oracledb.py +0 -160
- ads/secrets/secrets.py +0 -407
- ads/telemetry/__init__.py +0 -7
- ads/telemetry/base.py +0 -69
- ads/telemetry/client.py +0 -125
- ads/telemetry/telemetry.py +0 -257
- ads/templates/dataflow_pyspark.jinja2 +0 -13
- ads/templates/dataflow_sparksql.jinja2 +0 -22
- ads/templates/func.jinja2 +0 -20
- ads/templates/schemas/openapi.json +0 -1740
- ads/templates/score-pkl.jinja2 +0 -173
- ads/templates/score.jinja2 +0 -322
- ads/templates/score_embedding_onnx.jinja2 +0 -202
- ads/templates/score_generic.jinja2 +0 -165
- ads/templates/score_huggingface_pipeline.jinja2 +0 -217
- ads/templates/score_lightgbm.jinja2 +0 -185
- ads/templates/score_onnx.jinja2 +0 -407
- ads/templates/score_onnx_new.jinja2 +0 -473
- ads/templates/score_oracle_automl.jinja2 +0 -185
- ads/templates/score_pyspark.jinja2 +0 -154
- ads/templates/score_pytorch.jinja2 +0 -219
- ads/templates/score_scikit-learn.jinja2 +0 -184
- ads/templates/score_tensorflow.jinja2 +0 -184
- ads/templates/score_xgboost.jinja2 +0 -178
- ads/text_dataset/__init__.py +0 -5
- ads/text_dataset/backends.py +0 -211
- ads/text_dataset/dataset.py +0 -445
- ads/text_dataset/extractor.py +0 -207
- ads/text_dataset/options.py +0 -53
- ads/text_dataset/udfs.py +0 -22
- ads/text_dataset/utils.py +0 -49
- ads/type_discovery/__init__.py +0 -9
- ads/type_discovery/abstract_detector.py +0 -21
- ads/type_discovery/constant_detector.py +0 -41
- ads/type_discovery/continuous_detector.py +0 -54
- ads/type_discovery/credit_card_detector.py +0 -99
- ads/type_discovery/datetime_detector.py +0 -92
- ads/type_discovery/discrete_detector.py +0 -118
- ads/type_discovery/document_detector.py +0 -146
- ads/type_discovery/ip_detector.py +0 -68
- ads/type_discovery/latlon_detector.py +0 -90
- ads/type_discovery/phone_number_detector.py +0 -63
- ads/type_discovery/type_discovery_driver.py +0 -87
- ads/type_discovery/typed_feature.py +0 -594
- ads/type_discovery/unknown_detector.py +0 -41
- ads/type_discovery/zipcode_detector.py +0 -48
- ads/vault/__init__.py +0 -7
- ads/vault/vault.py +0 -237
- oracle_ads-2.13.8.dist-info/RECORD +0 -858
- {oracle_ads-2.13.8.dist-info → oracle_ads-2.13.9rc0.dist-info}/licenses/LICENSE.txt +0 -0
@@ -1,1894 +0,0 @@
|
|
1
|
-
#!/usr/bin/env python
|
2
|
-
# -*- coding: utf-8; -*-
|
3
|
-
|
4
|
-
# Copyright (c) 2021, 2024 Oracle and/or its affiliates.
|
5
|
-
# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
|
6
|
-
from __future__ import annotations
|
7
|
-
|
8
|
-
import datetime
|
9
|
-
import inspect
|
10
|
-
import logging
|
11
|
-
import os
|
12
|
-
import re
|
13
|
-
import time
|
14
|
-
import traceback
|
15
|
-
import uuid
|
16
|
-
from io import DEFAULT_BUFFER_SIZE
|
17
|
-
from string import Template
|
18
|
-
from typing import Any, Dict, List, Optional, Union
|
19
|
-
|
20
|
-
import fsspec
|
21
|
-
import oci
|
22
|
-
import oci.data_science
|
23
|
-
import oci.util as oci_util
|
24
|
-
from oci.data_science.models import JobInfrastructureConfigurationDetails
|
25
|
-
from oci.exceptions import ServiceError
|
26
|
-
import yaml
|
27
|
-
from ads.common import utils
|
28
|
-
from ads.common.oci_datascience import DSCNotebookSession, OCIDataScienceMixin
|
29
|
-
from ads.common.oci_logging import OCILog
|
30
|
-
from ads.common.oci_resource import ResourceNotFoundError
|
31
|
-
from ads.jobs.builders.infrastructure.base import Infrastructure, RunInstance
|
32
|
-
from ads.jobs.builders.infrastructure.dsc_job_runtime import (
|
33
|
-
ContainerRuntimeHandler,
|
34
|
-
DataScienceJobRuntimeManager,
|
35
|
-
)
|
36
|
-
from ads.jobs.builders.infrastructure.utils import get_value
|
37
|
-
from ads.jobs.builders.runtimes.artifact import Artifact
|
38
|
-
from ads.jobs.builders.runtimes.container_runtime import ContainerRuntime
|
39
|
-
from ads.jobs.builders.runtimes.python_runtime import GitPythonRuntime
|
40
|
-
|
41
|
-
from ads.common.dsc_file_system import (
|
42
|
-
OCIFileStorage,
|
43
|
-
DSCFileSystemManager,
|
44
|
-
OCIObjectStorage,
|
45
|
-
)
|
46
|
-
from ads.common.decorator.utils import class_or_instance_method
|
47
|
-
|
48
|
-
logger = logging.getLogger(__name__)
|
49
|
-
|
50
|
-
SLEEP_INTERVAL = 3
|
51
|
-
WAIT_SECONDS_AFTER_FINISHED = 90
|
52
|
-
MAXIMUM_MOUNT_COUNT = 5
|
53
|
-
FILE_STORAGE_TYPE = "FILE_STORAGE"
|
54
|
-
OBJECT_STORAGE_TYPE = "OBJECT_STORAGE"
|
55
|
-
|
56
|
-
|
57
|
-
class DSCJob(OCIDataScienceMixin, oci.data_science.models.Job):
|
58
|
-
"""Represents an OCI Data Science Job
|
59
|
-
This class contains all attributes of the oci.data_science.models.Job.
|
60
|
-
The main purpose of this class is to link the oci.data_science.models.Job model and the related client methods.
|
61
|
-
Mainly, linking the Job model (payload) to Create/Update/Get/List/Delete methods.
|
62
|
-
|
63
|
-
A DSCJob can be initialized by unpacking a the properties stored in a dictionary (payload):
|
64
|
-
|
65
|
-
.. code-block:: python
|
66
|
-
|
67
|
-
job_properties = {
|
68
|
-
"display_name": "my_job",
|
69
|
-
"job_infrastructure_configuration_details": {"shape_name": "VM.MY_SHAPE"}
|
70
|
-
}
|
71
|
-
job = DSCJob(**job_properties)
|
72
|
-
|
73
|
-
The properties can also be OCI REST API payload, in which the keys are in camel format.
|
74
|
-
|
75
|
-
.. code-block:: python
|
76
|
-
|
77
|
-
job_payload = {
|
78
|
-
"projectId": "<project_ocid>",
|
79
|
-
"compartmentId": "<compartment_ocid>",
|
80
|
-
"displayName": "<job_name>",
|
81
|
-
"jobConfigurationDetails": {
|
82
|
-
"jobType": "DEFAULT",
|
83
|
-
"commandLineArguments": "pos_arg1 pos_arg2 --key1 val1 --key2 val2",
|
84
|
-
"environmentVariables": {
|
85
|
-
"KEY1": "VALUE1",
|
86
|
-
"KEY2": "VALUE2",
|
87
|
-
# User specifies conda env via env var
|
88
|
-
"CONDA_ENV_TYPE" : "service",
|
89
|
-
"CONDA_ENV_SLUG" : "mlcpuv1"
|
90
|
-
}
|
91
|
-
},
|
92
|
-
"jobInfrastructureConfigurationDetails": {
|
93
|
-
"jobInfrastructureType": "STANDALONE",
|
94
|
-
"shapeName": "VM.Standard.E3.Flex",
|
95
|
-
"jobShapeConfigDetails": {
|
96
|
-
"memoryInGBs": 16,
|
97
|
-
"ocpus": 1
|
98
|
-
},
|
99
|
-
"blockStorageSizeInGBs": "100",
|
100
|
-
"subnetId": "<subnet_ocid>"
|
101
|
-
}
|
102
|
-
}
|
103
|
-
job = DSCJob(**job_payload)
|
104
|
-
"""
|
105
|
-
|
106
|
-
DEFAULT_INFRA_TYPE = (
|
107
|
-
JobInfrastructureConfigurationDetails.JOB_INFRASTRUCTURE_TYPE_ME_STANDALONE
|
108
|
-
)
|
109
|
-
|
110
|
-
CONST_DEFAULT_BLOCK_STORAGE_SIZE = 50
|
111
|
-
|
112
|
-
def __init__(self, artifact: Union[str, Artifact] = None, **kwargs) -> None:
|
113
|
-
"""Initialize a DSCJob object.
|
114
|
-
|
115
|
-
Parameters
|
116
|
-
----------
|
117
|
-
artifact: str or Artifact
|
118
|
-
Job artifact, which can be a path or an Artifact object. Defaults to None.
|
119
|
-
kwargs:
|
120
|
-
Same as kwargs in oci.data_science.models.Job.
|
121
|
-
Keyword arguments are passed into OCI Job model to initialize the properties.
|
122
|
-
|
123
|
-
"""
|
124
|
-
self._artifact = artifact
|
125
|
-
|
126
|
-
super().__init__(**kwargs)
|
127
|
-
if not self.job_configuration_details:
|
128
|
-
self.job_configuration_details = {
|
129
|
-
"jobType": "DEFAULT",
|
130
|
-
}
|
131
|
-
if not self.job_infrastructure_configuration_details:
|
132
|
-
self.job_infrastructure_configuration_details = {}
|
133
|
-
|
134
|
-
@property
|
135
|
-
def artifact(self) -> Union[str, Artifact]:
|
136
|
-
"""Job artifact.
|
137
|
-
|
138
|
-
Returns
|
139
|
-
-------
|
140
|
-
str or Artifact
|
141
|
-
When creating a job, this be a path or an Artifact object.
|
142
|
-
When loading the job from OCI, this will be the filename of the job artifact.
|
143
|
-
"""
|
144
|
-
if self.id and self._artifact is None:
|
145
|
-
try:
|
146
|
-
res = self.client.head_job_artifact(self.id)
|
147
|
-
content = res.headers.get("content-disposition")
|
148
|
-
if content and "filename=" in content:
|
149
|
-
self._artifact = content.split("filename=", 1)[-1]
|
150
|
-
except ServiceError:
|
151
|
-
self._artifact = ""
|
152
|
-
return self._artifact
|
153
|
-
|
154
|
-
@artifact.setter
|
155
|
-
def artifact(self, artifact: Union[str, Artifact]):
|
156
|
-
"""Sets the job artifact."""
|
157
|
-
self._artifact = artifact
|
158
|
-
|
159
|
-
def _load_infra_from_notebook(self, nb_config):
|
160
|
-
"""Loads the infrastructure configuration from notebook configuration."""
|
161
|
-
infra = self.job_infrastructure_configuration_details
|
162
|
-
nb_shape_config_details = oci_util.to_dict(
|
163
|
-
getattr(nb_config, "notebook_session_shape_config_details", None) or {}
|
164
|
-
)
|
165
|
-
if isinstance(infra, dict):
|
166
|
-
shape_name = infra.get("shapeName", nb_config.shape)
|
167
|
-
|
168
|
-
# Ignore notebook shape config details if shape names do not match.
|
169
|
-
if shape_name != nb_config.shape:
|
170
|
-
nb_shape_config_details = {}
|
171
|
-
|
172
|
-
infra_type = infra.get("jobInfrastructureType")
|
173
|
-
block_storage = infra.get(
|
174
|
-
"blockStorageSizeInGBs", nb_config.block_storage_size_in_gbs
|
175
|
-
)
|
176
|
-
subnet_id = infra.get(
|
177
|
-
"subnetId",
|
178
|
-
(
|
179
|
-
nb_config.subnet_id
|
180
|
-
if infra_type
|
181
|
-
!= JobInfrastructureConfigurationDetails.JOB_INFRASTRUCTURE_TYPE_ME_STANDALONE
|
182
|
-
else None
|
183
|
-
),
|
184
|
-
)
|
185
|
-
job_shape_config_details = infra.get("jobShapeConfigDetails", {})
|
186
|
-
memory_in_gbs = job_shape_config_details.get(
|
187
|
-
"memoryInGBs", nb_shape_config_details.get("memory_in_gbs")
|
188
|
-
)
|
189
|
-
ocpus = job_shape_config_details.get(
|
190
|
-
"ocpus", nb_shape_config_details.get("ocpus")
|
191
|
-
)
|
192
|
-
else:
|
193
|
-
shape_name = (
|
194
|
-
infra.shape_name
|
195
|
-
if getattr(infra, "shape_name", None)
|
196
|
-
else nb_config.shape
|
197
|
-
)
|
198
|
-
# Ignore notebook shape config details if shape names do not match.
|
199
|
-
if shape_name != nb_config.shape:
|
200
|
-
nb_shape_config_details = {}
|
201
|
-
|
202
|
-
infra_type = getattr(infra, "job_infrastructure_type", None)
|
203
|
-
|
204
|
-
block_storage = (
|
205
|
-
infra.block_storage_size_in_gbs
|
206
|
-
if getattr(infra, "block_storage_size_in_gbs", None)
|
207
|
-
else nb_config.block_storage_size_in_gbs
|
208
|
-
)
|
209
|
-
subnet_id = (
|
210
|
-
infra.subnet_id
|
211
|
-
if getattr(infra, "subnet_id", None)
|
212
|
-
else (
|
213
|
-
nb_config.subnet_id
|
214
|
-
if infra_type
|
215
|
-
!= JobInfrastructureConfigurationDetails.JOB_INFRASTRUCTURE_TYPE_ME_STANDALONE
|
216
|
-
else None
|
217
|
-
)
|
218
|
-
)
|
219
|
-
job_shape_config_details = oci_util.to_dict(
|
220
|
-
getattr(infra, "job_shape_config_details", {}) or {}
|
221
|
-
)
|
222
|
-
memory_in_gbs = job_shape_config_details.get(
|
223
|
-
"memory_in_gbs", nb_shape_config_details.get("memory_in_gbs")
|
224
|
-
)
|
225
|
-
ocpus = job_shape_config_details.get(
|
226
|
-
"ocpus", nb_shape_config_details.get("ocpus")
|
227
|
-
)
|
228
|
-
|
229
|
-
self.job_infrastructure_configuration_details = {
|
230
|
-
"jobInfrastructureType": infra_type,
|
231
|
-
"shapeName": shape_name,
|
232
|
-
"blockStorageSizeInGBs": block_storage,
|
233
|
-
}
|
234
|
-
# ADS does not provide explicit API for setting infrastructure type.
|
235
|
-
# If subnet is configured, the type will be set to STANDALONE,
|
236
|
-
# otherwise ME_STANDALONE
|
237
|
-
if subnet_id:
|
238
|
-
self.job_infrastructure_configuration_details.update(
|
239
|
-
{
|
240
|
-
"subnetId": subnet_id,
|
241
|
-
"jobInfrastructureType": JobInfrastructureConfigurationDetails.JOB_INFRASTRUCTURE_TYPE_STANDALONE,
|
242
|
-
}
|
243
|
-
)
|
244
|
-
else:
|
245
|
-
self.job_infrastructure_configuration_details.update(
|
246
|
-
{
|
247
|
-
"jobInfrastructureType": self.DEFAULT_INFRA_TYPE,
|
248
|
-
}
|
249
|
-
)
|
250
|
-
|
251
|
-
# Specify shape config details
|
252
|
-
if memory_in_gbs or ocpus:
|
253
|
-
self.job_infrastructure_configuration_details.update(
|
254
|
-
{
|
255
|
-
"jobShapeConfigDetails": {
|
256
|
-
"memoryInGBs": memory_in_gbs,
|
257
|
-
"ocpus": ocpus,
|
258
|
-
}
|
259
|
-
}
|
260
|
-
)
|
261
|
-
|
262
|
-
def load_properties_from_env(self) -> None:
|
263
|
-
"""Loads default properties from the environment"""
|
264
|
-
if "NB_SESSION_OCID" in os.environ:
|
265
|
-
try:
|
266
|
-
nb_session = DSCNotebookSession.from_ocid(os.environ["NB_SESSION_OCID"])
|
267
|
-
except Exception:
|
268
|
-
logger.debug("Failed to load config from notebook.")
|
269
|
-
logger.debug(traceback.format_exc())
|
270
|
-
# If there is an error loading the notebook infra configurations.
|
271
|
-
# Ignore it by setting nb_session to None
|
272
|
-
# This will skip loading the default configure.
|
273
|
-
nb_session = None
|
274
|
-
if nb_session:
|
275
|
-
nb_config = getattr(
|
276
|
-
nb_session, "notebook_session_config_details", None
|
277
|
-
) or getattr(nb_session, "notebook_session_configuration_details", None)
|
278
|
-
|
279
|
-
if nb_config:
|
280
|
-
self._load_infra_from_notebook(nb_config)
|
281
|
-
if self.project_id is None:
|
282
|
-
self.project_id = nb_session.project_id
|
283
|
-
super().load_properties_from_env()
|
284
|
-
|
285
|
-
def load_defaults(self) -> DSCJob:
|
286
|
-
self.load_properties_from_env()
|
287
|
-
if not self.job_infrastructure_configuration_details:
|
288
|
-
self.job_infrastructure_configuration_details = {}
|
289
|
-
# Convert the dict to JobInfrastructureConfigurationDetails object
|
290
|
-
if isinstance(self.job_infrastructure_configuration_details, dict):
|
291
|
-
# Default networking
|
292
|
-
if not self.job_infrastructure_configuration_details.get(
|
293
|
-
"jobInfrastructureType"
|
294
|
-
):
|
295
|
-
self.job_infrastructure_configuration_details[
|
296
|
-
"jobInfrastructureType"
|
297
|
-
] = self.DEFAULT_INFRA_TYPE
|
298
|
-
self.job_infrastructure_configuration_details = self.deserialize(
|
299
|
-
self.job_infrastructure_configuration_details,
|
300
|
-
JobInfrastructureConfigurationDetails.__name__,
|
301
|
-
)
|
302
|
-
|
303
|
-
# Default block storage size
|
304
|
-
if not self.job_infrastructure_configuration_details.block_storage_size_in_gbs:
|
305
|
-
self.job_infrastructure_configuration_details.block_storage_size_in_gbs = (
|
306
|
-
self.CONST_DEFAULT_BLOCK_STORAGE_SIZE
|
307
|
-
)
|
308
|
-
return self
|
309
|
-
|
310
|
-
def _create_with_oci_api(self) -> None:
|
311
|
-
oci_model = self.to_oci_model(oci.data_science.models.CreateJobDetails)
|
312
|
-
logger.debug(oci_model)
|
313
|
-
res = self.client.create_job(oci_model)
|
314
|
-
self.update_from_oci_model(res.data)
|
315
|
-
if not self.artifact:
|
316
|
-
return
|
317
|
-
try:
|
318
|
-
if issubclass(self.artifact.__class__, Artifact):
|
319
|
-
with self.artifact as artifact:
|
320
|
-
self.upload_artifact(artifact.path)
|
321
|
-
else:
|
322
|
-
self.upload_artifact()
|
323
|
-
except Exception as ex:
|
324
|
-
# Delete the job if upload artifact is failed.
|
325
|
-
self.delete()
|
326
|
-
raise ex
|
327
|
-
|
328
|
-
def create(self) -> DSCJob:
|
329
|
-
"""Create the job on OCI Data Science platform
|
330
|
-
|
331
|
-
Returns
|
332
|
-
-------
|
333
|
-
DSCJob
|
334
|
-
The DSCJob instance (self), which allows chaining additional method.
|
335
|
-
|
336
|
-
"""
|
337
|
-
if not self.display_name:
|
338
|
-
if self.artifact:
|
339
|
-
timestamp = datetime.datetime.utcnow().strftime("%Y-%m-%d-%H:%M.%S")
|
340
|
-
self.display_name = (
|
341
|
-
os.path.basename(str(self.artifact)).split(".")[0] + f"-{timestamp}"
|
342
|
-
)
|
343
|
-
else:
|
344
|
-
# Set default display_name if not specified - randomly generated easy to remember name generated
|
345
|
-
self.display_name = utils.get_random_name_for_resource()
|
346
|
-
try:
|
347
|
-
self.load_defaults()
|
348
|
-
except Exception:
|
349
|
-
logger.exception("Failed to load default properties.")
|
350
|
-
# Check compartment ID and project ID before calling the OCI API
|
351
|
-
if not self.compartment_id:
|
352
|
-
raise ValueError("Specify compartment ID for data science job.")
|
353
|
-
if not self.project_id:
|
354
|
-
raise ValueError("Specify project ID for data science job.")
|
355
|
-
self._create_with_oci_api()
|
356
|
-
return self
|
357
|
-
|
358
|
-
def update(self) -> DSCJob:
|
359
|
-
"""Updates the Data Science Job."""
|
360
|
-
raise NotImplementedError("Updating Job is not supported at the moment.")
|
361
|
-
|
362
|
-
def delete(self, force_delete: bool = False) -> DSCJob:
|
363
|
-
"""Deletes the job and the corresponding job runs.
|
364
|
-
|
365
|
-
Parameters
|
366
|
-
----------
|
367
|
-
force_delete : bool, optional, defaults to False
|
368
|
-
the deletion fails when associated job runs are in progress, but if force_delete to true, then
|
369
|
-
the job run will be canceled, then it will be deleted. In this case, delete job has to wait till
|
370
|
-
job has been canceled.
|
371
|
-
|
372
|
-
Returns
|
373
|
-
-------
|
374
|
-
DSCJob
|
375
|
-
The DSCJob instance (self), which allows chaining additional method.
|
376
|
-
|
377
|
-
"""
|
378
|
-
runs = self.run_list()
|
379
|
-
for run in runs:
|
380
|
-
if force_delete and run.lifecycle_state in [
|
381
|
-
DataScienceJobRun.LIFECYCLE_STATE_ACCEPTED,
|
382
|
-
DataScienceJobRun.LIFECYCLE_STATE_IN_PROGRESS,
|
383
|
-
DataScienceJobRun.LIFECYCLE_STATE_NEEDS_ATTENTION,
|
384
|
-
]:
|
385
|
-
run.cancel(wait_for_completion=True)
|
386
|
-
run.delete()
|
387
|
-
self.client.delete_job(self.id)
|
388
|
-
return self
|
389
|
-
|
390
|
-
def upload_artifact(self, artifact_path: str = None) -> DSCJob:
|
391
|
-
"""Uploads the job artifact to OCI
|
392
|
-
|
393
|
-
Parameters
|
394
|
-
----------
|
395
|
-
artifact_path : str, optional
|
396
|
-
Local path to the job artifact file to be uploaded, by default None.
|
397
|
-
If artifact_path is None, the path in self.artifact will be used.
|
398
|
-
|
399
|
-
Returns
|
400
|
-
-------
|
401
|
-
DSCJob
|
402
|
-
The DSCJob instance (self), which allows chaining additional method.
|
403
|
-
|
404
|
-
"""
|
405
|
-
if not artifact_path:
|
406
|
-
artifact_path = self.artifact
|
407
|
-
with fsspec.open(artifact_path, "rb") as f:
|
408
|
-
self.client.create_job_artifact(
|
409
|
-
self.id,
|
410
|
-
f,
|
411
|
-
content_disposition=f"attachment; filename={os.path.basename(artifact_path)}",
|
412
|
-
)
|
413
|
-
return self
|
414
|
-
|
415
|
-
def download_artifact(self, artifact_path: str) -> DSCJob:
|
416
|
-
"""Downloads the artifact from OCI
|
417
|
-
|
418
|
-
Parameters
|
419
|
-
----------
|
420
|
-
artifact_path : str
|
421
|
-
Local path to store the job artifact.
|
422
|
-
|
423
|
-
Returns
|
424
|
-
-------
|
425
|
-
DSCJob
|
426
|
-
The DSCJob instance (self), which allows chaining additional method.
|
427
|
-
|
428
|
-
"""
|
429
|
-
res = self.client.get_job_artifact_content(self.id)
|
430
|
-
with open(artifact_path, "wb") as f:
|
431
|
-
for chunk in res.data.iter_content(chunk_size=DEFAULT_BUFFER_SIZE * 16):
|
432
|
-
f.write(chunk)
|
433
|
-
return self
|
434
|
-
|
435
|
-
def run_list(self, **kwargs) -> list[DataScienceJobRun]:
|
436
|
-
"""Lists the runs of this job.
|
437
|
-
|
438
|
-
Parameters
|
439
|
-
----------
|
440
|
-
**kwargs :
|
441
|
-
Keyword arguments to te passed into the OCI list_job_runs() for filtering the job runs.
|
442
|
-
|
443
|
-
Returns
|
444
|
-
-------
|
445
|
-
list
|
446
|
-
A list of DSCJobRun objects
|
447
|
-
|
448
|
-
"""
|
449
|
-
items = oci.pagination.list_call_get_all_results(
|
450
|
-
self.client.list_job_runs, self.compartment_id, job_id=self.id, **kwargs
|
451
|
-
).data
|
452
|
-
return [DataScienceJobRun(**self.auth).from_oci_model(item) for item in items]
|
453
|
-
|
454
|
-
def run(self, **kwargs) -> DataScienceJobRun:
|
455
|
-
"""Runs the job
|
456
|
-
|
457
|
-
Parameters
|
458
|
-
----------
|
459
|
-
**kwargs :
|
460
|
-
Keyword arguments for initializing a Data Science Job Run.
|
461
|
-
The keys can be any keys in supported by OCI JobConfigurationDetails, OcirContainerJobEnvironmentConfigurationDetails and JobRun, including:
|
462
|
-
* hyperparameter_values: dict(str, str)
|
463
|
-
* environment_variables: dict(str, str)
|
464
|
-
* command_line_arguments: str
|
465
|
-
* maximum_runtime_in_minutes: int
|
466
|
-
* display_name: str
|
467
|
-
* freeform_tags: dict(str, str)
|
468
|
-
* defined_tags: dict(str, dict(str, object))
|
469
|
-
* image: str
|
470
|
-
* cmd: list[str]
|
471
|
-
* entrypoint: list[str]
|
472
|
-
* image_digest: str
|
473
|
-
* image_signature_id: str
|
474
|
-
|
475
|
-
If display_name is not specified, it will be generated as "<JOB_NAME>-run-<TIMESTAMP>".
|
476
|
-
|
477
|
-
Returns
|
478
|
-
-------
|
479
|
-
DSCJobRun
|
480
|
-
An instance of DSCJobRun, which can be used to monitor the job run.
|
481
|
-
|
482
|
-
"""
|
483
|
-
if not self.id:
|
484
|
-
self.create()
|
485
|
-
|
486
|
-
config_swagger_types = (
|
487
|
-
oci.data_science.models.DefaultJobConfigurationDetails().swagger_types.keys()
|
488
|
-
)
|
489
|
-
env_config_swagger_types = {}
|
490
|
-
if hasattr(
|
491
|
-
oci.data_science.models, "OcirContainerJobEnvironmentConfigurationDetails"
|
492
|
-
):
|
493
|
-
env_config_swagger_types = (
|
494
|
-
oci.data_science.models.OcirContainerJobEnvironmentConfigurationDetails().swagger_types.keys()
|
495
|
-
)
|
496
|
-
config_kwargs = {}
|
497
|
-
env_config_kwargs = {}
|
498
|
-
keys = list(kwargs.keys())
|
499
|
-
for key in keys:
|
500
|
-
if key in config_swagger_types:
|
501
|
-
config_kwargs[key] = kwargs.pop(key)
|
502
|
-
elif key in env_config_swagger_types:
|
503
|
-
value = kwargs.pop(key)
|
504
|
-
if key in [
|
505
|
-
ContainerRuntime.CONST_CMD,
|
506
|
-
ContainerRuntime.CONST_ENTRYPOINT,
|
507
|
-
] and isinstance(value, str):
|
508
|
-
value = ContainerRuntimeHandler.split_args(value)
|
509
|
-
env_config_kwargs[key] = value
|
510
|
-
|
511
|
-
# remove timestamp from the job name (added in default names, when display_name not specified by user)
|
512
|
-
if self.display_name:
|
513
|
-
try:
|
514
|
-
datetime.datetime.strptime(self.display_name[-19:], "%Y-%m-%d-%H:%M.%S")
|
515
|
-
self.display_name = self.display_name[:-20]
|
516
|
-
except ValueError:
|
517
|
-
pass
|
518
|
-
|
519
|
-
job_attrs = dict(
|
520
|
-
project_id=self.project_id,
|
521
|
-
display_name=self.display_name
|
522
|
-
+ "-run-"
|
523
|
-
+ datetime.datetime.now().strftime("%Y-%m-%d-%H:%M.%S"),
|
524
|
-
job_id=self.id,
|
525
|
-
compartment_id=self.compartment_id,
|
526
|
-
)
|
527
|
-
|
528
|
-
for key, val in job_attrs.items():
|
529
|
-
if not kwargs.get(key):
|
530
|
-
kwargs[key] = val
|
531
|
-
|
532
|
-
if config_kwargs:
|
533
|
-
config_kwargs["jobType"] = "DEFAULT"
|
534
|
-
config_override = kwargs.get("job_configuration_override_details", {})
|
535
|
-
config_override.update(config_kwargs)
|
536
|
-
kwargs["job_configuration_override_details"] = config_override
|
537
|
-
|
538
|
-
if env_config_kwargs:
|
539
|
-
env_config_kwargs["jobEnvironmentType"] = "OCIR_CONTAINER"
|
540
|
-
env_config_override = kwargs.get(
|
541
|
-
"job_environment_configuration_override_details", {}
|
542
|
-
)
|
543
|
-
env_config_override.update(env_config_kwargs)
|
544
|
-
kwargs["job_environment_configuration_override_details"] = (
|
545
|
-
env_config_override
|
546
|
-
)
|
547
|
-
|
548
|
-
wait = kwargs.pop("wait", False)
|
549
|
-
run = DataScienceJobRun(**kwargs, **self.auth).create()
|
550
|
-
if wait:
|
551
|
-
return run.watch()
|
552
|
-
return run
|
553
|
-
|
554
|
-
|
555
|
-
class DataScienceJobRun(
|
556
|
-
OCIDataScienceMixin, oci.data_science.models.JobRun, RunInstance
|
557
|
-
):
|
558
|
-
"""Represents a Data Science Job run"""
|
559
|
-
|
560
|
-
_DETAILS_LINK = (
|
561
|
-
"https://console.{region}.oraclecloud.com/data-science/job-runs/{id}"
|
562
|
-
)
|
563
|
-
|
564
|
-
TERMINAL_STATES = [
|
565
|
-
oci.data_science.models.JobRun.LIFECYCLE_STATE_SUCCEEDED,
|
566
|
-
oci.data_science.models.JobRun.LIFECYCLE_STATE_FAILED,
|
567
|
-
oci.data_science.models.JobRun.LIFECYCLE_STATE_CANCELED,
|
568
|
-
oci.data_science.models.JobRun.LIFECYCLE_STATE_DELETED,
|
569
|
-
]
|
570
|
-
|
571
|
-
def create(self) -> DataScienceJobRun:
|
572
|
-
"""Creates a job run"""
|
573
|
-
self.load_properties_from_env()
|
574
|
-
res = self.client.create_job_run(
|
575
|
-
self.to_oci_model(oci.data_science.models.CreateJobRunDetails)
|
576
|
-
)
|
577
|
-
self.update_from_oci_model(res.data)
|
578
|
-
return self
|
579
|
-
|
580
|
-
@property
|
581
|
-
def status(self) -> str:
|
582
|
-
"""Lifecycle status
|
583
|
-
|
584
|
-
Returns
|
585
|
-
-------
|
586
|
-
str
|
587
|
-
Status in a string.
|
588
|
-
"""
|
589
|
-
return self.lifecycle_state
|
590
|
-
|
591
|
-
@property
|
592
|
-
def log_id(self) -> str:
|
593
|
-
"""The log ID from OCI logging service containing the logs from the job run."""
|
594
|
-
if not self.log_details:
|
595
|
-
return None
|
596
|
-
return self.log_details.log_id
|
597
|
-
|
598
|
-
@property
|
599
|
-
def log_group_id(self) -> str:
|
600
|
-
"""The log group ID from OCI logging service containing the logs from the job run."""
|
601
|
-
if not self.log_details:
|
602
|
-
return None
|
603
|
-
return self.log_details.log_group_id
|
604
|
-
|
605
|
-
@property
|
606
|
-
def logging(self) -> OCILog:
|
607
|
-
"""The OCILog object containing the logs from the job run"""
|
608
|
-
if not self.log_id:
|
609
|
-
raise ValueError("Log OCID is not specified for this job run.")
|
610
|
-
# Specifying log group ID when initializing OCILog can reduce the number of API calls.
|
611
|
-
auth = self.auth
|
612
|
-
if "client_kwargs" in auth and isinstance(auth["client_kwargs"], dict):
|
613
|
-
auth["client_kwargs"].pop("service_endpoint", None)
|
614
|
-
return OCILog(
|
615
|
-
id=self.log_id, log_group_id=self.log_details.log_group_id, **auth
|
616
|
-
)
|
617
|
-
|
618
|
-
@property
|
619
|
-
def exit_code(self):
|
620
|
-
"""The exit code of the job run from the lifecycle details.
|
621
|
-
Note that,
|
622
|
-
None will be returned if the job run is not finished or failed without exit code.
|
623
|
-
0 will be returned if job run succeeded.
|
624
|
-
"""
|
625
|
-
if self.lifecycle_state == self.LIFECYCLE_STATE_SUCCEEDED:
|
626
|
-
return 0
|
627
|
-
if not self.lifecycle_details:
|
628
|
-
return None
|
629
|
-
match = re.search(r"exit code (\d+)", self.lifecycle_details)
|
630
|
-
if not match:
|
631
|
-
return None
|
632
|
-
try:
|
633
|
-
return int(match.group(1))
|
634
|
-
except Exception:
|
635
|
-
return None
|
636
|
-
|
637
|
-
@staticmethod
|
638
|
-
def _format_log(message: str, date_time: datetime.datetime) -> dict:
|
639
|
-
"""Formats a message as log record with datetime.
|
640
|
-
This is used to add additional logs to show job run status change.
|
641
|
-
|
642
|
-
Parameters
|
643
|
-
----------
|
644
|
-
message : str
|
645
|
-
Log message.
|
646
|
-
date_time : datetime or str
|
647
|
-
Timestamp for the message
|
648
|
-
|
649
|
-
Returns
|
650
|
-
-------
|
651
|
-
dict
|
652
|
-
log record as a dictionary, including id, time and message as keys.
|
653
|
-
"""
|
654
|
-
if isinstance(date_time, datetime.datetime):
|
655
|
-
date_time = date_time.strftime("%Y-%m-%dT%H:%M:%S.000Z")
|
656
|
-
return {
|
657
|
-
"id": str(uuid.uuid4()),
|
658
|
-
"message": message,
|
659
|
-
"time": date_time,
|
660
|
-
}
|
661
|
-
|
662
|
-
def logs(self, limit: int = None) -> list:
|
663
|
-
"""Gets the logs of the job run.
|
664
|
-
|
665
|
-
Parameters
|
666
|
-
----------
|
667
|
-
limit : int, optional
|
668
|
-
Limit the number of logs to be returned.
|
669
|
-
Defaults to None. All logs will be returned.
|
670
|
-
|
671
|
-
Returns
|
672
|
-
-------
|
673
|
-
list
|
674
|
-
A list of log records. Each log record is a dictionary with the following keys: id, time, message.
|
675
|
-
"""
|
676
|
-
if self.time_accepted:
|
677
|
-
log_messages = self.logging.tail(
|
678
|
-
source=self.id, limit=limit, time_start=self.time_accepted
|
679
|
-
)
|
680
|
-
else:
|
681
|
-
log_messages = []
|
682
|
-
if self.time_started:
|
683
|
-
log_messages.insert(
|
684
|
-
0, self._format_log("Job Run STARTED", self.time_started)
|
685
|
-
)
|
686
|
-
if self.time_accepted:
|
687
|
-
log_messages.insert(
|
688
|
-
0, self._format_log("Job Run ACCEPTED", self.time_accepted)
|
689
|
-
)
|
690
|
-
if self.time_finished:
|
691
|
-
log_messages.append(
|
692
|
-
self._format_log("Job Run FINISHED", self.time_finished)
|
693
|
-
)
|
694
|
-
return log_messages
|
695
|
-
|
696
|
-
def _job_run_status_text(self) -> str:
|
697
|
-
details = f", {self.lifecycle_details}" if self.lifecycle_details else ""
|
698
|
-
return f"Job Run {self.lifecycle_state}" + details
|
699
|
-
|
700
|
-
def _check_and_print_status(self, prev_status) -> str:
|
701
|
-
status = self._job_run_status_text()
|
702
|
-
if status != prev_status:
|
703
|
-
if self.lifecycle_state in self.TERMINAL_STATES and self.time_finished:
|
704
|
-
timestamp = self.time_finished.strftime("%Y-%m-%d %H:%M:%S")
|
705
|
-
else:
|
706
|
-
timestamp = datetime.datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
|
707
|
-
print(f"{timestamp} - {status}")
|
708
|
-
return status
|
709
|
-
|
710
|
-
def wait(self, interval: float = SLEEP_INTERVAL):
|
711
|
-
"""Waits for the job run until if finishes.
|
712
|
-
|
713
|
-
Parameters
|
714
|
-
----------
|
715
|
-
interval : float
|
716
|
-
Time interval in seconds between each request to update the logs.
|
717
|
-
Defaults to 3 (seconds).
|
718
|
-
|
719
|
-
"""
|
720
|
-
self.sync()
|
721
|
-
while self.status not in self.TERMINAL_STATES:
|
722
|
-
time.sleep(interval)
|
723
|
-
self.sync()
|
724
|
-
return self
|
725
|
-
|
726
|
-
def watch(
|
727
|
-
self,
|
728
|
-
interval: float = SLEEP_INTERVAL,
|
729
|
-
wait: float = WAIT_SECONDS_AFTER_FINISHED,
|
730
|
-
) -> DataScienceJobRun:
|
731
|
-
"""Watches the job run until it finishes.
|
732
|
-
Before the job start running, this method will output the job run status.
|
733
|
-
Once the job start running,
|
734
|
-
the logs will be streamed until the job is success, failed or cancelled.
|
735
|
-
|
736
|
-
Parameters
|
737
|
-
----------
|
738
|
-
interval : float
|
739
|
-
Time interval in seconds between each request to update the logs.
|
740
|
-
Defaults to 3 (seconds).
|
741
|
-
wait : float
|
742
|
-
Time in seconds to keep updating the logs after the job run finished.
|
743
|
-
It may take some time for logs to appear in OCI logging service
|
744
|
-
after the job run is finished.
|
745
|
-
Defaults to 90 (seconds).
|
746
|
-
|
747
|
-
"""
|
748
|
-
|
749
|
-
def stop_condition():
|
750
|
-
"""Stops the log streaming once the job is in a terminal state."""
|
751
|
-
self.sync()
|
752
|
-
if self.lifecycle_state not in self.TERMINAL_STATES:
|
753
|
-
return False
|
754
|
-
# Stop if time_finished is not available.
|
755
|
-
if not self.time_finished:
|
756
|
-
return True
|
757
|
-
# Stop only if time_finished is over 2 minute ago.
|
758
|
-
# This is for the time delay between job run stopped and the logs appear in oci logging.
|
759
|
-
if (
|
760
|
-
datetime.datetime.now(self.time_finished.tzinfo)
|
761
|
-
- datetime.timedelta(seconds=wait)
|
762
|
-
> self.time_finished
|
763
|
-
):
|
764
|
-
return True
|
765
|
-
return False
|
766
|
-
|
767
|
-
if not self.log_id and not self.log_group_id:
|
768
|
-
print(
|
769
|
-
"Logging is not configured for the job. Watch() will only show job status."
|
770
|
-
)
|
771
|
-
|
772
|
-
print(f"Job OCID: {self.job.id}")
|
773
|
-
print(f"Job Run OCID: {self.id}")
|
774
|
-
|
775
|
-
status = ""
|
776
|
-
while not stop_condition():
|
777
|
-
status = self._check_and_print_status(status)
|
778
|
-
# Break and stream logs if job has log ID and started.
|
779
|
-
# Otherwise, keep watching the status until job terminates.
|
780
|
-
if self.time_started and self.log_id:
|
781
|
-
break
|
782
|
-
time.sleep(interval)
|
783
|
-
|
784
|
-
if self.log_id and self.time_accepted:
|
785
|
-
count = self.logging.stream(
|
786
|
-
source=self.id,
|
787
|
-
interval=interval,
|
788
|
-
stop_condition=stop_condition,
|
789
|
-
time_start=self.time_accepted,
|
790
|
-
)
|
791
|
-
if not count:
|
792
|
-
print(
|
793
|
-
"No logs in the last 14 days. Please set time_start to see older logs."
|
794
|
-
)
|
795
|
-
|
796
|
-
self._check_and_print_status(status)
|
797
|
-
|
798
|
-
return self
|
799
|
-
|
800
|
-
def cancel(self, wait_for_completion: bool = True) -> DataScienceJobRun:
|
801
|
-
"""Cancels a job run
|
802
|
-
|
803
|
-
Parameters
|
804
|
-
----------
|
805
|
-
wait_for_completion: bool
|
806
|
-
Whether to wait for job run to be cancelled before proceeding.
|
807
|
-
Defaults to True.
|
808
|
-
|
809
|
-
Returns
|
810
|
-
-------
|
811
|
-
self
|
812
|
-
The job run instance.
|
813
|
-
"""
|
814
|
-
self.client.cancel_job_run(self.id)
|
815
|
-
if wait_for_completion:
|
816
|
-
while (
|
817
|
-
self.lifecycle_state
|
818
|
-
!= oci.data_science.models.JobRun.LIFECYCLE_STATE_CANCELED
|
819
|
-
):
|
820
|
-
self.sync()
|
821
|
-
time.sleep(SLEEP_INTERVAL)
|
822
|
-
return self
|
823
|
-
|
824
|
-
def __repr__(self) -> str:
|
825
|
-
"""Displays the object as YAML."""
|
826
|
-
return self.to_yaml()
|
827
|
-
|
828
|
-
def to_yaml(self) -> str:
|
829
|
-
"""Serializes the object into YAML string.
|
830
|
-
|
831
|
-
Returns
|
832
|
-
-------
|
833
|
-
str
|
834
|
-
YAML stored in a string.
|
835
|
-
"""
|
836
|
-
# Here the job YAML is used as the base for the job run
|
837
|
-
job_dict = self.job.to_dict()
|
838
|
-
|
839
|
-
# Update infrastructure from job run
|
840
|
-
run_dict = self.to_dict()
|
841
|
-
infra_specs = [
|
842
|
-
run_dict,
|
843
|
-
run_dict.get("jobInfrastructureConfigurationDetails", {}),
|
844
|
-
run_dict.get("logDetails", {}),
|
845
|
-
]
|
846
|
-
for infra_spec in infra_specs:
|
847
|
-
for key in infra_spec:
|
848
|
-
if key in job_dict["spec"]["infrastructure"]["spec"]:
|
849
|
-
job_dict["spec"]["infrastructure"]["spec"][key] = infra_spec[key]
|
850
|
-
|
851
|
-
# Update runtime from job run
|
852
|
-
from ads.jobs import Job
|
853
|
-
|
854
|
-
job = Job(**self.auth).from_dict(job_dict)
|
855
|
-
envs = job.runtime.envs
|
856
|
-
run_config_override = run_dict.get("jobConfigurationOverrideDetails", {})
|
857
|
-
envs.update(run_config_override.get("environmentVariables", {}))
|
858
|
-
job.runtime.with_environment_variable(**envs)
|
859
|
-
if run_config_override.get("commandLineArguments"):
|
860
|
-
job.runtime.set_spec(
|
861
|
-
"args",
|
862
|
-
run_config_override.get("commandLineArguments"),
|
863
|
-
)
|
864
|
-
|
865
|
-
# Update kind, id and name
|
866
|
-
run_dict = job.to_dict()
|
867
|
-
run_dict["kind"] = "jobRun"
|
868
|
-
run_dict["spec"]["id"] = self.id
|
869
|
-
run_dict["spec"]["name"] = self.display_name
|
870
|
-
return yaml.safe_dump(run_dict)
|
871
|
-
|
872
|
-
@property
|
873
|
-
def job(self):
|
874
|
-
"""The job instance of this run.
|
875
|
-
|
876
|
-
Returns
|
877
|
-
-------
|
878
|
-
Job
|
879
|
-
An ADS Job instance
|
880
|
-
"""
|
881
|
-
from ads.jobs import Job
|
882
|
-
|
883
|
-
return Job(**self.auth).from_datascience_job(self.job_id)
|
884
|
-
|
885
|
-
def download(self, to_dir):
|
886
|
-
"""Downloads files from job run output URI to local.
|
887
|
-
|
888
|
-
Parameters
|
889
|
-
----------
|
890
|
-
to_dir : str
|
891
|
-
Local directory to which the files will be downloaded to.
|
892
|
-
|
893
|
-
Returns
|
894
|
-
-------
|
895
|
-
DataScienceJobRun
|
896
|
-
The job run instance (self)
|
897
|
-
"""
|
898
|
-
self.job.download(to_dir)
|
899
|
-
return self
|
900
|
-
|
901
|
-
def delete(self, force_delete: bool = False):
|
902
|
-
if force_delete and self.status in [
|
903
|
-
DataScienceJobRun.LIFECYCLE_STATE_ACCEPTED,
|
904
|
-
DataScienceJobRun.LIFECYCLE_STATE_IN_PROGRESS,
|
905
|
-
DataScienceJobRun.LIFECYCLE_STATE_NEEDS_ATTENTION,
|
906
|
-
]:
|
907
|
-
self.cancel(wait_for_completion=True)
|
908
|
-
super().delete()
|
909
|
-
return self
|
910
|
-
|
911
|
-
|
912
|
-
# This is for backward compatibility
|
913
|
-
DSCJobRun = DataScienceJobRun
|
914
|
-
|
915
|
-
|
916
|
-
class DataScienceJob(Infrastructure):
|
917
|
-
"""Represents the OCI Data Science Job infrastructure.
|
918
|
-
|
919
|
-
To configure the infrastructure for a Data Science Job::
|
920
|
-
|
921
|
-
infrastructure = (
|
922
|
-
DataScienceJob()
|
923
|
-
# Configure logging for getting the job run outputs.
|
924
|
-
.with_log_group_id("<log_group_ocid>")
|
925
|
-
# Log resource will be auto-generated if log ID is not specified.
|
926
|
-
.with_log_id("<log_ocid>")
|
927
|
-
# If you are in an OCI data science notebook session,
|
928
|
-
# the following configurations are not required.
|
929
|
-
# Configurations from the notebook session will be used as defaults.
|
930
|
-
.with_compartment_id("<compartment_ocid>")
|
931
|
-
.with_project_id("<project_ocid>")
|
932
|
-
.with_subnet_id("<subnet_ocid>")
|
933
|
-
.with_shape_name("VM.Standard.E3.Flex")
|
934
|
-
# Shape config details are applicable only for the flexible shapes.
|
935
|
-
.with_shape_config_details(memory_in_gbs=16, ocpus=1)
|
936
|
-
# Minimum/Default block storage size is 50 (GB).
|
937
|
-
.with_block_storage_size(50)
|
938
|
-
# A list of file systems to be mounted
|
939
|
-
.with_storage_mount(
|
940
|
-
{
|
941
|
-
"src" : "<mount_target_ip_address>:<export_path>",
|
942
|
-
"dest" : "<destination_directory_name>"
|
943
|
-
}
|
944
|
-
)
|
945
|
-
# Tags
|
946
|
-
.with_freeform_tag(my_tag="my_value")
|
947
|
-
.with_defined_tag(**{"Operations": {"CostCenter": "42"}})
|
948
|
-
)
|
949
|
-
|
950
|
-
"""
|
951
|
-
|
952
|
-
CONST_PROJECT_ID = "projectId"
|
953
|
-
CONST_COMPARTMENT_ID = "compartmentId"
|
954
|
-
CONST_DISPLAY_NAME = "displayName"
|
955
|
-
CONST_JOB_TYPE = "jobType"
|
956
|
-
CONST_JOB_INFRA = "jobInfrastructureType"
|
957
|
-
CONST_SHAPE_NAME = "shapeName"
|
958
|
-
CONST_BLOCK_STORAGE = "blockStorageSize"
|
959
|
-
CONST_SUBNET_ID = "subnetId"
|
960
|
-
CONST_SHAPE_CONFIG_DETAILS = "shapeConfigDetails"
|
961
|
-
CONST_MEMORY_IN_GBS = "memoryInGBs"
|
962
|
-
CONST_OCPUS = "ocpus"
|
963
|
-
CONST_LOG_ID = "logId"
|
964
|
-
CONST_LOG_GROUP_ID = "logGroupId"
|
965
|
-
CONST_STORAGE_MOUNT = "storageMount"
|
966
|
-
CONST_FREEFORM_TAGS = "freeformTags"
|
967
|
-
CONST_DEFINED_TAGS = "definedTags"
|
968
|
-
|
969
|
-
attribute_map = {
|
970
|
-
CONST_PROJECT_ID: "project_id",
|
971
|
-
CONST_COMPARTMENT_ID: "compartment_id",
|
972
|
-
CONST_DISPLAY_NAME: "display_name",
|
973
|
-
CONST_JOB_TYPE: "job_type",
|
974
|
-
CONST_JOB_INFRA: "job_infrastructure_type",
|
975
|
-
CONST_SHAPE_NAME: "shape_name",
|
976
|
-
CONST_BLOCK_STORAGE: "block_storage_size",
|
977
|
-
CONST_SUBNET_ID: "subnet_id",
|
978
|
-
CONST_SHAPE_CONFIG_DETAILS: "shape_config_details",
|
979
|
-
CONST_LOG_ID: "log_id",
|
980
|
-
CONST_LOG_GROUP_ID: "log_group_id",
|
981
|
-
CONST_STORAGE_MOUNT: "storage_mount",
|
982
|
-
CONST_FREEFORM_TAGS: "freeform_tags",
|
983
|
-
CONST_DEFINED_TAGS: "defined_tags",
|
984
|
-
}
|
985
|
-
|
986
|
-
shape_config_details_attribute_map = {
|
987
|
-
CONST_MEMORY_IN_GBS: "memory_in_gbs",
|
988
|
-
CONST_OCPUS: "ocpus",
|
989
|
-
}
|
990
|
-
|
991
|
-
payload_attribute_map = {
|
992
|
-
CONST_PROJECT_ID: "project_id",
|
993
|
-
CONST_COMPARTMENT_ID: "compartment_id",
|
994
|
-
CONST_DISPLAY_NAME: "display_name",
|
995
|
-
CONST_JOB_TYPE: "job_configuration_details.job_type",
|
996
|
-
CONST_JOB_INFRA: "job_infrastructure_configuration_details.job_infrastructure_type",
|
997
|
-
CONST_SHAPE_NAME: "job_infrastructure_configuration_details.shape_name",
|
998
|
-
CONST_BLOCK_STORAGE: "job_infrastructure_configuration_details.block_storage_size_in_gbs",
|
999
|
-
CONST_SUBNET_ID: "job_infrastructure_configuration_details.subnet_id",
|
1000
|
-
CONST_SHAPE_CONFIG_DETAILS: "job_infrastructure_configuration_details.job_shape_config_details",
|
1001
|
-
CONST_LOG_ID: "job_log_configuration_details.log_id",
|
1002
|
-
CONST_LOG_GROUP_ID: "job_log_configuration_details.log_group_id",
|
1003
|
-
}
|
1004
|
-
|
1005
|
-
snake_to_camel_map = {
|
1006
|
-
v.split(".", maxsplit=1)[-1]: k for k, v in payload_attribute_map.items()
|
1007
|
-
}
|
1008
|
-
|
1009
|
-
storage_mount_type_dict = {
|
1010
|
-
FILE_STORAGE_TYPE: OCIFileStorage,
|
1011
|
-
OBJECT_STORAGE_TYPE: OCIObjectStorage,
|
1012
|
-
}
|
1013
|
-
|
1014
|
-
auth = {}
|
1015
|
-
|
1016
|
-
@staticmethod
|
1017
|
-
def standardize_spec(spec):
|
1018
|
-
if not spec:
|
1019
|
-
return {}
|
1020
|
-
|
1021
|
-
attribute_map = {
|
1022
|
-
**DataScienceJob.attribute_map,
|
1023
|
-
**DataScienceJob.shape_config_details_attribute_map,
|
1024
|
-
}
|
1025
|
-
snake_to_camel_map = {v: k for k, v in attribute_map.items()}
|
1026
|
-
snake_to_camel_map = {
|
1027
|
-
**{v: k for k, v in attribute_map.items()},
|
1028
|
-
**DataScienceJob.snake_to_camel_map,
|
1029
|
-
}
|
1030
|
-
|
1031
|
-
for key in list(spec.keys()):
|
1032
|
-
if key not in attribute_map and key.lower() in snake_to_camel_map:
|
1033
|
-
value = spec.pop(key)
|
1034
|
-
if isinstance(value, dict):
|
1035
|
-
spec[snake_to_camel_map[key.lower()]] = (
|
1036
|
-
DataScienceJob.standardize_spec(value)
|
1037
|
-
)
|
1038
|
-
else:
|
1039
|
-
spec[snake_to_camel_map[key.lower()]] = value
|
1040
|
-
return spec
|
1041
|
-
|
1042
|
-
def __init__(self, spec: Dict = None, **kwargs) -> None:
|
1043
|
-
"""Initializes a data science job infrastructure
|
1044
|
-
|
1045
|
-
Parameters
|
1046
|
-
----------
|
1047
|
-
spec : dict, optional
|
1048
|
-
Object specification, by default None
|
1049
|
-
kwargs: dict
|
1050
|
-
Specification as keyword arguments.
|
1051
|
-
If spec contains the same key as the one in kwargs, the value from kwargs will be used.
|
1052
|
-
"""
|
1053
|
-
# Saves a copy of the auth object from the class to the instance.
|
1054
|
-
# Future changes to the class level Job.auth will not affect the auth of existing instances.
|
1055
|
-
self.auth = self.auth.copy()
|
1056
|
-
for key in ["config", "signer", "client_kwargs"]:
|
1057
|
-
if kwargs.get(key):
|
1058
|
-
self.auth[key] = kwargs.pop(key)
|
1059
|
-
|
1060
|
-
self.standardize_spec(spec)
|
1061
|
-
self.standardize_spec(kwargs)
|
1062
|
-
super().__init__(spec=spec, **kwargs)
|
1063
|
-
if not self.job_type:
|
1064
|
-
self.with_job_type("DEFAULT")
|
1065
|
-
self.dsc_job = DSCJob(**self.auth)
|
1066
|
-
self.runtime = None
|
1067
|
-
self._name = None
|
1068
|
-
|
1069
|
-
@property
|
1070
|
-
def name(self) -> str:
|
1071
|
-
"""Display name of the job"""
|
1072
|
-
if self.dsc_job:
|
1073
|
-
self._name = self.dsc_job.display_name
|
1074
|
-
return self._name
|
1075
|
-
|
1076
|
-
@name.setter
|
1077
|
-
def name(self, value: str):
|
1078
|
-
"""Sets the display name of the job
|
1079
|
-
|
1080
|
-
Parameters
|
1081
|
-
----------
|
1082
|
-
value : str
|
1083
|
-
The display name of the job
|
1084
|
-
"""
|
1085
|
-
self._name = value
|
1086
|
-
if self.dsc_job:
|
1087
|
-
self.dsc_job.display_name = value
|
1088
|
-
|
1089
|
-
@property
|
1090
|
-
def job_id(self) -> Optional[str]:
|
1091
|
-
"""The OCID of the job"""
|
1092
|
-
if self.dsc_job:
|
1093
|
-
return self.dsc_job.id
|
1094
|
-
return None
|
1095
|
-
|
1096
|
-
@property
|
1097
|
-
def status(self) -> Optional[str]:
|
1098
|
-
"""Status of the job.
|
1099
|
-
|
1100
|
-
Returns
|
1101
|
-
-------
|
1102
|
-
str
|
1103
|
-
Status of the job.
|
1104
|
-
"""
|
1105
|
-
if self.dsc_job:
|
1106
|
-
return self.dsc_job.lifecycle_state
|
1107
|
-
return None
|
1108
|
-
|
1109
|
-
def with_project_id(self, project_id: str) -> DataScienceJob:
|
1110
|
-
"""Sets the project OCID
|
1111
|
-
|
1112
|
-
Parameters
|
1113
|
-
----------
|
1114
|
-
project_id : str
|
1115
|
-
The project OCID
|
1116
|
-
|
1117
|
-
Returns
|
1118
|
-
-------
|
1119
|
-
DataScienceJob
|
1120
|
-
The DataScienceJob instance (self)
|
1121
|
-
|
1122
|
-
"""
|
1123
|
-
return self.set_spec(self.CONST_PROJECT_ID, project_id)
|
1124
|
-
|
1125
|
-
@property
|
1126
|
-
def project_id(self) -> Optional[str]:
|
1127
|
-
"""Project OCID"""
|
1128
|
-
return self.get_spec(self.CONST_PROJECT_ID)
|
1129
|
-
|
1130
|
-
def with_compartment_id(self, compartment_id: str) -> DataScienceJob:
|
1131
|
-
"""Sets the compartment OCID
|
1132
|
-
|
1133
|
-
Parameters
|
1134
|
-
----------
|
1135
|
-
compartment_id : str
|
1136
|
-
The compartment OCID
|
1137
|
-
|
1138
|
-
Returns
|
1139
|
-
-------
|
1140
|
-
DataScienceJob
|
1141
|
-
The DataScienceJob instance (self)
|
1142
|
-
|
1143
|
-
"""
|
1144
|
-
return self.set_spec(self.CONST_COMPARTMENT_ID, compartment_id)
|
1145
|
-
|
1146
|
-
@property
|
1147
|
-
def compartment_id(self) -> Optional[str]:
|
1148
|
-
"""The compartment OCID"""
|
1149
|
-
return self.get_spec(self.CONST_COMPARTMENT_ID)
|
1150
|
-
|
1151
|
-
def with_job_type(self, job_type: str) -> DataScienceJob:
|
1152
|
-
"""Sets the job type
|
1153
|
-
|
1154
|
-
Parameters
|
1155
|
-
----------
|
1156
|
-
job_type : str
|
1157
|
-
Job type as string
|
1158
|
-
|
1159
|
-
Returns
|
1160
|
-
-------
|
1161
|
-
DataScienceJob
|
1162
|
-
The DataScienceJob instance (self)
|
1163
|
-
|
1164
|
-
"""
|
1165
|
-
return self.set_spec(self.CONST_JOB_TYPE, job_type)
|
1166
|
-
|
1167
|
-
@property
|
1168
|
-
def job_type(self) -> Optional[str]:
|
1169
|
-
"""Job type"""
|
1170
|
-
return self.get_spec(self.CONST_JOB_TYPE)
|
1171
|
-
|
1172
|
-
def with_job_infrastructure_type(self, infrastructure_type: str) -> DataScienceJob:
|
1173
|
-
"""Sets the job infrastructure type
|
1174
|
-
|
1175
|
-
Parameters
|
1176
|
-
----------
|
1177
|
-
infrastructure_type : str
|
1178
|
-
Job infrastructure type as string
|
1179
|
-
|
1180
|
-
Returns
|
1181
|
-
-------
|
1182
|
-
DataScienceJob
|
1183
|
-
The DataScienceJob instance (self)
|
1184
|
-
|
1185
|
-
"""
|
1186
|
-
return self.set_spec(self.CONST_JOB_INFRA, infrastructure_type)
|
1187
|
-
|
1188
|
-
@property
|
1189
|
-
def job_infrastructure_type(self) -> Optional[str]:
|
1190
|
-
"""Job infrastructure type"""
|
1191
|
-
return self.get_spec(self.CONST_JOB_INFRA)
|
1192
|
-
|
1193
|
-
def with_shape_name(self, shape_name: str) -> DataScienceJob:
|
1194
|
-
"""Sets the shape name for running the job
|
1195
|
-
|
1196
|
-
Parameters
|
1197
|
-
----------
|
1198
|
-
shape_name : str
|
1199
|
-
Shape name
|
1200
|
-
|
1201
|
-
Returns
|
1202
|
-
-------
|
1203
|
-
DataScienceJob
|
1204
|
-
The DataScienceJob instance (self)
|
1205
|
-
|
1206
|
-
"""
|
1207
|
-
return self.set_spec(self.CONST_SHAPE_NAME, shape_name)
|
1208
|
-
|
1209
|
-
@property
|
1210
|
-
def shape_name(self) -> Optional[str]:
|
1211
|
-
"""Shape name"""
|
1212
|
-
return self.get_spec(self.CONST_SHAPE_NAME)
|
1213
|
-
|
1214
|
-
def with_block_storage_size(self, size_in_gb: int) -> DataScienceJob:
|
1215
|
-
"""Sets the block storage size in GB
|
1216
|
-
|
1217
|
-
Parameters
|
1218
|
-
----------
|
1219
|
-
size_in_gb : int
|
1220
|
-
Block storage size in GB
|
1221
|
-
|
1222
|
-
Returns
|
1223
|
-
-------
|
1224
|
-
DataScienceJob
|
1225
|
-
The DataScienceJob instance (self)
|
1226
|
-
|
1227
|
-
"""
|
1228
|
-
return self.set_spec(self.CONST_BLOCK_STORAGE, size_in_gb)
|
1229
|
-
|
1230
|
-
@property
|
1231
|
-
def block_storage_size(self) -> int:
|
1232
|
-
"""Block storage size for the job"""
|
1233
|
-
return self.get_spec(self.CONST_BLOCK_STORAGE)
|
1234
|
-
|
1235
|
-
def with_subnet_id(self, subnet_id: str) -> DataScienceJob:
|
1236
|
-
"""Sets the subnet ID
|
1237
|
-
|
1238
|
-
Parameters
|
1239
|
-
----------
|
1240
|
-
subnet_id : str
|
1241
|
-
Subnet ID
|
1242
|
-
|
1243
|
-
Returns
|
1244
|
-
-------
|
1245
|
-
DataScienceJob
|
1246
|
-
The DataScienceJob instance (self)
|
1247
|
-
|
1248
|
-
"""
|
1249
|
-
return self.set_spec(self.CONST_SUBNET_ID, subnet_id)
|
1250
|
-
|
1251
|
-
@property
|
1252
|
-
def subnet_id(self) -> str:
|
1253
|
-
"""Subnet ID"""
|
1254
|
-
return self.get_spec(self.CONST_SUBNET_ID)
|
1255
|
-
|
1256
|
-
def with_shape_config_details(
|
1257
|
-
self, memory_in_gbs: float, ocpus: float, **kwargs: Dict[str, Any]
|
1258
|
-
) -> DataScienceJob:
|
1259
|
-
"""Sets the details for the job run shape configuration.
|
1260
|
-
Specify only when a flex shape is selected.
|
1261
|
-
For example `VM.Standard.E3.Flex` allows the memory_in_gbs and cpu count to be specified.
|
1262
|
-
|
1263
|
-
Parameters
|
1264
|
-
----------
|
1265
|
-
memory_in_gbs: float
|
1266
|
-
The size of the memory in GBs.
|
1267
|
-
ocpus: float
|
1268
|
-
The OCPUs count.
|
1269
|
-
kwargs
|
1270
|
-
Additional keyword arguments.
|
1271
|
-
|
1272
|
-
Returns
|
1273
|
-
-------
|
1274
|
-
DataScienceJob
|
1275
|
-
The DataScienceJob instance (self)
|
1276
|
-
"""
|
1277
|
-
return self.set_spec(
|
1278
|
-
self.CONST_SHAPE_CONFIG_DETAILS,
|
1279
|
-
{
|
1280
|
-
self.CONST_OCPUS: ocpus,
|
1281
|
-
self.CONST_MEMORY_IN_GBS: memory_in_gbs,
|
1282
|
-
**kwargs,
|
1283
|
-
},
|
1284
|
-
)
|
1285
|
-
|
1286
|
-
@property
|
1287
|
-
def shape_config_details(self) -> Dict:
|
1288
|
-
"""The details for the job run shape configuration."""
|
1289
|
-
return self.get_spec(self.CONST_SHAPE_CONFIG_DETAILS)
|
1290
|
-
|
1291
|
-
def with_log_id(self, log_id: str) -> DataScienceJob:
|
1292
|
-
"""Sets the log OCID for the data science job.
|
1293
|
-
If log ID is specified, setting the log group ID (with_log_group_id()) is not strictly needed.
|
1294
|
-
ADS will look up the log group ID automatically.
|
1295
|
-
However, this may require additional permission,
|
1296
|
-
and the look up may not be available for newly created log group.
|
1297
|
-
Specifying both log ID (with_log_id()) and log group ID (with_log_group_id())
|
1298
|
-
can avoid such lookup and speed up the job creation.
|
1299
|
-
|
1300
|
-
Parameters
|
1301
|
-
----------
|
1302
|
-
log_id : str
|
1303
|
-
Log resource OCID.
|
1304
|
-
|
1305
|
-
Returns
|
1306
|
-
-------
|
1307
|
-
DataScienceJob
|
1308
|
-
The DataScienceJob instance (self)
|
1309
|
-
"""
|
1310
|
-
return self.set_spec(self.CONST_LOG_ID, log_id)
|
1311
|
-
|
1312
|
-
@property
|
1313
|
-
def log_id(self) -> str:
|
1314
|
-
"""Log OCID for the data science job.
|
1315
|
-
|
1316
|
-
Returns
|
1317
|
-
-------
|
1318
|
-
str
|
1319
|
-
Log OCID
|
1320
|
-
"""
|
1321
|
-
return self.get_spec(self.CONST_LOG_ID)
|
1322
|
-
|
1323
|
-
def with_log_group_id(self, log_group_id: str) -> DataScienceJob:
|
1324
|
-
"""Sets the log group OCID for the data science job.
|
1325
|
-
If log group ID is specified but log ID is not,
|
1326
|
-
a new log resource will be created automatically for each job run to store the logs.
|
1327
|
-
|
1328
|
-
Parameters
|
1329
|
-
----------
|
1330
|
-
log_group_id : str
|
1331
|
-
Log Group OCID
|
1332
|
-
|
1333
|
-
Returns
|
1334
|
-
-------
|
1335
|
-
DataScienceJob
|
1336
|
-
The DataScienceJob instance (self)
|
1337
|
-
"""
|
1338
|
-
return self.set_spec(self.CONST_LOG_GROUP_ID, log_group_id)
|
1339
|
-
|
1340
|
-
@property
|
1341
|
-
def log_group_id(self) -> str:
|
1342
|
-
"""Log group OCID of the data science job
|
1343
|
-
|
1344
|
-
Returns
|
1345
|
-
-------
|
1346
|
-
str
|
1347
|
-
Log group OCID
|
1348
|
-
"""
|
1349
|
-
return self.get_spec(self.CONST_LOG_GROUP_ID)
|
1350
|
-
|
1351
|
-
def with_storage_mount(self, *storage_mount: List[dict]) -> DataScienceJob:
|
1352
|
-
"""Sets the file systems to be mounted for the data science job.
|
1353
|
-
A maximum number of 5 file systems are allowed to be mounted for a single data science job.
|
1354
|
-
|
1355
|
-
Parameters
|
1356
|
-
----------
|
1357
|
-
storage_mount : List[dict]
|
1358
|
-
A list of file systems to be mounted.
|
1359
|
-
|
1360
|
-
Returns
|
1361
|
-
-------
|
1362
|
-
DataScienceJob
|
1363
|
-
The DataScienceJob instance (self)
|
1364
|
-
"""
|
1365
|
-
storage_mount_list = []
|
1366
|
-
for item in storage_mount:
|
1367
|
-
if not isinstance(item, dict):
|
1368
|
-
raise ValueError(
|
1369
|
-
"Parameter `storage_mount` should be a list of dictionaries."
|
1370
|
-
)
|
1371
|
-
storage_mount_list.append(item)
|
1372
|
-
if len(storage_mount_list) > MAXIMUM_MOUNT_COUNT:
|
1373
|
-
raise ValueError(
|
1374
|
-
f"A maximum number of {MAXIMUM_MOUNT_COUNT} file systems are allowed to be mounted at this time for a job."
|
1375
|
-
)
|
1376
|
-
return self.set_spec(self.CONST_STORAGE_MOUNT, storage_mount_list)
|
1377
|
-
|
1378
|
-
@property
|
1379
|
-
def storage_mount(self) -> List[dict]:
|
1380
|
-
"""Files systems that have been mounted for the data science job
|
1381
|
-
|
1382
|
-
Returns
|
1383
|
-
-------
|
1384
|
-
list
|
1385
|
-
A list of file systems that have been mounted
|
1386
|
-
"""
|
1387
|
-
return self.get_spec(self.CONST_STORAGE_MOUNT, [])
|
1388
|
-
|
1389
|
-
def with_freeform_tag(self, **kwargs) -> DataScienceJob:
|
1390
|
-
"""Sets freeform tags
|
1391
|
-
|
1392
|
-
Returns
|
1393
|
-
-------
|
1394
|
-
DataScienceJob
|
1395
|
-
The DataScienceJob instance (self)
|
1396
|
-
"""
|
1397
|
-
return self.set_spec(self.CONST_FREEFORM_TAGS, kwargs)
|
1398
|
-
|
1399
|
-
def with_defined_tag(self, **kwargs) -> DataScienceJob:
|
1400
|
-
"""Sets defined tags
|
1401
|
-
|
1402
|
-
Returns
|
1403
|
-
-------
|
1404
|
-
DataScienceJob
|
1405
|
-
The DataScienceJob instance (self)
|
1406
|
-
"""
|
1407
|
-
return self.set_spec(self.CONST_DEFINED_TAGS, kwargs)
|
1408
|
-
|
1409
|
-
@property
|
1410
|
-
def freeform_tags(self) -> dict:
|
1411
|
-
"""Freeform tags"""
|
1412
|
-
return self.get_spec(self.CONST_FREEFORM_TAGS, {})
|
1413
|
-
|
1414
|
-
@property
|
1415
|
-
def defined_tags(self) -> dict:
|
1416
|
-
"""Defined tags"""
|
1417
|
-
return self.get_spec(self.CONST_DEFINED_TAGS, {})
|
1418
|
-
|
1419
|
-
def _prepare_log_config(self) -> dict:
|
1420
|
-
if not self.log_group_id and not self.log_id:
|
1421
|
-
return None
|
1422
|
-
# Look up log group ID if only the log ID is specified
|
1423
|
-
if self.log_id and not self.log_group_id:
|
1424
|
-
try:
|
1425
|
-
log_obj = OCILog.from_ocid(self.log_id)
|
1426
|
-
except ResourceNotFoundError as exc:
|
1427
|
-
raise ResourceNotFoundError(
|
1428
|
-
f"Unable to determine log group ID for Log ({self.log_id})."
|
1429
|
-
" The log resource may not exist or You may not have the required permission."
|
1430
|
-
" Try to avoid this by specifying the log group ID."
|
1431
|
-
) from exc
|
1432
|
-
self.with_log_group_id(log_obj.log_group_id)
|
1433
|
-
|
1434
|
-
if self.log_group_id and not self.log_id:
|
1435
|
-
enable_auto_log_creation = True
|
1436
|
-
else:
|
1437
|
-
enable_auto_log_creation = False
|
1438
|
-
|
1439
|
-
log_config = {
|
1440
|
-
"enable_logging": True,
|
1441
|
-
"enable_auto_log_creation": enable_auto_log_creation,
|
1442
|
-
}
|
1443
|
-
if self.log_id:
|
1444
|
-
log_config["log_id"] = self.log_id
|
1445
|
-
|
1446
|
-
if self.log_group_id:
|
1447
|
-
log_config["log_group_id"] = self.log_group_id
|
1448
|
-
return log_config
|
1449
|
-
|
1450
|
-
def _update_from_dsc_model(
|
1451
|
-
self, dsc_job: oci.data_science.models.Job, overwrite: bool = True
|
1452
|
-
) -> DataScienceJob:
|
1453
|
-
"""Update the properties from an OCI data science job model.
|
1454
|
-
|
1455
|
-
Parameters
|
1456
|
-
----------
|
1457
|
-
dsc_job: oci.data_science.models.Job
|
1458
|
-
An OCI data science job model.
|
1459
|
-
|
1460
|
-
overwrite: bool
|
1461
|
-
Whether to overwrite the existing values.
|
1462
|
-
If this is set to False, only the empty/None properties will be updated.
|
1463
|
-
|
1464
|
-
Returns
|
1465
|
-
-------
|
1466
|
-
DataScienceJob
|
1467
|
-
The DataScienceJob instance (self)
|
1468
|
-
"""
|
1469
|
-
sub_level = {
|
1470
|
-
self.CONST_SHAPE_CONFIG_DETAILS: self.shape_config_details_attribute_map
|
1471
|
-
}
|
1472
|
-
self.dsc_job = dsc_job
|
1473
|
-
|
1474
|
-
for infra_attr, dsc_attr in self.payload_attribute_map.items():
|
1475
|
-
value = get_value(dsc_job, dsc_attr)
|
1476
|
-
if not value:
|
1477
|
-
continue
|
1478
|
-
if infra_attr not in sub_level:
|
1479
|
-
if overwrite or not self._spec.get(infra_attr):
|
1480
|
-
self._spec[infra_attr] = value
|
1481
|
-
else:
|
1482
|
-
sub_spec = self._spec.get(infra_attr, {})
|
1483
|
-
self._spec[infra_attr] = {}
|
1484
|
-
for sub_infra_attr, sub_dsc_attr in sub_level[infra_attr].items():
|
1485
|
-
sub_value = get_value(value, sub_dsc_attr)
|
1486
|
-
if not sub_value:
|
1487
|
-
continue
|
1488
|
-
if overwrite or not sub_spec.get(sub_infra_attr):
|
1489
|
-
sub_spec[sub_infra_attr] = sub_value
|
1490
|
-
if sub_spec:
|
1491
|
-
self._spec[infra_attr] = sub_spec
|
1492
|
-
|
1493
|
-
self._update_storage_mount_from_dsc_model(dsc_job, overwrite)
|
1494
|
-
return self
|
1495
|
-
|
1496
|
-
def _update_storage_mount_from_dsc_model(
|
1497
|
-
self, dsc_job: oci.data_science.models.Job, overwrite: bool = True
|
1498
|
-
) -> DataScienceJob:
|
1499
|
-
"""Update the mount storage properties from an OCI data science job model.
|
1500
|
-
|
1501
|
-
Parameters
|
1502
|
-
----------
|
1503
|
-
dsc_job: oci.data_science.models.Job
|
1504
|
-
An OCI data science job model.
|
1505
|
-
|
1506
|
-
overwrite: bool
|
1507
|
-
Whether to overwrite the existing values.
|
1508
|
-
If this is set to False, only the empty/None properties will be updated.
|
1509
|
-
|
1510
|
-
Returns
|
1511
|
-
-------
|
1512
|
-
DataScienceJob
|
1513
|
-
The DataScienceJob instance (self)
|
1514
|
-
"""
|
1515
|
-
storage_mount_list = get_value(
|
1516
|
-
dsc_job, "job_storage_mount_configuration_details_list"
|
1517
|
-
)
|
1518
|
-
if storage_mount_list:
|
1519
|
-
storage_mount = [
|
1520
|
-
self.storage_mount_type_dict[
|
1521
|
-
file_system.storage_type
|
1522
|
-
].update_from_dsc_model(file_system)
|
1523
|
-
for file_system in storage_mount_list
|
1524
|
-
if file_system.storage_type in self.storage_mount_type_dict
|
1525
|
-
]
|
1526
|
-
if overwrite or not self.get_spec(self.CONST_STORAGE_MOUNT):
|
1527
|
-
self.set_spec(self.CONST_STORAGE_MOUNT, storage_mount)
|
1528
|
-
return self
|
1529
|
-
|
1530
|
-
def _update_job_infra(self, dsc_job: DSCJob) -> DataScienceJob:
|
1531
|
-
"""Updates the job infrastructure from a DSCJob object.
|
1532
|
-
|
1533
|
-
Parameters
|
1534
|
-
----------
|
1535
|
-
dsc_job : DSCJob
|
1536
|
-
A DSCJob instance.
|
1537
|
-
|
1538
|
-
Returns
|
1539
|
-
-------
|
1540
|
-
DataScienceJob
|
1541
|
-
The DataScienceJob instance (self)
|
1542
|
-
|
1543
|
-
"""
|
1544
|
-
attr_map = {
|
1545
|
-
self.CONST_JOB_INFRA: "jobInfrastructureType",
|
1546
|
-
self.CONST_SHAPE_NAME: "shapeName",
|
1547
|
-
self.CONST_SUBNET_ID: "subnetId",
|
1548
|
-
self.CONST_BLOCK_STORAGE: "blockStorageSizeInGBs",
|
1549
|
-
self.CONST_SHAPE_CONFIG_DETAILS: "jobShapeConfigDetails",
|
1550
|
-
}
|
1551
|
-
|
1552
|
-
if not dsc_job.job_infrastructure_configuration_details:
|
1553
|
-
dsc_job.job_infrastructure_configuration_details = {}
|
1554
|
-
|
1555
|
-
for snake_attr, camel_attr in attr_map.items():
|
1556
|
-
value = self.get_spec(snake_attr)
|
1557
|
-
if value:
|
1558
|
-
dsc_job.job_infrastructure_configuration_details[camel_attr] = value
|
1559
|
-
|
1560
|
-
if not dsc_job.job_infrastructure_configuration_details.get(
|
1561
|
-
"shapeName", ""
|
1562
|
-
).endswith("Flex") and dsc_job.job_infrastructure_configuration_details.get(
|
1563
|
-
"jobShapeConfigDetails"
|
1564
|
-
):
|
1565
|
-
raise ValueError(
|
1566
|
-
"Shape config is not required for non flex shape from user end."
|
1567
|
-
)
|
1568
|
-
|
1569
|
-
if dsc_job.job_infrastructure_configuration_details.get("subnetId"):
|
1570
|
-
dsc_job.job_infrastructure_configuration_details[
|
1571
|
-
"jobInfrastructureType"
|
1572
|
-
] = JobInfrastructureConfigurationDetails.JOB_INFRASTRUCTURE_TYPE_STANDALONE
|
1573
|
-
|
1574
|
-
if self.storage_mount:
|
1575
|
-
if not hasattr(oci.data_science.models, "StorageMountConfigurationDetails"):
|
1576
|
-
raise EnvironmentError(
|
1577
|
-
"Storage mount hasn't been supported in the current OCI SDK installed."
|
1578
|
-
)
|
1579
|
-
dsc_job.job_storage_mount_configuration_details_list = [
|
1580
|
-
DSCFileSystemManager.initialize(file_system)
|
1581
|
-
for file_system in self.storage_mount
|
1582
|
-
]
|
1583
|
-
return self
|
1584
|
-
|
1585
|
-
def build(self) -> DataScienceJob:
|
1586
|
-
self.dsc_job.load_defaults()
|
1587
|
-
|
1588
|
-
try:
|
1589
|
-
self.dsc_job.load_defaults()
|
1590
|
-
except Exception:
|
1591
|
-
logger.exception("Failed to load default properties.")
|
1592
|
-
|
1593
|
-
self._update_from_dsc_model(self.dsc_job, overwrite=False)
|
1594
|
-
return self
|
1595
|
-
|
1596
|
-
def init(self, **kwargs) -> DataScienceJob:
|
1597
|
-
"""Initializes a starter specification for the DataScienceJob.
|
1598
|
-
|
1599
|
-
Returns
|
1600
|
-
-------
|
1601
|
-
DataScienceJob
|
1602
|
-
The DataScienceJob instance (self)
|
1603
|
-
"""
|
1604
|
-
return (
|
1605
|
-
self.build()
|
1606
|
-
.with_compartment_id(self.compartment_id or "{Provide a compartment OCID}")
|
1607
|
-
.with_project_id(self.project_id or "{Provide a project OCID}")
|
1608
|
-
.with_subnet_id(
|
1609
|
-
self.subnet_id
|
1610
|
-
or "{Provide a subnet OCID or remove this field if you use a default networking}"
|
1611
|
-
)
|
1612
|
-
)
|
1613
|
-
|
1614
|
-
def create(self, runtime, **kwargs) -> DataScienceJob:
|
1615
|
-
"""Creates a job with runtime.
|
1616
|
-
|
1617
|
-
Parameters
|
1618
|
-
----------
|
1619
|
-
runtime : Runtime
|
1620
|
-
An ADS job runtime.
|
1621
|
-
|
1622
|
-
Returns
|
1623
|
-
-------
|
1624
|
-
DataScienceJob
|
1625
|
-
The DataScienceJob instance (self)
|
1626
|
-
|
1627
|
-
"""
|
1628
|
-
if not runtime:
|
1629
|
-
raise ValueError("Set a valid runtime.")
|
1630
|
-
payload = DataScienceJobRuntimeManager(self).translate(runtime)
|
1631
|
-
# Add infra properties to payload
|
1632
|
-
for attr in ["project_id", "compartment_id"]:
|
1633
|
-
if getattr(self, attr):
|
1634
|
-
payload[attr] = getattr(self, attr)
|
1635
|
-
|
1636
|
-
if self.name:
|
1637
|
-
display_name = Template(self.name).safe_substitute(runtime.envs)
|
1638
|
-
elif isinstance(runtime, GitPythonRuntime) or isinstance(
|
1639
|
-
runtime, ContainerRuntime
|
1640
|
-
):
|
1641
|
-
display_name = utils.get_random_name_for_resource()
|
1642
|
-
else:
|
1643
|
-
display_name = None
|
1644
|
-
|
1645
|
-
payload["display_name"] = display_name
|
1646
|
-
payload["job_log_configuration_details"] = self._prepare_log_config()
|
1647
|
-
if not payload.get("freeform_tags"):
|
1648
|
-
payload["freeform_tags"] = self.freeform_tags
|
1649
|
-
if not payload.get("defined_tags"):
|
1650
|
-
payload["defined_tags"] = self.defined_tags
|
1651
|
-
|
1652
|
-
self.dsc_job = DSCJob(**payload, **self.auth)
|
1653
|
-
# Set Job infra to user values after DSCJob initialized the defaults
|
1654
|
-
self._update_job_infra(self.dsc_job)
|
1655
|
-
self.dsc_job.create()
|
1656
|
-
# Update the model from infra after job creation.
|
1657
|
-
self._update_from_dsc_model(self.dsc_job)
|
1658
|
-
return self
|
1659
|
-
|
1660
|
-
def run(
|
1661
|
-
self,
|
1662
|
-
name=None,
|
1663
|
-
args=None,
|
1664
|
-
env_var=None,
|
1665
|
-
freeform_tags=None,
|
1666
|
-
defined_tags=None,
|
1667
|
-
wait=False,
|
1668
|
-
**kwargs,
|
1669
|
-
) -> DataScienceJobRun:
|
1670
|
-
"""Runs a job on OCI Data Science job
|
1671
|
-
|
1672
|
-
Parameters
|
1673
|
-
----------
|
1674
|
-
name : str, optional
|
1675
|
-
The name of the job run, by default None.
|
1676
|
-
args : str, optional
|
1677
|
-
Command line arguments for the job run, by default None.
|
1678
|
-
env_var : dict, optional
|
1679
|
-
Environment variable for the job run, by default None
|
1680
|
-
freeform_tags : dict, optional
|
1681
|
-
Freeform tags for the job run, by default None
|
1682
|
-
defined_tags : dict, optional
|
1683
|
-
Defined tags for the job run, by default None
|
1684
|
-
wait : bool, optional
|
1685
|
-
Indicate if this method should wait for the run to finish before it returns, by default False.
|
1686
|
-
kwargs
|
1687
|
-
additional keyword arguments
|
1688
|
-
|
1689
|
-
Returns
|
1690
|
-
-------
|
1691
|
-
DataScienceJobRun
|
1692
|
-
A Data Science Job Run instance.
|
1693
|
-
|
1694
|
-
"""
|
1695
|
-
# Runtime in the infrastructure will be None if the job is not created.
|
1696
|
-
if not self.runtime:
|
1697
|
-
raise RuntimeError(
|
1698
|
-
"Job is not created. Call create() to create the job first."
|
1699
|
-
)
|
1700
|
-
|
1701
|
-
if not freeform_tags:
|
1702
|
-
freeform_tags = {}
|
1703
|
-
runtime_freeform_tags = self.runtime.freeform_tags
|
1704
|
-
if runtime_freeform_tags:
|
1705
|
-
freeform_tags.update(runtime_freeform_tags)
|
1706
|
-
|
1707
|
-
if not defined_tags:
|
1708
|
-
defined_tags = {}
|
1709
|
-
runtime_defined_tags = self.runtime.defined_tags
|
1710
|
-
if runtime_defined_tags:
|
1711
|
-
defined_tags.update(runtime_defined_tags)
|
1712
|
-
|
1713
|
-
if name:
|
1714
|
-
envs = self.runtime.envs
|
1715
|
-
if env_var:
|
1716
|
-
envs.update(env_var)
|
1717
|
-
name = Template(name).safe_substitute(envs)
|
1718
|
-
|
1719
|
-
kwargs = dict(
|
1720
|
-
display_name=name,
|
1721
|
-
command_line_arguments=args,
|
1722
|
-
environment_variables=env_var,
|
1723
|
-
freeform_tags=freeform_tags,
|
1724
|
-
defined_tags=defined_tags,
|
1725
|
-
wait=wait,
|
1726
|
-
**kwargs,
|
1727
|
-
)
|
1728
|
-
# A Runtime class may define customized run() method.
|
1729
|
-
# Use the customized method if the run() method is defined by the runtime.
|
1730
|
-
# Otherwise, use the default run() method defined in this class.
|
1731
|
-
if hasattr(self.runtime, "run"):
|
1732
|
-
return self.runtime.run(self.dsc_job, **kwargs)
|
1733
|
-
return self.dsc_job.run(**kwargs)
|
1734
|
-
|
1735
|
-
def delete(self) -> None:
|
1736
|
-
"""Deletes a job"""
|
1737
|
-
self.dsc_job.delete()
|
1738
|
-
|
1739
|
-
def run_list(self, **kwargs) -> List[DataScienceJobRun]:
|
1740
|
-
"""Gets a list of job runs.
|
1741
|
-
|
1742
|
-
Parameters
|
1743
|
-
----------
|
1744
|
-
**kwargs :
|
1745
|
-
Keyword arguments for filtering the job runs.
|
1746
|
-
These arguments will be passed to OCI API.
|
1747
|
-
|
1748
|
-
|
1749
|
-
Returns
|
1750
|
-
-------
|
1751
|
-
List[DSCJobRun]:
|
1752
|
-
A list of job runs.
|
1753
|
-
|
1754
|
-
"""
|
1755
|
-
return self.dsc_job.run_list(**kwargs)
|
1756
|
-
|
1757
|
-
@classmethod
|
1758
|
-
def from_dsc_job(cls, dsc_job: DSCJob) -> DataScienceJob:
|
1759
|
-
"""Initialize a DataScienceJob instance from a DSCJob
|
1760
|
-
|
1761
|
-
Parameters
|
1762
|
-
----------
|
1763
|
-
dsc_job : DSCJob
|
1764
|
-
An instance of DSCJob
|
1765
|
-
|
1766
|
-
Returns
|
1767
|
-
-------
|
1768
|
-
DataScienceJob
|
1769
|
-
An instance of DataScienceJob
|
1770
|
-
|
1771
|
-
"""
|
1772
|
-
instance = cls()
|
1773
|
-
instance._update_from_dsc_model(dsc_job)
|
1774
|
-
instance.runtime = DataScienceJobRuntimeManager(instance).extract(dsc_job)
|
1775
|
-
return instance
|
1776
|
-
|
1777
|
-
@class_or_instance_method
|
1778
|
-
def from_id(cls, job_id: str) -> DataScienceJob:
|
1779
|
-
"""Gets an existing job using Job OCID
|
1780
|
-
|
1781
|
-
Parameters
|
1782
|
-
----------
|
1783
|
-
job_id : str
|
1784
|
-
Job OCID
|
1785
|
-
|
1786
|
-
|
1787
|
-
Returns
|
1788
|
-
-------
|
1789
|
-
DataScienceJob
|
1790
|
-
An instance of DataScienceJob
|
1791
|
-
|
1792
|
-
"""
|
1793
|
-
return cls.from_dsc_job(DSCJob(**cls.auth).from_ocid(job_id))
|
1794
|
-
|
1795
|
-
@class_or_instance_method
|
1796
|
-
def from_dict(cls, obj_dict: dict):
|
1797
|
-
"""Initialize the object from a Python dictionary"""
|
1798
|
-
if inspect.isclass(cls):
|
1799
|
-
job_cls = cls
|
1800
|
-
else:
|
1801
|
-
job_cls = cls.__class__
|
1802
|
-
return job_cls(spec=obj_dict.get("spec"), **cls.auth)
|
1803
|
-
|
1804
|
-
@class_or_instance_method
|
1805
|
-
def list_jobs(cls, compartment_id: str = None, **kwargs) -> List[DataScienceJob]:
|
1806
|
-
"""Lists all jobs in a compartment.
|
1807
|
-
|
1808
|
-
Parameters
|
1809
|
-
----------
|
1810
|
-
compartment_id : str, optional
|
1811
|
-
The compartment ID for running the jobs, by default None.
|
1812
|
-
This is optional in a OCI Data Science notebook session.
|
1813
|
-
If this is not specified, the compartment ID of the notebook session will be used.
|
1814
|
-
**kwargs :
|
1815
|
-
Keyword arguments to be passed into OCI list_jobs API for filtering the jobs.
|
1816
|
-
|
1817
|
-
Returns
|
1818
|
-
-------
|
1819
|
-
List[DataScienceJob]
|
1820
|
-
A list of DataScienceJob object.
|
1821
|
-
|
1822
|
-
"""
|
1823
|
-
return [
|
1824
|
-
cls.from_dsc_job(job)
|
1825
|
-
for job in DSCJob(**cls.auth).list_resource(compartment_id, **kwargs)
|
1826
|
-
]
|
1827
|
-
|
1828
|
-
@class_or_instance_method
|
1829
|
-
def instance_shapes(cls, compartment_id: str = None, **kwargs) -> list:
|
1830
|
-
"""Lists the supported shapes for running jobs in a compartment.
|
1831
|
-
|
1832
|
-
Parameters
|
1833
|
-
----------
|
1834
|
-
compartment_id : str, optional
|
1835
|
-
The compartment ID for running the jobs, by default None.
|
1836
|
-
This is optional in a OCI Data Science notebook session.
|
1837
|
-
If this is not specified, the compartment ID of the notebook session will be used.
|
1838
|
-
|
1839
|
-
Returns
|
1840
|
-
-------
|
1841
|
-
list
|
1842
|
-
A list of oci.data_science.models.JobShapeSummary objects
|
1843
|
-
containing the information of the supported shapes.
|
1844
|
-
|
1845
|
-
Examples
|
1846
|
-
--------
|
1847
|
-
To get a list of shape names::
|
1848
|
-
|
1849
|
-
shapes = DataScienceJob.fast_launch_shapes(
|
1850
|
-
compartment_id=os.environ["PROJECT_COMPARTMENT_OCID"]
|
1851
|
-
)
|
1852
|
-
shape_names = [shape.name for shape in shapes]
|
1853
|
-
|
1854
|
-
"""
|
1855
|
-
shapes = oci.pagination.list_call_get_all_results(
|
1856
|
-
DSCJob(**cls.auth).init_client().list_job_shapes,
|
1857
|
-
DSCJob.check_compartment_id(compartment_id),
|
1858
|
-
**kwargs,
|
1859
|
-
).data
|
1860
|
-
return shapes
|
1861
|
-
|
1862
|
-
@class_or_instance_method
|
1863
|
-
def fast_launch_shapes(cls, compartment_id: str = None, **kwargs) -> list:
|
1864
|
-
"""Lists the supported fast launch shapes for running jobs in a compartment.
|
1865
|
-
|
1866
|
-
Parameters
|
1867
|
-
----------
|
1868
|
-
compartment_id : str, optional
|
1869
|
-
The compartment ID for running the jobs, by default None.
|
1870
|
-
This is optional in a OCI Data Science notebook session.
|
1871
|
-
If this is not specified, the compartment ID of the notebook session will be used.
|
1872
|
-
|
1873
|
-
Returns
|
1874
|
-
-------
|
1875
|
-
list
|
1876
|
-
A list of oci.data_science.models.FastLaunchJobConfigSummary objects
|
1877
|
-
containing the information of the supported shapes.
|
1878
|
-
|
1879
|
-
Examples
|
1880
|
-
--------
|
1881
|
-
To get a list of shape names::
|
1882
|
-
|
1883
|
-
shapes = DataScienceJob.fast_launch_shapes(
|
1884
|
-
compartment_id=os.environ["PROJECT_COMPARTMENT_OCID"]
|
1885
|
-
)
|
1886
|
-
shape_names = [shape.shape_name for shape in shapes]
|
1887
|
-
|
1888
|
-
"""
|
1889
|
-
shapes = oci.pagination.list_call_get_all_results(
|
1890
|
-
DSCJob(**cls.auth).init_client().list_fast_launch_job_configs,
|
1891
|
-
DSCJob.check_compartment_id(compartment_id),
|
1892
|
-
**kwargs,
|
1893
|
-
).data
|
1894
|
-
return shapes
|