mlrun 1.8.0rc61__py3-none-any.whl → 1.9.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mlrun might be problematic. Click here for more details.
- mlrun/__main__.py +13 -3
- mlrun/common/constants.py +7 -0
- mlrun/config.py +4 -4
- mlrun/datastore/datastore_profile.py +3 -3
- mlrun/db/httpdb.py +4 -2
- mlrun/frameworks/tf_keras/__init__.py +4 -4
- mlrun/frameworks/tf_keras/callbacks/logging_callback.py +23 -20
- mlrun/frameworks/tf_keras/model_handler.py +69 -9
- mlrun/frameworks/tf_keras/utils.py +12 -1
- mlrun/launcher/client.py +1 -1
- mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connection.py +118 -50
- mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py +12 -13
- mlrun/projects/project.py +4 -1
- mlrun/runtimes/base.py +1 -1
- mlrun/runtimes/utils.py +24 -7
- mlrun/serving/v2_serving.py +9 -8
- mlrun/utils/helpers.py +72 -22
- mlrun/utils/notifications/notification/slack.py +5 -1
- mlrun/utils/notifications/notification_pusher.py +2 -1
- mlrun/utils/version/version.json +2 -2
- {mlrun-1.8.0rc61.dist-info → mlrun-1.9.0.dist-info}/METADATA +9 -8
- {mlrun-1.8.0rc61.dist-info → mlrun-1.9.0.dist-info}/RECORD +26 -26
- {mlrun-1.8.0rc61.dist-info → mlrun-1.9.0.dist-info}/WHEEL +1 -1
- {mlrun-1.8.0rc61.dist-info → mlrun-1.9.0.dist-info}/entry_points.txt +0 -0
- {mlrun-1.8.0rc61.dist-info → mlrun-1.9.0.dist-info}/licenses/LICENSE +0 -0
- {mlrun-1.8.0rc61.dist-info → mlrun-1.9.0.dist-info}/top_level.txt +0 -0
mlrun/__main__.py
CHANGED
|
@@ -13,6 +13,8 @@
|
|
|
13
13
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
14
14
|
# See the License for the specific language governing permissions and
|
|
15
15
|
# limitations under the License.
|
|
16
|
+
import functools
|
|
17
|
+
import importlib.metadata
|
|
16
18
|
import json
|
|
17
19
|
import pathlib
|
|
18
20
|
import socket
|
|
@@ -25,12 +27,14 @@ from pprint import pprint
|
|
|
25
27
|
import click
|
|
26
28
|
import dotenv
|
|
27
29
|
import pandas as pd
|
|
30
|
+
import semver
|
|
28
31
|
import yaml
|
|
29
32
|
from tabulate import tabulate
|
|
30
33
|
|
|
31
34
|
import mlrun
|
|
32
35
|
import mlrun.common.constants as mlrun_constants
|
|
33
36
|
import mlrun.common.schemas
|
|
37
|
+
import mlrun.platforms
|
|
34
38
|
import mlrun.utils.helpers
|
|
35
39
|
from mlrun.common.helpers import parse_versioned_object_uri
|
|
36
40
|
from mlrun.runtimes.mounts import auto_mount as auto_mount_modifier
|
|
@@ -63,16 +67,22 @@ from .utils.version import Version
|
|
|
63
67
|
pd.set_option("mode.chained_assignment", None)
|
|
64
68
|
|
|
65
69
|
|
|
66
|
-
def validate_base_argument(ctx, param, value):
|
|
70
|
+
def validate_base_argument(ctx: click.Context, param: click.Parameter, value: str):
|
|
71
|
+
# click 8.2 expects the context to be passed to make_metavar
|
|
72
|
+
if semver.VersionInfo.parse(
|
|
73
|
+
importlib.metadata.version("click")
|
|
74
|
+
) < semver.VersionInfo.parse("8.2.0"):
|
|
75
|
+
metavar_func = functools.partial(param.make_metavar)
|
|
76
|
+
else:
|
|
77
|
+
metavar_func = functools.partial(param.make_metavar, ctx)
|
|
67
78
|
if value and value.startswith("-"):
|
|
68
79
|
raise click.BadParameter(
|
|
69
80
|
f"{param.human_readable_name} ({value}) cannot start with '-', ensure the command options are typed "
|
|
70
81
|
f"correctly. Preferably use '--' to separate options and arguments "
|
|
71
|
-
f"e.g. 'mlrun run --option1 --option2 -- {
|
|
82
|
+
f"e.g. 'mlrun run --option1 --option2 -- {metavar_func()} [--arg1|arg1] [--arg2|arg2]'",
|
|
72
83
|
ctx=ctx,
|
|
73
84
|
param=param,
|
|
74
85
|
)
|
|
75
|
-
|
|
76
86
|
return value
|
|
77
87
|
|
|
78
88
|
|
mlrun/common/constants.py
CHANGED
|
@@ -90,6 +90,13 @@ class MLRunInternalLabels:
|
|
|
90
90
|
if not key.startswith("__") and isinstance(value, str)
|
|
91
91
|
]
|
|
92
92
|
|
|
93
|
+
@staticmethod
|
|
94
|
+
def default_run_labels_to_enrich():
|
|
95
|
+
return [
|
|
96
|
+
MLRunInternalLabels.owner,
|
|
97
|
+
MLRunInternalLabels.v3io_user,
|
|
98
|
+
]
|
|
99
|
+
|
|
93
100
|
|
|
94
101
|
class DeployStatusTextKind(mlrun.common.types.StrEnum):
|
|
95
102
|
logs = "logs"
|
mlrun/config.py
CHANGED
|
@@ -64,7 +64,7 @@ default_config = {
|
|
|
64
64
|
# url to nuclio dashboard api (can be with user & token, e.g. https://username:password@dashboard-url.com)
|
|
65
65
|
"nuclio_dashboard_url": "",
|
|
66
66
|
"nuclio_version": "",
|
|
67
|
-
"default_nuclio_runtime": "python:3.
|
|
67
|
+
"default_nuclio_runtime": "python:3.11",
|
|
68
68
|
"nest_asyncio_enabled": "", # enable import of nest_asyncio for corner cases with old jupyter, set "1"
|
|
69
69
|
"ui_url": "", # remote/external mlrun UI url (for hyperlinks) (This is deprecated in favor of the ui block)
|
|
70
70
|
"remote_host": "",
|
|
@@ -79,7 +79,7 @@ default_config = {
|
|
|
79
79
|
# comma separated list of images that are in the specified images_registry, and therefore will be enriched with this
|
|
80
80
|
# registry when used. default to mlrun/* which means any image which is of the mlrun repository (mlrun/mlrun,
|
|
81
81
|
# mlrun/ml-base, etc...)
|
|
82
|
-
"images_to_enrich_registry": "^mlrun
|
|
82
|
+
"images_to_enrich_registry": "^mlrun/*,^python:3.(9|11)$",
|
|
83
83
|
"kfp_url": "",
|
|
84
84
|
"kfp_ttl": "14400", # KFP ttl in sec, after that completed PODs will be deleted
|
|
85
85
|
"kfp_image": "mlrun/mlrun-kfp", # image to use for KFP runner
|
|
@@ -286,7 +286,7 @@ default_config = {
|
|
|
286
286
|
"remote": "mlrun/mlrun",
|
|
287
287
|
"dask": "mlrun/ml-base",
|
|
288
288
|
"mpijob": "mlrun/mlrun",
|
|
289
|
-
"application": "python
|
|
289
|
+
"application": "python",
|
|
290
290
|
},
|
|
291
291
|
# see enrich_function_preemption_spec for more info,
|
|
292
292
|
# and mlrun.common.schemas.function.PreemptionModes for available options
|
|
@@ -482,7 +482,7 @@ default_config = {
|
|
|
482
482
|
"project_owners_cache_ttl": "30 seconds",
|
|
483
483
|
# access key to be used when the leader is iguazio and polling is done from it
|
|
484
484
|
"iguazio_access_key": "",
|
|
485
|
-
"iguazio_list_projects_default_page_size":
|
|
485
|
+
"iguazio_list_projects_default_page_size": 200,
|
|
486
486
|
"iguazio_client_job_cache_ttl": "20 minutes",
|
|
487
487
|
"nuclio_project_deletion_verification_timeout": "300 seconds",
|
|
488
488
|
"nuclio_project_deletion_verification_interval": "5 seconds",
|
|
@@ -255,7 +255,7 @@ class DatastoreProfileS3(DatastoreProfile):
|
|
|
255
255
|
def check_bucket(cls, v):
|
|
256
256
|
if not v:
|
|
257
257
|
warnings.warn(
|
|
258
|
-
"The 'bucket' attribute will be mandatory starting from version 1.
|
|
258
|
+
"The 'bucket' attribute will be mandatory starting from version 1.10",
|
|
259
259
|
FutureWarning,
|
|
260
260
|
stacklevel=2,
|
|
261
261
|
)
|
|
@@ -360,7 +360,7 @@ class DatastoreProfileGCS(DatastoreProfile):
|
|
|
360
360
|
def check_bucket(cls, v):
|
|
361
361
|
if not v:
|
|
362
362
|
warnings.warn(
|
|
363
|
-
"The 'bucket' attribute will be mandatory starting from version 1.
|
|
363
|
+
"The 'bucket' attribute will be mandatory starting from version 1.10",
|
|
364
364
|
FutureWarning,
|
|
365
365
|
stacklevel=2,
|
|
366
366
|
)
|
|
@@ -417,7 +417,7 @@ class DatastoreProfileAzureBlob(DatastoreProfile):
|
|
|
417
417
|
def check_container(cls, v):
|
|
418
418
|
if not v:
|
|
419
419
|
warnings.warn(
|
|
420
|
-
"The 'container' attribute will be mandatory starting from version 1.
|
|
420
|
+
"The 'container' attribute will be mandatory starting from version 1.10",
|
|
421
421
|
FutureWarning,
|
|
422
422
|
stacklevel=2,
|
|
423
423
|
)
|
mlrun/db/httpdb.py
CHANGED
|
@@ -21,7 +21,7 @@ import typing
|
|
|
21
21
|
import warnings
|
|
22
22
|
from copy import deepcopy
|
|
23
23
|
from datetime import datetime, timedelta
|
|
24
|
-
from os import path, remove
|
|
24
|
+
from os import environ, path, remove
|
|
25
25
|
from typing import Literal, Optional, Union
|
|
26
26
|
from urllib.parse import urlparse
|
|
27
27
|
|
|
@@ -129,7 +129,9 @@ class HTTPRunDB(RunDBInterface):
|
|
|
129
129
|
self._wait_for_background_task_terminal_state_retry_interval = 3
|
|
130
130
|
self._wait_for_project_deletion_interval = 3
|
|
131
131
|
self.client_version = version.Version().get()["version"]
|
|
132
|
-
self.python_version =
|
|
132
|
+
self.python_version = environ.get("MLRUN_PYTHON_VERSION") or str(
|
|
133
|
+
version.Version().get_python_version()
|
|
134
|
+
)
|
|
133
135
|
|
|
134
136
|
self._enrich_and_validate(url)
|
|
135
137
|
|
|
@@ -14,7 +14,7 @@
|
|
|
14
14
|
|
|
15
15
|
from typing import Any, Optional, Union
|
|
16
16
|
|
|
17
|
-
|
|
17
|
+
import tensorflow as tf
|
|
18
18
|
|
|
19
19
|
import mlrun
|
|
20
20
|
import mlrun.common.constants as mlrun_constants
|
|
@@ -27,11 +27,11 @@ from .utils import TFKerasTypes, TFKerasUtils
|
|
|
27
27
|
|
|
28
28
|
|
|
29
29
|
def apply_mlrun(
|
|
30
|
-
model: keras.Model = None,
|
|
30
|
+
model: tf.keras.Model = None,
|
|
31
31
|
model_name: Optional[str] = None,
|
|
32
32
|
tag: str = "",
|
|
33
33
|
model_path: Optional[str] = None,
|
|
34
|
-
model_format: str =
|
|
34
|
+
model_format: Optional[str] = None,
|
|
35
35
|
save_traces: bool = False,
|
|
36
36
|
modules_map: Optional[Union[dict[str, Union[None, str, list[str]]], str]] = None,
|
|
37
37
|
custom_objects_map: Optional[Union[dict[str, Union[str, list[str]]], str]] = None,
|
|
@@ -54,7 +54,7 @@ def apply_mlrun(
|
|
|
54
54
|
:param model_path: The model's store object path. Mandatory for evaluation (to know which model to
|
|
55
55
|
update). If model is not provided, it will be loaded from this path.
|
|
56
56
|
:param model_format: The format to use for saving and loading the model. Should be passed as a
|
|
57
|
-
member of the class 'ModelFormats'.
|
|
57
|
+
member of the class 'ModelFormats'.
|
|
58
58
|
:param save_traces: Whether or not to use functions saving (only available for the 'SavedModel'
|
|
59
59
|
format) for loading the model later without the custom objects dictionary. Only
|
|
60
60
|
from tensorflow version >= 2.4.0. Using this setting will increase the model
|
|
@@ -16,14 +16,14 @@ from typing import Callable, Optional, Union
|
|
|
16
16
|
|
|
17
17
|
import numpy as np
|
|
18
18
|
import tensorflow as tf
|
|
19
|
-
from tensorflow import
|
|
19
|
+
from tensorflow import keras
|
|
20
20
|
from tensorflow.python.keras.callbacks import Callback
|
|
21
21
|
|
|
22
22
|
import mlrun
|
|
23
23
|
|
|
24
24
|
from ..._common import LoggingMode
|
|
25
25
|
from ..._dl_common.loggers import Logger
|
|
26
|
-
from ..utils import TFKerasTypes
|
|
26
|
+
from ..utils import TFKerasTypes, is_keras_3
|
|
27
27
|
|
|
28
28
|
|
|
29
29
|
class LoggingCallback(Callback):
|
|
@@ -70,7 +70,7 @@ class LoggingCallback(Callback):
|
|
|
70
70
|
{
|
|
71
71
|
"epochs": 7
|
|
72
72
|
}
|
|
73
|
-
:param auto_log: Whether
|
|
73
|
+
:param auto_log: Whether to enable auto logging, trying to track common static and dynamic
|
|
74
74
|
hyperparameters.
|
|
75
75
|
"""
|
|
76
76
|
super().__init__()
|
|
@@ -385,18 +385,24 @@ class LoggingCallback(Callback):
|
|
|
385
385
|
self._logger.log_context_parameters()
|
|
386
386
|
|
|
387
387
|
# Add learning rate:
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
388
|
+
learning_rate_keys = [
|
|
389
|
+
"learning_rate",
|
|
390
|
+
"lr",
|
|
391
|
+
] # "lr" is for backward compatibility in older keras versions.
|
|
392
|
+
if all(
|
|
393
|
+
learning_rate_key not in self._dynamic_hyperparameters_keys
|
|
394
|
+
for learning_rate_key in learning_rate_keys
|
|
395
|
+
) and hasattr(self.model, "optimizer"):
|
|
396
|
+
for learning_rate_key in learning_rate_keys:
|
|
397
|
+
learning_rate_key_chain = ["optimizer", learning_rate_key]
|
|
398
|
+
try:
|
|
399
|
+
self._get_hyperparameter(key_chain=learning_rate_key_chain)
|
|
400
|
+
except (KeyError, IndexError, AttributeError, ValueError):
|
|
401
|
+
continue
|
|
395
402
|
self._dynamic_hyperparameters_keys[learning_rate_key] = (
|
|
396
403
|
learning_rate_key_chain
|
|
397
404
|
)
|
|
398
|
-
|
|
399
|
-
pass
|
|
405
|
+
break
|
|
400
406
|
|
|
401
407
|
def _get_hyperparameter(
|
|
402
408
|
self,
|
|
@@ -427,7 +433,7 @@ class LoggingCallback(Callback):
|
|
|
427
433
|
value = value[key]
|
|
428
434
|
else:
|
|
429
435
|
value = getattr(value, key)
|
|
430
|
-
except KeyError or IndexError as KeyChainError:
|
|
436
|
+
except KeyError or IndexError or AttributeError as KeyChainError:
|
|
431
437
|
raise KeyChainError(
|
|
432
438
|
f"Error during getting a hyperparameter value with the key chain {key_chain}. "
|
|
433
439
|
f"The {value.__class__} in it does not have the following key/index from the key provided: "
|
|
@@ -435,7 +441,9 @@ class LoggingCallback(Callback):
|
|
|
435
441
|
)
|
|
436
442
|
|
|
437
443
|
# Parse the value:
|
|
438
|
-
if isinstance(value, Tensor) or
|
|
444
|
+
if isinstance(value, (tf.Tensor, tf.Variable)) or (
|
|
445
|
+
is_keras_3() and isinstance(value, (keras.KerasTensor, keras.Variable))
|
|
446
|
+
):
|
|
439
447
|
if int(tf.size(value)) == 1:
|
|
440
448
|
value = float(value)
|
|
441
449
|
else:
|
|
@@ -451,12 +459,7 @@ class LoggingCallback(Callback):
|
|
|
451
459
|
f"The parameter with the following key chain: {key_chain} is a numpy.ndarray with {value.size} "
|
|
452
460
|
f"elements. numpy arrays are trackable only if they have 1 element."
|
|
453
461
|
)
|
|
454
|
-
elif not (
|
|
455
|
-
isinstance(value, float)
|
|
456
|
-
or isinstance(value, int)
|
|
457
|
-
or isinstance(value, str)
|
|
458
|
-
or isinstance(value, bool)
|
|
459
|
-
):
|
|
462
|
+
elif not (isinstance(value, (float, int, str, bool))):
|
|
460
463
|
raise mlrun.errors.MLRunInvalidArgumentError(
|
|
461
464
|
f"The parameter with the following key chain: {key_chain} is of type '{type(value)}'. The only "
|
|
462
465
|
f"trackable types are: float, int, str and bool."
|
|
@@ -29,7 +29,7 @@ from mlrun.features import Feature
|
|
|
29
29
|
from .._common import without_mlrun_interface
|
|
30
30
|
from .._dl_common import DLModelHandler
|
|
31
31
|
from .mlrun_interface import TFKerasMLRunInterface
|
|
32
|
-
from .utils import TFKerasUtils
|
|
32
|
+
from .utils import TFKerasUtils, is_keras_3
|
|
33
33
|
|
|
34
34
|
|
|
35
35
|
class TFKerasModelHandler(DLModelHandler):
|
|
@@ -40,8 +40,8 @@ class TFKerasModelHandler(DLModelHandler):
|
|
|
40
40
|
# Framework name:
|
|
41
41
|
FRAMEWORK_NAME = "tensorflow.keras"
|
|
42
42
|
|
|
43
|
-
# Declare a type of
|
|
44
|
-
IOSample = Union[tf.Tensor, tf.TensorSpec, np.ndarray]
|
|
43
|
+
# Declare a type of input sample (only from keras v3 there is a KerasTensor type):
|
|
44
|
+
IOSample = Union[tf.Tensor, tf.TensorSpec, "keras.KerasTensor", np.ndarray]
|
|
45
45
|
|
|
46
46
|
class ModelFormats:
|
|
47
47
|
"""
|
|
@@ -49,9 +49,19 @@ class TFKerasModelHandler(DLModelHandler):
|
|
|
49
49
|
"""
|
|
50
50
|
|
|
51
51
|
SAVED_MODEL = "SavedModel"
|
|
52
|
+
KERAS = "keras"
|
|
52
53
|
H5 = "h5"
|
|
53
54
|
JSON_ARCHITECTURE_H5_WEIGHTS = "json_h5"
|
|
54
55
|
|
|
56
|
+
@classmethod
|
|
57
|
+
def default(cls) -> str:
|
|
58
|
+
"""
|
|
59
|
+
Get the default model format to use for saving and loading the model based on the keras version.
|
|
60
|
+
|
|
61
|
+
:return: The default model format to use.
|
|
62
|
+
"""
|
|
63
|
+
return cls.KERAS if is_keras_3() else cls.SAVED_MODEL
|
|
64
|
+
|
|
55
65
|
class _LabelKeys:
|
|
56
66
|
"""
|
|
57
67
|
Required labels keys to log with the model.
|
|
@@ -65,7 +75,7 @@ class TFKerasModelHandler(DLModelHandler):
|
|
|
65
75
|
model: keras.Model = None,
|
|
66
76
|
model_path: Optional[str] = None,
|
|
67
77
|
model_name: Optional[str] = None,
|
|
68
|
-
model_format: str =
|
|
78
|
+
model_format: Optional[str] = None,
|
|
69
79
|
context: mlrun.MLClientCtx = None,
|
|
70
80
|
modules_map: Optional[
|
|
71
81
|
Union[dict[str, Union[None, str, list[str]]], str]
|
|
@@ -98,7 +108,7 @@ class TFKerasModelHandler(DLModelHandler):
|
|
|
98
108
|
* If given a loaded model object and the model name is None, the name will be
|
|
99
109
|
set to the model's object name / class.
|
|
100
110
|
:param model_format: The format to use for saving and loading the model. Should be passed as a
|
|
101
|
-
member of the class 'ModelFormats'.
|
|
111
|
+
member of the class 'ModelFormats'.
|
|
102
112
|
:param context: MLRun context to work with for logging the model.
|
|
103
113
|
:param modules_map: A dictionary of all the modules required for loading the model. Each key
|
|
104
114
|
is a path to a module and its value is the object name to import from it. All
|
|
@@ -144,8 +154,11 @@ class TFKerasModelHandler(DLModelHandler):
|
|
|
144
154
|
* 'save_traces' parameter was miss-used.
|
|
145
155
|
"""
|
|
146
156
|
# Validate given format:
|
|
157
|
+
if not model_format:
|
|
158
|
+
model_format = TFKerasModelHandler.ModelFormats.default()
|
|
147
159
|
if model_format not in [
|
|
148
160
|
TFKerasModelHandler.ModelFormats.SAVED_MODEL,
|
|
161
|
+
TFKerasModelHandler.ModelFormats.KERAS,
|
|
149
162
|
TFKerasModelHandler.ModelFormats.H5,
|
|
150
163
|
TFKerasModelHandler.ModelFormats.JSON_ARCHITECTURE_H5_WEIGHTS,
|
|
151
164
|
]:
|
|
@@ -153,6 +166,22 @@ class TFKerasModelHandler(DLModelHandler):
|
|
|
153
166
|
f"Unrecognized model format: '{model_format}'. Please use one of the class members of "
|
|
154
167
|
"'TFKerasModelHandler.ModelFormats'"
|
|
155
168
|
)
|
|
169
|
+
if not is_keras_3():
|
|
170
|
+
if model_format == TFKerasModelHandler.ModelFormats.KERAS:
|
|
171
|
+
raise mlrun.errors.MLRunInvalidArgumentError(
|
|
172
|
+
"The 'keras' model format is only supported in Keras 3.0.0 and above. "
|
|
173
|
+
f"Current version is {keras.__version__}."
|
|
174
|
+
)
|
|
175
|
+
else:
|
|
176
|
+
if (
|
|
177
|
+
model_format == TFKerasModelHandler.ModelFormats.SAVED_MODEL
|
|
178
|
+
or model_format
|
|
179
|
+
== TFKerasModelHandler.ModelFormats.JSON_ARCHITECTURE_H5_WEIGHTS
|
|
180
|
+
):
|
|
181
|
+
raise mlrun.errors.MLRunInvalidArgumentError(
|
|
182
|
+
f"The '{model_format}' model format is not supported in Keras 3.0.0 and above. "
|
|
183
|
+
f"Current version is {keras.__version__}."
|
|
184
|
+
)
|
|
156
185
|
|
|
157
186
|
# Validate 'save_traces':
|
|
158
187
|
if save_traces:
|
|
@@ -239,11 +268,19 @@ class TFKerasModelHandler(DLModelHandler):
|
|
|
239
268
|
self._model_file = f"{self._model_name}.h5"
|
|
240
269
|
self._model.save(self._model_file)
|
|
241
270
|
|
|
271
|
+
# ModelFormats.keras - Save as a keras file:
|
|
272
|
+
elif self._model_format == self.ModelFormats.KERAS:
|
|
273
|
+
self._model_file = f"{self._model_name}.keras"
|
|
274
|
+
self._model.save(self._model_file)
|
|
275
|
+
|
|
242
276
|
# ModelFormats.SAVED_MODEL - Save as a SavedModel directory and zip its file:
|
|
243
277
|
elif self._model_format == TFKerasModelHandler.ModelFormats.SAVED_MODEL:
|
|
244
278
|
# Save it in a SavedModel format directory:
|
|
279
|
+
# Note: Using keras>=3.0.0 can save in this format via `model.export` but then it won't be able to load it
|
|
280
|
+
# back, only for inference. So, we use the `save` method instead for keras 2 and validate the user won't use
|
|
281
|
+
# keras 3 and this model format.
|
|
245
282
|
if self._save_traces is True:
|
|
246
|
-
# Save traces can only be used in versions >= 2.4, so only if
|
|
283
|
+
# Save traces can only be used in versions >= 2.4, so only if it's true, we use it in the call:
|
|
247
284
|
self._model.save(self._model_name, save_traces=self._save_traces)
|
|
248
285
|
else:
|
|
249
286
|
self._model.save(self._model_name)
|
|
@@ -303,6 +340,12 @@ class TFKerasModelHandler(DLModelHandler):
|
|
|
303
340
|
self._model_file, custom_objects=self._custom_objects
|
|
304
341
|
)
|
|
305
342
|
|
|
343
|
+
# ModelFormats.KERAS - Load from a keras file:
|
|
344
|
+
elif self._model_format == TFKerasModelHandler.ModelFormats.KERAS:
|
|
345
|
+
self._model = keras.models.load_model(
|
|
346
|
+
self._model_file, custom_objects=self._custom_objects
|
|
347
|
+
)
|
|
348
|
+
|
|
306
349
|
# ModelFormats.SAVED_MODEL - Load from a SavedModel directory:
|
|
307
350
|
elif self._model_format == TFKerasModelHandler.ModelFormats.SAVED_MODEL:
|
|
308
351
|
self._model = keras.models.load_model(
|
|
@@ -434,7 +477,10 @@ class TFKerasModelHandler(DLModelHandler):
|
|
|
434
477
|
)
|
|
435
478
|
|
|
436
479
|
# Read the inputs:
|
|
437
|
-
input_signature = [
|
|
480
|
+
input_signature = [
|
|
481
|
+
getattr(input_layer, "type_spec", input_layer)
|
|
482
|
+
for input_layer in self._model.inputs
|
|
483
|
+
]
|
|
438
484
|
|
|
439
485
|
# Set the inputs:
|
|
440
486
|
self.set_inputs(from_sample=input_signature)
|
|
@@ -453,7 +499,8 @@ class TFKerasModelHandler(DLModelHandler):
|
|
|
453
499
|
|
|
454
500
|
# Read the outputs:
|
|
455
501
|
output_signature = [
|
|
456
|
-
output_layer
|
|
502
|
+
getattr(output_layer, "type_spec", output_layer)
|
|
503
|
+
for output_layer in self._model.outputs
|
|
457
504
|
]
|
|
458
505
|
|
|
459
506
|
# Set the outputs:
|
|
@@ -509,6 +556,17 @@ class TFKerasModelHandler(DLModelHandler):
|
|
|
509
556
|
f"'{self._model_path}'"
|
|
510
557
|
)
|
|
511
558
|
|
|
559
|
+
# ModelFormats.KERAS - Get the keras model file:
|
|
560
|
+
elif self._model_format == TFKerasModelHandler.ModelFormats.KERAS:
|
|
561
|
+
self._model_file = os.path.join(
|
|
562
|
+
self._model_path, f"{self._model_name}.keras"
|
|
563
|
+
)
|
|
564
|
+
if not os.path.exists(self._model_file):
|
|
565
|
+
raise mlrun.errors.MLRunNotFoundError(
|
|
566
|
+
f"The model file '{self._model_name}.keras' was not found within the given 'model_path': "
|
|
567
|
+
f"'{self._model_path}'"
|
|
568
|
+
)
|
|
569
|
+
|
|
512
570
|
# ModelFormats.SAVED_MODEL - Get the zip file and extract it, or simply locate the directory:
|
|
513
571
|
elif self._model_format == TFKerasModelHandler.ModelFormats.SAVED_MODEL:
|
|
514
572
|
self._model_file = os.path.join(self._model_path, f"{self._model_name}.zip")
|
|
@@ -559,7 +617,9 @@ class TFKerasModelHandler(DLModelHandler):
|
|
|
559
617
|
# Supported types:
|
|
560
618
|
if isinstance(sample, np.ndarray):
|
|
561
619
|
return super()._read_sample(sample=sample)
|
|
562
|
-
elif isinstance(sample, tf.TensorSpec)
|
|
620
|
+
elif isinstance(sample, tf.TensorSpec) or (
|
|
621
|
+
is_keras_3() and isinstance(sample, keras.KerasTensor)
|
|
622
|
+
):
|
|
563
623
|
return Feature(
|
|
564
624
|
name=sample.name,
|
|
565
625
|
value_type=TFKerasUtils.convert_tf_dtype_to_value_type(
|
|
@@ -11,8 +11,8 @@
|
|
|
11
11
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
12
|
# See the License for the specific language governing permissions and
|
|
13
13
|
# limitations under the License.
|
|
14
|
-
#
|
|
15
14
|
import tensorflow as tf
|
|
15
|
+
from packaging import version
|
|
16
16
|
from tensorflow import keras
|
|
17
17
|
|
|
18
18
|
import mlrun
|
|
@@ -117,3 +117,14 @@ class TFKerasUtils(DLUtils):
|
|
|
117
117
|
raise mlrun.errors.MLRunInvalidArgumentError(
|
|
118
118
|
f"MLRun value type is not supporting the given tensorflow data type: '{tf_dtype}'."
|
|
119
119
|
)
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
def is_keras_3() -> bool:
|
|
123
|
+
"""
|
|
124
|
+
Check if the current Keras version is 3.x.
|
|
125
|
+
|
|
126
|
+
:return: True if Keras version is 3.x, False otherwise.
|
|
127
|
+
"""
|
|
128
|
+
return hasattr(keras, "__version__") and version.parse(
|
|
129
|
+
keras.__version__
|
|
130
|
+
) >= version.parse("3.0.0")
|
mlrun/launcher/client.py
CHANGED
|
@@ -72,7 +72,7 @@ class ClientBaseLauncher(launcher.BaseLauncher, abc.ABC):
|
|
|
72
72
|
):
|
|
73
73
|
run.metadata.labels[mlrun_constants.MLRunInternalLabels.kind] = runtime.kind
|
|
74
74
|
mlrun.runtimes.utils.enrich_run_labels(
|
|
75
|
-
run.metadata.labels, [
|
|
75
|
+
run.metadata.labels, [mlrun_constants.MLRunInternalLabels.owner]
|
|
76
76
|
)
|
|
77
77
|
if run.spec.output_path:
|
|
78
78
|
run.spec.output_path = run.spec.output_path.replace(
|