ml-management 0.5.1rc0__tar.gz → 0.5.1rc1__tar.gz

Sign up to get free protection for your applications and to get access to all the features.
Files changed (108) hide show
  1. ml-management-0.5.1rc1/ML_management/dataset_loader/dataset_loader_pattern.py +34 -0
  2. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/executor/base_executor.py +1 -91
  3. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/mlmanagement/jsonschema_inference.py +8 -8
  4. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/mlmanagement/load_api.py +36 -74
  5. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/mlmanagement/log_api.py +3 -20
  6. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/model/patterns/gradient_model.py +4 -2
  7. ml-management-0.5.1rc1/ML_management/model/patterns/model_pattern.py +53 -0
  8. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/model/patterns/model_with_metrics.py +6 -2
  9. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/model/patterns/preprocessor.py +5 -2
  10. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/model/patterns/transformer.py +2 -2
  11. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/sdk/model.py +1 -0
  12. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/sdk/schema.py +3 -6
  13. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/PKG-INFO +1 -5
  14. ml-management-0.5.1rc1/VERSION +1 -0
  15. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ml_management.egg-info/PKG-INFO +1 -5
  16. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ml_management.egg-info/SOURCES.txt +0 -2
  17. ml-management-0.5.1rc0/ML_management/dataset_loader/dataset_loader_pattern.py +0 -111
  18. ml-management-0.5.1rc0/ML_management/loader/loader.py +0 -68
  19. ml-management-0.5.1rc0/ML_management/model/patterns/model_pattern.py +0 -161
  20. ml-management-0.5.1rc0/ML_management/uploader_data/__init__.py +0 -0
  21. ml-management-0.5.1rc0/VERSION +0 -1
  22. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/MANIFEST.in +0 -0
  23. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/__init__.py +0 -0
  24. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/collectors/__init__.py +0 -0
  25. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/collectors/collector_pattern.py +0 -0
  26. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/collectors/collector_pattern_to_methods_map.py +0 -0
  27. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/collectors/collectors.py +0 -0
  28. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/collectors/dummy/__init__.py +0 -0
  29. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/collectors/dummy/dummy_collector.py +0 -0
  30. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/collectors/s3/__init__.py +0 -0
  31. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/collectors/s3/s3collector.py +0 -0
  32. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/collectors/topic_markers/__init__.py +0 -0
  33. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/collectors/topic_markers/api_schema.py +0 -0
  34. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/collectors/topic_markers/topic_markers_collector.py +0 -0
  35. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/dataset_loader/__init__.py +0 -0
  36. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/dataset_loader/base_splits_dataset_loader.py +0 -0
  37. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/dataset_loader/dataset_loader_pattern_to_methods_map.py +0 -0
  38. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/dataset_loader/poisoned_images_dataset_loader.py +0 -0
  39. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/dataset_loader/templates/__init__.py +0 -0
  40. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/dataset_loader/templates/dummy_dataset_loader/__init__.py +0 -0
  41. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/dataset_loader/templates/dummy_dataset_loader/conda.yaml +0 -0
  42. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/dataset_loader/templates/dummy_dataset_loader/dummy_dataset.py +0 -0
  43. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/dataset_loader/templates/upload.py +0 -0
  44. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/executor/__init__.py +0 -0
  45. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/executor/executor_pattern.py +0 -0
  46. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/executor/executor_pattern_to_methods_map.py +0 -0
  47. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/executor/no_model_executor_pattern.py +0 -0
  48. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/executor/patterns.py +0 -0
  49. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/executor/templates/__init__.py +0 -0
  50. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/executor/templates/eval/__init__.py +0 -0
  51. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/executor/templates/eval/conda.yaml +0 -0
  52. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/executor/templates/eval/eval_executor.py +0 -0
  53. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/executor/templates/finetune/__init__.py +0 -0
  54. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/executor/templates/finetune/conda.yaml +0 -0
  55. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/executor/templates/finetune/finetune_executor.py +0 -0
  56. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/executor/templates/train/__init__.py +0 -0
  57. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/executor/templates/train/conda.yaml +0 -0
  58. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/executor/templates/train/train_executor.py +0 -0
  59. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/executor/templates/upload.py +0 -0
  60. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/executor/upload_model_mode.py +0 -0
  61. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/mlmanagement/__init__.py +0 -0
  62. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/mlmanagement/backend_api.py +0 -0
  63. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/mlmanagement/base_exceptions.py +0 -0
  64. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/mlmanagement/jsonschema_exceptions.py +0 -0
  65. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/mlmanagement/mlmanager.py +0 -0
  66. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/mlmanagement/model_type.py +0 -0
  67. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/mlmanagement/module_finder.py +0 -0
  68. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/mlmanagement/server_mlmanager_exceptions.py +0 -0
  69. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/mlmanagement/session.py +0 -0
  70. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/mlmanagement/singleton_pattern.py +0 -0
  71. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/mlmanagement/utils.py +0 -0
  72. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/mlmanagement/variables.py +0 -0
  73. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/mlmanagement/visibility_options.py +0 -0
  74. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/model/__init__.py +0 -0
  75. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/model/model_type_to_methods_map.py +0 -0
  76. {ml-management-0.5.1rc0/ML_management/loader → ml-management-0.5.1rc1/ML_management/model/patterns}/__init__.py +0 -0
  77. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/model/patterns/evaluatable_model.py +0 -0
  78. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/model/patterns/model_with_losses.py +0 -0
  79. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/model/patterns/retrainable_model.py +0 -0
  80. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/model/patterns/rich_python_model.py +0 -0
  81. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/model/patterns/target_layer.py +0 -0
  82. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/model/patterns/torch_model.py +0 -0
  83. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/model/patterns/trainable_model.py +0 -0
  84. {ml-management-0.5.1rc0/ML_management/model/patterns → ml-management-0.5.1rc1/ML_management/registry}/__init__.py +0 -0
  85. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/registry/exceptions.py +0 -0
  86. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/s3/__init__.py +0 -0
  87. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/s3/manager.py +0 -0
  88. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/s3/utils.py +0 -0
  89. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/sdk/__init__.py +0 -0
  90. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/sdk/dataset_loader.py +0 -0
  91. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/sdk/executor.py +0 -0
  92. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/sdk/experiment.py +0 -0
  93. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/sdk/job.py +0 -0
  94. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/sdk/parameters.py +0 -0
  95. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/sdk/sdk.py +0 -0
  96. {ml-management-0.5.1rc0/ML_management/registry → ml-management-0.5.1rc1/ML_management/test_sdk}/__init__.py +0 -0
  97. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/test_sdk/test_sdk.py +0 -0
  98. {ml-management-0.5.1rc0/ML_management/test_sdk → ml-management-0.5.1rc1/ML_management/tests}/__init__.py +0 -0
  99. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/tests/test_jsonschema_inference.py +0 -0
  100. {ml-management-0.5.1rc0/ML_management/tests → ml-management-0.5.1rc1/ML_management/uploader_data}/__init__.py +0 -0
  101. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/uploader_data/s3_uploader.py +0 -0
  102. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ML_management/uploader_data/utils.py +0 -0
  103. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/README.md +0 -0
  104. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ml_management.egg-info/dependency_links.txt +0 -0
  105. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ml_management.egg-info/requires.txt +6 -6
  106. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/ml_management.egg-info/top_level.txt +0 -0
  107. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/setup.cfg +0 -0
  108. {ml-management-0.5.1rc0 → ml-management-0.5.1rc1}/setup.py +0 -0
@@ -0,0 +1,34 @@
1
+ """Dataset loader template for custom dataset loader."""
2
+ from abc import ABC, abstractmethod
3
+
4
+ from ML_management.model.patterns.rich_python_model import RichPythonModel
5
+
6
+
7
+ class DatasetLoaderPattern(RichPythonModel, ABC):
8
+ """Define dataset loader.
9
+
10
+ Attributes
11
+ ----------
12
+ artifacts : str
13
+ Local path to artifacts.
14
+ That parameters will be set automatically in job before the 'get_dataset' func would be executed.
15
+ data_path: str
16
+ A path to data to be loaded.
17
+
18
+ """
19
+
20
+ def __init__(self):
21
+ """Init dataset loader class."""
22
+ self.artifacts: str
23
+ self.data_path = None
24
+
25
+ @abstractmethod
26
+ def get_dataset(self, **dataset_params):
27
+ """
28
+ Return dataset.
29
+
30
+ To get data_path use self.data_path parameter, which also will be set in the job.
31
+ 'dataset_params' are dataset_loader parameters. One has to define it as ordinary kwargs
32
+ with type annotation.
33
+ """
34
+ raise NotImplementedError
@@ -1,10 +1,8 @@
1
1
  """Executor template for custom executor."""
2
2
 
3
3
  from abc import ABC, abstractmethod
4
- from typing import Any, Dict, List, Optional, Union
4
+ from typing import Any, Dict, Union
5
5
 
6
- import ML_management.mlmanagement.log_api
7
- import ML_management.mlmanagement.mlmanager
8
6
  from ML_management.dataset_loader import DatasetLoaderPattern
9
7
  from ML_management.dataset_loader.dataset_loader_pattern_to_methods_map import DatasetLoaderMethodName
10
8
  from ML_management.executor.patterns import (
@@ -14,9 +12,6 @@ from ML_management.executor.patterns import (
14
12
  OneDatasetLoaderPattern,
15
13
  OneModelPattern,
16
14
  )
17
- from ML_management.mlmanagement import variables
18
- from ML_management.mlmanagement.module_finder import ModuleFinder
19
- from ML_management.mlmanagement.visibility_options import VisibilityOptions
20
15
  from ML_management.model.model_type_to_methods_map import ModelMethodName
21
16
  from ML_management.model.patterns.model_pattern import Model
22
17
  from ML_management.model.patterns.rich_python_model import RichPythonModel
@@ -140,91 +135,6 @@ class BaseExecutor(RichPythonModel, ABC):
140
135
  """
141
136
  raise NotImplementedError
142
137
 
143
- def upload_executor(
144
- self,
145
- executor_name: str,
146
- pip_requirements=None,
147
- description: Optional[str] = None,
148
- extra_pip_requirements=None,
149
- conda_env=None,
150
- artifacts: Optional[dict] = None,
151
- visibility: VisibilityOptions = VisibilityOptions.PRIVATE,
152
- extra_modules_names: Optional[List[str]] = None,
153
- used_modules_names: Optional[List[str]] = None,
154
- linter_check: bool = True,
155
- start_build: bool = False,
156
- ) -> None:
157
- """
158
- Upload wrapper to MLmanagement server.
159
-
160
- :param pip_requirements: {{ pip_requirements }}
161
-
162
- :param extra_pip_requirements: {{ extra_pip_requirements }}
163
- `pip_requirements` and 'extra_pip_requirements' must be either a string path to a pip requirements file on the
164
- local filesystem or an iterable of pip requirement strings.
165
-
166
- :param conda_env: {{ conda_env }}
167
- 'conda_env' must be a dict specifying the conda environment for this model.
168
-
169
- :param artifacts: A dictionary containing ``<name, artifact_uri>`` entries. Remote artifact URIs
170
- are resolved to absolute filesystem paths, producing a dictionary of
171
- ``<name, absolute_path>`` entries. ``python_model`` can reference these
172
- resolved entries as the ``artifacts`` property of the ``context`` parameter
173
- in :func:`PythonModel.load_context() <mlflow.pyfunc.PythonModel.load_context>`
174
- and :func:`PythonModel.predict() <mlflow.pyfunc.PythonModel.predict>`.
175
- For example, consider the following ``artifacts`` dictionary::
176
-
177
- {
178
- "my_file": "s3://my-bucket/path/to/my/file"
179
- }
180
-
181
- In this case, the ``"my_file"`` artifact is downloaded from S3. The
182
- ``python_model`` can then refer to ``"my_file"`` as an absolute filesystem
183
- path via ``context.artifacts["my_file"]``.
184
-
185
- If ``None``, no artifacts are added to the executor.
186
-
187
- :param visibility: either a private or public executor.
188
-
189
- :param extra_modules_names: names of modules that should be pickled by value
190
- in addition to auto-detected modules.
191
-
192
- :param used_modules_names: modules that should be pickled by value, disables the auto-detection of modules.
193
-
194
- :param linter_check: if True, check code of the executor by linter.
195
-
196
- :param start_build: if set to True, start job to create docker image.
197
- This parameter may be True only if there is no models for executor.
198
- """
199
- old_experiment_name = variables.active_experiment
200
- if (self.desired_model_methods or self.upload_model_modes) and start_build:
201
- raise RuntimeError("Parameter start_build=True is acceptable only if there is no models for executor.")
202
- ML_management.mlmanagement.mlmanager.set_experiment(
203
- variables.EXPERIMENT_NAME_FOR_EXECUTOR, visibility=VisibilityOptions.PUBLIC
204
- )
205
- try:
206
- with ML_management.mlmanagement.mlmanager.start_run(nested=True):
207
- ML_management.mlmanagement.log_api._log_model(
208
- artifact_path="",
209
- description=description,
210
- artifacts=artifacts,
211
- python_model=self,
212
- registered_model_name=executor_name,
213
- pip_requirements=pip_requirements,
214
- extra_pip_requirements=extra_pip_requirements,
215
- conda_env=conda_env,
216
- visibility=visibility,
217
- extra_modules_names=extra_modules_names,
218
- used_modules_names=used_modules_names,
219
- root_module_name=ModuleFinder.get_my_caller_module_name(),
220
- linter_check=linter_check,
221
- start_build=start_build,
222
- )
223
- except Exception as err:
224
- raise err
225
- finally:
226
- variables.active_experiment = old_experiment_name
227
-
228
138
  @property
229
139
  def model(self) -> Model:
230
140
  """Property returning a single model.
@@ -1,8 +1,8 @@
1
1
  """Module for automatic jsonschema inference."""
2
+ import typing
2
3
  import warnings
3
4
  from copy import copy
4
5
  from inspect import formatannotation, getfullargspec
5
- from typing import Generic, TypeVar
6
6
 
7
7
  from jsonschema import SchemaError
8
8
  from jsonschema.validators import Draft4Validator
@@ -28,14 +28,10 @@ type_map = {
28
28
  }
29
29
 
30
30
 
31
- T = TypeVar("T")
32
-
33
-
34
- class SkipJsonSchema(Generic[T]):
31
+ class SkipJsonSchema:
35
32
  """Annotation wrapper for system parameters to skip them in JSON schema."""
36
33
 
37
- def __class_getitem__(cls, _type):
38
- return cls()
34
+ pass
39
35
 
40
36
 
41
37
  def __get_or_raise(type_name, from_optional: bool = False):
@@ -168,7 +164,11 @@ def infer_jsonschema(func, get_object_func):
168
164
  raise NoAnnotationError(arg=arg)
169
165
 
170
166
  # skip system parameters
171
- if isinstance(spec.annotations[arg], SkipJsonSchema):
167
+ if (
168
+ typing.get_origin(spec.annotations[arg]) is typing.Annotated
169
+ and spec.annotations[arg].__metadata__
170
+ and spec.annotations[arg].__metadata__[0] is SkipJsonSchema
171
+ ):
172
172
  continue
173
173
 
174
174
  schema["properties"][arg] = __get_json_schema_from_annotation(spec.annotations[arg])
@@ -7,29 +7,19 @@ import tarfile
7
7
  import tempfile
8
8
  import threading
9
9
  import traceback
10
+ from pathlib import Path
10
11
  from typing import Optional
11
12
 
13
+ import yaml
14
+
12
15
  from ML_management.mlmanagement.base_exceptions import MLMClientError
13
16
  from ML_management.mlmanagement.log_api import _raise_error
14
17
  from ML_management.mlmanagement.model_type import ModelType
15
18
  from ML_management.mlmanagement.session import AuthSession
16
19
  from ML_management.mlmanagement.variables import get_log_service_url
17
- from mlflow.models import Model
18
- from mlflow.pyfunc import (
19
- CODE,
20
- DATA,
21
- FLAVOR_NAME,
22
- MAIN,
23
- MLMODEL_FILE_NAME,
24
- PY_VERSION,
25
- RESOURCE_DOES_NOT_EXIST,
26
- MlflowException,
27
- PyFuncModel,
28
- _add_code_from_conf_to_system_path,
29
- _download_artifact_from_uri,
30
- _warn_dependency_requirement_mismatches,
31
- _warn_potentially_incompatible_py_version_if_necessary,
32
- )
20
+ from mlflow.pyfunc.model import CONFIG_KEY_ARTIFACTS
21
+
22
+ MLCONFIG = "MLConfig.yaml"
33
23
 
34
24
 
35
25
  def download_artifacts_by_name_version(
@@ -57,7 +47,6 @@ def _load_model_type(
57
47
  name: str,
58
48
  version: Optional[int],
59
49
  model_type: ModelType,
60
- unwrap: bool = True,
61
50
  install_requirements: bool = False,
62
51
  dst_path: Optional[str] = None,
63
52
  kwargs_for_init=None,
@@ -68,12 +57,7 @@ def _load_model_type(
68
57
  )
69
58
  if install_requirements:
70
59
  _set_model_version_requirements(local_path)
71
- model_load_dict = {"model_path": local_path, "kwargs_for_init": kwargs_for_init}
72
- loaded_model = _load_model_mflow(model_data=model_load_dict, suppress_warnings=True)
73
- if unwrap:
74
- artifacts_path = loaded_model._model_impl.context._artifacts
75
- loaded_model = loaded_model.unwrap_python_model()
76
- loaded_model.artifacts = artifacts_path
60
+ loaded_model = _load_model_src(local_path, kwargs_for_init)
77
61
  return loaded_model
78
62
 
79
63
 
@@ -81,7 +65,6 @@ def load_dataset(
81
65
  name: str,
82
66
  version: Optional[int] = None,
83
67
  install_requirements: bool = False,
84
- unwrap: bool = True,
85
68
  dst_path: Optional[str] = None,
86
69
  kwargs_for_init: Optional[dict] = None,
87
70
  ):
@@ -95,8 +78,6 @@ def load_dataset(
95
78
  Version of the dataset. Default: None, "latest" version is used.
96
79
  install_requirements: bool = False
97
80
  Whether to install dataset requirements. Default: False.
98
- unwrap: bool = True
99
- Whether to unwrap dataset. Default: True.
100
81
  dst_path: Optional[str]: None
101
82
  Destination path. Default: None.
102
83
  kwargs_for_init: Optional[dict]: None
@@ -106,16 +87,17 @@ def load_dataset(
106
87
  DatasetLoaderPattern
107
88
  The object of the dataset to use.
108
89
  """
109
- return _load_model_type(
110
- name, version, ModelType.DATASET_LOADER, unwrap, install_requirements, dst_path, kwargs_for_init
111
- )
90
+ return _load_model_type(name, version, ModelType.DATASET_LOADER, install_requirements, dst_path, kwargs_for_init)
112
91
 
113
92
 
114
93
  def _set_model_version_requirements(local_path) -> None:
115
94
  """Installing requirements of the model locally."""
116
95
  with open(os.path.join(local_path, "requirements.txt")) as req:
117
96
  requirements = list(
118
- filter(lambda x: "ml-management" not in x.lower() and "mlflow" not in x.lower(), req.read().split("\n"))
97
+ filter(
98
+ lambda x: "ml-management" not in x.lower() and "mlflow" not in x.lower() and len(x),
99
+ req.read().split("\n"),
100
+ )
119
101
  )
120
102
  try:
121
103
  if requirements:
@@ -130,7 +112,6 @@ def load_model(
130
112
  name: str,
131
113
  version: Optional[int] = None,
132
114
  install_requirements: bool = False,
133
- unwrap: bool = True,
134
115
  dst_path: Optional[str] = None,
135
116
  kwargs_for_init=None,
136
117
  ):
@@ -144,8 +125,6 @@ def load_model(
144
125
  Version of the model. Default: None, "latest" version is used.
145
126
  install_requirements: bool = False
146
127
  Whether to install model requirements. Default: False.
147
- unwrap: bool = True
148
- Whether to unwrap model. Default: True.
149
128
  dst_path: Optional[str]: None
150
129
  Destination path. Default: None.
151
130
  kwargs_for_init: Optional[dict]: None
@@ -155,14 +134,13 @@ def load_model(
155
134
  Model
156
135
  The object of the model to use.
157
136
  """
158
- return _load_model_type(name, version, ModelType.MODEL, unwrap, install_requirements, dst_path, kwargs_for_init)
137
+ return _load_model_type(name, version, ModelType.MODEL, install_requirements, dst_path, kwargs_for_init)
159
138
 
160
139
 
161
140
  def load_executor(
162
141
  name: str,
163
142
  version: Optional[int] = None,
164
143
  install_requirements: bool = False,
165
- unwrap: bool = True,
166
144
  dst_path: Optional[str] = None,
167
145
  ):
168
146
  """Download all model's files for loading model locally.
@@ -175,8 +153,6 @@ def load_executor(
175
153
  Version of the executor. Default: None, "latest" version is used.
176
154
  install_requirements: bool = False
177
155
  Whether to install executor requirements. Default: False.
178
- unwrap: bool = True
179
- Whether to unwrap executor. Default: True.
180
156
  dst_path: Optional[str]: None
181
157
  Destination path. Default: None.
182
158
  Returns
@@ -184,7 +160,7 @@ def load_executor(
184
160
  BaseExecutor
185
161
  The object of the executor to use.
186
162
  """
187
- return _load_model_type(name, version, ModelType.EXECUTOR, unwrap, install_requirements, dst_path)
163
+ return _load_model_type(name, version, ModelType.EXECUTOR, install_requirements, dst_path)
188
164
 
189
165
 
190
166
  def _untar_folder(buff, to_folder):
@@ -230,39 +206,25 @@ def _request_download_artifacts(url, params: dict, dst_path: Optional[str] = Non
230
206
  return local_path
231
207
 
232
208
 
233
- def _load_model_mflow(
234
- model_data: dict,
235
- suppress_warnings: bool = False,
236
- dst_path: Optional[str] = None,
237
- ) -> PyFuncModel:
238
- """Load model function from mlflow with custom arguments for our loader of a model."""
239
- # Mflow checks type of arguments so we need to use json as str.
240
- model_args = model_data
241
- model_uri = model_args["model_path"]
242
- local_path = _download_artifact_from_uri(artifact_uri=model_uri, output_path=dst_path)
243
-
244
- if not suppress_warnings:
245
- _warn_dependency_requirement_mismatches(local_path)
246
-
247
- model_meta = Model.load(os.path.join(local_path, MLMODEL_FILE_NAME))
248
-
249
- conf = model_meta.flavors.get(FLAVOR_NAME)
250
- if conf is None:
251
- raise MlflowException(
252
- f'Model does not have the "{FLAVOR_NAME}" flavor',
253
- RESOURCE_DOES_NOT_EXIST,
254
- )
255
- model_py_version = conf.get(PY_VERSION)
256
- if not suppress_warnings:
257
- _warn_potentially_incompatible_py_version_if_necessary(model_py_version=model_py_version)
258
-
259
- _add_code_from_conf_to_system_path(local_path, conf, code_key=CODE)
260
- data_path = os.path.join(local_path, conf[DATA]) if (DATA in conf) else local_path
261
- model_args["model_path"] = data_path
262
- if importlib.import_module(conf[MAIN]).__name__ == "ML_management.loader.loader":
263
- model_data = model_args
264
- else:
265
- model_data = data_path
266
- model_impl = importlib.import_module(conf[MAIN])._load_pyfunc(model_data)
267
- predict_fn = conf.get("predict_fn", "predict")
268
- return PyFuncModel(model_meta=model_meta, model_impl=model_impl, predict_fn=predict_fn)
209
+ def _load_model_src(local_path: str, kwargs_for_init: Optional[dict]):
210
+ if not kwargs_for_init:
211
+ kwargs_for_init = {}
212
+ config_path = os.path.join(local_path, MLCONFIG)
213
+ with open(config_path) as file:
214
+ conf = yaml.safe_load(file)
215
+
216
+ load_model_path = os.path.join(local_path, conf["load_model_path"])
217
+
218
+ from ML_management.mlmanagement.utils import INIT_FUNCTION_NAME # circular import
219
+
220
+ parts = Path(load_model_path).parts
221
+ if str(Path(*parts[:2])) not in sys.path:
222
+ sys.path.append(str(Path(*parts[:2])))
223
+ python_model = getattr(importlib.import_module(".".join(parts[2:])), INIT_FUNCTION_NAME)(**kwargs_for_init)
224
+ artifacts = Path(load_model_path) / CONFIG_KEY_ARTIFACTS
225
+ if not artifacts.exists():
226
+ artifacts.mkdir()
227
+
228
+ python_model.artifacts = str(artifacts)
229
+
230
+ return python_model
@@ -20,7 +20,6 @@ import yaml
20
20
  from scipy.sparse import csc_matrix, csr_matrix
21
21
 
22
22
  import mlflow
23
- from ML_management.loader.loader import CONFIG_KEY_ARTIFACTS
24
23
  from ML_management.mlmanagement import variables
25
24
  from ML_management.mlmanagement.base_exceptions import * # noqa: F403
26
25
  from ML_management.mlmanagement.base_exceptions import MLMClientError, MLMServerError, PylintError
@@ -48,6 +47,8 @@ from ML_management.mlmanagement.visibility_options import VisibilityOptions
48
47
  from ML_management.registry.exceptions import * # noqa: F403
49
48
  from mlflow.pyfunc import DATA
50
49
 
50
+ CONFIG_KEY_ARTIFACTS = "artifacts"
51
+
51
52
 
52
53
  def _log_model(
53
54
  artifact_path,
@@ -90,6 +91,7 @@ def _log_model(
90
91
  Validation regexp: "(([A-Za-z0-9][A-Za-z0-9_]*)?[A-Za-z0-9])+"
91
92
  You cannot specify the parameters: loader_module, data_path and the parameters: python_model, artifacts together.
92
93
  """
94
+ raise DeprecationWarning("The function '_log_model' is deprecated.")
93
95
  from ML_management.executor.upload_model_mode import UploadModelMode # circular import
94
96
 
95
97
  if extra_modules_names and used_modules_names:
@@ -185,18 +187,7 @@ def _log_object_src(
185
187
  description: str,
186
188
  model_type: ModelType = ModelType.MODEL,
187
189
  model_version_tags: Optional[Dict[str, str]] = None,
188
- code_path=None,
189
- conda_env=None,
190
- python_model=None,
191
190
  registered_model_name: str = "default_name",
192
- signature: mlflow.models.signature.ModelSignature = None,
193
- input_example: Union[
194
- pandas.core.frame.DataFrame, numpy.ndarray, dict, list, csr_matrix, csc_matrix, str, bytes
195
- ] = None,
196
- await_registration_for: int = 300,
197
- pip_requirements=None,
198
- extra_pip_requirements=None,
199
- metadata=None,
200
191
  source_model_name=None,
201
192
  source_model_version=None,
202
193
  source_executor_name=None,
@@ -257,16 +248,8 @@ def _log_object_src(
257
248
  "model_path": model_path,
258
249
  "description": description,
259
250
  "model_version_tags": model_version_tags,
260
- "code_path": code_path,
261
- "conda_env": conda_env,
262
251
  "python_model": python_model,
263
252
  "registered_model_name": registered_model_name,
264
- "signature": signature,
265
- "input_example": input_example,
266
- "await_registration_for": await_registration_for,
267
- "pip_requirements": pip_requirements,
268
- "extra_pip_requirements": extra_pip_requirements,
269
- "metadata": metadata,
270
253
  "source_model_name": source_model_name,
271
254
  "source_model_version": source_model_version,
272
255
  "source_executor_name": source_executor_name,
@@ -1,6 +1,6 @@
1
1
  """Class inherits from base class of Model with a specific method for obtaining gradients."""
2
2
  from abc import ABC, abstractmethod
3
- from typing import Callable
3
+ from typing import Annotated, Callable
4
4
 
5
5
  import numpy as np
6
6
 
@@ -12,6 +12,8 @@ class GradientModel(model_pattern.Model, ABC):
12
12
  """Implementation of gradient model."""
13
13
 
14
14
  @abstractmethod
15
- def get_grad(self, loss_fn: SkipJsonSchema[Callable], input_batch: SkipJsonSchema[np.ndarray]) -> np.ndarray:
15
+ def get_grad(
16
+ self, loss_fn: Annotated[Callable, SkipJsonSchema], input_batch: Annotated[np.ndarray, SkipJsonSchema]
17
+ ) -> np.ndarray:
16
18
  """Define get_grad function."""
17
19
  raise NotImplementedError
@@ -0,0 +1,53 @@
1
+ """Define Abstract class for Model with necessary methods and methods to implement."""
2
+ import inspect
3
+ import os
4
+ from abc import ABC, abstractmethod
5
+ from pathlib import Path
6
+
7
+ from ML_management.mlmanagement.log_api import CONFIG_KEY_ARTIFACTS
8
+ from ML_management.model.patterns.rich_python_model import RichPythonModel
9
+
10
+
11
+ class Model(RichPythonModel, ABC):
12
+ """Abstract class for model that Job will use."""
13
+
14
+ def __new__(cls, *args, **kwargs): # noqa: ARG003
15
+ """Get object of Model class.
16
+
17
+ Attributes
18
+ ----------
19
+ self.artifacts: str
20
+ Local path to artifacts.
21
+
22
+ self.dataset: DatasetLoader object
23
+ Instance of user's dataset.
24
+ """
25
+ self = super().__new__(cls)
26
+ # For now it was impossible to set self.artifacts before init func of model,
27
+ # so it was impossible to use it inside init.
28
+ # Because inside the job container code we have a fixed folder and file structure,
29
+ # we can predetermine artifacts path.
30
+ # It has to be beside file __init__.py with get_object function.
31
+ self.artifacts = str(
32
+ Path(os.path.dirname(inspect.getframeinfo(inspect.currentframe().f_back)[0])) / CONFIG_KEY_ARTIFACTS
33
+ )
34
+ self.dataset = None
35
+
36
+ return self
37
+
38
+ @abstractmethod
39
+ def predict_function(self, **kwargs):
40
+ """Every model should make predictions."""
41
+ raise NotImplementedError
42
+
43
+ def to_device(self, device: str) -> None:
44
+ """
45
+ Define model migration to specific device.
46
+
47
+ Devices are marked with following notation:
48
+
49
+ cpu - CPU instance
50
+
51
+ cuda:<number: int> - GPU instance
52
+ """
53
+ pass
@@ -1,10 +1,12 @@
1
1
  """Class inherits from base class of Model with a specific method for updating and computing metrics."""
2
2
  from abc import ABC, abstractmethod
3
- from typing import Dict
3
+ from typing import Annotated, Any, Dict
4
4
 
5
5
  from ML_management.mlmanagement.jsonschema_inference import SkipJsonSchema
6
6
  from ML_management.model.patterns.model_pattern import Model
7
7
 
8
+ Tensor = Any
9
+
8
10
 
9
11
  class ModelWithMetrics(Model, ABC):
10
12
  """Implementation of model with specific methods for reseting, updating and computing metrics."""
@@ -18,7 +20,9 @@ class ModelWithMetrics(Model, ABC):
18
20
  raise NotImplementedError
19
21
 
20
22
  @abstractmethod
21
- def update_metrics(self, outputs_batch: SkipJsonSchema["tensor"], targets: SkipJsonSchema["tensor"], **kwargs) -> None: # type: ignore # noqa
23
+ def update_metrics(
24
+ self, outputs_batch: Annotated[Tensor, SkipJsonSchema], targets: Annotated[Tensor, SkipJsonSchema], **kwargs
25
+ ) -> None:
22
26
  """Define function to update internal variables with provided (outputs_batch, targets)."""
23
27
  raise NotImplementedError
24
28
 
@@ -1,17 +1,20 @@
1
1
  """Abstract base class for preprocessing methods."""
2
2
  from abc import ABC, abstractmethod
3
+ from typing import Annotated, Any
3
4
 
4
5
  from ML_management.mlmanagement.jsonschema_inference import SkipJsonSchema
5
6
  from ML_management.model.patterns.model_pattern import Model
6
7
 
8
+ Tensor = Any
9
+
7
10
 
8
11
  class Preprocessor(Model, ABC):
9
12
  """Abstract class for model that performs preprocessing."""
10
13
 
11
14
  @abstractmethod
12
- def preprocess(self, input_batch: SkipJsonSchema["tensor"], **kwargs) -> "tensor": # type: ignore # noqa
15
+ def preprocess(self, input_batch: Annotated[Tensor, SkipJsonSchema], **kwargs) -> Tensor:
13
16
  """Perform data preprocessing."""
14
17
  raise NotImplementedError
15
18
 
16
- def predict_function(self, input_batch: SkipJsonSchema["tensor"]): # type: ignore # noqa
19
+ def predict_function(self, input_batch: Annotated[Tensor, SkipJsonSchema]):
17
20
  return self.preprocess(input_batch)
@@ -1,6 +1,6 @@
1
1
  """Abstract base class for model transformation methods."""
2
2
  from abc import ABC, abstractmethod
3
- from typing import Callable
3
+ from typing import Annotated, Callable
4
4
 
5
5
  from ML_management.mlmanagement.jsonschema_inference import SkipJsonSchema
6
6
  from ML_management.model.patterns.model_pattern import Model
@@ -10,7 +10,7 @@ class Transformer(Model, ABC):
10
10
  """Abstract class for model that performs transformations."""
11
11
 
12
12
  @abstractmethod
13
- def transform(self, model_fn: SkipJsonSchema[Callable], **kwargs) -> Callable:
13
+ def transform(self, model_fn: Annotated[Callable, SkipJsonSchema], **kwargs) -> Callable:
14
14
  """Perform model transformation.
15
15
 
16
16
  :param model_fn: takes a batch of input tensors and produces a final prediction tensor.
@@ -171,6 +171,7 @@ def get_model_version(name: str, version: Optional[int] = None) -> ModelVersionI
171
171
  base_query = op.model_version_from_name_version(model_version=_model_version)
172
172
  base_query.name()
173
173
  base_query.tags()
174
+ base_query.source_path()
174
175
  base_query.run.run_id()
175
176
  base_query.version()
176
177
  base_query.build_job.status()
@@ -34,9 +34,10 @@ class JobStatus(sgqlc.types.Enum):
34
34
  * `PLANNED`None
35
35
  * `RUNNING`None
36
36
  * `SUCCESSFUL`None
37
+ * `UNKNOWN`None
37
38
  '''
38
39
  __schema__ = schema
39
- __choices__ = ('CANCELED', 'FAILED', 'PLANNED', 'RUNNING', 'SUCCESSFUL')
40
+ __choices__ = ('CANCELED', 'FAILED', 'PLANNED', 'RUNNING', 'SUCCESSFUL', 'UNKNOWN')
40
41
 
41
42
 
42
43
  class JobsSortBy(sgqlc.types.Enum):
@@ -222,15 +223,11 @@ class ModelServingInput(sgqlc.types.Input):
222
223
 
223
224
  class ModelVersionChoice(sgqlc.types.Input):
224
225
  __schema__ = schema
225
- __field_names__ = ('choice_criteria', 'metric_name', 'name', 'optimal_min', 'version')
226
+ __field_names__ = ('choice_criteria', 'name', 'version')
226
227
  choice_criteria = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name='choiceCriteria')
227
228
 
228
- metric_name = sgqlc.types.Field(String, graphql_name='metricName')
229
-
230
229
  name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name='name')
231
230
 
232
- optimal_min = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name='optimalMin')
233
-
234
231
  version = sgqlc.types.Field(Int, graphql_name='version')
235
232
 
236
233
 
@@ -1,17 +1,13 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ml-management
3
- Version: 0.5.1rc0
3
+ Version: 0.5.1rc1
4
4
  Summary: Python implementation of model pattern, dataset
5
- Home-page: UNKNOWN
6
5
  Author: ISPRAS MODIS
7
6
  Author-email: modis@ispras.ru
8
7
  Maintainer: Maxim Ryndin
9
- License: UNKNOWN
10
- Platform: UNKNOWN
11
8
  Requires-Python: >=3.8
12
9
  Description-Content-Type: text/markdown
13
10
 
14
11
  # mlmanagement
15
12
 
16
13
  implementation of model pattern, dataset
17
-
@@ -0,0 +1 @@
1
+ 0.5.1rc1