mlrun 1.7.0rc4__py3-none-any.whl → 1.7.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (235) hide show
  1. mlrun/__init__.py +11 -1
  2. mlrun/__main__.py +39 -121
  3. mlrun/{datastore/helpers.py → alerts/__init__.py} +2 -5
  4. mlrun/alerts/alert.py +248 -0
  5. mlrun/api/schemas/__init__.py +4 -3
  6. mlrun/artifacts/__init__.py +8 -3
  7. mlrun/artifacts/base.py +39 -254
  8. mlrun/artifacts/dataset.py +9 -190
  9. mlrun/artifacts/manager.py +73 -46
  10. mlrun/artifacts/model.py +30 -158
  11. mlrun/artifacts/plots.py +23 -380
  12. mlrun/common/constants.py +73 -1
  13. mlrun/common/db/sql_session.py +3 -2
  14. mlrun/common/formatters/__init__.py +21 -0
  15. mlrun/common/formatters/artifact.py +46 -0
  16. mlrun/common/formatters/base.py +113 -0
  17. mlrun/common/formatters/feature_set.py +44 -0
  18. mlrun/common/formatters/function.py +46 -0
  19. mlrun/common/formatters/pipeline.py +53 -0
  20. mlrun/common/formatters/project.py +51 -0
  21. mlrun/common/formatters/run.py +29 -0
  22. mlrun/common/helpers.py +11 -1
  23. mlrun/{runtimes → common/runtimes}/constants.py +32 -4
  24. mlrun/common/schemas/__init__.py +31 -4
  25. mlrun/common/schemas/alert.py +202 -0
  26. mlrun/common/schemas/api_gateway.py +196 -0
  27. mlrun/common/schemas/artifact.py +28 -1
  28. mlrun/common/schemas/auth.py +13 -2
  29. mlrun/common/schemas/client_spec.py +2 -1
  30. mlrun/common/schemas/common.py +7 -4
  31. mlrun/common/schemas/constants.py +3 -0
  32. mlrun/common/schemas/feature_store.py +58 -28
  33. mlrun/common/schemas/frontend_spec.py +8 -0
  34. mlrun/common/schemas/function.py +11 -0
  35. mlrun/common/schemas/hub.py +7 -9
  36. mlrun/common/schemas/model_monitoring/__init__.py +21 -4
  37. mlrun/common/schemas/model_monitoring/constants.py +136 -42
  38. mlrun/common/schemas/model_monitoring/grafana.py +9 -5
  39. mlrun/common/schemas/model_monitoring/model_endpoints.py +89 -41
  40. mlrun/common/schemas/notification.py +69 -12
  41. mlrun/{runtimes/mpijob/v1alpha1.py → common/schemas/pagination.py} +10 -13
  42. mlrun/common/schemas/pipeline.py +7 -0
  43. mlrun/common/schemas/project.py +67 -16
  44. mlrun/common/schemas/runs.py +17 -0
  45. mlrun/common/schemas/schedule.py +1 -1
  46. mlrun/common/schemas/workflow.py +10 -2
  47. mlrun/common/types.py +14 -1
  48. mlrun/config.py +233 -58
  49. mlrun/data_types/data_types.py +11 -1
  50. mlrun/data_types/spark.py +5 -4
  51. mlrun/data_types/to_pandas.py +75 -34
  52. mlrun/datastore/__init__.py +8 -10
  53. mlrun/datastore/alibaba_oss.py +131 -0
  54. mlrun/datastore/azure_blob.py +131 -43
  55. mlrun/datastore/base.py +107 -47
  56. mlrun/datastore/datastore.py +17 -7
  57. mlrun/datastore/datastore_profile.py +91 -7
  58. mlrun/datastore/dbfs_store.py +3 -7
  59. mlrun/datastore/filestore.py +1 -3
  60. mlrun/datastore/google_cloud_storage.py +92 -32
  61. mlrun/datastore/hdfs.py +5 -0
  62. mlrun/datastore/inmem.py +6 -3
  63. mlrun/datastore/redis.py +3 -2
  64. mlrun/datastore/s3.py +30 -12
  65. mlrun/datastore/snowflake_utils.py +45 -0
  66. mlrun/datastore/sources.py +274 -59
  67. mlrun/datastore/spark_utils.py +30 -0
  68. mlrun/datastore/store_resources.py +9 -7
  69. mlrun/datastore/storeytargets.py +151 -0
  70. mlrun/datastore/targets.py +387 -119
  71. mlrun/datastore/utils.py +68 -5
  72. mlrun/datastore/v3io.py +28 -50
  73. mlrun/db/auth_utils.py +152 -0
  74. mlrun/db/base.py +245 -20
  75. mlrun/db/factory.py +1 -4
  76. mlrun/db/httpdb.py +909 -231
  77. mlrun/db/nopdb.py +279 -14
  78. mlrun/errors.py +35 -5
  79. mlrun/execution.py +111 -38
  80. mlrun/feature_store/__init__.py +0 -2
  81. mlrun/feature_store/api.py +46 -53
  82. mlrun/feature_store/common.py +6 -11
  83. mlrun/feature_store/feature_set.py +48 -23
  84. mlrun/feature_store/feature_vector.py +13 -2
  85. mlrun/feature_store/ingestion.py +7 -6
  86. mlrun/feature_store/retrieval/base.py +9 -4
  87. mlrun/feature_store/retrieval/dask_merger.py +2 -0
  88. mlrun/feature_store/retrieval/job.py +13 -4
  89. mlrun/feature_store/retrieval/local_merger.py +2 -0
  90. mlrun/feature_store/retrieval/spark_merger.py +24 -32
  91. mlrun/feature_store/steps.py +38 -19
  92. mlrun/features.py +6 -14
  93. mlrun/frameworks/_common/plan.py +3 -3
  94. mlrun/frameworks/_dl_common/loggers/tensorboard_logger.py +7 -12
  95. mlrun/frameworks/_ml_common/plan.py +1 -1
  96. mlrun/frameworks/auto_mlrun/auto_mlrun.py +2 -2
  97. mlrun/frameworks/lgbm/__init__.py +1 -1
  98. mlrun/frameworks/lgbm/callbacks/callback.py +2 -4
  99. mlrun/frameworks/lgbm/model_handler.py +1 -1
  100. mlrun/frameworks/parallel_coordinates.py +4 -4
  101. mlrun/frameworks/pytorch/__init__.py +2 -2
  102. mlrun/frameworks/sklearn/__init__.py +1 -1
  103. mlrun/frameworks/sklearn/mlrun_interface.py +13 -3
  104. mlrun/frameworks/tf_keras/__init__.py +5 -2
  105. mlrun/frameworks/tf_keras/callbacks/logging_callback.py +1 -1
  106. mlrun/frameworks/tf_keras/mlrun_interface.py +2 -2
  107. mlrun/frameworks/xgboost/__init__.py +1 -1
  108. mlrun/k8s_utils.py +57 -12
  109. mlrun/launcher/__init__.py +1 -1
  110. mlrun/launcher/base.py +6 -5
  111. mlrun/launcher/client.py +13 -11
  112. mlrun/launcher/factory.py +1 -1
  113. mlrun/launcher/local.py +15 -5
  114. mlrun/launcher/remote.py +10 -3
  115. mlrun/lists.py +6 -2
  116. mlrun/model.py +297 -48
  117. mlrun/model_monitoring/__init__.py +1 -1
  118. mlrun/model_monitoring/api.py +152 -357
  119. mlrun/model_monitoring/applications/__init__.py +10 -0
  120. mlrun/model_monitoring/applications/_application_steps.py +190 -0
  121. mlrun/model_monitoring/applications/base.py +108 -0
  122. mlrun/model_monitoring/applications/context.py +341 -0
  123. mlrun/model_monitoring/{evidently_application.py → applications/evidently_base.py} +27 -22
  124. mlrun/model_monitoring/applications/histogram_data_drift.py +227 -91
  125. mlrun/model_monitoring/applications/results.py +99 -0
  126. mlrun/model_monitoring/controller.py +130 -303
  127. mlrun/model_monitoring/{stores/models/sqlite.py → db/__init__.py} +5 -10
  128. mlrun/model_monitoring/db/stores/__init__.py +136 -0
  129. mlrun/model_monitoring/db/stores/base/__init__.py +15 -0
  130. mlrun/model_monitoring/db/stores/base/store.py +213 -0
  131. mlrun/model_monitoring/db/stores/sqldb/__init__.py +13 -0
  132. mlrun/model_monitoring/db/stores/sqldb/models/__init__.py +71 -0
  133. mlrun/model_monitoring/db/stores/sqldb/models/base.py +190 -0
  134. mlrun/model_monitoring/db/stores/sqldb/models/mysql.py +103 -0
  135. mlrun/model_monitoring/{stores/models/mysql.py → db/stores/sqldb/models/sqlite.py} +19 -13
  136. mlrun/model_monitoring/db/stores/sqldb/sql_store.py +659 -0
  137. mlrun/model_monitoring/db/stores/v3io_kv/__init__.py +13 -0
  138. mlrun/model_monitoring/db/stores/v3io_kv/kv_store.py +726 -0
  139. mlrun/model_monitoring/db/tsdb/__init__.py +105 -0
  140. mlrun/model_monitoring/db/tsdb/base.py +448 -0
  141. mlrun/model_monitoring/db/tsdb/helpers.py +30 -0
  142. mlrun/model_monitoring/db/tsdb/tdengine/__init__.py +15 -0
  143. mlrun/model_monitoring/db/tsdb/tdengine/schemas.py +298 -0
  144. mlrun/model_monitoring/db/tsdb/tdengine/stream_graph_steps.py +42 -0
  145. mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py +522 -0
  146. mlrun/model_monitoring/db/tsdb/v3io/__init__.py +15 -0
  147. mlrun/model_monitoring/db/tsdb/v3io/stream_graph_steps.py +158 -0
  148. mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +849 -0
  149. mlrun/model_monitoring/features_drift_table.py +34 -22
  150. mlrun/model_monitoring/helpers.py +177 -39
  151. mlrun/model_monitoring/model_endpoint.py +3 -2
  152. mlrun/model_monitoring/stream_processing.py +165 -398
  153. mlrun/model_monitoring/tracking_policy.py +7 -1
  154. mlrun/model_monitoring/writer.py +161 -125
  155. mlrun/package/packagers/default_packager.py +2 -2
  156. mlrun/package/packagers_manager.py +1 -0
  157. mlrun/package/utils/_formatter.py +2 -2
  158. mlrun/platforms/__init__.py +11 -10
  159. mlrun/platforms/iguazio.py +67 -228
  160. mlrun/projects/__init__.py +6 -1
  161. mlrun/projects/operations.py +47 -20
  162. mlrun/projects/pipelines.py +396 -249
  163. mlrun/projects/project.py +1176 -406
  164. mlrun/render.py +28 -22
  165. mlrun/run.py +208 -181
  166. mlrun/runtimes/__init__.py +76 -11
  167. mlrun/runtimes/base.py +54 -24
  168. mlrun/runtimes/daskjob.py +9 -2
  169. mlrun/runtimes/databricks_job/databricks_runtime.py +1 -0
  170. mlrun/runtimes/databricks_job/databricks_wrapper.py +1 -1
  171. mlrun/runtimes/funcdoc.py +1 -29
  172. mlrun/runtimes/kubejob.py +34 -128
  173. mlrun/runtimes/local.py +39 -10
  174. mlrun/runtimes/mpijob/__init__.py +0 -20
  175. mlrun/runtimes/mpijob/abstract.py +8 -8
  176. mlrun/runtimes/mpijob/v1.py +1 -1
  177. mlrun/runtimes/nuclio/__init__.py +1 -0
  178. mlrun/runtimes/nuclio/api_gateway.py +769 -0
  179. mlrun/runtimes/nuclio/application/__init__.py +15 -0
  180. mlrun/runtimes/nuclio/application/application.py +758 -0
  181. mlrun/runtimes/nuclio/application/reverse_proxy.go +95 -0
  182. mlrun/runtimes/nuclio/function.py +188 -68
  183. mlrun/runtimes/nuclio/serving.py +57 -60
  184. mlrun/runtimes/pod.py +191 -58
  185. mlrun/runtimes/remotesparkjob.py +11 -8
  186. mlrun/runtimes/sparkjob/spark3job.py +17 -18
  187. mlrun/runtimes/utils.py +40 -73
  188. mlrun/secrets.py +6 -2
  189. mlrun/serving/__init__.py +8 -1
  190. mlrun/serving/remote.py +2 -3
  191. mlrun/serving/routers.py +89 -64
  192. mlrun/serving/server.py +54 -26
  193. mlrun/serving/states.py +187 -56
  194. mlrun/serving/utils.py +19 -11
  195. mlrun/serving/v2_serving.py +136 -63
  196. mlrun/track/tracker.py +2 -1
  197. mlrun/track/trackers/mlflow_tracker.py +5 -0
  198. mlrun/utils/async_http.py +26 -6
  199. mlrun/utils/db.py +18 -0
  200. mlrun/utils/helpers.py +375 -105
  201. mlrun/utils/http.py +2 -2
  202. mlrun/utils/logger.py +75 -9
  203. mlrun/utils/notifications/notification/__init__.py +14 -10
  204. mlrun/utils/notifications/notification/base.py +48 -0
  205. mlrun/utils/notifications/notification/console.py +2 -0
  206. mlrun/utils/notifications/notification/git.py +24 -1
  207. mlrun/utils/notifications/notification/ipython.py +2 -0
  208. mlrun/utils/notifications/notification/slack.py +96 -21
  209. mlrun/utils/notifications/notification/webhook.py +63 -2
  210. mlrun/utils/notifications/notification_pusher.py +146 -16
  211. mlrun/utils/regex.py +9 -0
  212. mlrun/utils/retryer.py +3 -2
  213. mlrun/utils/v3io_clients.py +2 -3
  214. mlrun/utils/version/version.json +2 -2
  215. mlrun-1.7.2.dist-info/METADATA +390 -0
  216. mlrun-1.7.2.dist-info/RECORD +351 -0
  217. {mlrun-1.7.0rc4.dist-info → mlrun-1.7.2.dist-info}/WHEEL +1 -1
  218. mlrun/feature_store/retrieval/conversion.py +0 -271
  219. mlrun/kfpops.py +0 -868
  220. mlrun/model_monitoring/application.py +0 -310
  221. mlrun/model_monitoring/batch.py +0 -974
  222. mlrun/model_monitoring/controller_handler.py +0 -37
  223. mlrun/model_monitoring/prometheus.py +0 -216
  224. mlrun/model_monitoring/stores/__init__.py +0 -111
  225. mlrun/model_monitoring/stores/kv_model_endpoint_store.py +0 -574
  226. mlrun/model_monitoring/stores/model_endpoint_store.py +0 -145
  227. mlrun/model_monitoring/stores/models/__init__.py +0 -27
  228. mlrun/model_monitoring/stores/models/base.py +0 -84
  229. mlrun/model_monitoring/stores/sql_model_endpoint_store.py +0 -382
  230. mlrun/platforms/other.py +0 -305
  231. mlrun-1.7.0rc4.dist-info/METADATA +0 -269
  232. mlrun-1.7.0rc4.dist-info/RECORD +0 -321
  233. {mlrun-1.7.0rc4.dist-info → mlrun-1.7.2.dist-info}/LICENSE +0 -0
  234. {mlrun-1.7.0rc4.dist-info → mlrun-1.7.2.dist-info}/entry_points.txt +0 -0
  235. {mlrun-1.7.0rc4.dist-info → mlrun-1.7.2.dist-info}/top_level.txt +0 -0
mlrun/artifacts/base.py CHANGED
@@ -20,7 +20,6 @@ import warnings
20
20
  import zipfile
21
21
 
22
22
  import yaml
23
- from deprecated import deprecated
24
23
 
25
24
  import mlrun
26
25
  import mlrun.artifacts
@@ -88,9 +87,10 @@ class ArtifactSpec(ModelObj):
88
87
  "db_key",
89
88
  "extra_data",
90
89
  "unpackaging_instructions",
90
+ "producer",
91
91
  ]
92
92
 
93
- _extra_fields = ["annotations", "producer", "sources", "license", "encoding"]
93
+ _extra_fields = ["annotations", "sources", "license", "encoding"]
94
94
  _exclude_fields_from_uid_hash = [
95
95
  # if the artifact is first created, it will not have a db_key,
96
96
  # exclude it so further updates of the artifacts will have the same hash
@@ -191,12 +191,30 @@ class Artifact(ModelObj):
191
191
  format=None,
192
192
  size=None,
193
193
  target_path=None,
194
- # All params up until here are legacy params for compatibility with legacy artifacts.
195
194
  project=None,
195
+ src_path: str = None,
196
+ # All params up until here are legacy params for compatibility with legacy artifacts.
197
+ # TODO: remove them in 1.9.0.
196
198
  metadata: ArtifactMetadata = None,
197
199
  spec: ArtifactSpec = None,
198
- src_path: str = None,
199
200
  ):
201
+ if (
202
+ key
203
+ or body
204
+ or viewer
205
+ or is_inline
206
+ or format
207
+ or size
208
+ or target_path
209
+ or project
210
+ or src_path
211
+ ):
212
+ warnings.warn(
213
+ "Artifact constructor parameters are deprecated and will be removed in 1.9.0. "
214
+ "Use the metadata and spec parameters instead.",
215
+ DeprecationWarning,
216
+ )
217
+
200
218
  self._metadata = None
201
219
  self.metadata = metadata
202
220
  self._spec = None
@@ -614,6 +632,7 @@ class DirArtifactSpec(ArtifactSpec):
614
632
  "src_path",
615
633
  "target_path",
616
634
  "db_key",
635
+ "producer",
617
636
  ]
618
637
 
619
638
 
@@ -698,11 +717,18 @@ class LinkArtifact(Artifact):
698
717
  link_iteration=None,
699
718
  link_key=None,
700
719
  link_tree=None,
701
- # All params up until here are legacy params for compatibility with legacy artifacts.
702
720
  project=None,
721
+ # All params up until here are legacy params for compatibility with legacy artifacts.
722
+ # TODO: remove them in 1.9.0.
703
723
  metadata: ArtifactMetadata = None,
704
724
  spec: LinkArtifactSpec = None,
705
725
  ):
726
+ if key or target_path or link_iteration or link_key or link_tree or project:
727
+ warnings.warn(
728
+ "Artifact constructor parameters are deprecated and will be removed in 1.9.0. "
729
+ "Use the metadata and spec parameters instead.",
730
+ DeprecationWarning,
731
+ )
706
732
  super().__init__(
707
733
  key, target_path=target_path, project=project, metadata=metadata, spec=spec
708
734
  )
@@ -719,238 +745,6 @@ class LinkArtifact(Artifact):
719
745
  self._spec = self._verify_dict(spec, "spec", LinkArtifactSpec)
720
746
 
721
747
 
722
- # TODO: remove in 1.7.0
723
- @deprecated(
724
- version="1.3.0",
725
- reason="'LegacyArtifact' will be removed in 1.7.0, use 'Artifact' instead",
726
- category=FutureWarning,
727
- )
728
- class LegacyArtifact(ModelObj):
729
- _dict_fields = [
730
- "key",
731
- "kind",
732
- "iter",
733
- "tree",
734
- "src_path",
735
- "target_path",
736
- "hash",
737
- "description",
738
- "viewer",
739
- "inline",
740
- "format",
741
- "size",
742
- "db_key",
743
- "extra_data",
744
- "tag",
745
- ]
746
- kind = ""
747
- _store_prefix = StorePrefix.Artifact
748
-
749
- def __init__(
750
- self,
751
- key=None,
752
- body=None,
753
- viewer=None,
754
- is_inline=False,
755
- format=None,
756
- size=None,
757
- target_path=None,
758
- ):
759
- self.key = key
760
- self.project = ""
761
- self.db_key = None
762
- self.size = size
763
- self.iter = None
764
- self.tree = None
765
- self.updated = None
766
- self.target_path = target_path
767
- self.src_path = None
768
- self._body = body
769
- self.format = format
770
- self.description = None
771
- self.viewer = viewer
772
- self.encoding = None
773
- self.labels = {}
774
- self.annotations = None
775
- self.sources = []
776
- self.producer = None
777
- self.hash = None
778
- self._inline = is_inline
779
- self.license = ""
780
- self.extra_data = {}
781
- self.tag = None # temp store of the tag
782
-
783
- def before_log(self):
784
- for key, item in self.extra_data.items():
785
- if hasattr(item, "target_path"):
786
- self.extra_data[key] = item.target_path
787
-
788
- def is_inline(self):
789
- return self._inline
790
-
791
- @property
792
- def is_dir(self):
793
- """this is a directory"""
794
- return False
795
-
796
- @property
797
- def inline(self):
798
- """inline data (body)"""
799
- if self._inline:
800
- return self.get_body()
801
- return None
802
-
803
- @inline.setter
804
- def inline(self, body):
805
- self._body = body
806
- if body:
807
- self._inline = True
808
-
809
- @property
810
- def uri(self):
811
- """return artifact uri (store://..)"""
812
- return self.get_store_url()
813
-
814
- def to_dataitem(self):
815
- """return a DataItem object (if available) representing the artifact content"""
816
- uri = self.get_store_url()
817
- if uri:
818
- return mlrun.get_dataitem(uri)
819
-
820
- def get_body(self):
821
- """get the artifact body when inline"""
822
- return self._body
823
-
824
- def get_target_path(self):
825
- """get the absolute target path for the artifact"""
826
- return self.target_path
827
-
828
- def get_store_url(self, with_tag=True, project=None):
829
- """get the artifact uri (store://..) with optional parameters"""
830
- tag = self.tree if with_tag else None
831
- uri = generate_artifact_uri(
832
- project or self.project, self.db_key, tag, self.iter
833
- )
834
- return mlrun.datastore.get_store_uri(self._store_prefix, uri)
835
-
836
- def base_dict(self):
837
- """return short dict form of the artifact"""
838
- return super().to_dict()
839
-
840
- def to_dict(self, fields: list = None, exclude: list = None, strip: bool = False):
841
- """return long dict form of the artifact"""
842
- return super().to_dict(
843
- self._dict_fields
844
- + ["updated", "labels", "annotations", "producer", "sources", "project"],
845
- strip=strip,
846
- )
847
-
848
- @classmethod
849
- def from_dict(cls, struct=None, fields=None):
850
- fields = fields or cls._dict_fields + [
851
- "updated",
852
- "labels",
853
- "annotations",
854
- "producer",
855
- "sources",
856
- "project",
857
- ]
858
- return super().from_dict(struct, fields=fields)
859
-
860
- def upload(self):
861
- """internal, upload to target store"""
862
- src_path = self.src_path
863
- body = self.get_body()
864
- if body:
865
- self._upload_body(body)
866
- else:
867
- if src_path and os.path.isfile(src_path):
868
- self._upload_file(src_path)
869
-
870
- def _upload_body(self, body, target=None):
871
- if mlrun.mlconf.artifacts.calculate_hash:
872
- self.hash = calculate_blob_hash(body)
873
- self.size = len(body)
874
- mlrun.datastore.store_manager.object(url=target or self.target_path).put(body)
875
-
876
- def _upload_file(self, src, target=None):
877
- if mlrun.mlconf.artifacts.calculate_hash:
878
- self.hash = calculate_local_file_hash(src)
879
- self.size = os.stat(src).st_size
880
- mlrun.datastore.store_manager.object(url=target or self.target_path).upload(src)
881
-
882
- def artifact_kind(self):
883
- return self.kind
884
-
885
- def generate_target_path(self, artifact_path, producer):
886
- return generate_target_path(self, artifact_path, producer)
887
-
888
-
889
- # TODO: remove in 1.7.0
890
- @deprecated(
891
- version="1.3.0",
892
- reason="'LegacyDirArtifact' will be removed in 1.7.0, use 'DirArtifact' instead",
893
- category=FutureWarning,
894
- )
895
- class LegacyDirArtifact(LegacyArtifact):
896
- _dict_fields = [
897
- "key",
898
- "kind",
899
- "iter",
900
- "tree",
901
- "src_path",
902
- "target_path",
903
- "description",
904
- "db_key",
905
- ]
906
- kind = "dir"
907
-
908
- @property
909
- def is_dir(self):
910
- return True
911
-
912
- def upload(self):
913
- if not self.src_path:
914
- raise ValueError("local/source path not specified")
915
-
916
- files = os.listdir(self.src_path)
917
- for f in files:
918
- file_path = os.path.join(self.src_path, f)
919
- if not os.path.isfile(file_path):
920
- raise ValueError(f"file {file_path} not found, cant upload")
921
- target = os.path.join(self.target_path, f)
922
- mlrun.datastore.store_manager.object(url=target).upload(file_path)
923
-
924
-
925
- # TODO: remove in 1.7.0
926
- @deprecated(
927
- version="1.3.0",
928
- reason="'LegacyLinkArtifact' will be removed in 1.7.0, use 'LinkArtifact' instead",
929
- category=FutureWarning,
930
- )
931
- class LegacyLinkArtifact(LegacyArtifact):
932
- _dict_fields = LegacyArtifact._dict_fields + [
933
- "link_iteration",
934
- "link_key",
935
- "link_tree",
936
- ]
937
- kind = "link"
938
-
939
- def __init__(
940
- self,
941
- key=None,
942
- target_path="",
943
- link_iteration=None,
944
- link_key=None,
945
- link_tree=None,
946
- ):
947
- super().__init__(key)
948
- self.target_path = target_path
949
- self.link_iteration = link_iteration
950
- self.link_key = link_key
951
- self.link_tree = link_tree
952
-
953
-
954
748
  def calculate_blob_hash(data):
955
749
  if isinstance(data, str):
956
750
  data = data.encode()
@@ -1056,25 +850,16 @@ def generate_target_path(item: Artifact, artifact_path, producer):
1056
850
  return f"{artifact_path}{item.key}{suffix}"
1057
851
 
1058
852
 
853
+ # TODO: left to support data migration from legacy artifacts to new artifacts. Remove in 1.8.0.
1059
854
  def convert_legacy_artifact_to_new_format(
1060
- legacy_artifact: typing.Union[LegacyArtifact, dict],
855
+ legacy_artifact: dict,
1061
856
  ) -> Artifact:
1062
857
  """Converts a legacy artifact to a new format.
1063
-
1064
858
  :param legacy_artifact: The legacy artifact to convert.
1065
859
  :return: The converted artifact.
1066
860
  """
1067
- if isinstance(legacy_artifact, LegacyArtifact):
1068
- legacy_artifact_dict = legacy_artifact.to_dict()
1069
- elif isinstance(legacy_artifact, dict):
1070
- legacy_artifact_dict = legacy_artifact
1071
- else:
1072
- raise TypeError(
1073
- f"Unsupported type '{type(legacy_artifact)}' for legacy artifact"
1074
- )
1075
-
1076
- artifact_key = legacy_artifact_dict.get("key", "")
1077
- artifact_tag = legacy_artifact_dict.get("tag", "")
861
+ artifact_key = legacy_artifact.get("key", "")
862
+ artifact_tag = legacy_artifact.get("tag", "")
1078
863
  if artifact_tag:
1079
864
  artifact_key = f"{artifact_key}:{artifact_tag}"
1080
865
  # TODO: remove in 1.8.0
@@ -1085,12 +870,12 @@ def convert_legacy_artifact_to_new_format(
1085
870
  )
1086
871
 
1087
872
  artifact = mlrun.artifacts.artifact_types.get(
1088
- legacy_artifact_dict.get("kind", "artifact"), mlrun.artifacts.Artifact
873
+ legacy_artifact.get("kind", "artifact"), mlrun.artifacts.Artifact
1089
874
  )()
1090
875
 
1091
- artifact.metadata = artifact.metadata.from_dict(legacy_artifact_dict)
1092
- artifact.spec = artifact.spec.from_dict(legacy_artifact_dict)
1093
- artifact.status = artifact.status.from_dict(legacy_artifact_dict)
876
+ artifact.metadata = artifact.metadata.from_dict(legacy_artifact)
877
+ artifact.spec = artifact.spec.from_dict(legacy_artifact)
878
+ artifact.status = artifact.status.from_dict(legacy_artifact)
1094
879
 
1095
880
  return artifact
1096
881
 
@@ -13,12 +13,12 @@
13
13
  # limitations under the License.
14
14
  import os
15
15
  import pathlib
16
+ import warnings
16
17
  from io import StringIO
17
18
  from typing import Optional
18
19
 
19
20
  import numpy as np
20
21
  import pandas as pd
21
- from deprecated import deprecated
22
22
  from pandas.io.json import build_table_schema
23
23
 
24
24
  import mlrun
@@ -27,7 +27,7 @@ import mlrun.datastore
27
27
  import mlrun.utils.helpers
28
28
  from mlrun.config import config as mlconf
29
29
 
30
- from .base import Artifact, ArtifactSpec, LegacyArtifact, StorePrefix
30
+ from .base import Artifact, ArtifactSpec, StorePrefix
31
31
 
32
32
  default_preview_rows_length = 20
33
33
  max_preview_columns = mlconf.artifacts.datasets.max_preview_columns
@@ -161,6 +161,13 @@ class DatasetArtifact(Artifact):
161
161
  label_column: str = None,
162
162
  **kwargs,
163
163
  ):
164
+ if key or format or target_path:
165
+ warnings.warn(
166
+ "Artifact constructor parameters are deprecated and will be removed in 1.9.0. "
167
+ "Use the metadata and spec parameters instead.",
168
+ DeprecationWarning,
169
+ )
170
+
164
171
  format = (format or "").lower()
165
172
  super().__init__(key, None, format=format, target_path=target_path)
166
173
  if format and format not in self.SUPPORTED_FORMATS:
@@ -360,194 +367,6 @@ class DatasetArtifact(Artifact):
360
367
  self.status.stats = stats
361
368
 
362
369
 
363
- # TODO: remove in 1.7.0
364
- @deprecated(
365
- version="1.3.0",
366
- reason="'LegacyTableArtifact' will be removed in 1.7.0, use 'TableArtifact' instead",
367
- category=FutureWarning,
368
- )
369
- class LegacyTableArtifact(LegacyArtifact):
370
- _dict_fields = LegacyArtifact._dict_fields + ["schema", "header"]
371
- kind = "table"
372
-
373
- def __init__(
374
- self,
375
- key=None,
376
- body=None,
377
- df=None,
378
- viewer=None,
379
- visible=False,
380
- inline=False,
381
- format=None,
382
- header=None,
383
- schema=None,
384
- ):
385
- if key:
386
- key_suffix = pathlib.Path(key).suffix
387
- if not format and key_suffix:
388
- format = key_suffix[1:]
389
- super().__init__(key, body, viewer=viewer, is_inline=inline, format=format)
390
-
391
- if df is not None:
392
- self._is_df = True
393
- self.header = df.reset_index(drop=True).columns.values.tolist()
394
- self.format = "csv" # todo other formats
395
- # if visible and not key_suffix:
396
- # key += '.csv'
397
- self._body = df
398
- else:
399
- self._is_df = False
400
- self.header = header
401
-
402
- self.schema = schema
403
- if not viewer:
404
- viewer = "table" if visible else None
405
- self.viewer = viewer
406
-
407
- def get_body(self):
408
- if not self._is_df:
409
- return self._body
410
- csv_buffer = StringIO()
411
- self._body.to_csv(
412
- csv_buffer,
413
- encoding="utf-8",
414
- **mlrun.utils.line_terminator_kwargs(),
415
- )
416
- return csv_buffer.getvalue()
417
-
418
-
419
- # TODO: remove in 1.7.0
420
- @deprecated(
421
- version="1.3.0",
422
- reason="'LegacyDatasetArtifact' will be removed in 1.7.0, use 'DatasetArtifact' instead",
423
- category=FutureWarning,
424
- )
425
- class LegacyDatasetArtifact(LegacyArtifact):
426
- # List of all the supported saving formats of a DataFrame:
427
- SUPPORTED_FORMATS = ["csv", "parquet", "pq", "tsdb", "kv"]
428
-
429
- _dict_fields = LegacyArtifact._dict_fields + [
430
- "schema",
431
- "header",
432
- "length",
433
- "preview",
434
- "stats",
435
- "extra_data",
436
- "column_metadata",
437
- ]
438
- kind = "dataset"
439
-
440
- def __init__(
441
- self,
442
- key: str = None,
443
- df=None,
444
- preview: int = None,
445
- format: str = "", # TODO: should be changed to 'fmt'.
446
- stats: bool = None,
447
- target_path: str = None,
448
- extra_data: dict = None,
449
- column_metadata: dict = None,
450
- ignore_preview_limits: bool = False,
451
- **kwargs,
452
- ):
453
- format = (format or "").lower()
454
- super().__init__(key, None, format=format, target_path=target_path)
455
- if format and format not in self.SUPPORTED_FORMATS:
456
- raise ValueError(
457
- f"unsupported format {format} use one of {'|'.join(self.SUPPORTED_FORMATS)}"
458
- )
459
-
460
- if format == "pq":
461
- format = "parquet"
462
- self.format = format
463
- self.stats = None
464
- self.extra_data = extra_data or {}
465
- self.column_metadata = column_metadata or {}
466
-
467
- if df is not None:
468
- if hasattr(df, "dask"):
469
- # If df is a Dask DataFrame, and it's small in-memory, convert to Pandas
470
- if (df.memory_usage(deep=True).sum().compute() / 1e9) < max_ddf_size:
471
- df = df.compute()
472
- self.update_preview_fields_from_df(
473
- self, df, stats, preview, ignore_preview_limits
474
- )
475
-
476
- self._df = df
477
- self._kw = kwargs
478
-
479
- def upload(self):
480
- suffix = pathlib.Path(self.target_path).suffix
481
- format = self.format
482
- if not format:
483
- if suffix and suffix in [".csv", ".parquet", ".pq"]:
484
- format = "csv" if suffix == ".csv" else "parquet"
485
- else:
486
- format = "parquet"
487
- if not suffix and not self.target_path.startswith("memory://"):
488
- self.target_path = self.target_path + "." + format
489
-
490
- self.size, self.hash = upload_dataframe(
491
- self._df,
492
- self.target_path,
493
- format=format,
494
- src_path=self.src_path,
495
- **self._kw,
496
- )
497
-
498
- @property
499
- def df(self) -> pd.DataFrame:
500
- """
501
- Get the dataset in this artifact.
502
-
503
- :return: The dataset as a DataFrame.
504
- """
505
- return self._df
506
-
507
- @staticmethod
508
- def is_format_supported(fmt: str) -> bool:
509
- """
510
- Check whether the given dataset format is supported by the DatasetArtifact.
511
-
512
- :param fmt: The format string to check.
513
-
514
- :return: True if the format is supported and False if not.
515
- """
516
- return fmt in DatasetArtifact.SUPPORTED_FORMATS
517
-
518
- @staticmethod
519
- def update_preview_fields_from_df(
520
- artifact, df, stats=None, preview_rows_length=None, ignore_preview_limits=False
521
- ):
522
- preview_rows_length = preview_rows_length or default_preview_rows_length
523
- if hasattr(df, "dask"):
524
- artifact.length = df.shape[0].compute()
525
- preview_df = df.sample(frac=ddf_sample_pct).compute()
526
- else:
527
- artifact.length = df.shape[0]
528
- preview_df = df
529
-
530
- if artifact.length > preview_rows_length and not ignore_preview_limits:
531
- preview_df = df.head(preview_rows_length)
532
-
533
- preview_df = preview_df.reset_index()
534
- if len(preview_df.columns) > max_preview_columns and not ignore_preview_limits:
535
- preview_df = preview_df.iloc[:, :max_preview_columns]
536
- artifact.header = preview_df.columns.values.tolist()
537
- artifact.preview = preview_df.values.tolist()
538
- # Table schema parsing doesn't require a column named "index"
539
- # to align its output with previously generated header and preview data
540
- if "index" in preview_df.columns:
541
- preview_df.drop("index", axis=1, inplace=True)
542
- artifact.schema = build_table_schema(preview_df)
543
- if (
544
- stats
545
- or (artifact.length < max_csv and len(df.columns) < max_preview_columns)
546
- or ignore_preview_limits
547
- ):
548
- artifact.stats = get_df_stats(df)
549
-
550
-
551
370
  def get_df_stats(df):
552
371
  if hasattr(df, "dask"):
553
372
  df = df.sample(frac=ddf_sample_pct).compute()