mlrun 1.7.0rc25__py3-none-any.whl → 1.7.0rc29__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (67) hide show
  1. mlrun/__main__.py +7 -7
  2. mlrun/alerts/alert.py +13 -1
  3. mlrun/artifacts/manager.py +5 -0
  4. mlrun/common/constants.py +2 -2
  5. mlrun/common/formatters/base.py +9 -9
  6. mlrun/common/schemas/alert.py +4 -8
  7. mlrun/common/schemas/api_gateway.py +7 -0
  8. mlrun/common/schemas/constants.py +3 -0
  9. mlrun/common/schemas/model_monitoring/__init__.py +1 -0
  10. mlrun/common/schemas/model_monitoring/constants.py +27 -12
  11. mlrun/common/schemas/model_monitoring/model_endpoints.py +0 -12
  12. mlrun/common/schemas/schedule.py +1 -1
  13. mlrun/config.py +16 -9
  14. mlrun/datastore/azure_blob.py +2 -1
  15. mlrun/datastore/base.py +1 -5
  16. mlrun/datastore/datastore.py +3 -3
  17. mlrun/datastore/google_cloud_storage.py +6 -2
  18. mlrun/datastore/inmem.py +1 -1
  19. mlrun/datastore/snowflake_utils.py +3 -1
  20. mlrun/datastore/sources.py +26 -11
  21. mlrun/datastore/store_resources.py +2 -0
  22. mlrun/datastore/targets.py +60 -25
  23. mlrun/db/base.py +11 -0
  24. mlrun/db/httpdb.py +47 -33
  25. mlrun/db/nopdb.py +11 -1
  26. mlrun/errors.py +4 -0
  27. mlrun/execution.py +18 -10
  28. mlrun/feature_store/retrieval/spark_merger.py +2 -1
  29. mlrun/launcher/local.py +2 -2
  30. mlrun/model.py +30 -0
  31. mlrun/model_monitoring/api.py +6 -52
  32. mlrun/model_monitoring/applications/histogram_data_drift.py +4 -1
  33. mlrun/model_monitoring/db/stores/__init__.py +21 -9
  34. mlrun/model_monitoring/db/stores/base/store.py +39 -1
  35. mlrun/model_monitoring/db/stores/sqldb/models/base.py +9 -7
  36. mlrun/model_monitoring/db/stores/sqldb/models/mysql.py +4 -2
  37. mlrun/model_monitoring/db/stores/sqldb/sql_store.py +34 -79
  38. mlrun/model_monitoring/db/stores/v3io_kv/kv_store.py +19 -27
  39. mlrun/model_monitoring/db/tsdb/__init__.py +19 -14
  40. mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +4 -2
  41. mlrun/model_monitoring/helpers.py +9 -5
  42. mlrun/model_monitoring/writer.py +1 -5
  43. mlrun/projects/operations.py +1 -0
  44. mlrun/projects/project.py +76 -76
  45. mlrun/render.py +10 -5
  46. mlrun/run.py +2 -2
  47. mlrun/runtimes/daskjob.py +7 -1
  48. mlrun/runtimes/local.py +24 -7
  49. mlrun/runtimes/nuclio/function.py +20 -0
  50. mlrun/runtimes/pod.py +5 -29
  51. mlrun/serving/routers.py +75 -59
  52. mlrun/serving/server.py +1 -0
  53. mlrun/serving/v2_serving.py +8 -1
  54. mlrun/utils/helpers.py +46 -2
  55. mlrun/utils/logger.py +36 -2
  56. mlrun/utils/notifications/notification/base.py +4 -0
  57. mlrun/utils/notifications/notification/git.py +21 -0
  58. mlrun/utils/notifications/notification/slack.py +8 -0
  59. mlrun/utils/notifications/notification/webhook.py +41 -1
  60. mlrun/utils/notifications/notification_pusher.py +2 -2
  61. mlrun/utils/version/version.json +2 -2
  62. {mlrun-1.7.0rc25.dist-info → mlrun-1.7.0rc29.dist-info}/METADATA +11 -6
  63. {mlrun-1.7.0rc25.dist-info → mlrun-1.7.0rc29.dist-info}/RECORD +67 -67
  64. {mlrun-1.7.0rc25.dist-info → mlrun-1.7.0rc29.dist-info}/WHEEL +1 -1
  65. {mlrun-1.7.0rc25.dist-info → mlrun-1.7.0rc29.dist-info}/LICENSE +0 -0
  66. {mlrun-1.7.0rc25.dist-info → mlrun-1.7.0rc29.dist-info}/entry_points.txt +0 -0
  67. {mlrun-1.7.0rc25.dist-info → mlrun-1.7.0rc29.dist-info}/top_level.txt +0 -0
@@ -256,7 +256,6 @@ class KVStoreBase(StoreBase):
256
256
  self.project,
257
257
  function,
258
258
  model,
259
- labels,
260
259
  top_level,
261
260
  ),
262
261
  raise_for_status=v3io.dataplane.RaiseForStatus.never,
@@ -269,7 +268,6 @@ class KVStoreBase(StoreBase):
269
268
  exc=mlrun.errors.err_to_str(exc),
270
269
  )
271
270
  return endpoint_list
272
-
273
271
  # Create a list of model endpoints unique ids
274
272
  if uids is None:
275
273
  uids = []
@@ -282,10 +280,16 @@ class KVStoreBase(StoreBase):
282
280
 
283
281
  # Add each relevant model endpoint to the model endpoints list
284
282
  for endpoint_id in uids:
285
- endpoint = self.get_model_endpoint(
283
+ endpoint_dict = self.get_model_endpoint(
286
284
  endpoint_id=endpoint_id,
287
285
  )
288
- endpoint_list.append(endpoint)
286
+
287
+ if labels and not self._validate_labels(
288
+ endpoint_dict=endpoint_dict, labels=labels
289
+ ):
290
+ continue
291
+
292
+ endpoint_list.append(endpoint_dict)
289
293
 
290
294
  return endpoint_list
291
295
 
@@ -509,20 +513,16 @@ class KVStoreBase(StoreBase):
509
513
  project: str,
510
514
  function: str = None,
511
515
  model: str = None,
512
- labels: list[str] = None,
513
516
  top_level: bool = False,
514
517
  ) -> str:
515
518
  """
516
519
  Convert the provided filters into a valid filter expression. The expected filter expression includes different
517
520
  conditions, divided by ' AND '.
518
521
 
519
- :param project: The name of the project.
520
- :param model: The name of the model to filter by.
521
- :param function: The name of the function to filter by.
522
- :param labels: A list of labels to filter by. Label filters work by either filtering a specific value of
523
- a label (i.e. list("key=value")) or by looking for the existence of a given
524
- key (i.e. "key").
525
- :param top_level: If True will return only routers and endpoint that are NOT children of any router.
522
+ :param project: The name of the project.
523
+ :param model: The name of the model to filter by.
524
+ :param function: The name of the function to filter by.
525
+ :param top_level: If True will return only routers and endpoint that are NOT children of any router.
526
526
 
527
527
  :return: A valid filter expression as a string.
528
528
 
@@ -533,25 +533,17 @@ class KVStoreBase(StoreBase):
533
533
  raise mlrun.errors.MLRunInvalidArgumentError("project can't be empty")
534
534
 
535
535
  # Add project filter
536
- filter_expression = [f"project=='{project}'"]
536
+ filter_expression = [f"{mm_schemas.EventFieldType.PROJECT}=='{project}'"]
537
537
 
538
538
  # Add function and model filters
539
539
  if function:
540
- filter_expression.append(f"function=='{function}'")
540
+ function_uri = f"{project}/{function}" if function else None
541
+ filter_expression.append(
542
+ f"{mm_schemas.EventFieldType.FUNCTION_URI}=='{function_uri}'"
543
+ )
541
544
  if model:
542
- filter_expression.append(f"model=='{model}'")
543
-
544
- # Add labels filters
545
- if labels:
546
- for label in labels:
547
- if not label.startswith("_"):
548
- label = f"_{label}"
549
-
550
- if "=" in label:
551
- lbl, value = list(map(lambda x: x.strip(), label.split("=")))
552
- filter_expression.append(f"{lbl}=='{value}'")
553
- else:
554
- filter_expression.append(f"exists({label})")
545
+ model = model if ":" in model else f"{model}:latest"
546
+ filter_expression.append(f"{mm_schemas.EventFieldType.MODEL}=='{model}'")
555
547
 
556
548
  # Apply top_level filter (remove endpoints that considered a child of a router)
557
549
  if top_level:
@@ -57,41 +57,46 @@ class ObjectTSDBFactory(enum.Enum):
57
57
  :param value: Provided enum (invalid) value.
58
58
  """
59
59
  valid_values = list(cls.__members__.keys())
60
- raise mlrun.errors.MLRunInvalidArgumentError(
60
+ raise mlrun.errors.MLRunInvalidMMStoreType(
61
61
  f"{value} is not a valid tsdb, please choose a valid value: %{valid_values}."
62
62
  )
63
63
 
64
64
 
65
65
  def get_tsdb_connector(
66
66
  project: str,
67
- tsdb_connector_type: str = "",
68
67
  secret_provider: typing.Optional[typing.Callable[[str], str]] = None,
68
+ tsdb_connection_string: typing.Optional[str] = None,
69
69
  **kwargs,
70
70
  ) -> TSDBConnector:
71
71
  """
72
72
  Get TSDB connector object.
73
- :param project: The name of the project.
74
- :param tsdb_connector_type: The type of the TSDB connector. See mlrun.model_monitoring.db.tsdb.ObjectTSDBFactory
75
- for available options.
76
- :param secret_provider: An optional secret provider to get the connection string secret.
73
+ :param project: The name of the project.
74
+ :param secret_provider: An optional secret provider to get the connection string secret.
75
+ :param tsdb_connection_string: An optional explicit connection string to the TSDB.
77
76
 
78
77
  :return: `TSDBConnector` object. The main goal of this object is to handle different operations on the
79
78
  TSDB connector such as updating drift metrics or write application record result.
79
+ :raise: `MLRunInvalidMMStoreType` if the user didn't provide TSDB connection
80
+ or the provided TSDB connection is invalid.
80
81
  """
81
82
 
82
- tsdb_connection_string = mlrun.model_monitoring.helpers.get_tsdb_connection_string(
83
- secret_provider=secret_provider
83
+ tsdb_connection_string = (
84
+ tsdb_connection_string
85
+ or mlrun.model_monitoring.helpers.get_tsdb_connection_string(
86
+ secret_provider=secret_provider
87
+ )
84
88
  )
85
89
 
86
90
  if tsdb_connection_string and tsdb_connection_string.startswith("taosws"):
87
91
  tsdb_connector_type = mlrun.common.schemas.model_monitoring.TSDBTarget.TDEngine
88
92
  kwargs["connection_string"] = tsdb_connection_string
89
-
90
- # Set the default TSDB connector type if no connection has been set
91
- tsdb_connector_type = (
92
- tsdb_connector_type
93
- or mlrun.mlconf.model_endpoint_monitoring.tsdb_connector_type
94
- )
93
+ elif tsdb_connection_string and tsdb_connection_string == "v3io":
94
+ tsdb_connector_type = mlrun.common.schemas.model_monitoring.TSDBTarget.V3IO_TSDB
95
+ else:
96
+ raise mlrun.errors.MLRunInvalidMMStoreType(
97
+ "You must provide a valid tsdb store connection by using "
98
+ "set_model_monitoring_credentials API."
99
+ )
95
100
 
96
101
  # Get connector type value from ObjectTSDBFactory enum class
97
102
  tsdb_connector_factory = ObjectTSDBFactory(tsdb_connector_type)
@@ -38,7 +38,10 @@ def _is_no_schema_error(exc: v3io_frames.ReadError) -> bool:
38
38
  In case of a nonexistent TSDB table - a `v3io_frames.ReadError` error is raised.
39
39
  Check if the error message contains the relevant string to verify the cause.
40
40
  """
41
- return "No TSDB schema file found" in str(exc)
41
+ msg = str(exc)
42
+ # https://github.com/v3io/v3io-tsdb/blob/v0.14.1/pkg/tsdb/v3iotsdb.go#L205
43
+ # https://github.com/v3io/v3io-tsdb/blob/v0.14.1/pkg/partmgr/partmgr.go#L238
44
+ return "No TSDB schema file found" in msg or "Failed to read schema at path" in msg
42
45
 
43
46
 
44
47
  class V3IOTSDBConnector(TSDBConnector):
@@ -596,7 +599,6 @@ class V3IOTSDBConnector(TSDBConnector):
596
599
  end=end,
597
600
  columns=[mm_schemas.EventFieldType.LATENCY],
598
601
  filter_query=f"endpoint_id=='{endpoint_id}'",
599
- interval=aggregation_window,
600
602
  agg_funcs=agg_funcs,
601
603
  sliding_window_step=aggregation_window,
602
604
  )
@@ -59,13 +59,17 @@ def get_stream_path(
59
59
 
60
60
  stream_uri = mlrun.get_secret_or_env(
61
61
  mlrun.common.schemas.model_monitoring.ProjectSecretKeys.STREAM_PATH
62
- ) or mlrun.mlconf.get_model_monitoring_file_target_path(
63
- project=project,
64
- kind=mlrun.common.schemas.model_monitoring.FileTargetKind.STREAM,
65
- target="online",
66
- function_name=function_name,
67
62
  )
68
63
 
64
+ if not stream_uri or stream_uri == "v3io":
65
+ # TODO : remove the first part of this condition in 1.9.0
66
+ stream_uri = mlrun.mlconf.get_model_monitoring_file_target_path(
67
+ project=project,
68
+ kind=mlrun.common.schemas.model_monitoring.FileTargetKind.STREAM,
69
+ target="online",
70
+ function_name=function_name,
71
+ )
72
+
69
73
  if isinstance(stream_uri, list): # ML-6043 - user side gets only the new stream uri
70
74
  stream_uri = stream_uri[1] # get new stream path, under projects
71
75
  return mlrun.common.model_monitoring.helpers.parse_monitoring_stream_path(
@@ -153,11 +153,7 @@ class ModelMonitoringWriter(StepToDict):
153
153
  result_kind: int, result_status: int
154
154
  ) -> alert_objects.EventKind:
155
155
  """Generate the required Event Kind format for the alerting system"""
156
- if result_kind == ResultKindApp.custom.value:
157
- # Custom kind is represented as an anomaly detection
158
- event_kind = "mm_app_anomaly"
159
- else:
160
- event_kind = ResultKindApp(value=result_kind).name
156
+ event_kind = ResultKindApp(value=result_kind).name
161
157
 
162
158
  if result_status == ResultStatusApp.detected.value:
163
159
  event_kind = f"{event_kind}_detected"
@@ -330,6 +330,7 @@ def build_function(
330
330
  commands=commands,
331
331
  secret=secret_name,
332
332
  requirements=requirements,
333
+ requirements_file=requirements_file,
333
334
  overwrite=overwrite_build_params,
334
335
  extra_args=extra_args,
335
336
  )
mlrun/projects/project.py CHANGED
@@ -714,7 +714,8 @@ def _project_instance_from_struct(struct, name, allow_cross_project):
714
714
  name_from_struct = struct.get("metadata", {}).get("name", "")
715
715
  if name and name_from_struct and name_from_struct != name:
716
716
  error_message = (
717
- f"project name mismatch, {name_from_struct} != {name}, please do one of the following:\n"
717
+ f"Project name mismatch, {name_from_struct} != {name}, project is loaded from {name_from_struct} "
718
+ f"project yaml. To prevent/allow this, you can take one of the following actions:\n"
718
719
  "1. Set the `allow_cross_project=True` when loading the project.\n"
719
720
  f"2. Delete the existing project yaml, or ensure its name is equal to {name}.\n"
720
721
  "3. Use different project context dir."
@@ -722,14 +723,14 @@ def _project_instance_from_struct(struct, name, allow_cross_project):
722
723
 
723
724
  if allow_cross_project is None:
724
725
  # TODO: Remove this warning in version 1.9.0 and also fix cli to support allow_cross_project
725
- logger.warn(
726
- "Project name is different than specified on its project yaml."
727
- "You should fix it until version 1.9.0",
728
- description=error_message,
726
+ warnings.warn(
727
+ f"Project {name=} is different than specified on the context's project yaml. "
728
+ "This behavior is deprecated and will not be supported in version 1.9.0."
729
729
  )
730
+ logger.warn(error_message)
730
731
  elif allow_cross_project:
731
- logger.warn(
732
- "Project name is different than specified on its project yaml. Overriding.",
732
+ logger.debug(
733
+ "Project name is different than specified on the context's project yaml. Overriding.",
733
734
  existing_name=name_from_struct,
734
735
  overriding_name=name,
735
736
  )
@@ -1007,8 +1008,13 @@ class ProjectSpec(ModelObj):
1007
1008
  key = artifact.key
1008
1009
  artifact = artifact.to_dict()
1009
1010
  else: # artifact is a dict
1010
- # imported artifacts don't have metadata,spec,status fields
1011
- key_field = "key" if _is_imported_artifact(artifact) else "metadata.key"
1011
+ # imported/legacy artifacts don't have metadata,spec,status fields
1012
+ key_field = (
1013
+ "key"
1014
+ if _is_imported_artifact(artifact)
1015
+ or mlrun.utils.is_legacy_artifact(artifact)
1016
+ else "metadata.key"
1017
+ )
1012
1018
  key = mlrun.utils.get_in(artifact, key_field, "")
1013
1019
  if not key:
1014
1020
  raise ValueError(f'artifacts "{key_field}" must be specified')
@@ -2127,6 +2133,7 @@ class MlrunProject(ModelObj):
2127
2133
  deploy_histogram_data_drift_app: bool = True,
2128
2134
  wait_for_deployment: bool = False,
2129
2135
  rebuild_images: bool = False,
2136
+ fetch_credentials_from_sys_config: bool = False,
2130
2137
  ) -> None:
2131
2138
  """
2132
2139
  Deploy model monitoring application controller, writer and stream functions.
@@ -2136,17 +2143,18 @@ class MlrunProject(ModelObj):
2136
2143
  The stream function goal is to monitor the log of the data stream. It is triggered when a new log entry
2137
2144
  is detected. It processes the new events into statistics that are then written to statistics databases.
2138
2145
 
2139
- :param default_controller_image: Deprecated.
2140
- :param base_period: The time period in minutes in which the model monitoring controller
2141
- function is triggered. By default, the base period is 10 minutes.
2142
- :param image: The image of the model monitoring controller, writer, monitoring
2143
- stream & histogram data drift functions, which are real time nuclio
2144
- functions. By default, the image is mlrun/mlrun.
2145
- :param deploy_histogram_data_drift_app: If true, deploy the default histogram-based data drift application.
2146
- :param wait_for_deployment: If true, return only after the deployment is done on the backend.
2147
- Otherwise, deploy the model monitoring infrastructure on the
2148
- background, including the histogram data drift app if selected.
2149
- :param rebuild_images: If true, force rebuild of model monitoring infrastructure images.
2146
+ :param default_controller_image: Deprecated.
2147
+ :param base_period: The time period in minutes in which the model monitoring controller
2148
+ function is triggered. By default, the base period is 10 minutes.
2149
+ :param image: The image of the model monitoring controller, writer, monitoring
2150
+ stream & histogram data drift functions, which are real time nuclio
2151
+ functions. By default, the image is mlrun/mlrun.
2152
+ :param deploy_histogram_data_drift_app: If true, deploy the default histogram-based data drift application.
2153
+ :param wait_for_deployment: If true, return only after the deployment is done on the backend.
2154
+ Otherwise, deploy the model monitoring infrastructure on the
2155
+ background, including the histogram data drift app if selected.
2156
+ :param rebuild_images: If true, force rebuild of model monitoring infrastructure images.
2157
+ :param fetch_credentials_from_sys_config: If true, fetch the credentials from the system configuration.
2150
2158
  """
2151
2159
  if default_controller_image != "mlrun/mlrun":
2152
2160
  # TODO: Remove this in 1.9.0
@@ -2163,6 +2171,7 @@ class MlrunProject(ModelObj):
2163
2171
  base_period=base_period,
2164
2172
  deploy_histogram_data_drift_app=deploy_histogram_data_drift_app,
2165
2173
  rebuild_images=rebuild_images,
2174
+ fetch_credentials_from_sys_config=fetch_credentials_from_sys_config,
2166
2175
  )
2167
2176
 
2168
2177
  if wait_for_deployment:
@@ -2337,7 +2346,8 @@ class MlrunProject(ModelObj):
2337
2346
  Default: job
2338
2347
  :param image: Docker image to be used, can also be specified in the function object/yaml
2339
2348
  :param handler: Default function handler to invoke (can only be set with .py/.ipynb files)
2340
- :param with_repo: Add (clone) the current repo to the build source
2349
+ :param with_repo: Add (clone) the current repo to the build source - use when the function code is in
2350
+ the project repo (project.spec.source).
2341
2351
  :param tag: Function version tag to set (none for current or 'latest')
2342
2352
  Specifying a tag as a parameter will update the project's tagged function
2343
2353
  (myfunc:v1) and the untagged function (myfunc)
@@ -2484,25 +2494,17 @@ class MlrunProject(ModelObj):
2484
2494
  self.spec.remove_function(name)
2485
2495
 
2486
2496
  def remove_model_monitoring_function(self, name: Union[str, list[str]]):
2487
- """remove the specified model-monitoring-app function/s from the project spec
2497
+ """delete the specified model-monitoring-app function/s
2488
2498
 
2489
2499
  :param name: name of the model-monitoring-function/s (under the project)
2490
2500
  """
2491
- names = name if isinstance(name, list) else [name]
2492
- for func_name in names:
2493
- function = self.get_function(key=func_name)
2494
- if (
2495
- function.metadata.labels.get(mm_constants.ModelMonitoringAppLabel.KEY)
2496
- == mm_constants.ModelMonitoringAppLabel.VAL
2497
- ):
2498
- self.remove_function(name=func_name)
2499
- logger.info(
2500
- f"{func_name} function has been removed from {self.name} project"
2501
- )
2502
- else:
2503
- raise logger.warn(
2504
- f"There is no model monitoring function with {func_name} name"
2505
- )
2501
+ # TODO: Remove this in 1.9.0
2502
+ warnings.warn(
2503
+ "'remove_model_monitoring_function' is deprecated and will be removed in 1.9.0. "
2504
+ "Please use `delete_model_monitoring_function` instead.",
2505
+ FutureWarning,
2506
+ )
2507
+ self.delete_model_monitoring_function(name)
2506
2508
 
2507
2509
  def delete_model_monitoring_function(self, name: Union[str, list[str]]):
2508
2510
  """delete the specified model-monitoring-app function/s
@@ -3204,49 +3206,44 @@ class MlrunProject(ModelObj):
3204
3206
  stream_path: Optional[str] = None,
3205
3207
  tsdb_connection: Optional[str] = None,
3206
3208
  ):
3207
- """Set the credentials that will be used by the project's model monitoring
3209
+ """
3210
+ Set the credentials that will be used by the project's model monitoring
3208
3211
  infrastructure functions. Important to note that you have to set the credentials before deploying any
3209
3212
  model monitoring or serving function.
3210
3213
 
3211
- :param access_key: Model Monitoring access key for managing user permissions
3212
- :param endpoint_store_connection: Endpoint store connection string
3213
- :param stream_path: Path to the model monitoring stream
3214
- :param tsdb_connection: Connection string to the time series database
3214
+ :param access_key: Model Monitoring access key for managing user permissions.
3215
+ :param endpoint_store_connection: Endpoint store connection string. By default, None.
3216
+ Options:
3217
+ 1. None, will be set from the system configuration.
3218
+ 2. v3io - for v3io endpoint store,
3219
+ pass `v3io` and the system will generate the exact path.
3220
+ 3. MySQL/SQLite - for SQL endpoint store, please provide full
3221
+ connection string, for example
3222
+ mysql+pymysql://<username>:<password>@<host>:<port>/<db_name>
3223
+ :param stream_path: Path to the model monitoring stream. By default, None.
3224
+ Options:
3225
+ 1. None, will be set from the system configuration.
3226
+ 2. v3io - for v3io stream,
3227
+ pass `v3io` and the system will generate the exact path.
3228
+ 3. Kafka - for Kafka stream, please provide full connection string without
3229
+ custom topic, for example kafka://<some_kafka_broker>:<port>.
3230
+ :param tsdb_connection: Connection string to the time series database. By default, None.
3231
+ Options:
3232
+ 1. None, will be set from the system configuration.
3233
+ 2. v3io - for v3io stream,
3234
+ pass `v3io` and the system will generate the exact path.
3235
+ 3. TDEngine - for TDEngine tsdb, please provide full websocket connection URL,
3236
+ for example taosws://<username>:<password>@<host>:<port>.
3215
3237
  """
3216
-
3217
- secrets_dict = {}
3218
- if access_key:
3219
- secrets_dict[
3220
- mlrun.common.schemas.model_monitoring.ProjectSecretKeys.ACCESS_KEY
3221
- ] = access_key
3222
-
3223
- if endpoint_store_connection:
3224
- secrets_dict[
3225
- mlrun.common.schemas.model_monitoring.ProjectSecretKeys.ENDPOINT_STORE_CONNECTION
3226
- ] = endpoint_store_connection
3227
-
3228
- if stream_path:
3229
- if stream_path.startswith("kafka://") and "?topic" in stream_path:
3230
- raise mlrun.errors.MLRunInvalidArgumentError(
3231
- "Custom kafka topic is not allowed"
3232
- )
3233
- secrets_dict[
3234
- mlrun.common.schemas.model_monitoring.ProjectSecretKeys.STREAM_PATH
3235
- ] = stream_path
3236
-
3237
- if tsdb_connection:
3238
- if not tsdb_connection.startswith("taosws://"):
3239
- raise mlrun.errors.MLRunInvalidArgumentError(
3240
- "Currently only TDEngine websocket connection is supported for non-v3io TSDB,"
3241
- "please provide a full URL (e.g. taosws://user:password@host:port)"
3242
- )
3243
- secrets_dict[
3244
- mlrun.common.schemas.model_monitoring.ProjectSecretKeys.TSDB_CONNECTION
3245
- ] = tsdb_connection
3246
-
3247
- self.set_secrets(
3248
- secrets=secrets_dict,
3249
- provider=mlrun.common.schemas.SecretProviderName.kubernetes,
3238
+ db = mlrun.db.get_run_db(secrets=self._secrets)
3239
+ db.set_model_monitoring_credentials(
3240
+ project=self.name,
3241
+ credentials={
3242
+ "access_key": access_key,
3243
+ "endpoint_store_connection": endpoint_store_connection,
3244
+ "stream_path": stream_path,
3245
+ "tsdb_connection": tsdb_connection,
3246
+ },
3250
3247
  )
3251
3248
 
3252
3249
  def run_function(
@@ -3663,6 +3660,7 @@ class MlrunProject(ModelObj):
3663
3660
  kind: str = None,
3664
3661
  category: typing.Union[str, mlrun.common.schemas.ArtifactCategories] = None,
3665
3662
  tree: str = None,
3663
+ limit: int = None,
3666
3664
  ) -> mlrun.lists.ArtifactList:
3667
3665
  """List artifacts filtered by various parameters.
3668
3666
 
@@ -3692,6 +3690,7 @@ class MlrunProject(ModelObj):
3692
3690
  :param kind: Return artifacts of the requested kind.
3693
3691
  :param category: Return artifacts of the requested category.
3694
3692
  :param tree: Return artifacts of the requested tree.
3693
+ :param limit: Maximum number of artifacts to return.
3695
3694
  """
3696
3695
  db = mlrun.db.get_run_db(secrets=self._secrets)
3697
3696
  return db.list_artifacts(
@@ -3706,6 +3705,7 @@ class MlrunProject(ModelObj):
3706
3705
  kind=kind,
3707
3706
  category=category,
3708
3707
  tree=tree,
3708
+ limit=limit,
3709
3709
  )
3710
3710
 
3711
3711
  def list_models(
mlrun/render.py CHANGED
@@ -283,9 +283,14 @@ function copyToClipboard(fld) {
283
283
  }
284
284
  function expandPanel(el) {
285
285
  const panelName = "#" + el.getAttribute('paneName');
286
- console.log(el.title);
287
286
 
288
- document.querySelector(panelName + "-title").innerHTML = el.title
287
+ // Get the base URL of the current notebook
288
+ var baseUrl = window.location.origin;
289
+
290
+ // Construct the full URL
291
+ var fullUrl = new URL(el.title, baseUrl).href;
292
+
293
+ document.querySelector(panelName + "-title").innerHTML = fullUrl
289
294
  iframe = document.querySelector(panelName + "-body");
290
295
 
291
296
  const tblcss = `<style> body { font-family: Arial, Helvetica, sans-serif;}
@@ -299,7 +304,7 @@ function expandPanel(el) {
299
304
  }
300
305
 
301
306
  function reqListener () {
302
- if (el.title.endsWith(".csv")) {
307
+ if (fullUrl.endsWith(".csv")) {
303
308
  iframe.setAttribute("srcdoc", tblcss + csvToHtmlTable(this.responseText));
304
309
  } else {
305
310
  iframe.setAttribute("srcdoc", this.responseText);
@@ -309,11 +314,11 @@ function expandPanel(el) {
309
314
 
310
315
  const oReq = new XMLHttpRequest();
311
316
  oReq.addEventListener("load", reqListener);
312
- oReq.open("GET", el.title);
317
+ oReq.open("GET", fullUrl);
313
318
  oReq.send();
314
319
 
315
320
 
316
- //iframe.src = el.title;
321
+ //iframe.src = fullUrl;
317
322
  const resultPane = document.querySelector(panelName + "-pane");
318
323
  if (resultPane.classList.contains("hidden")) {
319
324
  resultPane.classList.remove("hidden");
mlrun/run.py CHANGED
@@ -63,11 +63,11 @@ from .runtimes.funcdoc import update_function_entry_points
63
63
  from .runtimes.nuclio.application import ApplicationRuntime
64
64
  from .runtimes.utils import add_code_metadata, global_context
65
65
  from .utils import (
66
+ RunKeys,
66
67
  extend_hub_uri_if_needed,
67
68
  get_in,
68
69
  logger,
69
70
  retry_until_successful,
70
- run_keys,
71
71
  update_in,
72
72
  )
73
73
 
@@ -280,7 +280,7 @@ def get_or_create_ctx(
280
280
  artifact_path = mlrun.utils.helpers.template_artifact_path(
281
281
  mlconf.artifact_path, project or mlconf.default_project
282
282
  )
283
- update_in(newspec, ["spec", run_keys.output_path], artifact_path)
283
+ update_in(newspec, ["spec", RunKeys.output_path], artifact_path)
284
284
 
285
285
  newspec.setdefault("metadata", {})
286
286
  update_in(newspec, "metadata.name", name, replace=False)
mlrun/runtimes/daskjob.py CHANGED
@@ -548,7 +548,13 @@ class DaskCluster(KubejobRuntime):
548
548
  "specified handler (string) without command "
549
549
  "(py file path), specify command or use handler pointer"
550
550
  )
551
- handler = load_module(self.spec.command, handler, context=context)
551
+ # Do not embed the module in system as it is not persistent with the dask cluster
552
+ handler = load_module(
553
+ self.spec.command,
554
+ handler,
555
+ context=context,
556
+ embed_in_sys=False,
557
+ )
552
558
  client = self.client
553
559
  setattr(context, "dask_client", client)
554
560
  sout, serr = exec_from_params(handler, runobj, context)
mlrun/runtimes/local.py CHANGED
@@ -58,7 +58,9 @@ class ParallelRunner:
58
58
 
59
59
  return TrackerManager()
60
60
 
61
- def _get_handler(self, handler, context):
61
+ def _get_handler(
62
+ self, handler: str, context: MLClientCtx, embed_in_sys: bool = True
63
+ ):
62
64
  return handler
63
65
 
64
66
  def _get_dask_client(self, options):
@@ -86,7 +88,7 @@ class ParallelRunner:
86
88
  handler = runobj.spec.handler
87
89
  self._force_handler(handler)
88
90
  set_paths(self.spec.pythonpath)
89
- handler = self._get_handler(handler, execution)
91
+ handler = self._get_handler(handler, execution, embed_in_sys=False)
90
92
 
91
93
  client, function_name = self._get_dask_client(generator.options)
92
94
  parallel_runs = generator.options.parallel_runs or 4
@@ -224,12 +226,14 @@ class LocalRuntime(BaseRuntime, ParallelRunner):
224
226
  def is_deployed(self):
225
227
  return True
226
228
 
227
- def _get_handler(self, handler, context):
229
+ def _get_handler(
230
+ self, handler: str, context: MLClientCtx, embed_in_sys: bool = True
231
+ ):
228
232
  command = self.spec.command
229
233
  if not command and self.spec.build.functionSourceCode:
230
234
  # if the code is embedded in the function object extract or find it
231
235
  command, _ = mlrun.run.load_func_code(self)
232
- return load_module(command, handler, context)
236
+ return load_module(command, handler, context, embed_in_sys=embed_in_sys)
233
237
 
234
238
  def _pre_run(self, runobj: RunObject, execution: MLClientCtx):
235
239
  workdir = self.spec.workdir
@@ -372,8 +376,20 @@ class LocalRuntime(BaseRuntime, ParallelRunner):
372
376
  return run_obj_dict
373
377
 
374
378
 
375
- def load_module(file_name, handler, context):
376
- """Load module from file name"""
379
+ def load_module(
380
+ file_name: str,
381
+ handler: str,
382
+ context: MLClientCtx,
383
+ embed_in_sys: bool = True,
384
+ ):
385
+ """
386
+ Load module from filename
387
+ :param file_name: The module path to load
388
+ :param handler: The callable to load
389
+ :param context: Execution context
390
+ :param embed_in_sys: Embed the file-named module in sys.modules. This is not persistent with remote
391
+ environments and therefore can effect pickling.
392
+ """
377
393
  module = None
378
394
  if file_name:
379
395
  path = Path(file_name)
@@ -384,7 +400,8 @@ def load_module(file_name, handler, context):
384
400
  if spec is None:
385
401
  raise RunError(f"Cannot import from {file_name!r}")
386
402
  module = imputil.module_from_spec(spec)
387
- sys.modules[mod_name] = module
403
+ if embed_in_sys:
404
+ sys.modules[mod_name] = module
388
405
  spec.loader.exec_module(module)
389
406
 
390
407
  class_args = {}
@@ -1327,3 +1327,23 @@ def get_nuclio_deploy_status(
1327
1327
  else:
1328
1328
  text = "\n".join(outputs) if outputs else ""
1329
1329
  return state, address, name, last_log_timestamp, text, function_status
1330
+
1331
+
1332
+ def enrich_nuclio_function_from_headers(
1333
+ func: RemoteRuntime,
1334
+ headers: dict,
1335
+ ):
1336
+ func.status.state = headers.get("x-mlrun-function-status", "")
1337
+ func.status.address = headers.get("x-mlrun-address", "")
1338
+ func.status.nuclio_name = headers.get("x-mlrun-name", "")
1339
+ func.status.internal_invocation_urls = (
1340
+ headers.get("x-mlrun-internal-invocation-urls", "").split(",")
1341
+ if headers.get("x-mlrun-internal-invocation-urls")
1342
+ else []
1343
+ )
1344
+ func.status.external_invocation_urls = (
1345
+ headers.get("x-mlrun-external-invocation-urls", "").split(",")
1346
+ if headers.get("x-mlrun-external-invocation-urls")
1347
+ else []
1348
+ )
1349
+ func.status.container_image = headers.get("x-mlrun-container-image", "")