mlrun 1.8.0rc26__py3-none-any.whl → 1.8.0rc28__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (39) hide show
  1. mlrun/__main__.py +3 -2
  2. mlrun/artifacts/document.py +9 -6
  3. mlrun/artifacts/model.py +19 -4
  4. mlrun/common/model_monitoring/helpers.py +2 -2
  5. mlrun/common/schemas/model_monitoring/constants.py +0 -1
  6. mlrun/common/schemas/serving.py +22 -0
  7. mlrun/config.py +22 -9
  8. mlrun/datastore/base.py +0 -7
  9. mlrun/datastore/s3.py +9 -2
  10. mlrun/db/base.py +2 -1
  11. mlrun/db/httpdb.py +17 -10
  12. mlrun/db/nopdb.py +2 -1
  13. mlrun/execution.py +15 -4
  14. mlrun/lists.py +4 -1
  15. mlrun/model.py +2 -0
  16. mlrun/model_monitoring/applications/_application_steps.py +1 -0
  17. mlrun/model_monitoring/applications/base.py +132 -21
  18. mlrun/model_monitoring/applications/context.py +2 -3
  19. mlrun/model_monitoring/controller.py +117 -57
  20. mlrun/model_monitoring/db/_schedules.py +8 -0
  21. mlrun/model_monitoring/db/tsdb/__init__.py +12 -5
  22. mlrun/model_monitoring/stream_processing.py +3 -2
  23. mlrun/projects/project.py +44 -7
  24. mlrun/runtimes/base.py +1 -1
  25. mlrun/runtimes/generators.py +1 -1
  26. mlrun/runtimes/nuclio/function.py +37 -0
  27. mlrun/runtimes/nuclio/serving.py +3 -0
  28. mlrun/runtimes/pod.py +1 -3
  29. mlrun/serving/routers.py +62 -17
  30. mlrun/serving/server.py +11 -0
  31. mlrun/serving/states.py +0 -4
  32. mlrun/serving/v2_serving.py +45 -10
  33. mlrun/utils/version/version.json +2 -2
  34. {mlrun-1.8.0rc26.dist-info → mlrun-1.8.0rc28.dist-info}/METADATA +4 -2
  35. {mlrun-1.8.0rc26.dist-info → mlrun-1.8.0rc28.dist-info}/RECORD +39 -38
  36. {mlrun-1.8.0rc26.dist-info → mlrun-1.8.0rc28.dist-info}/LICENSE +0 -0
  37. {mlrun-1.8.0rc26.dist-info → mlrun-1.8.0rc28.dist-info}/WHEEL +0 -0
  38. {mlrun-1.8.0rc26.dist-info → mlrun-1.8.0rc28.dist-info}/entry_points.txt +0 -0
  39. {mlrun-1.8.0rc26.dist-info → mlrun-1.8.0rc28.dist-info}/top_level.txt +0 -0
mlrun/__main__.py CHANGED
@@ -772,10 +772,11 @@ def get(kind, name, selector, namespace, uid, project, tag, db, extra_args):
772
772
 
773
773
  runs = run_db.list_runs(uid=uid, project=project, labels=selector)
774
774
  df = runs.to_df()[
775
- ["name", "uid", "iter", "start", "state", "parameters", "results"]
775
+ ["name", "uid", "iter", "start", "end", "state", "parameters", "results"]
776
776
  ]
777
777
  # df['uid'] = df['uid'].apply(lambda x: f'..{x[-6:]}')
778
- df["start"] = df["start"].apply(time_str)
778
+ for time_column in ["start", "end"]:
779
+ df[time_column] = df[time_column].apply(time_str)
779
780
  df["parameters"] = df["parameters"].apply(dict_to_str)
780
781
  df["results"] = df["results"].apply(dict_to_str)
781
782
  print(tabulate(df, headers="keys"))
@@ -34,7 +34,9 @@ class DocumentLoaderSpec(ModelObj):
34
34
 
35
35
  This class is responsible for loading documents from a given source path using a specified loader class.
36
36
  The loader class is dynamically imported and instantiated with the provided arguments. The loaded documents
37
- can be optionally uploaded as artifacts.
37
+ can be optionally uploaded as artifacts. Note that only loader classes that return single results
38
+ (e.g., TextLoader, UnstructuredHTMLLoader, WebBaseLoader(scalar)) are supported - loaders returning multiple
39
+ results like DirectoryLoader or WebBaseLoader(list) are not compatible.
38
40
 
39
41
  Attributes:
40
42
  loader_class_name (str): The name of the loader class to use for loading documents.
@@ -61,7 +63,7 @@ class DocumentLoaderSpec(ModelObj):
61
63
  kwargs (Optional[dict]): Additional keyword arguments to pass to the loader class.
62
64
  download_object (bool, optional): If True, the file will be downloaded before launching
63
65
  the loader. If False, the loader accepts a link that should not be downloaded.
64
- Defaults to False.
66
+ Defaults to True.
65
67
  Example:
66
68
  >>> # Create a loader specification for PDF documents
67
69
  >>> loader_spec = DocumentLoaderSpec(
@@ -97,7 +99,7 @@ class MLRunLoader:
97
99
  Args:
98
100
  artifact_key (str, optional): The key for the artifact to be logged. Special characters and symbols
99
101
  not valid in artifact names will be encoded as their hexadecimal representation. The '%%' pattern
100
- in the key will be replaced by the hex-encoded version of the source path. Defaults to "doc%%".
102
+ in the key will be replaced by the hex-encoded version of the source path. Defaults to "%%".
101
103
  local_path (str): The source path of the document to be loaded.
102
104
  loader_spec (DocumentLoaderSpec): Specification for the document loader.
103
105
  producer (Optional[Union[MlrunProject, str, MLClientCtx]], optional): The producer of the document.
@@ -129,7 +131,7 @@ class MLRunLoader:
129
131
  >>> loader = MLRunLoader(
130
132
  ... source_path="/path/to/document.txt",
131
133
  ... loader_spec=loader_spec,
132
- ... artifact_key="doc%%", # %% will be replaced with encoded path
134
+ ... artifact_key="%%", # %% will be replaced with encoded path
133
135
  ... producer=project,
134
136
  ... )
135
137
  >>> documents = loader.load()
@@ -141,7 +143,7 @@ class MLRunLoader:
141
143
  ... loader_cls=MLRunLoader,
142
144
  ... loader_kwargs={
143
145
  ... "loader_spec": loader_spec,
144
- ... "artifact_key": "doc%%",
146
+ ... "artifact_key": "%%",
145
147
  ... "producer": project,
146
148
  ... "upload": True,
147
149
  ... },
@@ -154,7 +156,7 @@ class MLRunLoader:
154
156
  cls,
155
157
  source_path: str,
156
158
  loader_spec: "DocumentLoaderSpec",
157
- artifact_key="doc%%",
159
+ artifact_key="%%",
158
160
  producer: Optional[Union["MlrunProject", str, "MLClientCtx"]] = None, # noqa: F821
159
161
  upload: bool = False,
160
162
  tag: str = "",
@@ -271,6 +273,7 @@ class DocumentArtifact(Artifact):
271
273
  result.append("_")
272
274
 
273
275
  resolved_path = "".join(result)
276
+ resolved_path = resolved_path.lstrip("_")
274
277
  return resolved_path
275
278
 
276
279
  class DocumentArtifactSpec(ArtifactSpec):
mlrun/artifacts/model.py CHANGED
@@ -30,6 +30,7 @@ from ..utils import StorePrefix, is_relative_path
30
30
  from .base import Artifact, ArtifactSpec, upload_extra_data
31
31
 
32
32
  model_spec_filename = "model_spec.yaml"
33
+ MODEL_OPTIONAL_SUFFIXES = [".tar.gz", ".pkl", ".bin", ".pickle"]
33
34
 
34
35
 
35
36
  class ModelArtifactSpec(ArtifactSpec):
@@ -426,7 +427,17 @@ def get_model(model_dir, suffix=""):
426
427
  model_file = ""
427
428
  model_spec = None
428
429
  extra_dataitems = {}
429
- suffix = suffix or ".pkl"
430
+ default_suffix = ".pkl"
431
+
432
+ alternative_suffix = next(
433
+ (
434
+ optional_suffix
435
+ for optional_suffix in MODEL_OPTIONAL_SUFFIXES
436
+ if model_dir.lower().endswith(optional_suffix)
437
+ ),
438
+ None,
439
+ )
440
+
430
441
  if hasattr(model_dir, "artifact_url"):
431
442
  model_dir = model_dir.artifact_url
432
443
 
@@ -440,15 +451,19 @@ def get_model(model_dir, suffix=""):
440
451
  target, model_spec.model_target_file or model_spec.model_file
441
452
  )
442
453
  extra_dataitems = _get_extra(target, model_spec.extra_data)
443
-
454
+ suffix = suffix or default_suffix
444
455
  elif model_dir.lower().endswith(".yaml"):
445
456
  model_spec = _load_model_spec(model_dir)
446
457
  model_file = _get_file_path(model_dir, model_spec.model_file)
447
458
  extra_dataitems = _get_extra(model_dir, model_spec.extra_data)
448
-
449
- elif model_dir.endswith(suffix):
459
+ suffix = suffix or default_suffix
460
+ elif suffix and model_dir.endswith(suffix):
461
+ model_file = model_dir
462
+ elif not suffix and alternative_suffix:
463
+ suffix = alternative_suffix
450
464
  model_file = model_dir
451
465
  else:
466
+ suffix = suffix or default_suffix
452
467
  dirobj = mlrun.datastore.store_manager.object(url=model_dir)
453
468
  model_dir_list = dirobj.listdir()
454
469
  if model_spec_filename in model_dir_list:
@@ -47,9 +47,9 @@ def parse_monitoring_stream_path(
47
47
  function_name is None
48
48
  or function_name == mm_constants.MonitoringFunctionNames.STREAM
49
49
  ):
50
- stream_uri += f"?topic=monitoring_stream_{project}"
50
+ stream_uri += f"?topic=monitoring_stream_{project}_v1"
51
51
  else:
52
- stream_uri += f"?topic=monitoring_stream_{project}_{function_name}"
52
+ stream_uri += f"?topic=monitoring_stream_{project}_{function_name}_v1"
53
53
 
54
54
  return stream_uri
55
55
 
@@ -146,7 +146,6 @@ class EventFieldType:
146
146
 
147
147
  class FeatureSetFeatures(MonitoringStrEnum):
148
148
  LATENCY = EventFieldType.LATENCY
149
- ERROR_COUNT = EventFieldType.ERROR_COUNT
150
149
  METRICS = EventFieldType.METRICS
151
150
 
152
151
  @classmethod
@@ -0,0 +1,22 @@
1
+ # Copyright 2025 Iguazio
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ from pydantic.v1 import BaseModel
16
+
17
+ from .background_task import BackgroundTaskList
18
+
19
+
20
+ class DeployResponse(BaseModel):
21
+ data: dict
22
+ background_tasks: BackgroundTaskList
mlrun/config.py CHANGED
@@ -232,6 +232,7 @@ default_config = {
232
232
  "abort_grace_period": "10",
233
233
  "delete_project": "900",
234
234
  "delete_function": "900",
235
+ "model_endpoint_creation": "600",
235
236
  },
236
237
  "runtimes": {"dask": "600"},
237
238
  "push_notifications": "60",
@@ -870,6 +871,14 @@ class Config:
870
871
  return self.__class__(val)
871
872
  return val
872
873
 
874
+ def __deepcopy__(self, memo):
875
+ cls = self.__class__
876
+ # create a new Config without calling __init__ (avoid recursion)
877
+ result = cls.__new__(cls)
878
+ # manually deep-copy _cfg
879
+ object.__setattr__(result, "_cfg", copy.deepcopy(self._cfg, memo))
880
+ return result
881
+
873
882
  def __setattr__(self, attr, value):
874
883
  # in order for the dbpath setter to work
875
884
  if attr == "dbpath":
@@ -1304,7 +1313,7 @@ class Config:
1304
1313
  project=project,
1305
1314
  kind=kind
1306
1315
  if function_name is None
1307
- else f"{kind}-{function_name.lower()}",
1316
+ else f"{kind}-{function_name.lower()}-v1",
1308
1317
  )
1309
1318
  elif (
1310
1319
  kind == "stream"
@@ -1313,19 +1322,23 @@ class Config:
1313
1322
  ):
1314
1323
  return mlrun.mlconf.model_endpoint_monitoring.store_prefixes.user_space.format(
1315
1324
  project=project,
1316
- kind=kind,
1325
+ kind=f"{kind}-v1",
1317
1326
  )
1318
- else:
1319
- if (
1320
- function_name
1321
- == mlrun.common.schemas.model_monitoring.constants.MonitoringFunctionNames.APPLICATION_CONTROLLER
1322
- ):
1323
- kind = function_name
1327
+ elif (
1328
+ function_name
1329
+ == mlrun.common.schemas.model_monitoring.constants.MonitoringFunctionNames.APPLICATION_CONTROLLER
1330
+ and kind == "stream"
1331
+ ):
1324
1332
  return mlrun.mlconf.model_endpoint_monitoring.store_prefixes.default.format(
1325
1333
  project=project,
1326
- kind=kind,
1334
+ kind=f"{kind}-{function_name.lower()}-v1",
1327
1335
  )
1328
1336
 
1337
+ return mlrun.mlconf.model_endpoint_monitoring.store_prefixes.default.format(
1338
+ project=project,
1339
+ kind=kind,
1340
+ )
1341
+
1329
1342
  # Get the current offline path from the configuration
1330
1343
  file_path = mlrun.mlconf.model_endpoint_monitoring.offline_storage_path.format(
1331
1344
  project=project, kind=kind
mlrun/datastore/base.py CHANGED
@@ -677,13 +677,6 @@ class DataItem:
677
677
  return f"'{self.url}'"
678
678
 
679
679
 
680
- def get_range(size, offset):
681
- byterange = f"bytes={offset}-"
682
- if size:
683
- byterange += str(offset + size)
684
- return byterange
685
-
686
-
687
680
  def basic_auth_header(user, password):
688
681
  username = user.encode("latin1")
689
682
  password = password.encode("latin1")
mlrun/datastore/s3.py CHANGED
@@ -21,7 +21,7 @@ from fsspec.registry import get_filesystem_class
21
21
 
22
22
  import mlrun.errors
23
23
 
24
- from .base import DataStore, FileStats, get_range, make_datastore_schema_sanitizer
24
+ from .base import DataStore, FileStats, make_datastore_schema_sanitizer
25
25
 
26
26
 
27
27
  class S3Store(DataStore):
@@ -108,6 +108,13 @@ class S3Store(DataStore):
108
108
  "choose-signer.s3.*", disable_signing
109
109
  )
110
110
 
111
+ @staticmethod
112
+ def get_range(size, offset):
113
+ byterange = f"bytes={offset}-"
114
+ if size:
115
+ byterange += str(offset + size - 1)
116
+ return byterange
117
+
111
118
  def get_spark_options(self):
112
119
  res = {}
113
120
  st = self.get_storage_options()
@@ -185,7 +192,7 @@ class S3Store(DataStore):
185
192
  bucket, key = self.get_bucket_and_key(key)
186
193
  obj = self.s3.Object(bucket, key)
187
194
  if size or offset:
188
- return obj.get(Range=get_range(size, offset))["Body"].read()
195
+ return obj.get(Range=S3Store.get_range(size, offset))["Body"].read()
189
196
  return obj.get()["Body"].read()
190
197
 
191
198
  def put(self, key, data, append=False):
mlrun/db/base.py CHANGED
@@ -108,6 +108,8 @@ class RunDBInterface(ABC):
108
108
  start_time_to: Optional[datetime.datetime] = None,
109
109
  last_update_time_from: Optional[datetime.datetime] = None,
110
110
  last_update_time_to: Optional[datetime.datetime] = None,
111
+ end_time_from: Optional[datetime.datetime] = None,
112
+ end_time_to: Optional[datetime.datetime] = None,
111
113
  partition_by: Union[mlrun.common.schemas.RunPartitionByField, str] = None,
112
114
  rows_per_partition: int = 1,
113
115
  partition_sort_by: Union[mlrun.common.schemas.SortField, str] = None,
@@ -1075,7 +1077,6 @@ class RunDBInterface(ABC):
1075
1077
  base_period: int = 10,
1076
1078
  image: str = "mlrun/mlrun",
1077
1079
  deploy_histogram_data_drift_app: bool = True,
1078
- rebuild_images: bool = False,
1079
1080
  fetch_credentials_from_sys_config: bool = False,
1080
1081
  ) -> None:
1081
1082
  pass
mlrun/db/httpdb.py CHANGED
@@ -905,6 +905,8 @@ class HTTPRunDB(RunDBInterface):
905
905
  start_time_to: Optional[datetime] = None,
906
906
  last_update_time_from: Optional[datetime] = None,
907
907
  last_update_time_to: Optional[datetime] = None,
908
+ end_time_from: Optional[datetime] = None,
909
+ end_time_to: Optional[datetime] = None,
908
910
  partition_by: Optional[
909
911
  Union[mlrun.common.schemas.RunPartitionByField, str]
910
912
  ] = None,
@@ -951,6 +953,8 @@ class HTTPRunDB(RunDBInterface):
951
953
  :param last_update_time_from: Filter by run last update time in ``(last_update_time_from,
952
954
  last_update_time_to)``.
953
955
  :param last_update_time_to: Filter by run last update time in ``(last_update_time_from, last_update_time_to)``.
956
+ :param end_time_from: Filter by run end time in ``[end_time_from, end_time_to]``.
957
+ :param end_time_to: Filter by run end time in ``[end_time_from, end_time_to]``.
954
958
  :param partition_by: Field to group results by. When `partition_by` is specified, the `partition_sort_by`
955
959
  parameter must be provided as well.
956
960
  :param rows_per_partition: How many top rows (per sorting defined by `partition_sort_by` and `partition_order`)
@@ -976,6 +980,8 @@ class HTTPRunDB(RunDBInterface):
976
980
  start_time_to=start_time_to,
977
981
  last_update_time_from=last_update_time_from,
978
982
  last_update_time_to=last_update_time_to,
983
+ end_time_from=end_time_from,
984
+ end_time_to=end_time_to,
979
985
  partition_by=partition_by,
980
986
  rows_per_partition=rows_per_partition,
981
987
  partition_sort_by=partition_sort_by,
@@ -2368,9 +2374,9 @@ class HTTPRunDB(RunDBInterface):
2368
2374
  def retry_pipeline(
2369
2375
  self,
2370
2376
  run_id: str,
2377
+ project: str,
2371
2378
  namespace: Optional[str] = None,
2372
2379
  timeout: int = 30,
2373
- project: Optional[str] = None,
2374
2380
  ):
2375
2381
  """
2376
2382
  Retry a specific pipeline run using its run ID. This function sends an API request
@@ -2380,8 +2386,7 @@ class HTTPRunDB(RunDBInterface):
2380
2386
  :param run_id: The unique ID of the pipeline run to retry.
2381
2387
  :param namespace: Kubernetes namespace where the pipeline is running. Optional.
2382
2388
  :param timeout: Timeout (in seconds) for the API call. Defaults to 30 seconds.
2383
- :param project: Name of the MLRun project associated with the pipeline. Can be
2384
- ``*`` to query across all projects. Optional.
2389
+ :param project: Name of the MLRun project associated with the pipeline.
2385
2390
 
2386
2391
  :raises ValueError: Raised if the API response is not successful or contains an
2387
2392
  error.
@@ -2392,14 +2397,13 @@ class HTTPRunDB(RunDBInterface):
2392
2397
  params = {}
2393
2398
  if namespace:
2394
2399
  params["namespace"] = namespace
2395
- project_path = project if project else "*"
2396
2400
 
2397
2401
  resp_text = ""
2398
2402
  resp_code = None
2399
2403
  try:
2400
2404
  resp = self.api_call(
2401
2405
  "POST",
2402
- f"projects/{project_path}/pipelines/{run_id}/retry",
2406
+ f"projects/{project}/pipelines/{run_id}/retry",
2403
2407
  params=params,
2404
2408
  timeout=timeout,
2405
2409
  )
@@ -2414,7 +2418,7 @@ class HTTPRunDB(RunDBInterface):
2414
2418
  logger.error(
2415
2419
  "Retry pipeline API call encountered an error.",
2416
2420
  run_id=run_id,
2417
- project=project_path,
2421
+ project=project,
2418
2422
  namespace=namespace,
2419
2423
  response_code=resp_code,
2420
2424
  response_text=resp_text,
@@ -2429,7 +2433,7 @@ class HTTPRunDB(RunDBInterface):
2429
2433
  logger.info(
2430
2434
  "Successfully retried pipeline run",
2431
2435
  run_id=run_id,
2432
- project=project_path,
2436
+ project=project,
2433
2437
  namespace=namespace,
2434
2438
  )
2435
2439
  return resp.json()
@@ -3967,7 +3971,6 @@ class HTTPRunDB(RunDBInterface):
3967
3971
  base_period: int = 10,
3968
3972
  image: str = "mlrun/mlrun",
3969
3973
  deploy_histogram_data_drift_app: bool = True,
3970
- rebuild_images: bool = False,
3971
3974
  fetch_credentials_from_sys_config: bool = False,
3972
3975
  ) -> None:
3973
3976
  """
@@ -3985,7 +3988,6 @@ class HTTPRunDB(RunDBInterface):
3985
3988
  stream functions, which are real time nuclio functions.
3986
3989
  By default, the image is mlrun/mlrun.
3987
3990
  :param deploy_histogram_data_drift_app: If true, deploy the default histogram-based data drift application.
3988
- :param rebuild_images: If true, force rebuild of model monitoring infrastructure images.
3989
3991
  :param fetch_credentials_from_sys_config: If true, fetch the credentials from the system configuration.
3990
3992
 
3991
3993
  """
@@ -3996,7 +3998,6 @@ class HTTPRunDB(RunDBInterface):
3996
3998
  "base_period": base_period,
3997
3999
  "image": image,
3998
4000
  "deploy_histogram_data_drift_app": deploy_histogram_data_drift_app,
3999
- "rebuild_images": rebuild_images,
4000
4001
  "fetch_credentials_from_sys_config": fetch_credentials_from_sys_config,
4001
4002
  },
4002
4003
  )
@@ -5226,6 +5227,8 @@ class HTTPRunDB(RunDBInterface):
5226
5227
  start_time_to: Optional[datetime] = None,
5227
5228
  last_update_time_from: Optional[datetime] = None,
5228
5229
  last_update_time_to: Optional[datetime] = None,
5230
+ end_time_from: Optional[datetime] = None,
5231
+ end_time_to: Optional[datetime] = None,
5229
5232
  partition_by: Optional[
5230
5233
  Union[mlrun.common.schemas.RunPartitionByField, str]
5231
5234
  ] = None,
@@ -5277,6 +5280,8 @@ class HTTPRunDB(RunDBInterface):
5277
5280
  and not start_time_to
5278
5281
  and not last_update_time_from
5279
5282
  and not last_update_time_to
5283
+ and not end_time_from
5284
+ and not end_time_to
5280
5285
  and not partition_by
5281
5286
  and not partition_sort_by
5282
5287
  and not iter
@@ -5301,6 +5306,8 @@ class HTTPRunDB(RunDBInterface):
5301
5306
  "start_time_to": datetime_to_iso(start_time_to),
5302
5307
  "last_update_time_from": datetime_to_iso(last_update_time_from),
5303
5308
  "last_update_time_to": datetime_to_iso(last_update_time_to),
5309
+ "end_time_from": datetime_to_iso(end_time_from),
5310
+ "end_time_to": datetime_to_iso(end_time_to),
5304
5311
  "with-notifications": with_notifications,
5305
5312
  "page": page,
5306
5313
  "page-size": page_size,
mlrun/db/nopdb.py CHANGED
@@ -138,6 +138,8 @@ class NopDB(RunDBInterface):
138
138
  start_time_to: Optional[datetime.datetime] = None,
139
139
  last_update_time_from: Optional[datetime.datetime] = None,
140
140
  last_update_time_to: Optional[datetime.datetime] = None,
141
+ end_time_from: Optional[datetime.datetime] = None,
142
+ end_time_to: Optional[datetime.datetime] = None,
141
143
  partition_by: Union[mlrun.common.schemas.RunPartitionByField, str] = None,
142
144
  rows_per_partition: int = 1,
143
145
  partition_sort_by: Union[mlrun.common.schemas.SortField, str] = None,
@@ -855,7 +857,6 @@ class NopDB(RunDBInterface):
855
857
  base_period: int = 10,
856
858
  image: str = "mlrun/mlrun",
857
859
  deploy_histogram_data_drift_app: bool = True,
858
- rebuild_images: bool = False,
859
860
  fetch_credentials_from_sys_config: bool = False,
860
861
  ) -> None:
861
862
  pass
mlrun/execution.py CHANGED
@@ -914,7 +914,8 @@ class MLClientCtx:
914
914
  kwargs={"extract_images": True}
915
915
  )
916
916
  :param upload: Whether to upload the artifact
917
- :param labels: Key-value labels
917
+ :param labels: Key-value labels. A 'source' label is automatically added using either
918
+ local_path or target_path to facilitate easier document searching.
918
919
  :param target_path: Path to the local file
919
920
  :param db_key: The key to use in the artifact DB table, by default its run name + '_' + key
920
921
  db_key=False will not register it in the artifacts table
@@ -932,22 +933,32 @@ class MLClientCtx:
932
933
  ... ),
933
934
  ... )
934
935
  """
936
+ original_source = local_path or target_path
935
937
 
936
- if not key and not local_path and not target_path:
938
+ if not key and not original_source:
937
939
  raise ValueError(
938
940
  "Must provide either 'key' parameter or 'local_path'/'target_path' to derive the key from"
939
941
  )
940
942
  if not key:
941
- key = DocumentArtifact.key_from_source(local_path or target_path)
943
+ key = DocumentArtifact.key_from_source(original_source)
942
944
 
943
945
  doc_artifact = DocumentArtifact(
944
946
  key=key,
945
- original_source=local_path or target_path,
947
+ original_source=original_source,
946
948
  document_loader_spec=document_loader_spec,
947
949
  collections=kwargs.pop("collections", None),
948
950
  **kwargs,
949
951
  )
950
952
 
953
+ # limit label to a max of 255 characters (for db reasons)
954
+ max_length = 255
955
+ labels = labels or {}
956
+ labels["source"] = (
957
+ original_source[: max_length - 3] + "..."
958
+ if len(original_source) > max_length
959
+ else original_source
960
+ )
961
+
951
962
  item = self._artifacts_manager.log_artifact(
952
963
  self,
953
964
  doc_artifact,
mlrun/lists.py CHANGED
@@ -29,6 +29,7 @@ list_header = [
29
29
  "uid",
30
30
  "iter",
31
31
  "start",
32
+ "end",
32
33
  "state",
33
34
  "kind",
34
35
  "name",
@@ -58,6 +59,7 @@ class RunList(list):
58
59
  get_in(run, "metadata.uid", ""),
59
60
  get_in(run, "metadata.iteration", ""),
60
61
  get_in(run, "status.start_time", ""),
62
+ get_in(run, "status.end_time", ""),
61
63
  get_in(run, "status.state", ""),
62
64
  get_in(run, "step_kind", get_in(run, "kind", "")),
63
65
  get_in(run, "metadata.name", ""),
@@ -103,7 +105,8 @@ class RunList(list):
103
105
  return self._df
104
106
  rows = self.to_rows(extend_iterations=extend_iterations)
105
107
  df = pd.DataFrame(rows[1:], columns=rows[0]) # .set_index('iter')
106
- df["start"] = pd.to_datetime(df["start"])
108
+ for time_column in ["start", "end"]:
109
+ df[time_column] = pd.to_datetime(df[time_column])
107
110
 
108
111
  if flat:
109
112
  df = flatten(df, "labels")
mlrun/model.py CHANGED
@@ -1284,6 +1284,7 @@ class RunStatus(ModelObj):
1284
1284
  results=None,
1285
1285
  artifacts=None,
1286
1286
  start_time=None,
1287
+ end_time=None,
1287
1288
  last_update=None,
1288
1289
  iterations=None,
1289
1290
  ui_url=None,
@@ -1299,6 +1300,7 @@ class RunStatus(ModelObj):
1299
1300
  self.results = results
1300
1301
  self._artifacts = artifacts
1301
1302
  self.start_time = start_time
1303
+ self.end_time = end_time
1302
1304
  self.last_update = last_update
1303
1305
  self.iterations = iterations
1304
1306
  self.ui_url = ui_url
@@ -126,6 +126,7 @@ class _PrepareMonitoringEvent(StepToDict):
126
126
  :param application_name: Application name.
127
127
  """
128
128
  self.graph_context = context
129
+ _ = self.graph_context.project_obj # Ensure project exists
129
130
  self.application_name = application_name
130
131
  self.model_endpoints: dict[str, mlrun.common.schemas.ModelEndpoint] = {}
131
132