mlrun 1.10.0rc3__py3-none-any.whl → 1.10.0rc5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (44) hide show
  1. mlrun/artifacts/__init__.py +1 -0
  2. mlrun/artifacts/base.py +14 -2
  3. mlrun/artifacts/helpers.py +40 -0
  4. mlrun/artifacts/llm_prompt.py +165 -0
  5. mlrun/artifacts/manager.py +13 -1
  6. mlrun/artifacts/model.py +91 -11
  7. mlrun/common/formatters/artifact.py +1 -0
  8. mlrun/common/runtimes/constants.py +0 -14
  9. mlrun/common/schemas/artifact.py +12 -12
  10. mlrun/common/schemas/pipeline.py +0 -16
  11. mlrun/common/schemas/project.py +0 -17
  12. mlrun/common/schemas/runs.py +0 -17
  13. mlrun/config.py +5 -2
  14. mlrun/datastore/base.py +2 -2
  15. mlrun/datastore/datastore.py +1 -1
  16. mlrun/datastore/datastore_profile.py +1 -9
  17. mlrun/datastore/redis.py +2 -3
  18. mlrun/datastore/sources.py +0 -9
  19. mlrun/datastore/storeytargets.py +2 -5
  20. mlrun/datastore/targets.py +6 -56
  21. mlrun/datastore/utils.py +1 -11
  22. mlrun/db/base.py +1 -0
  23. mlrun/db/httpdb.py +6 -0
  24. mlrun/db/nopdb.py +1 -0
  25. mlrun/execution.py +87 -1
  26. mlrun/model.py +0 -5
  27. mlrun/model_monitoring/applications/base.py +9 -5
  28. mlrun/projects/project.py +241 -4
  29. mlrun/run.py +0 -18
  30. mlrun/runtimes/daskjob.py +8 -1
  31. mlrun/runtimes/remotesparkjob.py +6 -0
  32. mlrun/runtimes/sparkjob/spark3job.py +6 -0
  33. mlrun/serving/states.py +67 -3
  34. mlrun/serving/v2_serving.py +1 -1
  35. mlrun/utils/helpers.py +60 -8
  36. mlrun/utils/notifications/notification/slack.py +5 -1
  37. mlrun/utils/notifications/notification_pusher.py +2 -1
  38. mlrun/utils/version/version.json +2 -2
  39. {mlrun-1.10.0rc3.dist-info → mlrun-1.10.0rc5.dist-info}/METADATA +5 -5
  40. {mlrun-1.10.0rc3.dist-info → mlrun-1.10.0rc5.dist-info}/RECORD +44 -42
  41. {mlrun-1.10.0rc3.dist-info → mlrun-1.10.0rc5.dist-info}/WHEEL +1 -1
  42. {mlrun-1.10.0rc3.dist-info → mlrun-1.10.0rc5.dist-info}/entry_points.txt +0 -0
  43. {mlrun-1.10.0rc3.dist-info → mlrun-1.10.0rc5.dist-info}/licenses/LICENSE +0 -0
  44. {mlrun-1.10.0rc3.dist-info → mlrun-1.10.0rc5.dist-info}/top_level.txt +0 -0
@@ -16,7 +16,6 @@ import datetime
16
16
  import typing
17
17
 
18
18
  import pydantic.v1
19
- from deprecated import deprecated
20
19
 
21
20
  import mlrun.common.types
22
21
 
@@ -24,22 +23,6 @@ from .common import ImageBuilder
24
23
  from .object import ObjectKind, ObjectStatus
25
24
 
26
25
 
27
- @deprecated(
28
- version="1.7.0",
29
- reason="mlrun.common.schemas.ProjectsFormat is deprecated and will be removed in 1.10.0. "
30
- "Use mlrun.common.formatters.ProjectFormat instead.",
31
- category=FutureWarning,
32
- )
33
- class ProjectsFormat(mlrun.common.types.StrEnum):
34
- full = "full"
35
- name_only = "name_only"
36
- # minimal format removes large fields from the response (e.g. functions, workflows, artifacts)
37
- # and is used for faster response times (in the UI)
38
- minimal = "minimal"
39
- # internal - allowed only in follower mode, only for the leader for upgrade purposes
40
- leader = "leader"
41
-
42
-
43
26
  class ProjectMetadata(pydantic.v1.BaseModel):
44
27
  name: str
45
28
  created: typing.Optional[datetime.datetime] = None
@@ -15,26 +15,9 @@
15
15
  import typing
16
16
 
17
17
  import pydantic.v1
18
- from deprecated import deprecated
19
-
20
- import mlrun.common.types
21
18
 
22
19
 
23
20
  class RunIdentifier(pydantic.v1.BaseModel):
24
21
  kind: typing.Literal["run"] = "run"
25
22
  uid: typing.Optional[str]
26
23
  iter: typing.Optional[int]
27
-
28
-
29
- @deprecated(
30
- version="1.7.0",
31
- reason="mlrun.common.schemas.RunsFormat is deprecated and will be removed in 1.10.0. "
32
- "Use mlrun.common.formatters.RunFormat instead.",
33
- category=FutureWarning,
34
- )
35
- class RunsFormat(mlrun.common.types.StrEnum):
36
- # No enrichment, data is pulled as-is from the database.
37
- standard = "standard"
38
-
39
- # Performs run enrichment, including the run's artifacts. Only available for the `get` run API.
40
- full = "full"
mlrun/config.py CHANGED
@@ -234,7 +234,10 @@ default_config = {
234
234
  "model_endpoint_creation": "600",
235
235
  "model_endpoint_tsdb_leftovers": "900",
236
236
  },
237
- "runtimes": {"dask": "600"},
237
+ "runtimes": {
238
+ "dask": "600",
239
+ "dask_cluster_start": "300",
240
+ },
238
241
  "push_notifications": "60",
239
242
  },
240
243
  },
@@ -482,7 +485,7 @@ default_config = {
482
485
  "project_owners_cache_ttl": "30 seconds",
483
486
  # access key to be used when the leader is iguazio and polling is done from it
484
487
  "iguazio_access_key": "",
485
- "iguazio_list_projects_default_page_size": 500,
488
+ "iguazio_list_projects_default_page_size": 200,
486
489
  "iguazio_client_job_cache_ttl": "20 minutes",
487
490
  "nuclio_project_deletion_verification_timeout": "300 seconds",
488
491
  "nuclio_project_deletion_verification_interval": "5 seconds",
mlrun/datastore/base.py CHANGED
@@ -104,10 +104,10 @@ class DataStore:
104
104
  """Whether the data store supports isdir"""
105
105
  return True
106
106
 
107
- def _get_secret_or_env(self, key, default=None, prefix=None):
107
+ def _get_secret_or_env(self, key, default=None):
108
108
  # Project-secrets are mounted as env variables whose name can be retrieved from SecretsStore
109
109
  return mlrun.get_secret_or_env(
110
- key, secret_provider=self._get_secret, default=default, prefix=prefix
110
+ key, secret_provider=self._get_secret, default=default
111
111
  )
112
112
 
113
113
  def get_storage_options(self):
@@ -54,7 +54,7 @@ def parse_url(url):
54
54
  return schema, endpoint, parsed_url
55
55
 
56
56
 
57
- def schema_to_store(schema):
57
+ def schema_to_store(schema) -> DataStore.__subclasses__():
58
58
  # import store classes inside to enable making their dependencies optional (package extras)
59
59
 
60
60
  if not schema or schema in get_local_file_schema():
@@ -141,19 +141,11 @@ class ConfigProfile(DatastoreProfile):
141
141
  class DatastoreProfileKafkaTarget(DatastoreProfile):
142
142
  type: str = pydantic.v1.Field("kafka_target")
143
143
  _private_attributes = "kwargs_private"
144
- brokers: typing.Optional[str] = None
144
+ brokers: str
145
145
  topic: str
146
146
  kwargs_public: typing.Optional[dict]
147
147
  kwargs_private: typing.Optional[dict]
148
148
 
149
- def __init__(self, **kwargs):
150
- super().__init__(**kwargs)
151
-
152
- if not self.brokers:
153
- raise mlrun.errors.MLRunInvalidArgumentError(
154
- "DatastoreProfileKafkaTarget requires the 'brokers' field to be set"
155
- )
156
-
157
149
  def get_topic(self) -> typing.Optional[str]:
158
150
  return self.topic
159
151
 
mlrun/datastore/redis.py CHANGED
@@ -48,9 +48,8 @@ class RedisStore(DataStore):
48
48
  raise mlrun.errors.MLRunInvalidArgumentError(
49
49
  "Provide Redis username and password only via secrets"
50
50
  )
51
- credentials_prefix = self._get_secret_or_env("CREDENTIALS_PREFIX")
52
- user = self._get_secret_or_env("REDIS_USER", "", credentials_prefix)
53
- password = self._get_secret_or_env("REDIS_PASSWORD", "", credentials_prefix)
51
+ user = self._get_secret_or_env("REDIS_USER", "")
52
+ password = self._get_secret_or_env("REDIS_PASSWORD", "")
54
53
  host = parsed_endpoint.hostname
55
54
  port = parsed_endpoint.port if parsed_endpoint.port else redis_default_port
56
55
  schema = parsed_endpoint.scheme
@@ -768,7 +768,6 @@ class SnowflakeSource(BaseSourceDriver):
768
768
  :parameter url: URL of the snowflake cluster
769
769
  :parameter user: snowflake user
770
770
  :parameter database: snowflake database
771
- :parameter schema: snowflake schema - deprecated, use db_schema
772
771
  :parameter db_schema: snowflake schema
773
772
  :parameter warehouse: snowflake warehouse
774
773
  """
@@ -790,18 +789,10 @@ class SnowflakeSource(BaseSourceDriver):
790
789
  url: Optional[str] = None,
791
790
  user: Optional[str] = None,
792
791
  database: Optional[str] = None,
793
- schema: Optional[str] = None,
794
792
  db_schema: Optional[str] = None,
795
793
  warehouse: Optional[str] = None,
796
794
  **kwargs,
797
795
  ):
798
- # TODO: Remove in 1.10.0
799
- if schema:
800
- warnings.warn(
801
- "schema is deprecated in 1.7.0, and will be removed in 1.10.0, please use db_schema"
802
- )
803
- db_schema = db_schema or schema # TODO: Remove in 1.10.0
804
-
805
796
  attributes = attributes or {}
806
797
  if url:
807
798
  attributes["url"] = url
@@ -152,9 +152,7 @@ class KafkaStoreyTarget(storey.KafkaTarget):
152
152
  parsed.path.strip("/") if parsed.path else datastore_profile.get_topic()
153
153
  )
154
154
  else:
155
- brokers = attributes.pop(
156
- "brokers", attributes.pop("bootstrap_servers", None)
157
- )
155
+ brokers = attributes.pop("brokers", None)
158
156
  topic, brokers = parse_kafka_url(path, brokers)
159
157
 
160
158
  if not topic:
@@ -175,8 +173,7 @@ class RedisNoSqlStoreyTarget(storey.NoSqlTarget):
175
173
  def __init__(self, *args, **kwargs):
176
174
  path = kwargs.pop("path")
177
175
  endpoint, uri = mlrun.datastore.targets.RedisNoSqlTarget.get_server_endpoint(
178
- path,
179
- kwargs.pop("credentials_prefix", None),
176
+ path
180
177
  )
181
178
  kwargs["path"] = endpoint + "/" + uri
182
179
  super().__init__(*args, **kwargs)
@@ -17,7 +17,6 @@ import os
17
17
  import random
18
18
  import sys
19
19
  import time
20
- import warnings
21
20
  from collections import Counter
22
21
  from copy import copy
23
22
  from typing import Any, Optional, Union
@@ -409,7 +408,6 @@ class BaseStoreTarget(DataTargetBase):
409
408
  flush_after_seconds: Optional[int] = None,
410
409
  storage_options: Optional[dict[str, str]] = None,
411
410
  schema: Optional[dict[str, Any]] = None,
412
- credentials_prefix=None,
413
411
  ):
414
412
  super().__init__(
415
413
  self.kind,
@@ -424,7 +422,6 @@ class BaseStoreTarget(DataTargetBase):
424
422
  max_events,
425
423
  flush_after_seconds,
426
424
  schema=schema,
427
- credentials_prefix=credentials_prefix,
428
425
  )
429
426
 
430
427
  self.name = name or self.kind
@@ -440,13 +437,6 @@ class BaseStoreTarget(DataTargetBase):
440
437
  self.flush_after_seconds = flush_after_seconds
441
438
  self.storage_options = storage_options
442
439
  self.schema = schema or {}
443
- self.credentials_prefix = credentials_prefix
444
- if credentials_prefix:
445
- warnings.warn(
446
- "The 'credentials_prefix' parameter is deprecated in 1.7.0 and will be removed in "
447
- "1.10.0. Please use datastore profiles instead.",
448
- FutureWarning,
449
- )
450
440
 
451
441
  self._target = None
452
442
  self._resource = None
@@ -457,18 +447,11 @@ class BaseStoreTarget(DataTargetBase):
457
447
  key,
458
448
  secret_provider=self._secrets,
459
449
  default=default_value,
460
- prefix=self.credentials_prefix,
461
450
  )
462
451
 
463
452
  def _get_store_and_path(self):
464
- credentials_prefix_secrets = (
465
- {"CREDENTIALS_PREFIX": self.credentials_prefix}
466
- if self.credentials_prefix
467
- else None
468
- )
469
453
  store, resolved_store_path, url = mlrun.store_manager.get_or_create_store(
470
- self.get_target_path(),
471
- credentials_prefix_secrets,
454
+ self.get_target_path()
472
455
  )
473
456
  return store, resolved_store_path, url
474
457
 
@@ -621,7 +604,6 @@ class BaseStoreTarget(DataTargetBase):
621
604
  driver.path = spec.path
622
605
  driver.attributes = spec.attributes
623
606
  driver.schema = spec.schema
624
- driver.credentials_prefix = spec.credentials_prefix
625
607
 
626
608
  if hasattr(spec, "columns"):
627
609
  driver.columns = spec.columns
@@ -638,7 +620,6 @@ class BaseStoreTarget(DataTargetBase):
638
620
  driver.max_events = spec.max_events
639
621
  driver.flush_after_seconds = spec.flush_after_seconds
640
622
  driver.storage_options = spec.storage_options
641
- driver.credentials_prefix = spec.credentials_prefix
642
623
 
643
624
  driver._resource = resource
644
625
  driver.run_id = spec.run_id
@@ -720,7 +701,6 @@ class BaseStoreTarget(DataTargetBase):
720
701
  target.key_bucketing_number = self.key_bucketing_number
721
702
  target.partition_cols = self.partition_cols
722
703
  target.time_partitioning_granularity = self.time_partitioning_granularity
723
- target.credentials_prefix = self.credentials_prefix
724
704
 
725
705
  self._resource.status.update_target(target)
726
706
  return target
@@ -1213,7 +1193,6 @@ class SnowflakeTarget(BaseStoreTarget):
1213
1193
  flush_after_seconds: Optional[int] = None,
1214
1194
  storage_options: Optional[dict[str, str]] = None,
1215
1195
  schema: Optional[dict[str, Any]] = None,
1216
- credentials_prefix=None,
1217
1196
  url: Optional[str] = None,
1218
1197
  user: Optional[str] = None,
1219
1198
  db_schema: Optional[str] = None,
@@ -1249,7 +1228,6 @@ class SnowflakeTarget(BaseStoreTarget):
1249
1228
  flush_after_seconds=flush_after_seconds,
1250
1229
  storage_options=storage_options,
1251
1230
  schema=schema,
1252
- credentials_prefix=credentials_prefix,
1253
1231
  )
1254
1232
 
1255
1233
  def get_spark_options(self, key_column=None, timestamp_key=None, overwrite=True):
@@ -1488,7 +1466,7 @@ class RedisNoSqlTarget(NoSqlBaseTarget):
1488
1466
  writer_step_name = "RedisNoSqlTarget"
1489
1467
 
1490
1468
  @staticmethod
1491
- def get_server_endpoint(path, credentials_prefix=None):
1469
+ def get_server_endpoint(path):
1492
1470
  endpoint, uri = parse_path(path)
1493
1471
  endpoint = endpoint or mlrun.mlconf.redis.url
1494
1472
  if endpoint.startswith("ds://"):
@@ -1506,15 +1484,8 @@ class RedisNoSqlTarget(NoSqlBaseTarget):
1506
1484
  raise mlrun.errors.MLRunInvalidArgumentError(
1507
1485
  "Provide Redis username and password only via secrets"
1508
1486
  )
1509
- credentials_prefix = credentials_prefix or mlrun.get_secret_or_env(
1510
- key="CREDENTIALS_PREFIX"
1511
- )
1512
- user = mlrun.get_secret_or_env(
1513
- "REDIS_USER", default="", prefix=credentials_prefix
1514
- )
1515
- password = mlrun.get_secret_or_env(
1516
- "REDIS_PASSWORD", default="", prefix=credentials_prefix
1517
- )
1487
+ user = mlrun.get_secret_or_env("REDIS_USER", default="")
1488
+ password = mlrun.get_secret_or_env("REDIS_PASSWORD", default="")
1518
1489
  host = parsed_endpoint.hostname
1519
1490
  port = parsed_endpoint.port if parsed_endpoint.port else "6379"
1520
1491
  scheme = parsed_endpoint.scheme
@@ -1528,9 +1499,7 @@ class RedisNoSqlTarget(NoSqlBaseTarget):
1528
1499
  from storey import Table
1529
1500
  from storey.redis_driver import RedisDriver
1530
1501
 
1531
- endpoint, uri = self.get_server_endpoint(
1532
- self.get_target_path(), self.credentials_prefix
1533
- )
1502
+ endpoint, uri = self.get_server_endpoint(self.get_target_path())
1534
1503
 
1535
1504
  return Table(
1536
1505
  uri,
@@ -1539,9 +1508,7 @@ class RedisNoSqlTarget(NoSqlBaseTarget):
1539
1508
  )
1540
1509
 
1541
1510
  def get_spark_options(self, key_column=None, timestamp_key=None, overwrite=True):
1542
- endpoint, uri = self.get_server_endpoint(
1543
- self.get_target_path(), self.credentials_prefix
1544
- )
1511
+ endpoint, uri = self.get_server_endpoint(self.get_target_path())
1545
1512
  parsed_endpoint = urlparse(endpoint)
1546
1513
  store, path_in_store, path = self._get_store_and_path()
1547
1514
  return {
@@ -1592,7 +1559,6 @@ class RedisNoSqlTarget(NoSqlBaseTarget):
1592
1559
  class_name="mlrun.datastore.storeytargets.RedisNoSqlStoreyTarget",
1593
1560
  columns=column_list,
1594
1561
  table=table,
1595
- credentials_prefix=self.credentials_prefix,
1596
1562
  **self.attributes,
1597
1563
  )
1598
1564
 
@@ -1648,7 +1614,6 @@ class KafkaTarget(BaseStoreTarget):
1648
1614
  :param path: topic name e.g. "my_topic"
1649
1615
  :param after_step: optional, after what step in the graph to add the target
1650
1616
  :param columns: optional, which columns from data to write
1651
- :param bootstrap_servers: Deprecated. Use the brokers parameter instead
1652
1617
  :param producer_options: additional configurations for kafka producer
1653
1618
  :param brokers: kafka broker as represented by a host:port pair, or a list of kafka brokers, e.g.
1654
1619
  "localhost:9092", or ["kafka-broker-1:9092", "kafka-broker-2:9092"]
@@ -1664,27 +1629,12 @@ class KafkaTarget(BaseStoreTarget):
1664
1629
  def __init__(
1665
1630
  self,
1666
1631
  *args,
1667
- bootstrap_servers=None,
1668
1632
  producer_options=None,
1669
1633
  brokers=None,
1670
1634
  **kwargs,
1671
1635
  ):
1672
1636
  attrs = {}
1673
1637
 
1674
- # TODO: Remove this in 1.10.0
1675
- if bootstrap_servers:
1676
- if brokers:
1677
- raise mlrun.errors.MLRunInvalidArgumentError(
1678
- "KafkaTarget cannot be created with both the 'brokers' parameter and the deprecated "
1679
- "'bootstrap_servers' parameter. Please use 'brokers' only."
1680
- )
1681
- warnings.warn(
1682
- "'bootstrap_servers' parameter is deprecated in 1.7.0 and will be removed in 1.10.0, "
1683
- "use 'brokers' instead.",
1684
- FutureWarning,
1685
- )
1686
- brokers = bootstrap_servers
1687
-
1688
1638
  if brokers:
1689
1639
  attrs["brokers"] = brokers
1690
1640
  if producer_options is not None:
mlrun/datastore/utils.py CHANGED
@@ -16,7 +16,6 @@ import math
16
16
  import tarfile
17
17
  import tempfile
18
18
  import typing
19
- import warnings
20
19
  from urllib.parse import parse_qs, urlparse
21
20
 
22
21
  import pandas as pd
@@ -171,16 +170,7 @@ def _generate_sql_query_with_time_filter(
171
170
  def get_kafka_brokers_from_dict(options: dict, pop=False) -> typing.Optional[str]:
172
171
  get_or_pop = options.pop if pop else options.get
173
172
  kafka_brokers = get_or_pop("kafka_brokers", None)
174
- if kafka_brokers:
175
- return kafka_brokers
176
- kafka_bootstrap_servers = get_or_pop("kafka_bootstrap_servers", None)
177
- if kafka_bootstrap_servers:
178
- warnings.warn(
179
- "The 'kafka_bootstrap_servers' parameter is deprecated in 1.7.0 and will be removed in "
180
- "1.10.0. Please pass the 'kafka_brokers' parameter instead.",
181
- FutureWarning,
182
- )
183
- return kafka_bootstrap_servers
173
+ return kafka_brokers
184
174
 
185
175
 
186
176
  def transform_list_filters_to_tuple(additional_filters):
mlrun/db/base.py CHANGED
@@ -185,6 +185,7 @@ class RunDBInterface(ABC):
185
185
  kind: Optional[str] = None,
186
186
  category: Union[str, mlrun.common.schemas.ArtifactCategories] = None,
187
187
  tree: Optional[str] = None,
188
+ parent: Optional[str] = None,
188
189
  format_: mlrun.common.formatters.ArtifactFormat = mlrun.common.formatters.ArtifactFormat.full,
189
190
  limit: Optional[int] = None,
190
191
  partition_by: Optional[
mlrun/db/httpdb.py CHANGED
@@ -1225,6 +1225,7 @@ class HTTPRunDB(RunDBInterface):
1225
1225
  category: Union[str, mlrun.common.schemas.ArtifactCategories] = None,
1226
1226
  tree: Optional[str] = None,
1227
1227
  producer_uri: Optional[str] = None,
1228
+ parent: Optional[str] = None,
1228
1229
  format_: Optional[
1229
1230
  mlrun.common.formatters.ArtifactFormat
1230
1231
  ] = mlrun.common.formatters.ArtifactFormat.full,
@@ -1253,6 +1254,8 @@ class HTTPRunDB(RunDBInterface):
1253
1254
  "results", tag="*", project="iris", labels=["uploaded", "type=binary"]
1254
1255
  )
1255
1256
 
1257
+ :param parent: The URI or <parent_name>:<parent_tag> string of the parent artifact.
1258
+ Used to filter and return only artifacts that are direct children of the specified parent.
1256
1259
  :param name: Name of artifacts to retrieve. Name with '~' prefix is used as a like query, and is not
1257
1260
  case-sensitive. This means that querying for ``~name`` may return artifacts named
1258
1261
  ``my_Name_1`` or ``surname``.
@@ -1309,6 +1312,7 @@ class HTTPRunDB(RunDBInterface):
1309
1312
  partition_sort_by=partition_sort_by,
1310
1313
  partition_order=partition_order,
1311
1314
  return_all=not limit,
1315
+ parent=parent,
1312
1316
  )
1313
1317
  return artifacts
1314
1318
 
@@ -5080,6 +5084,7 @@ class HTTPRunDB(RunDBInterface):
5080
5084
  category: Union[str, mlrun.common.schemas.ArtifactCategories] = None,
5081
5085
  tree: Optional[str] = None,
5082
5086
  producer_uri: Optional[str] = None,
5087
+ parent: Optional[str] = None,
5083
5088
  format_: Optional[
5084
5089
  mlrun.common.formatters.ArtifactFormat
5085
5090
  ] = mlrun.common.formatters.ArtifactFormat.full,
@@ -5128,6 +5133,7 @@ class HTTPRunDB(RunDBInterface):
5128
5133
  "page": page,
5129
5134
  "page-size": page_size,
5130
5135
  "page-token": page_token,
5136
+ "parent": parent,
5131
5137
  }
5132
5138
 
5133
5139
  if partition_by:
mlrun/db/nopdb.py CHANGED
@@ -208,6 +208,7 @@ class NopDB(RunDBInterface):
208
208
  kind: Optional[str] = None,
209
209
  category: Union[str, mlrun.common.schemas.ArtifactCategories] = None,
210
210
  tree: Optional[str] = None,
211
+ parent: Optional[str] = None,
211
212
  format_: mlrun.common.formatters.ArtifactFormat = mlrun.common.formatters.ArtifactFormat.full,
212
213
  limit: Optional[int] = None,
213
214
  partition_by: Optional[
mlrun/execution.py CHANGED
@@ -31,6 +31,7 @@ from mlrun.artifacts import (
31
31
  DatasetArtifact,
32
32
  DocumentArtifact,
33
33
  DocumentLoaderSpec,
34
+ LLMPromptArtifact,
34
35
  ModelArtifact,
35
36
  )
36
37
  from mlrun.datastore.store_resources import get_store_resource
@@ -808,6 +809,8 @@ class MLClientCtx:
808
809
  label_column: Optional[Union[str, list]] = None,
809
810
  extra_data=None,
810
811
  db_key=None,
812
+ model_url: Optional[str] = None,
813
+ default_config=None,
811
814
  **kwargs,
812
815
  ) -> ModelArtifact:
813
816
  """Log a model artifact and optionally upload it to datastore
@@ -850,6 +853,9 @@ class MLClientCtx:
850
853
  value can be absolute path | relative path (to model dir) | bytes | artifact object
851
854
  :param db_key: The key to use in the artifact DB table, by default its run name + '_' + key
852
855
  db_key=False will not register it in the artifacts table
856
+ :param model_url: Remote model url.
857
+ :param default_config: Default configuration for client building
858
+ Saved as a sub-dictionary under the parameter.
853
859
 
854
860
  :returns: Model artifact object
855
861
  """
@@ -858,7 +864,6 @@ class MLClientCtx:
858
864
  raise MLRunInvalidArgumentError(
859
865
  "Cannot specify inputs and training set together"
860
866
  )
861
-
862
867
  model = ModelArtifact(
863
868
  key,
864
869
  body,
@@ -873,6 +878,8 @@ class MLClientCtx:
873
878
  feature_vector=feature_vector,
874
879
  feature_weights=feature_weights,
875
880
  extra_data=extra_data,
881
+ model_url=model_url,
882
+ default_config=default_config,
876
883
  **kwargs,
877
884
  )
878
885
  if training_set is not None:
@@ -893,6 +900,85 @@ class MLClientCtx:
893
900
  self._update_run()
894
901
  return item
895
902
 
903
+ def log_llm_prompt(
904
+ self,
905
+ key,
906
+ prompt_string: Optional[str] = None,
907
+ prompt_path: Optional[str] = None,
908
+ prompt_legend: Optional[dict] = None,
909
+ model_artifact: Union[ModelArtifact, str] = None,
910
+ model_configuration: Optional[dict] = None,
911
+ description: Optional[str] = None,
912
+ target_path: Optional[str] = None,
913
+ artifact_path: Optional[str] = None,
914
+ tag: Optional[str] = None,
915
+ labels: Optional[Union[list[str], str]] = None,
916
+ upload: Optional[bool] = None,
917
+ **kwargs,
918
+ ) -> LLMPromptArtifact:
919
+ """Log an LLM prompt artifact and optionally upload it to the artifact store.
920
+
921
+ This function allows you to log a prompt artifact for large language model (LLM) usage. Prompts can be defined
922
+ as a string or by referencing a file path. Optionally, you can link the prompt to a parent model artifact and
923
+ provide metadata like a prompt legend (e.g., input variable mapping) and generation configuration.
924
+
925
+ Examples::
926
+
927
+ # Log an inline prompt
928
+ context.log_llm_prompt(
929
+ key="qa-prompt",
930
+ prompt_string="Q: {question}",
931
+ model_artifact=model,
932
+ prompt_legend={"question": "user_input"},
933
+ model_configuration={"temperature": 0.7, "max_tokens": 128},
934
+ tag="latest",
935
+ )
936
+
937
+ :param key: Unique name of the artifact.
938
+ :param prompt_string: Raw prompt text as a string. Cannot be used with `prompt_path`.
939
+ :param prompt_path: Path to a file containing the prompt content. Cannot be used with `prompt_string`.
940
+ :param prompt_legend: A dictionary where each key is a placeholder in the prompt (e.g., ``{user_name}``)
941
+ and the value is a description or explanation of what that placeholder represents.
942
+ Useful for documenting and clarifying dynamic parts of the prompt.
943
+ :param model_artifact: Reference to the parent model (either `ModelArtifact` or model URI string).
944
+ :param model_configuration: Dictionary of generation parameters (e.g., temperature, max_tokens).
945
+ :param description: Optional description of the prompt.
946
+ :param target_path: Path to write the artifact locally.
947
+ :param artifact_path: Path in the artifact store (defaults to project artifact path).
948
+ :param tag: Tag/version to assign to the prompt artifact.
949
+ :param labels: Labels to tag the artifact (e.g., list or dict of key-value pairs).
950
+ :param upload: Whether to upload the artifact to the store (defaults to True).
951
+ :param kwargs: Additional fields to pass to the `LLMPromptArtifact` constructor.
952
+
953
+ :returns: The logged `LLMPromptArtifact` object.
954
+ """
955
+
956
+ llm_prompt = LLMPromptArtifact(
957
+ key=key,
958
+ project=self.project or "",
959
+ prompt_string=prompt_string,
960
+ prompt_path=prompt_path,
961
+ prompt_legend=prompt_legend,
962
+ model_artifact=model_artifact,
963
+ model_configuration=model_configuration,
964
+ target_path=target_path,
965
+ description=description,
966
+ **kwargs,
967
+ )
968
+
969
+ item = cast(
970
+ LLMPromptArtifact,
971
+ self.log_artifact(
972
+ llm_prompt,
973
+ artifact_path=artifact_path,
974
+ tag=tag,
975
+ upload=upload,
976
+ labels=labels,
977
+ ),
978
+ )
979
+ self._update_run()
980
+ return item
981
+
896
982
  def log_document(
897
983
  self,
898
984
  key: str = "",
mlrun/model.py CHANGED
@@ -2155,7 +2155,6 @@ class DataSource(ModelObj):
2155
2155
  "max_age",
2156
2156
  "start_time",
2157
2157
  "end_time",
2158
- "credentials_prefix",
2159
2158
  ]
2160
2159
  kind = None
2161
2160
 
@@ -2218,7 +2217,6 @@ class DataTargetBase(ModelObj):
2218
2217
  "storage_options",
2219
2218
  "run_id",
2220
2219
  "schema",
2221
- "credentials_prefix",
2222
2220
  ]
2223
2221
 
2224
2222
  @classmethod
@@ -2253,7 +2251,6 @@ class DataTargetBase(ModelObj):
2253
2251
  flush_after_seconds: Optional[int] = None,
2254
2252
  storage_options: Optional[dict[str, str]] = None,
2255
2253
  schema: Optional[dict[str, Any]] = None,
2256
- credentials_prefix=None,
2257
2254
  ):
2258
2255
  self.name = name
2259
2256
  self.kind: str = kind
@@ -2270,7 +2267,6 @@ class DataTargetBase(ModelObj):
2270
2267
  self.storage_options = storage_options
2271
2268
  self.run_id = None
2272
2269
  self.schema = schema
2273
- self.credentials_prefix = credentials_prefix
2274
2270
 
2275
2271
 
2276
2272
  class FeatureSetProducer(ModelObj):
@@ -2303,7 +2299,6 @@ class DataTarget(DataTargetBase):
2303
2299
  "key_bucketing_number",
2304
2300
  "partition_cols",
2305
2301
  "time_partitioning_granularity",
2306
- "credentials_prefix",
2307
2302
  ]
2308
2303
 
2309
2304
  def __init__(
@@ -409,8 +409,8 @@ class ModelMonitoringApplicationBase(MonitoringApplicationToDict, ABC):
409
409
  tag: Optional[str] = None,
410
410
  run_local: bool = True,
411
411
  auto_build: bool = True,
412
- sample_data: Optional[pd.DataFrame] = None,
413
- reference_data: Optional[pd.DataFrame] = None,
412
+ sample_data: Optional[Union[pd.DataFrame, str]] = None,
413
+ reference_data: Optional[Union[pd.DataFrame, str]] = None,
414
414
  image: Optional[str] = None,
415
415
  with_repo: Optional[bool] = False,
416
416
  class_handler: Optional[str] = None,
@@ -434,9 +434,11 @@ class ModelMonitoringApplicationBase(MonitoringApplicationToDict, ABC):
434
434
  :param tag: Tag for the function.
435
435
  :param run_local: Whether to run the function locally or remotely.
436
436
  :param auto_build: Whether to auto build the function.
437
- :param sample_data: Pandas data-frame as the current dataset.
437
+ :param sample_data: Pandas data-frame or :py:class:`~mlrun.artifacts.dataset.DatasetArtifact` URI as
438
+ the current dataset.
438
439
  When set, it replaces the data read from the model endpoint's offline source.
439
- :param reference_data: Pandas data-frame of the reference dataset.
440
+ :param reference_data: Pandas data-frame or :py:class:`~mlrun.artifacts.dataset.DatasetArtifact` URI as
441
+ the reference dataset.
440
442
  When set, its statistics override the model endpoint's feature statistics.
441
443
  :param image: Docker image to run the job on (when running remotely).
442
444
  :param with_repo: Whether to clone the current repo to the build source.
@@ -515,7 +517,9 @@ class ModelMonitoringApplicationBase(MonitoringApplicationToDict, ABC):
515
517
  (sample_data, "sample_data"),
516
518
  (reference_data, "reference_data"),
517
519
  ]:
518
- if data is not None:
520
+ if isinstance(data, str):
521
+ inputs[identifier] = data
522
+ elif data is not None:
519
523
  key = f"{job.metadata.name}_{identifier}"
520
524
  inputs[identifier] = project.log_dataset(
521
525
  key,