mlrun 1.10.0rc31__py3-none-any.whl → 1.10.0rc32__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

@@ -29,7 +29,7 @@ class LLMPromptArtifactSpec(ArtifactSpec):
29
29
  _dict_fields = ArtifactSpec._dict_fields + [
30
30
  "prompt_template",
31
31
  "prompt_legend",
32
- "model_configuration",
32
+ "invocation_config",
33
33
  "description",
34
34
  ]
35
35
  PROMPT_TEMPLATE_KEYS = ("content", "role")
@@ -70,9 +70,9 @@ class LLMPromptArtifactSpec(ArtifactSpec):
70
70
  self.prompt_legend = prompt_legend
71
71
  if invocation_config is not None and not isinstance(invocation_config, dict):
72
72
  raise mlrun.errors.MLRunInvalidArgumentError(
73
- "LLMPromptArtifact model_configuration must be a dictionary or None"
73
+ "LLMPromptArtifact invocation_config must be a dictionary or None"
74
74
  )
75
- self.model_configuration = invocation_config or {}
75
+ self.invocation_config = invocation_config or {}
76
76
  self.description = description
77
77
  self._model_artifact = (
78
78
  model_artifact
mlrun/execution.py CHANGED
@@ -958,7 +958,7 @@ class MLClientCtx:
958
958
  },
959
959
  },
960
960
  model_artifact=model,
961
- model_configuration={"temperature": 0.5, "max_tokens": 200},
961
+ invocation_config={"temperature": 0.5, "max_tokens": 200},
962
962
  description="Prompt for handling customer support queries",
963
963
  tag="support-v1",
964
964
  labels={"domain": "support"},
@@ -975,7 +975,7 @@ class MLClientCtx:
975
975
  }
976
976
  },
977
977
  model_artifact=model,
978
- model_configuration={"temperature": 0.7, "max_tokens": 256},
978
+ invocation_config={"temperature": 0.7, "max_tokens": 256},
979
979
  description="Q&A prompt template with user-provided question",
980
980
  tag="v2",
981
981
  labels={"task": "qa", "stage": "experiment"},
mlrun/projects/project.py CHANGED
@@ -1932,7 +1932,7 @@ class MlrunProject(ModelObj):
1932
1932
  },
1933
1933
  },
1934
1934
  model_artifact=model,
1935
- model_configuration={"temperature": 0.5, "max_tokens": 200},
1935
+ invocation_config={"temperature": 0.5, "max_tokens": 200},
1936
1936
  description="Prompt for handling customer support queries",
1937
1937
  tag="support-v1",
1938
1938
  labels={"domain": "support"},
@@ -1949,7 +1949,7 @@ class MlrunProject(ModelObj):
1949
1949
  }
1950
1950
  },
1951
1951
  model_artifact=model,
1952
- model_configuration={"temperature": 0.7, "max_tokens": 256},
1952
+ invocation_config={"temperature": 0.7, "max_tokens": 256},
1953
1953
  description="Q&A prompt template with user-provided question",
1954
1954
  tag="v2",
1955
1955
  labels={"task": "qa", "stage": "experiment"},
@@ -16,6 +16,7 @@ import asyncio
16
16
  import copy
17
17
  import json
18
18
  import typing
19
+ import warnings
19
20
  from datetime import datetime
20
21
  from time import sleep
21
22
 
@@ -1329,8 +1330,10 @@ class RemoteRuntime(KubeResource):
1329
1330
  :return: returns function's url
1330
1331
  """
1331
1332
  if auth_info:
1332
- logger.warning(
1333
- "Deprecated parameter 'auth_info' was provided, but will be ignored. Will be removed in 1.12.0."
1333
+ warnings.warn(
1334
+ "'auth_info' is deprecated in 1.10.0 and will be removed in 1.12.0.",
1335
+ # TODO: Remove this in 1.12.0
1336
+ FutureWarning,
1334
1337
  )
1335
1338
  return self._resolve_invocation_url("", force_external_address)
1336
1339
 
mlrun/serving/states.py CHANGED
@@ -1349,7 +1349,7 @@ class LLModel(Model):
1349
1349
  "Invoking model provider",
1350
1350
  model_name=self.name,
1351
1351
  messages=messages,
1352
- model_configuration=invocation_config,
1352
+ invocation_config=invocation_config,
1353
1353
  )
1354
1354
  response_with_stats = self.model_provider.invoke(
1355
1355
  messages=messages,
@@ -1378,7 +1378,7 @@ class LLModel(Model):
1378
1378
  self,
1379
1379
  body: Any,
1380
1380
  messages: Optional[list[dict]] = None,
1381
- model_configuration: Optional[dict] = None,
1381
+ invocation_config: Optional[dict] = None,
1382
1382
  **kwargs,
1383
1383
  ) -> Any:
1384
1384
  llm_prompt_artifact = kwargs.get("llm_prompt_artifact")
@@ -1389,12 +1389,12 @@ class LLModel(Model):
1389
1389
  "Async invoking model provider",
1390
1390
  model_name=self.name,
1391
1391
  messages=messages,
1392
- model_configuration=model_configuration,
1392
+ invocation_config=invocation_config,
1393
1393
  )
1394
1394
  response_with_stats = await self.model_provider.async_invoke(
1395
1395
  messages=messages,
1396
1396
  invoke_response_format=InvokeResponseFormat.USAGE,
1397
- **(model_configuration or {}),
1397
+ **(invocation_config or {}),
1398
1398
  )
1399
1399
  set_data_by_path(
1400
1400
  path=self._result_path, data=body, value=response_with_stats
@@ -1416,7 +1416,7 @@ class LLModel(Model):
1416
1416
 
1417
1417
  def run(self, body: Any, path: str, origin_name: Optional[str] = None) -> Any:
1418
1418
  llm_prompt_artifact = self._get_invocation_artifact(origin_name)
1419
- messages, model_configuration = self.enrich_prompt(
1419
+ messages, invocation_config = self.enrich_prompt(
1420
1420
  body, origin_name, llm_prompt_artifact
1421
1421
  )
1422
1422
  logger.info(
@@ -1428,7 +1428,7 @@ class LLModel(Model):
1428
1428
  return self.predict(
1429
1429
  body,
1430
1430
  messages=messages,
1431
- invocation_config=model_configuration,
1431
+ invocation_config=invocation_config,
1432
1432
  llm_prompt_artifact=llm_prompt_artifact,
1433
1433
  )
1434
1434
 
@@ -1436,7 +1436,7 @@ class LLModel(Model):
1436
1436
  self, body: Any, path: str, origin_name: Optional[str] = None
1437
1437
  ) -> Any:
1438
1438
  llm_prompt_artifact = self._get_invocation_artifact(origin_name)
1439
- messages, model_configuration = self.enrich_prompt(
1439
+ messages, invocation_config = self.enrich_prompt(
1440
1440
  body, origin_name, llm_prompt_artifact
1441
1441
  )
1442
1442
  logger.info(
@@ -1448,7 +1448,7 @@ class LLModel(Model):
1448
1448
  return await self.predict_async(
1449
1449
  body,
1450
1450
  messages=messages,
1451
- model_configuration=model_configuration,
1451
+ invocation_config=invocation_config,
1452
1452
  llm_prompt_artifact=llm_prompt_artifact,
1453
1453
  )
1454
1454
 
@@ -1472,11 +1472,11 @@ class LLModel(Model):
1472
1472
  artifact_type=type(llm_prompt_artifact).__name__,
1473
1473
  llm_prompt_artifact=llm_prompt_artifact,
1474
1474
  )
1475
- prompt_legend, prompt_template, model_configuration = {}, [], {}
1475
+ prompt_legend, prompt_template, invocation_config = {}, [], {}
1476
1476
  else:
1477
1477
  prompt_legend = llm_prompt_artifact.spec.prompt_legend
1478
1478
  prompt_template = deepcopy(llm_prompt_artifact.read_prompt())
1479
- model_configuration = llm_prompt_artifact.spec.model_configuration
1479
+ invocation_config = llm_prompt_artifact.spec.invocation_config
1480
1480
  input_data = copy(get_data_from_path(self._input_path, body))
1481
1481
  if isinstance(input_data, dict) and prompt_template:
1482
1482
  kwargs = (
@@ -1512,7 +1512,7 @@ class LLModel(Model):
1512
1512
  model_name=self.name,
1513
1513
  input_data_type=type(input_data).__name__,
1514
1514
  )
1515
- return prompt_template, model_configuration
1515
+ return prompt_template, invocation_config
1516
1516
 
1517
1517
  def _get_invocation_artifact(
1518
1518
  self, origin_name: Optional[str] = None
@@ -1,4 +1,4 @@
1
1
  {
2
- "git_commit": "7714a5aa9f89102e497a5746d9711ea97a7d20c8",
3
- "version": "1.10.0-rc31"
2
+ "git_commit": "3aef2d331a6fdb0ab3c8584cc05f50ae58053cb9",
3
+ "version": "1.10.0-rc32"
4
4
  }
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: mlrun
3
- Version: 1.10.0rc31
3
+ Version: 1.10.0rc32
4
4
  Summary: Tracking and config of machine learning runs
5
5
  Home-page: https://github.com/mlrun/mlrun
6
6
  Author: Yaron Haviv
@@ -44,7 +44,7 @@ Requires-Dist: semver~=3.0
44
44
  Requires-Dist: dependency-injector~=4.41
45
45
  Requires-Dist: fsspec<=2025.7.0,>=2025.5.1
46
46
  Requires-Dist: v3iofs~=0.1.17
47
- Requires-Dist: storey~=1.10.14
47
+ Requires-Dist: storey~=1.10.16
48
48
  Requires-Dist: inflection~=0.5.0
49
49
  Requires-Dist: python-dotenv~=1.0
50
50
  Requires-Dist: setuptools>=75.2
@@ -2,7 +2,7 @@ mlrun/__init__.py,sha256=acM2jRv7RCvBROwucuC01Rf_HdvV3xUPtJlQtX_01MY,8076
2
2
  mlrun/__main__.py,sha256=wQNaxW7QsqFBtWffnPkw-497fnpsrQzUnscBQQAP_UM,48364
3
3
  mlrun/config.py,sha256=edvnwbZ2xlHwuRxy32SqzJyJE517zsWoduGYLO0zgGs,73433
4
4
  mlrun/errors.py,sha256=bAk0t_qmCxQSPNK0TugOAfA5R6f0G6OYvEvXUWSJ_5U,9062
5
- mlrun/execution.py,sha256=0NuuvXR2o3iAw1HiB3DZxOAy9xzSCw45AwASwTUK3I0,58806
5
+ mlrun/execution.py,sha256=Ozu8SjO-nQ6l5vHwqrTQjmP6koMpUqNQpp6qn6jvhVE,58802
6
6
  mlrun/features.py,sha256=jMEXo6NB36A6iaxNEJWzdtYwUmglYD90OIKTIEeWhE8,15841
7
7
  mlrun/k8s_utils.py,sha256=zIacVyvsXrXVO-DdxAoGQOGEDWOGJEFJzYPhPVnn3z8,24548
8
8
  mlrun/lists.py,sha256=OlaV2QIFUzmenad9kxNJ3k4whlDyxI3zFbGwr6vpC5Y,8561
@@ -17,7 +17,7 @@ mlrun/artifacts/base.py,sha256=6x_2KPMNOciiNNUsiKgJ-b6ejxAHm_Ro22xODLoTc44,28559
17
17
  mlrun/artifacts/dataset.py,sha256=bhb5Kfbs8P28yjnpN76th5lLEUl5nAqD4VqVzHEVPrM,16421
18
18
  mlrun/artifacts/document.py,sha256=p5HsWdmIIJ0NahS7y3EEQN2tfHtUrUmUG-8BEEyi_Jc,17373
19
19
  mlrun/artifacts/helpers.py,sha256=ejTEC9vkI2w5FHn5Gopw3VEIxuni0bazWUnR6BBWZfU,1662
20
- mlrun/artifacts/llm_prompt.py,sha256=zGTOeXOwjXkJaHZY99lU_TUSDOfyLfLCqCTN1vgJpQc,9846
20
+ mlrun/artifacts/llm_prompt.py,sha256=pshXzYXPDBAe6C0vecn9MyRNyPdxrah3c80oZUKkYWA,9840
21
21
  mlrun/artifacts/manager.py,sha256=_cDNCS7wwmFIsucJ2uOgHxZQECmIGb8Wye64b6oLgKU,16642
22
22
  mlrun/artifacts/model.py,sha256=9yU9NZlxxY_ifSyXOgMnPi_RMDmawY9A-rLi-_VJs4c,25662
23
23
  mlrun/artifacts/plots.py,sha256=wmaxVXiAPSCyn3M7pIlcBu9pP3O8lrq0Ewx6iHRDF9s,4238
@@ -281,7 +281,7 @@ mlrun/platforms/iguazio.py,sha256=32_o95Ntx9z3ciowt2NcnX7tAiLBwX3VB0mbTQ-KrIQ,13
281
281
  mlrun/projects/__init__.py,sha256=hdCOA6_fp8X4qGGGT7Bj7sPbkM1PayWuaVZL0DkpuZw,1240
282
282
  mlrun/projects/operations.py,sha256=dax9HGvs3S7FzZ2Hok1ixFoToIZI2mkUo0EhNUtsHGk,21020
283
283
  mlrun/projects/pipelines.py,sha256=ZOfuIEHOXfuc4qAkuWvbWhCjP6kqpLkv-yBBaY9RXhg,52219
284
- mlrun/projects/project.py,sha256=E4kX49_D2ZqytqBJMsVH6HBan9D1Z1_UDQPTJgRlJIM,256822
284
+ mlrun/projects/project.py,sha256=BwUCZTQr4YPr_s2dNh0-IKZo93ipp8sR-PJGslSkuz0,256818
285
285
  mlrun/runtimes/__init__.py,sha256=8cqrYKy1a0_87XG7V_p96untQ4t8RocadM4LVEEN1JM,9029
286
286
  mlrun/runtimes/base.py,sha256=pagMAvF0nEElptqLnBiGx9fpFenEq052B80GaLzR8Y8,38895
287
287
  mlrun/runtimes/daskjob.py,sha256=IN6gKKrmCIjWooj5FgFm-pAb2i7ra1ERRzClfu_rYGI,20102
@@ -303,7 +303,7 @@ mlrun/runtimes/mpijob/abstract.py,sha256=QjAG4OZ6JEQ58w5-qYNd6hUGwvaW8ynLtlr9jNf
303
303
  mlrun/runtimes/mpijob/v1.py,sha256=zSlRkiWHz4B3yht66sVf4mlfDs8YT9EnP9DfBLn5VNs,3372
304
304
  mlrun/runtimes/nuclio/__init__.py,sha256=osOVMN9paIOuUoOTizmkxMb_OXRP-SlPwXHJSSYK_wk,834
305
305
  mlrun/runtimes/nuclio/api_gateway.py,sha256=vH9ClKVP4Mb24rvA67xPuAvAhX-gAv6vVtjVxyplhdc,26969
306
- mlrun/runtimes/nuclio/function.py,sha256=ohO9NokWBn9dN7iPjD4Ws4MR2BJGqLBhxCi_yNCoyi8,55628
306
+ mlrun/runtimes/nuclio/function.py,sha256=VjJtfteEX2I8gYCwbBdqWwIK6ZOCVOu8lQGlX4i3nwU,55693
307
307
  mlrun/runtimes/nuclio/nuclio.py,sha256=sLK8KdGO1LbftlL3HqPZlFOFTAAuxJACZCVl1c0Ha6E,2942
308
308
  mlrun/runtimes/nuclio/serving.py,sha256=NF0f7a6KV8GIb4QBUKiJa_L_5oqCsG7UHPs8Uo3K_Eo,36330
309
309
  mlrun/runtimes/nuclio/application/__init__.py,sha256=rRs5vasy_G9IyoTpYIjYDafGoL6ifFBKgBtsXn31Atw,614
@@ -317,7 +317,7 @@ mlrun/serving/remote.py,sha256=p29CBtKwbW_l8BzmNg3Uy__0eMf7_OubTMzga_S3EOA,22089
317
317
  mlrun/serving/routers.py,sha256=pu5jlSLI4Ml68YP_FMFDhhwPfLcT6lRu5yL5QDgXPHQ,52889
318
318
  mlrun/serving/server.py,sha256=WvAQtkNhAcd2vGuMR04OdxfynMNWvtz6LpKEYPhK3z0,40959
319
319
  mlrun/serving/serving_wrapper.py,sha256=UL9hhWCfMPcTJO_XrkvNaFvck1U1E7oS8trTZyak0cA,835
320
- mlrun/serving/states.py,sha256=eT3dzYiEzVfDSLae_14m-c5vxHlEF9op4kxQtbZCASA,139004
320
+ mlrun/serving/states.py,sha256=Q2Q7o0eJCvnonXd2-sfiv7zhCiyC6xthfW25nzf61KM,138976
321
321
  mlrun/serving/system_steps.py,sha256=ZvGkUqiiYOrUlsDnsvzf9u9554mzyFwlKVrybqB7xao,20200
322
322
  mlrun/serving/utils.py,sha256=Zbfqm8TKNcTE8zRBezVBzpvR2WKeKeIRN7otNIaiYEc,4170
323
323
  mlrun/serving/v1_serving.py,sha256=c6J_MtpE-Tqu00-6r4eJOCO6rUasHDal9W2eBIcrl50,11853
@@ -351,11 +351,11 @@ mlrun/utils/notifications/notification/mail.py,sha256=ZyJ3eqd8simxffQmXzqd3bgbAq
351
351
  mlrun/utils/notifications/notification/slack.py,sha256=wSu_7W0EnGLBNwIgWCYEeTP8j9SPAMPDBnfUcPnVZYA,7299
352
352
  mlrun/utils/notifications/notification/webhook.py,sha256=FM5-LQAKAVJKp37MRzR3SsejalcnpM6r_9Oe7znxZEA,5313
353
353
  mlrun/utils/version/__init__.py,sha256=YnzE6tlf24uOQ8y7Z7l96QLAI6-QEii7-77g8ynmzy0,613
354
- mlrun/utils/version/version.json,sha256=GMYi0YSvQVE5LLpEES_qHCE6wE-GSUypn8YD_oQiU_k,90
354
+ mlrun/utils/version/version.json,sha256=j4SaTft98gfBfeSZ8hkeGUNgaedfuQHGalP9AUUmW6Y,90
355
355
  mlrun/utils/version/version.py,sha256=M2hVhRrgkN3SxacZHs3ZqaOsqAA7B6a22ne324IQ1HE,1877
356
- mlrun-1.10.0rc31.dist-info/licenses/LICENSE,sha256=zTiv1CxWNkOk1q8eJS1G_8oD4gWpWLwWxj_Agcsi8Os,11337
357
- mlrun-1.10.0rc31.dist-info/METADATA,sha256=gzxw9bbyGyHnvZlLHeIKmogYbBbL1yVnq6i8nSRadn4,26104
358
- mlrun-1.10.0rc31.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
359
- mlrun-1.10.0rc31.dist-info/entry_points.txt,sha256=1Owd16eAclD5pfRCoJpYC2ZJSyGNTtUr0nCELMioMmU,46
360
- mlrun-1.10.0rc31.dist-info/top_level.txt,sha256=NObLzw3maSF9wVrgSeYBv-fgnHkAJ1kEkh12DLdd5KM,6
361
- mlrun-1.10.0rc31.dist-info/RECORD,,
356
+ mlrun-1.10.0rc32.dist-info/licenses/LICENSE,sha256=zTiv1CxWNkOk1q8eJS1G_8oD4gWpWLwWxj_Agcsi8Os,11337
357
+ mlrun-1.10.0rc32.dist-info/METADATA,sha256=R2DqTgp-gYojqVO_b1I1zXQdReggmicO8dxP26a6M0w,26104
358
+ mlrun-1.10.0rc32.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
359
+ mlrun-1.10.0rc32.dist-info/entry_points.txt,sha256=1Owd16eAclD5pfRCoJpYC2ZJSyGNTtUr0nCELMioMmU,46
360
+ mlrun-1.10.0rc32.dist-info/top_level.txt,sha256=NObLzw3maSF9wVrgSeYBv-fgnHkAJ1kEkh12DLdd5KM,6
361
+ mlrun-1.10.0rc32.dist-info/RECORD,,