mlrun 1.10.0rc18__py3-none-any.whl → 1.11.0rc16__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mlrun might be problematic. Click here for more details.
- mlrun/__init__.py +24 -3
- mlrun/__main__.py +0 -4
- mlrun/artifacts/dataset.py +2 -2
- mlrun/artifacts/document.py +6 -1
- mlrun/artifacts/llm_prompt.py +21 -15
- mlrun/artifacts/model.py +3 -3
- mlrun/artifacts/plots.py +1 -1
- mlrun/{model_monitoring/db/tsdb/tdengine → auth}/__init__.py +2 -3
- mlrun/auth/nuclio.py +89 -0
- mlrun/auth/providers.py +429 -0
- mlrun/auth/utils.py +415 -0
- mlrun/common/constants.py +14 -0
- mlrun/common/model_monitoring/helpers.py +123 -0
- mlrun/common/runtimes/constants.py +28 -0
- mlrun/common/schemas/__init__.py +14 -3
- mlrun/common/schemas/alert.py +2 -2
- mlrun/common/schemas/api_gateway.py +3 -0
- mlrun/common/schemas/auth.py +12 -10
- mlrun/common/schemas/client_spec.py +4 -0
- mlrun/common/schemas/constants.py +25 -0
- mlrun/common/schemas/frontend_spec.py +1 -8
- mlrun/common/schemas/function.py +34 -0
- mlrun/common/schemas/hub.py +33 -20
- mlrun/common/schemas/model_monitoring/__init__.py +2 -1
- mlrun/common/schemas/model_monitoring/constants.py +12 -15
- mlrun/common/schemas/model_monitoring/functions.py +13 -4
- mlrun/common/schemas/model_monitoring/model_endpoints.py +11 -0
- mlrun/common/schemas/pipeline.py +1 -1
- mlrun/common/schemas/secret.py +17 -2
- mlrun/common/secrets.py +95 -1
- mlrun/common/types.py +10 -10
- mlrun/config.py +69 -19
- mlrun/data_types/infer.py +2 -2
- mlrun/datastore/__init__.py +12 -5
- mlrun/datastore/azure_blob.py +162 -47
- mlrun/datastore/base.py +274 -10
- mlrun/datastore/datastore.py +7 -2
- mlrun/datastore/datastore_profile.py +84 -22
- mlrun/datastore/model_provider/huggingface_provider.py +225 -41
- mlrun/datastore/model_provider/mock_model_provider.py +87 -0
- mlrun/datastore/model_provider/model_provider.py +206 -74
- mlrun/datastore/model_provider/openai_provider.py +226 -66
- mlrun/datastore/s3.py +39 -18
- mlrun/datastore/sources.py +1 -1
- mlrun/datastore/store_resources.py +4 -4
- mlrun/datastore/storeytargets.py +17 -12
- mlrun/datastore/targets.py +1 -1
- mlrun/datastore/utils.py +25 -6
- mlrun/datastore/v3io.py +1 -1
- mlrun/db/base.py +63 -32
- mlrun/db/httpdb.py +373 -153
- mlrun/db/nopdb.py +54 -21
- mlrun/errors.py +4 -2
- mlrun/execution.py +66 -25
- mlrun/feature_store/api.py +1 -1
- mlrun/feature_store/common.py +1 -1
- mlrun/feature_store/feature_vector_utils.py +1 -1
- mlrun/feature_store/steps.py +8 -6
- mlrun/frameworks/_common/utils.py +3 -3
- mlrun/frameworks/_dl_common/loggers/logger.py +1 -1
- mlrun/frameworks/_dl_common/loggers/tensorboard_logger.py +2 -1
- mlrun/frameworks/_ml_common/loggers/mlrun_logger.py +1 -1
- mlrun/frameworks/_ml_common/utils.py +2 -1
- mlrun/frameworks/auto_mlrun/auto_mlrun.py +4 -3
- mlrun/frameworks/lgbm/mlrun_interfaces/mlrun_interface.py +2 -1
- mlrun/frameworks/onnx/dataset.py +2 -1
- mlrun/frameworks/onnx/mlrun_interface.py +2 -1
- mlrun/frameworks/pytorch/callbacks/logging_callback.py +5 -4
- mlrun/frameworks/pytorch/callbacks/mlrun_logging_callback.py +2 -1
- mlrun/frameworks/pytorch/callbacks/tensorboard_logging_callback.py +2 -1
- mlrun/frameworks/pytorch/utils.py +2 -1
- mlrun/frameworks/sklearn/metric.py +2 -1
- mlrun/frameworks/tf_keras/callbacks/logging_callback.py +5 -4
- mlrun/frameworks/tf_keras/callbacks/mlrun_logging_callback.py +2 -1
- mlrun/frameworks/tf_keras/callbacks/tensorboard_logging_callback.py +2 -1
- mlrun/hub/__init__.py +52 -0
- mlrun/hub/base.py +142 -0
- mlrun/hub/module.py +172 -0
- mlrun/hub/step.py +113 -0
- mlrun/k8s_utils.py +105 -16
- mlrun/launcher/base.py +15 -7
- mlrun/launcher/local.py +4 -1
- mlrun/model.py +14 -4
- mlrun/model_monitoring/__init__.py +0 -1
- mlrun/model_monitoring/api.py +65 -28
- mlrun/model_monitoring/applications/__init__.py +1 -1
- mlrun/model_monitoring/applications/base.py +299 -128
- mlrun/model_monitoring/applications/context.py +2 -4
- mlrun/model_monitoring/controller.py +132 -58
- mlrun/model_monitoring/db/_schedules.py +38 -29
- mlrun/model_monitoring/db/_stats.py +6 -16
- mlrun/model_monitoring/db/tsdb/__init__.py +9 -7
- mlrun/model_monitoring/db/tsdb/base.py +29 -9
- mlrun/model_monitoring/db/tsdb/preaggregate.py +234 -0
- mlrun/model_monitoring/db/tsdb/stream_graph_steps.py +63 -0
- mlrun/model_monitoring/db/tsdb/timescaledb/queries/timescaledb_metrics_queries.py +414 -0
- mlrun/model_monitoring/db/tsdb/timescaledb/queries/timescaledb_predictions_queries.py +376 -0
- mlrun/model_monitoring/db/tsdb/timescaledb/queries/timescaledb_results_queries.py +590 -0
- mlrun/model_monitoring/db/tsdb/timescaledb/timescaledb_connection.py +434 -0
- mlrun/model_monitoring/db/tsdb/timescaledb/timescaledb_connector.py +541 -0
- mlrun/model_monitoring/db/tsdb/timescaledb/timescaledb_operations.py +808 -0
- mlrun/model_monitoring/db/tsdb/timescaledb/timescaledb_schema.py +502 -0
- mlrun/model_monitoring/db/tsdb/timescaledb/timescaledb_stream.py +163 -0
- mlrun/model_monitoring/db/tsdb/timescaledb/timescaledb_stream_graph_steps.py +60 -0
- mlrun/model_monitoring/db/tsdb/timescaledb/utils/timescaledb_dataframe_processor.py +141 -0
- mlrun/model_monitoring/db/tsdb/timescaledb/utils/timescaledb_query_builder.py +585 -0
- mlrun/model_monitoring/db/tsdb/timescaledb/writer_graph_steps.py +73 -0
- mlrun/model_monitoring/db/tsdb/v3io/stream_graph_steps.py +20 -9
- mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +235 -51
- mlrun/model_monitoring/features_drift_table.py +2 -1
- mlrun/model_monitoring/helpers.py +30 -6
- mlrun/model_monitoring/stream_processing.py +34 -28
- mlrun/model_monitoring/writer.py +224 -4
- mlrun/package/__init__.py +2 -1
- mlrun/platforms/__init__.py +0 -43
- mlrun/platforms/iguazio.py +8 -4
- mlrun/projects/operations.py +17 -11
- mlrun/projects/pipelines.py +2 -2
- mlrun/projects/project.py +187 -123
- mlrun/run.py +95 -21
- mlrun/runtimes/__init__.py +2 -186
- mlrun/runtimes/base.py +103 -25
- mlrun/runtimes/constants.py +225 -0
- mlrun/runtimes/daskjob.py +5 -2
- mlrun/runtimes/databricks_job/databricks_runtime.py +2 -1
- mlrun/runtimes/local.py +5 -2
- mlrun/runtimes/mounts.py +20 -2
- mlrun/runtimes/nuclio/__init__.py +12 -7
- mlrun/runtimes/nuclio/api_gateway.py +36 -6
- mlrun/runtimes/nuclio/application/application.py +339 -40
- mlrun/runtimes/nuclio/function.py +222 -72
- mlrun/runtimes/nuclio/serving.py +132 -42
- mlrun/runtimes/pod.py +213 -21
- mlrun/runtimes/utils.py +49 -9
- mlrun/secrets.py +99 -14
- mlrun/serving/__init__.py +2 -0
- mlrun/serving/remote.py +84 -11
- mlrun/serving/routers.py +26 -44
- mlrun/serving/server.py +138 -51
- mlrun/serving/serving_wrapper.py +6 -2
- mlrun/serving/states.py +997 -283
- mlrun/serving/steps.py +62 -0
- mlrun/serving/system_steps.py +149 -95
- mlrun/serving/v2_serving.py +9 -10
- mlrun/track/trackers/mlflow_tracker.py +29 -31
- mlrun/utils/helpers.py +292 -94
- mlrun/utils/http.py +9 -2
- mlrun/utils/notifications/notification/base.py +18 -0
- mlrun/utils/notifications/notification/git.py +3 -5
- mlrun/utils/notifications/notification/mail.py +39 -16
- mlrun/utils/notifications/notification/slack.py +2 -4
- mlrun/utils/notifications/notification/webhook.py +2 -5
- mlrun/utils/notifications/notification_pusher.py +3 -3
- mlrun/utils/version/version.json +2 -2
- mlrun/utils/version/version.py +3 -4
- {mlrun-1.10.0rc18.dist-info → mlrun-1.11.0rc16.dist-info}/METADATA +63 -74
- {mlrun-1.10.0rc18.dist-info → mlrun-1.11.0rc16.dist-info}/RECORD +161 -143
- mlrun/api/schemas/__init__.py +0 -259
- mlrun/db/auth_utils.py +0 -152
- mlrun/model_monitoring/db/tsdb/tdengine/schemas.py +0 -344
- mlrun/model_monitoring/db/tsdb/tdengine/stream_graph_steps.py +0 -75
- mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connection.py +0 -281
- mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py +0 -1266
- {mlrun-1.10.0rc18.dist-info → mlrun-1.11.0rc16.dist-info}/WHEEL +0 -0
- {mlrun-1.10.0rc18.dist-info → mlrun-1.11.0rc16.dist-info}/entry_points.txt +0 -0
- {mlrun-1.10.0rc18.dist-info → mlrun-1.11.0rc16.dist-info}/licenses/LICENSE +0 -0
- {mlrun-1.10.0rc18.dist-info → mlrun-1.11.0rc16.dist-info}/top_level.txt +0 -0
mlrun/utils/helpers.py
CHANGED
|
@@ -11,7 +11,6 @@
|
|
|
11
11
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
12
|
# See the License for the specific language governing permissions and
|
|
13
13
|
# limitations under the License.
|
|
14
|
-
|
|
15
14
|
import asyncio
|
|
16
15
|
import base64
|
|
17
16
|
import enum
|
|
@@ -22,6 +21,7 @@ import inspect
|
|
|
22
21
|
import itertools
|
|
23
22
|
import json
|
|
24
23
|
import os
|
|
24
|
+
import pathlib
|
|
25
25
|
import re
|
|
26
26
|
import string
|
|
27
27
|
import sys
|
|
@@ -30,7 +30,7 @@ import typing
|
|
|
30
30
|
import uuid
|
|
31
31
|
import warnings
|
|
32
32
|
from copy import deepcopy
|
|
33
|
-
from datetime import datetime, timedelta, timezone
|
|
33
|
+
from datetime import UTC, datetime, timedelta, timezone
|
|
34
34
|
from importlib import import_module, reload
|
|
35
35
|
from os import path
|
|
36
36
|
from types import ModuleType
|
|
@@ -46,6 +46,8 @@ import pytz
|
|
|
46
46
|
import semver
|
|
47
47
|
import yaml
|
|
48
48
|
from dateutil import parser
|
|
49
|
+
from packaging.requirements import Requirement
|
|
50
|
+
from packaging.utils import canonicalize_name
|
|
49
51
|
from pandas import Timedelta, Timestamp
|
|
50
52
|
from yaml.representer import RepresenterError
|
|
51
53
|
|
|
@@ -62,6 +64,7 @@ import mlrun_pipelines.models
|
|
|
62
64
|
import mlrun_pipelines.utils
|
|
63
65
|
from mlrun.common.constants import MYSQL_MEDIUMBLOB_SIZE_BYTES
|
|
64
66
|
from mlrun.common.schemas import ArtifactCategories
|
|
67
|
+
from mlrun.common.schemas.hub import HubSourceType
|
|
65
68
|
from mlrun.config import config
|
|
66
69
|
from mlrun_pipelines.models import PipelineRun
|
|
67
70
|
|
|
@@ -250,6 +253,40 @@ def verify_field_regex(
|
|
|
250
253
|
return False
|
|
251
254
|
|
|
252
255
|
|
|
256
|
+
def validate_function_name(name: str) -> None:
|
|
257
|
+
"""
|
|
258
|
+
Validate that a function name conforms to Kubernetes DNS-1123 label requirements.
|
|
259
|
+
|
|
260
|
+
Function names for Kubernetes resources must:
|
|
261
|
+
- Be lowercase alphanumeric characters or '-'
|
|
262
|
+
- Start and end with an alphanumeric character
|
|
263
|
+
- Be at most 63 characters long
|
|
264
|
+
|
|
265
|
+
This validation should be called AFTER normalize_name() has been applied.
|
|
266
|
+
|
|
267
|
+
Refer to https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#dns-label-names
|
|
268
|
+
|
|
269
|
+
:param name: The function name to validate (after normalization)
|
|
270
|
+
:raises MLRunInvalidArgumentError: If the function name is invalid for Kubernetes
|
|
271
|
+
"""
|
|
272
|
+
if not name:
|
|
273
|
+
return
|
|
274
|
+
|
|
275
|
+
verify_field_regex(
|
|
276
|
+
"function.metadata.name",
|
|
277
|
+
name,
|
|
278
|
+
mlrun.utils.regex.dns_1123_label,
|
|
279
|
+
raise_on_failure=True,
|
|
280
|
+
log_message=(
|
|
281
|
+
f"Function name '{name}' is invalid. "
|
|
282
|
+
"Kubernetes function names must be DNS-1123 labels: "
|
|
283
|
+
"lowercase alphanumeric characters or '-', "
|
|
284
|
+
"starting and ending with an alphanumeric character, "
|
|
285
|
+
"and at most 63 characters long."
|
|
286
|
+
),
|
|
287
|
+
)
|
|
288
|
+
|
|
289
|
+
|
|
253
290
|
def validate_builder_source(
|
|
254
291
|
source: str, pull_at_runtime: bool = False, workdir: Optional[str] = None
|
|
255
292
|
):
|
|
@@ -425,7 +462,7 @@ def get_pretty_types_names(types):
|
|
|
425
462
|
return types[0].__name__
|
|
426
463
|
|
|
427
464
|
|
|
428
|
-
def now_date(tz: timezone =
|
|
465
|
+
def now_date(tz: timezone = UTC) -> datetime:
|
|
429
466
|
return datetime.now(tz=tz)
|
|
430
467
|
|
|
431
468
|
|
|
@@ -440,7 +477,7 @@ def datetime_to_mysql_ts(datetime_object: datetime) -> datetime:
|
|
|
440
477
|
:return: A MySQL-compatible timestamp string with millisecond precision.
|
|
441
478
|
"""
|
|
442
479
|
if not datetime_object.tzinfo:
|
|
443
|
-
datetime_object = datetime_object.replace(tzinfo=
|
|
480
|
+
datetime_object = datetime_object.replace(tzinfo=UTC)
|
|
444
481
|
|
|
445
482
|
# Round to the nearest millisecond
|
|
446
483
|
ms = round(datetime_object.microsecond / 1000) * 1000
|
|
@@ -451,7 +488,7 @@ def datetime_to_mysql_ts(datetime_object: datetime) -> datetime:
|
|
|
451
488
|
return datetime_object.replace(microsecond=ms)
|
|
452
489
|
|
|
453
490
|
|
|
454
|
-
def datetime_min(tz: timezone =
|
|
491
|
+
def datetime_min(tz: timezone = UTC) -> datetime:
|
|
455
492
|
return datetime(1970, 1, 1, tzinfo=tz)
|
|
456
493
|
|
|
457
494
|
|
|
@@ -464,21 +501,49 @@ def to_date_str(d):
|
|
|
464
501
|
return ""
|
|
465
502
|
|
|
466
503
|
|
|
467
|
-
def normalize_name(name: str
|
|
504
|
+
def normalize_name(name: str):
|
|
468
505
|
# TODO: Must match
|
|
469
506
|
# [a-z0-9]([-a-z0-9]*[a-z0-9])?(\\.[a-z0-9]([-a-z0-9]*[a-z0-9])?
|
|
470
507
|
name = re.sub(r"\s+", "-", name)
|
|
471
508
|
if "_" in name:
|
|
472
|
-
if verbose:
|
|
473
|
-
warnings.warn(
|
|
474
|
-
"Names with underscore '_' are about to be deprecated, use dashes '-' instead. "
|
|
475
|
-
f"Replacing '{name}' underscores with dashes.",
|
|
476
|
-
FutureWarning,
|
|
477
|
-
)
|
|
478
509
|
name = name.replace("_", "-")
|
|
479
510
|
return name.lower()
|
|
480
511
|
|
|
481
512
|
|
|
513
|
+
def ensure_batch_job_suffix(
|
|
514
|
+
function_name: typing.Optional[str],
|
|
515
|
+
) -> tuple[typing.Optional[str], bool, str]:
|
|
516
|
+
"""
|
|
517
|
+
Ensure that a function name has the batch job suffix appended to prevent database collision.
|
|
518
|
+
|
|
519
|
+
This helper is used by to_job() methods in runtimes that convert online functions (serving, local)
|
|
520
|
+
to batch processing jobs. The suffix prevents the job from overwriting the original function in
|
|
521
|
+
the database when both are stored with the same (project, name) key.
|
|
522
|
+
|
|
523
|
+
:param function_name: The original function name (can be None or empty string)
|
|
524
|
+
|
|
525
|
+
:return: A tuple of (modified_name, was_renamed, suffix) where:
|
|
526
|
+
- modified_name: The function name with the batch suffix (if not already present),
|
|
527
|
+
or empty string if input was empty
|
|
528
|
+
- was_renamed: True if the suffix was added, False if it was already present or if name was empty/None
|
|
529
|
+
- suffix: The suffix value that was used (or would have been used)
|
|
530
|
+
|
|
531
|
+
"""
|
|
532
|
+
suffix = mlrun_constants.RESERVED_BATCH_JOB_SUFFIX
|
|
533
|
+
|
|
534
|
+
# Handle None or empty string
|
|
535
|
+
if not function_name:
|
|
536
|
+
return function_name, False, suffix
|
|
537
|
+
|
|
538
|
+
if not function_name.endswith(suffix):
|
|
539
|
+
return (
|
|
540
|
+
f"{function_name}{suffix}",
|
|
541
|
+
True,
|
|
542
|
+
suffix,
|
|
543
|
+
)
|
|
544
|
+
return function_name, False, suffix
|
|
545
|
+
|
|
546
|
+
|
|
482
547
|
class LogBatchWriter:
|
|
483
548
|
def __init__(self, func, batch=16, maxtime=5):
|
|
484
549
|
self.batch = batch
|
|
@@ -508,9 +573,14 @@ def get_in(obj, keys, default=None):
|
|
|
508
573
|
if isinstance(keys, str):
|
|
509
574
|
keys = keys.split(".")
|
|
510
575
|
for key in keys:
|
|
511
|
-
if
|
|
576
|
+
if obj is None:
|
|
512
577
|
return default
|
|
513
|
-
obj
|
|
578
|
+
if isinstance(obj, dict):
|
|
579
|
+
if key not in obj:
|
|
580
|
+
return default
|
|
581
|
+
obj = obj[key]
|
|
582
|
+
else:
|
|
583
|
+
obj = getattr(obj, key, default)
|
|
514
584
|
return obj
|
|
515
585
|
|
|
516
586
|
|
|
@@ -703,11 +773,11 @@ def dict_to_yaml(struct) -> str:
|
|
|
703
773
|
# solve numpy json serialization
|
|
704
774
|
class MyEncoder(json.JSONEncoder):
|
|
705
775
|
def default(self, obj):
|
|
706
|
-
if isinstance(obj,
|
|
776
|
+
if isinstance(obj, int | str | float | list | dict):
|
|
707
777
|
return obj
|
|
708
|
-
elif isinstance(obj,
|
|
778
|
+
elif isinstance(obj, np.integer | np.int64):
|
|
709
779
|
return int(obj)
|
|
710
|
-
elif isinstance(obj,
|
|
780
|
+
elif isinstance(obj, np.floating | np.float64):
|
|
711
781
|
return float(obj)
|
|
712
782
|
elif isinstance(obj, np.ndarray):
|
|
713
783
|
return obj.tolist()
|
|
@@ -808,17 +878,27 @@ def remove_tag_from_artifact_uri(uri: str) -> Optional[str]:
|
|
|
808
878
|
return uri if not add_store else DB_SCHEMA + "://" + uri
|
|
809
879
|
|
|
810
880
|
|
|
811
|
-
def
|
|
881
|
+
def check_if_hub_uri(uri: str) -> bool:
|
|
882
|
+
return uri.startswith(hub_prefix)
|
|
883
|
+
|
|
884
|
+
|
|
885
|
+
def extend_hub_uri_if_needed(
|
|
886
|
+
uri: str,
|
|
887
|
+
asset_type: HubSourceType = HubSourceType.functions,
|
|
888
|
+
file: str = "function.yaml",
|
|
889
|
+
) -> tuple[str, bool]:
|
|
812
890
|
"""
|
|
813
|
-
Retrieve the full uri of
|
|
891
|
+
Retrieve the full uri of an object in the hub.
|
|
814
892
|
|
|
815
893
|
:param uri: structure: "hub://[<source>/]<item-name>[:<tag>]"
|
|
894
|
+
:param asset_type: The type of the hub item (functions, modules, etc.)
|
|
895
|
+
:param file: The file name inside the hub item directory (default: function.yaml)
|
|
816
896
|
|
|
817
897
|
:return: A tuple of:
|
|
818
898
|
[0] = Extended URI of item
|
|
819
899
|
[1] = Is hub item (bool)
|
|
820
900
|
"""
|
|
821
|
-
is_hub_uri = uri
|
|
901
|
+
is_hub_uri = check_if_hub_uri(uri)
|
|
822
902
|
if not is_hub_uri:
|
|
823
903
|
return uri, is_hub_uri
|
|
824
904
|
|
|
@@ -835,10 +915,10 @@ def extend_hub_uri_if_needed(uri) -> tuple[str, bool]:
|
|
|
835
915
|
raise mlrun.errors.MLRunInvalidArgumentError(
|
|
836
916
|
"Invalid character '/' in function name or source name"
|
|
837
917
|
) from exc
|
|
838
|
-
name = normalize_name(name=name
|
|
918
|
+
name = normalize_name(name=name)
|
|
839
919
|
if not source_name:
|
|
840
920
|
# Searching item in all sources
|
|
841
|
-
sources = db.list_hub_sources(item_name=name, tag=tag)
|
|
921
|
+
sources = db.list_hub_sources(item_name=name, tag=tag, item_type=asset_type)
|
|
842
922
|
if not sources:
|
|
843
923
|
raise mlrun.errors.MLRunNotFoundError(
|
|
844
924
|
f"Item={name}, tag={tag} not found in any hub source"
|
|
@@ -848,10 +928,10 @@ def extend_hub_uri_if_needed(uri) -> tuple[str, bool]:
|
|
|
848
928
|
else:
|
|
849
929
|
# Specific source is given
|
|
850
930
|
indexed_source = db.get_hub_source(source_name)
|
|
851
|
-
# hub
|
|
931
|
+
# hub directories name are with underscores instead of hyphens
|
|
852
932
|
name = name.replace("-", "_")
|
|
853
|
-
|
|
854
|
-
return indexed_source.source.get_full_uri(
|
|
933
|
+
suffix = f"{name}/{tag}/src/{file}"
|
|
934
|
+
return indexed_source.source.get_full_uri(suffix, asset_type), is_hub_uri
|
|
855
935
|
|
|
856
936
|
|
|
857
937
|
def gen_md_table(header, rows=None):
|
|
@@ -919,10 +999,20 @@ def enrich_image_url(
|
|
|
919
999
|
mlrun_version = config.images_tag or client_version or server_version
|
|
920
1000
|
tag = mlrun_version or ""
|
|
921
1001
|
|
|
922
|
-
#
|
|
923
|
-
|
|
1002
|
+
# starting mlrun 1.10.0-rc0 we want to enrich the kfp image with the python version
|
|
1003
|
+
# e.g for 1.9 we have a single mlrun-kfp image that supports only python 3.9
|
|
1004
|
+
enrich_kfp_python_version = (
|
|
1005
|
+
"mlrun-kfp" in image_url
|
|
1006
|
+
and mlrun_version
|
|
1007
|
+
and semver.VersionInfo.is_valid(mlrun_version)
|
|
1008
|
+
and semver.VersionInfo.parse(mlrun_version)
|
|
1009
|
+
>= semver.VersionInfo.parse("1.10.0-rc0")
|
|
1010
|
+
)
|
|
1011
|
+
|
|
1012
|
+
if "mlrun-kfp" not in image_url or enrich_kfp_python_version:
|
|
924
1013
|
tag += resolve_image_tag_suffix(
|
|
925
|
-
mlrun_version=mlrun_version,
|
|
1014
|
+
mlrun_version=mlrun_version,
|
|
1015
|
+
python_version=client_python_version,
|
|
926
1016
|
)
|
|
927
1017
|
|
|
928
1018
|
# it's an mlrun image if the repository is mlrun
|
|
@@ -935,8 +1025,10 @@ def enrich_image_url(
|
|
|
935
1025
|
# use the tag from image URL if available, else fallback to the given tag
|
|
936
1026
|
tag = image_tag or tag
|
|
937
1027
|
if tag:
|
|
1028
|
+
# Remove '-pyXY' suffix if present, since the compatibility check expects a valid semver string
|
|
1029
|
+
tag_for_compatibility = re.sub(r"-py\d+$", "", tag)
|
|
938
1030
|
if mlrun.utils.helpers.validate_component_version_compatibility(
|
|
939
|
-
"mlrun-client", "1.10.0-rc0", mlrun_client_version=
|
|
1031
|
+
"mlrun-client", "1.10.0-rc0", mlrun_client_version=tag_for_compatibility
|
|
940
1032
|
):
|
|
941
1033
|
warnings.warn(
|
|
942
1034
|
"'mlrun/ml-base' image is deprecated in 1.10.0 and will be removed in 1.12.0, "
|
|
@@ -948,8 +1040,15 @@ def enrich_image_url(
|
|
|
948
1040
|
else:
|
|
949
1041
|
image_url = "mlrun/mlrun"
|
|
950
1042
|
|
|
951
|
-
if is_mlrun_image and tag
|
|
952
|
-
|
|
1043
|
+
if is_mlrun_image and tag:
|
|
1044
|
+
if ":" not in image_url:
|
|
1045
|
+
image_url = f"{image_url}:{tag}"
|
|
1046
|
+
elif enrich_kfp_python_version:
|
|
1047
|
+
# For mlrun-kfp >= 1.10.0-rc0, append python suffix to existing tag
|
|
1048
|
+
python_suffix = resolve_image_tag_suffix(
|
|
1049
|
+
mlrun_version, client_python_version
|
|
1050
|
+
)
|
|
1051
|
+
image_url = f"{image_url}{python_suffix}" if python_suffix else image_url
|
|
953
1052
|
|
|
954
1053
|
registry = (
|
|
955
1054
|
config.images_registry if is_mlrun_image else config.vendor_images_registry
|
|
@@ -1219,55 +1318,6 @@ def get_workflow_url(
|
|
|
1219
1318
|
return url
|
|
1220
1319
|
|
|
1221
1320
|
|
|
1222
|
-
def get_kfp_list_runs_filter(
|
|
1223
|
-
project_name: Optional[str] = None,
|
|
1224
|
-
end_date: Optional[str] = None,
|
|
1225
|
-
start_date: Optional[str] = None,
|
|
1226
|
-
) -> str:
|
|
1227
|
-
"""
|
|
1228
|
-
Generates a filter for listing Kubeflow Pipelines (KFP) runs.
|
|
1229
|
-
|
|
1230
|
-
:param project_name: The name of the project. If "*", it won't filter by project.
|
|
1231
|
-
:param end_date: The latest creation date for filtering runs (ISO 8601 format).
|
|
1232
|
-
:param start_date: The earliest creation date for filtering runs (ISO 8601 format).
|
|
1233
|
-
:return: A JSON-formatted filter string for KFP.
|
|
1234
|
-
"""
|
|
1235
|
-
|
|
1236
|
-
# KFP filter operation codes
|
|
1237
|
-
kfp_less_than_or_equal_op = 7 # '<='
|
|
1238
|
-
kfp_greater_than_or_equal_op = 5 # '>='
|
|
1239
|
-
kfp_substring_op = 9 # Substring match
|
|
1240
|
-
|
|
1241
|
-
filters = {"predicates": []}
|
|
1242
|
-
|
|
1243
|
-
if end_date:
|
|
1244
|
-
filters["predicates"].append(
|
|
1245
|
-
{
|
|
1246
|
-
"key": "created_at",
|
|
1247
|
-
"op": kfp_less_than_or_equal_op,
|
|
1248
|
-
"timestamp_value": end_date,
|
|
1249
|
-
}
|
|
1250
|
-
)
|
|
1251
|
-
|
|
1252
|
-
if project_name and project_name != "*":
|
|
1253
|
-
filters["predicates"].append(
|
|
1254
|
-
{
|
|
1255
|
-
"key": "name",
|
|
1256
|
-
"op": kfp_substring_op,
|
|
1257
|
-
"string_value": project_name,
|
|
1258
|
-
}
|
|
1259
|
-
)
|
|
1260
|
-
if start_date:
|
|
1261
|
-
filters["predicates"].append(
|
|
1262
|
-
{
|
|
1263
|
-
"key": "created_at",
|
|
1264
|
-
"op": kfp_greater_than_or_equal_op,
|
|
1265
|
-
"timestamp_value": start_date,
|
|
1266
|
-
}
|
|
1267
|
-
)
|
|
1268
|
-
return json.dumps(filters)
|
|
1269
|
-
|
|
1270
|
-
|
|
1271
1321
|
def validate_and_convert_date(date_input: str) -> str:
|
|
1272
1322
|
"""
|
|
1273
1323
|
Converts any recognizable date string into a standardized RFC 3339 format.
|
|
@@ -1483,9 +1533,9 @@ def datetime_from_iso(time_str: str) -> Optional[datetime]:
|
|
|
1483
1533
|
return
|
|
1484
1534
|
dt = parser.isoparse(time_str)
|
|
1485
1535
|
if dt.tzinfo is None:
|
|
1486
|
-
dt = dt.replace(tzinfo=
|
|
1536
|
+
dt = dt.replace(tzinfo=UTC)
|
|
1487
1537
|
# ensure the datetime is in UTC, converting if necessary
|
|
1488
|
-
return dt.astimezone(
|
|
1538
|
+
return dt.astimezone(UTC)
|
|
1489
1539
|
|
|
1490
1540
|
|
|
1491
1541
|
def datetime_to_iso(time_obj: Optional[datetime]) -> Optional[str]:
|
|
@@ -1499,7 +1549,7 @@ def enrich_datetime_with_tz_info(timestamp_string) -> Optional[datetime]:
|
|
|
1499
1549
|
return timestamp_string
|
|
1500
1550
|
|
|
1501
1551
|
if timestamp_string and not mlrun.utils.helpers.has_timezone(timestamp_string):
|
|
1502
|
-
timestamp_string += datetime.now(
|
|
1552
|
+
timestamp_string += datetime.now(UTC).astimezone().strftime("%z")
|
|
1503
1553
|
|
|
1504
1554
|
for _format in [
|
|
1505
1555
|
# e.g: 2021-08-25 12:00:00.000Z
|
|
@@ -1530,7 +1580,7 @@ def format_datetime(dt: datetime, fmt: Optional[str] = None) -> str:
|
|
|
1530
1580
|
|
|
1531
1581
|
# If the datetime is naive
|
|
1532
1582
|
if dt.tzinfo is None:
|
|
1533
|
-
dt = dt.replace(tzinfo=
|
|
1583
|
+
dt = dt.replace(tzinfo=UTC)
|
|
1534
1584
|
|
|
1535
1585
|
# TODO: Once Python 3.12 is the minimal version, use %:z to format the timezone offset with a colon
|
|
1536
1586
|
formatted_time = dt.strftime(fmt or "%Y-%m-%d %H:%M:%S.%f%z")
|
|
@@ -1692,7 +1742,7 @@ def format_run(run: PipelineRun, with_project=False) -> dict:
|
|
|
1692
1742
|
for key, value in run.items():
|
|
1693
1743
|
if (
|
|
1694
1744
|
key in time_keys
|
|
1695
|
-
and isinstance(value,
|
|
1745
|
+
and isinstance(value, str | datetime)
|
|
1696
1746
|
and parser.parse(str(value)).year == 1970
|
|
1697
1747
|
):
|
|
1698
1748
|
run[key] = None
|
|
@@ -1865,10 +1915,7 @@ async def run_in_threadpool(func, *args, **kwargs):
|
|
|
1865
1915
|
Run a sync-function in the loop default thread pool executor pool and await its result.
|
|
1866
1916
|
Note that this function is not suitable for CPU-bound tasks, as it will block the event loop.
|
|
1867
1917
|
"""
|
|
1868
|
-
|
|
1869
|
-
if kwargs:
|
|
1870
|
-
func = functools.partial(func, **kwargs)
|
|
1871
|
-
return await loop.run_in_executor(None, func, *args)
|
|
1918
|
+
return await asyncio.to_thread(func, *args, **kwargs)
|
|
1872
1919
|
|
|
1873
1920
|
|
|
1874
1921
|
def is_explicit_ack_supported(context):
|
|
@@ -2102,7 +2149,7 @@ def validate_single_def_handler(function_kind: str, code: str):
|
|
|
2102
2149
|
# it would override MLRun's wrapper
|
|
2103
2150
|
if function_kind == "mlrun":
|
|
2104
2151
|
# Find all lines that start with "def handler("
|
|
2105
|
-
pattern = re.compile(r"^def handler\(", re.MULTILINE)
|
|
2152
|
+
pattern = re.compile(r"^(?:async\s+)?def handler\(", re.MULTILINE)
|
|
2106
2153
|
matches = pattern.findall(code)
|
|
2107
2154
|
|
|
2108
2155
|
# Only MLRun's wrapper handler (footer) can be in the code
|
|
@@ -2410,8 +2457,29 @@ def split_path(path: str) -> typing.Union[str, list[str], None]:
|
|
|
2410
2457
|
|
|
2411
2458
|
|
|
2412
2459
|
def get_data_from_path(
|
|
2413
|
-
path: typing.Union[str, list[str], None], data: dict
|
|
2414
|
-
) ->
|
|
2460
|
+
path: typing.Union[str, list[str], None], data: typing.Union[dict, list]
|
|
2461
|
+
) -> Any:
|
|
2462
|
+
if data and isinstance(data, list):
|
|
2463
|
+
output_data = []
|
|
2464
|
+
for item in data:
|
|
2465
|
+
if isinstance(item, dict):
|
|
2466
|
+
output_data.append(get_data_from_dict(path, item))
|
|
2467
|
+
elif path is None:
|
|
2468
|
+
output_data = data
|
|
2469
|
+
else:
|
|
2470
|
+
raise mlrun.errors.MLRunInvalidArgumentError(
|
|
2471
|
+
"If data is a list of non-dict values, path must be None"
|
|
2472
|
+
)
|
|
2473
|
+
return output_data
|
|
2474
|
+
elif isinstance(data, dict):
|
|
2475
|
+
return get_data_from_dict(path, data)
|
|
2476
|
+
else:
|
|
2477
|
+
raise mlrun.errors.MLRunInvalidArgumentError(
|
|
2478
|
+
"Expected data be of type dict or list"
|
|
2479
|
+
)
|
|
2480
|
+
|
|
2481
|
+
|
|
2482
|
+
def get_data_from_dict(path: typing.Union[str, list[str], None], data: dict) -> Any:
|
|
2415
2483
|
if isinstance(path, str):
|
|
2416
2484
|
output_data = data.get(path)
|
|
2417
2485
|
elif isinstance(path, list):
|
|
@@ -2424,6 +2492,136 @@ def get_data_from_path(
|
|
|
2424
2492
|
raise mlrun.errors.MLRunInvalidArgumentError(
|
|
2425
2493
|
"Expected path be of type str or list of str or None"
|
|
2426
2494
|
)
|
|
2427
|
-
if isinstance(output_data, (int, float)):
|
|
2428
|
-
output_data = [output_data]
|
|
2429
2495
|
return output_data
|
|
2496
|
+
|
|
2497
|
+
|
|
2498
|
+
def is_valid_port(port: int, raise_on_error: bool = False) -> bool:
|
|
2499
|
+
if not port:
|
|
2500
|
+
return False
|
|
2501
|
+
if 0 <= port <= 65535:
|
|
2502
|
+
return True
|
|
2503
|
+
if raise_on_error:
|
|
2504
|
+
raise ValueError("Port must be in the range 0–65535")
|
|
2505
|
+
return False
|
|
2506
|
+
|
|
2507
|
+
|
|
2508
|
+
def set_data_by_path(
|
|
2509
|
+
path: typing.Union[str, list[str], None], data: dict, value
|
|
2510
|
+
) -> None:
|
|
2511
|
+
if path is None:
|
|
2512
|
+
if not isinstance(value, dict):
|
|
2513
|
+
raise ValueError("When path is None, value must be a dictionary.")
|
|
2514
|
+
data.update(value)
|
|
2515
|
+
|
|
2516
|
+
elif isinstance(path, str):
|
|
2517
|
+
data[path] = value
|
|
2518
|
+
|
|
2519
|
+
elif isinstance(path, list):
|
|
2520
|
+
current = data
|
|
2521
|
+
for key in path[:-1]:
|
|
2522
|
+
if key not in current or not isinstance(current[key], dict):
|
|
2523
|
+
current[key] = {}
|
|
2524
|
+
current = current[key]
|
|
2525
|
+
current[path[-1]] = value
|
|
2526
|
+
else:
|
|
2527
|
+
raise mlrun.errors.MLRunInvalidArgumentError(
|
|
2528
|
+
"Expected path to be of type str or list of str"
|
|
2529
|
+
)
|
|
2530
|
+
|
|
2531
|
+
|
|
2532
|
+
def _normalize_requirements(reqs: typing.Union[str, list[str], None]) -> list[str]:
|
|
2533
|
+
if reqs is None:
|
|
2534
|
+
return []
|
|
2535
|
+
if isinstance(reqs, str):
|
|
2536
|
+
s = reqs.strip()
|
|
2537
|
+
return [s] if s else []
|
|
2538
|
+
return [s.strip() for s in reqs if s and s.strip()]
|
|
2539
|
+
|
|
2540
|
+
|
|
2541
|
+
def merge_requirements(
|
|
2542
|
+
reqs_priority: typing.Union[str, list[str], None],
|
|
2543
|
+
reqs_secondary: typing.Union[str, list[str], None],
|
|
2544
|
+
) -> list[str]:
|
|
2545
|
+
"""
|
|
2546
|
+
Merge two requirement collections into a union. If the same package
|
|
2547
|
+
appears in both, the specifier from reqs_priority wins.
|
|
2548
|
+
|
|
2549
|
+
Args:
|
|
2550
|
+
reqs_priority: str | list[str] | None (priority input)
|
|
2551
|
+
reqs_secondary: str | list[str] | None
|
|
2552
|
+
|
|
2553
|
+
Returns:
|
|
2554
|
+
list[str]: pip-style requirements.
|
|
2555
|
+
"""
|
|
2556
|
+
merged: dict[str, Requirement] = {}
|
|
2557
|
+
|
|
2558
|
+
for r in _normalize_requirements(reqs_secondary) + _normalize_requirements(
|
|
2559
|
+
reqs_priority
|
|
2560
|
+
):
|
|
2561
|
+
req = Requirement(r)
|
|
2562
|
+
merged[canonicalize_name(req.name)] = req
|
|
2563
|
+
|
|
2564
|
+
return [str(req) for req in merged.values()]
|
|
2565
|
+
|
|
2566
|
+
|
|
2567
|
+
def get_source_and_working_dir_paths(source_file_path) -> (pathlib.Path, pathlib.Path):
|
|
2568
|
+
source_file_path_object = pathlib.Path(source_file_path).resolve()
|
|
2569
|
+
working_dir_path_object = pathlib.Path(".").resolve()
|
|
2570
|
+
return source_file_path_object, working_dir_path_object
|
|
2571
|
+
|
|
2572
|
+
|
|
2573
|
+
def get_relative_module_name_from_path(
|
|
2574
|
+
source_file_path_object, working_dir_path_object
|
|
2575
|
+
) -> str:
|
|
2576
|
+
relative_path_to_source_file = source_file_path_object.relative_to(
|
|
2577
|
+
working_dir_path_object
|
|
2578
|
+
)
|
|
2579
|
+
return ".".join(relative_path_to_source_file.with_suffix("").parts)
|
|
2580
|
+
|
|
2581
|
+
|
|
2582
|
+
def iguazio_v4_only(function):
|
|
2583
|
+
@functools.wraps(function)
|
|
2584
|
+
def wrapper(*args, **kwargs):
|
|
2585
|
+
if not config.is_iguazio_v4_mode():
|
|
2586
|
+
raise mlrun.errors.MLRunRuntimeError(
|
|
2587
|
+
"This method is only supported in an Iguazio V4 system."
|
|
2588
|
+
)
|
|
2589
|
+
return function(*args, **kwargs)
|
|
2590
|
+
|
|
2591
|
+
return wrapper
|
|
2592
|
+
|
|
2593
|
+
|
|
2594
|
+
def raise_or_log_error(message: str, raise_on_error: bool = True):
|
|
2595
|
+
"""
|
|
2596
|
+
Handle errors by either raising an exception or logging a warning.
|
|
2597
|
+
|
|
2598
|
+
:param message: The error message.
|
|
2599
|
+
:param raise_on_error: If True, raises an exception. Otherwise, logs a warning.
|
|
2600
|
+
"""
|
|
2601
|
+
if raise_on_error:
|
|
2602
|
+
raise mlrun.errors.MLRunRuntimeError(message)
|
|
2603
|
+
logger.warning(message)
|
|
2604
|
+
|
|
2605
|
+
|
|
2606
|
+
def is_running_in_runtime() -> bool:
|
|
2607
|
+
"""
|
|
2608
|
+
Check if the code is running inside an MLRun runtime environment.
|
|
2609
|
+
:return: True if running inside an MLRun runtime, False otherwise.
|
|
2610
|
+
"""
|
|
2611
|
+
# Check for the presence of the MLRUN_RUNTIME_KIND environment variable
|
|
2612
|
+
return True if os.getenv("MLRUN_RUNTIME_KIND") else False
|
|
2613
|
+
|
|
2614
|
+
|
|
2615
|
+
def is_async_serving_graph(function_spec) -> bool:
|
|
2616
|
+
"""Check if the serving graph contains any async nodes."""
|
|
2617
|
+
if not function_spec:
|
|
2618
|
+
return False
|
|
2619
|
+
|
|
2620
|
+
if (
|
|
2621
|
+
hasattr(function_spec, "graph")
|
|
2622
|
+
and hasattr(function_spec.graph, "engine")
|
|
2623
|
+
and function_spec.graph.engine == "async"
|
|
2624
|
+
):
|
|
2625
|
+
return True
|
|
2626
|
+
|
|
2627
|
+
return False
|
mlrun/utils/http.py
CHANGED
|
@@ -68,6 +68,7 @@ class HTTPSessionWithRetry(requests.Session):
|
|
|
68
68
|
retry_on_exception=True,
|
|
69
69
|
retry_on_status=True,
|
|
70
70
|
retry_on_post=False,
|
|
71
|
+
retry_on_put=True,
|
|
71
72
|
verbose=False,
|
|
72
73
|
):
|
|
73
74
|
"""
|
|
@@ -77,6 +78,8 @@ class HTTPSessionWithRetry(requests.Session):
|
|
|
77
78
|
:param retry_on_exception: Retry on the HTTP_RETRYABLE_EXCEPTIONS. defaults to True.
|
|
78
79
|
:param retry_on_status: Retry on error status codes. defaults to True.
|
|
79
80
|
:param retry_on_post: Retry on POST requests. defaults to False.
|
|
81
|
+
:param retry_on_put: Whether to allow retries on PUT requests. Actual behavior may exclude specific
|
|
82
|
+
paths from retrying. defaults to True.
|
|
80
83
|
:param verbose: Print debug messages.
|
|
81
84
|
"""
|
|
82
85
|
super().__init__()
|
|
@@ -86,7 +89,7 @@ class HTTPSessionWithRetry(requests.Session):
|
|
|
86
89
|
self.retry_on_exception = retry_on_exception
|
|
87
90
|
self.verbose = verbose
|
|
88
91
|
self._logger = logger.get_child("http-client")
|
|
89
|
-
self._retry_methods = self._resolve_retry_methods(retry_on_post)
|
|
92
|
+
self._retry_methods = self._resolve_retry_methods(retry_on_post, retry_on_put)
|
|
90
93
|
|
|
91
94
|
if retry_on_status:
|
|
92
95
|
self._http_adapter = requests.adapters.HTTPAdapter(
|
|
@@ -200,9 +203,13 @@ class HTTPSessionWithRetry(requests.Session):
|
|
|
200
203
|
def _method_retryable(self, method: str):
|
|
201
204
|
return method in self._retry_methods
|
|
202
205
|
|
|
203
|
-
def _resolve_retry_methods(
|
|
206
|
+
def _resolve_retry_methods(
|
|
207
|
+
self, retry_on_post: bool = False, retry_on_put: bool = True
|
|
208
|
+
) -> frozenset[str]:
|
|
204
209
|
methods = urllib3.util.retry.Retry.DEFAULT_ALLOWED_METHODS
|
|
205
210
|
methods = methods.union({"PATCH"})
|
|
211
|
+
if not retry_on_put:
|
|
212
|
+
methods = methods.difference({"PUT"})
|
|
206
213
|
if retry_on_post:
|
|
207
214
|
methods = methods.union({"POST"})
|
|
208
215
|
return frozenset(methods)
|
|
@@ -15,11 +15,29 @@
|
|
|
15
15
|
import asyncio
|
|
16
16
|
import typing
|
|
17
17
|
from copy import deepcopy
|
|
18
|
+
from typing import Optional
|
|
19
|
+
|
|
20
|
+
import aiohttp
|
|
18
21
|
|
|
19
22
|
import mlrun.common.schemas
|
|
20
23
|
import mlrun.lists
|
|
21
24
|
|
|
22
25
|
|
|
26
|
+
class TimedHTTPClient:
|
|
27
|
+
def __init__(self, timeout: Optional[float] = 30.0):
|
|
28
|
+
"""
|
|
29
|
+
HTTP client wrapper with built-in timeout.
|
|
30
|
+
|
|
31
|
+
Args:
|
|
32
|
+
timeout: Request timeout in seconds (default: 30.0)
|
|
33
|
+
"""
|
|
34
|
+
self.timeout = aiohttp.ClientTimeout(total=timeout)
|
|
35
|
+
|
|
36
|
+
def session(self, **kwargs) -> aiohttp.ClientSession:
|
|
37
|
+
"""Create a new ClientSession with the configured timeout and additional parameters."""
|
|
38
|
+
return aiohttp.ClientSession(timeout=self.timeout, **kwargs)
|
|
39
|
+
|
|
40
|
+
|
|
23
41
|
class NotificationBase:
|
|
24
42
|
def __init__(
|
|
25
43
|
self,
|
|
@@ -16,13 +16,11 @@ import json
|
|
|
16
16
|
import os
|
|
17
17
|
import typing
|
|
18
18
|
|
|
19
|
-
import aiohttp
|
|
20
|
-
|
|
21
19
|
import mlrun.common.schemas
|
|
22
20
|
import mlrun.errors
|
|
23
21
|
import mlrun.lists
|
|
24
22
|
|
|
25
|
-
from .base import NotificationBase
|
|
23
|
+
from .base import NotificationBase, TimedHTTPClient
|
|
26
24
|
|
|
27
25
|
|
|
28
26
|
class GitNotification(NotificationBase):
|
|
@@ -144,11 +142,11 @@ class GitNotification(NotificationBase):
|
|
|
144
142
|
issue = event["number"]
|
|
145
143
|
headers = {
|
|
146
144
|
"Accept": "application/vnd.github.v3+json",
|
|
147
|
-
|
|
145
|
+
mlrun.common.schemas.HeaderNames.authorization: f"token {token}",
|
|
148
146
|
}
|
|
149
147
|
url = f"https://{server}/repos/{repo}/issues/{issue}/comments"
|
|
150
148
|
|
|
151
|
-
async with
|
|
149
|
+
async with TimedHTTPClient().session() as session:
|
|
152
150
|
resp = await session.post(url, headers=headers, json={"body": message})
|
|
153
151
|
if not resp.ok:
|
|
154
152
|
resp_text = await resp.text()
|