mlrun 1.6.0rc31__py3-none-any.whl → 1.6.0rc33__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mlrun might be problematic. Click here for more details.
- mlrun/datastore/datastore.py +1 -1
- mlrun/datastore/datastore_profile.py +2 -2
- mlrun/feature_store/api.py +56 -56
- mlrun/feature_store/feature_vector.py +120 -0
- mlrun/projects/project.py +5 -6
- mlrun/serving/routers.py +14 -6
- mlrun/utils/helpers.py +18 -0
- mlrun/utils/version/version.json +2 -2
- {mlrun-1.6.0rc31.dist-info → mlrun-1.6.0rc33.dist-info}/METADATA +1 -1
- {mlrun-1.6.0rc31.dist-info → mlrun-1.6.0rc33.dist-info}/RECORD +14 -14
- {mlrun-1.6.0rc31.dist-info → mlrun-1.6.0rc33.dist-info}/LICENSE +0 -0
- {mlrun-1.6.0rc31.dist-info → mlrun-1.6.0rc33.dist-info}/WHEEL +0 -0
- {mlrun-1.6.0rc31.dist-info → mlrun-1.6.0rc33.dist-info}/entry_points.txt +0 -0
- {mlrun-1.6.0rc31.dist-info → mlrun-1.6.0rc33.dist-info}/top_level.txt +0 -0
mlrun/datastore/datastore.py
CHANGED
|
@@ -195,7 +195,7 @@ class StoreManager:
|
|
|
195
195
|
store_key = f"{schema}://{endpoint}"
|
|
196
196
|
|
|
197
197
|
if schema == "ds":
|
|
198
|
-
datastore_profile = datastore_profile_read(url, project_name)
|
|
198
|
+
datastore_profile = datastore_profile_read(url, project_name, secrets)
|
|
199
199
|
if secrets and datastore_profile.secrets():
|
|
200
200
|
secrets = merge(secrets, datastore_profile.secrets())
|
|
201
201
|
else:
|
|
@@ -367,7 +367,7 @@ class DatastoreProfile2Json(pydantic.BaseModel):
|
|
|
367
367
|
)
|
|
368
368
|
|
|
369
369
|
|
|
370
|
-
def datastore_profile_read(url, project_name=""):
|
|
370
|
+
def datastore_profile_read(url, project_name="", secrets: dict = None):
|
|
371
371
|
parsed_url = urlparse(url)
|
|
372
372
|
if parsed_url.scheme.lower() != "ds":
|
|
373
373
|
raise mlrun.errors.MLRunInvalidArgumentError(
|
|
@@ -385,7 +385,7 @@ def datastore_profile_read(url, project_name=""):
|
|
|
385
385
|
project_ds_name_private = DatastoreProfile.generate_secret_key(
|
|
386
386
|
profile_name, project_name
|
|
387
387
|
)
|
|
388
|
-
private_body = get_secret_or_env(project_ds_name_private)
|
|
388
|
+
private_body = get_secret_or_env(project_ds_name_private, secret_provider=secrets)
|
|
389
389
|
if not public_profile or not private_body:
|
|
390
390
|
raise mlrun.errors.MLRunInvalidArgumentError(
|
|
391
391
|
f"Unable to retrieve the datastore profile '{url}' from either the server or local environment. "
|
mlrun/feature_store/api.py
CHANGED
|
@@ -115,44 +115,6 @@ def get_offline_features(
|
|
|
115
115
|
spark_service: str = None,
|
|
116
116
|
timestamp_for_filtering: Union[str, Dict[str, str]] = None,
|
|
117
117
|
):
|
|
118
|
-
return _get_offline_features(
|
|
119
|
-
feature_vector,
|
|
120
|
-
entity_rows,
|
|
121
|
-
entity_timestamp_column,
|
|
122
|
-
target,
|
|
123
|
-
run_config,
|
|
124
|
-
drop_columns,
|
|
125
|
-
start_time,
|
|
126
|
-
end_time,
|
|
127
|
-
with_indexes,
|
|
128
|
-
update_stats,
|
|
129
|
-
engine,
|
|
130
|
-
engine_args,
|
|
131
|
-
query,
|
|
132
|
-
order_by,
|
|
133
|
-
spark_service,
|
|
134
|
-
timestamp_for_filtering,
|
|
135
|
-
)
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
def _get_offline_features(
|
|
139
|
-
feature_vector: Union[str, FeatureVector],
|
|
140
|
-
entity_rows=None,
|
|
141
|
-
entity_timestamp_column: str = None,
|
|
142
|
-
target: DataTargetBase = None,
|
|
143
|
-
run_config: RunConfig = None,
|
|
144
|
-
drop_columns: List[str] = None,
|
|
145
|
-
start_time: Union[str, datetime] = None,
|
|
146
|
-
end_time: Union[str, datetime] = None,
|
|
147
|
-
with_indexes: bool = False,
|
|
148
|
-
update_stats: bool = False,
|
|
149
|
-
engine: str = None,
|
|
150
|
-
engine_args: dict = None,
|
|
151
|
-
query: str = None,
|
|
152
|
-
order_by: Union[str, List[str]] = None,
|
|
153
|
-
spark_service: str = None,
|
|
154
|
-
timestamp_for_filtering: Union[str, Dict[str, str]] = None,
|
|
155
|
-
) -> Union[OfflineVectorResponse, RemoteVectorResponse]:
|
|
156
118
|
"""retrieve offline feature vector results
|
|
157
119
|
|
|
158
120
|
specify a feature vector object/uri and retrieve the desired features, their metadata
|
|
@@ -213,6 +175,44 @@ def _get_offline_features(
|
|
|
213
175
|
merge process using start_time and end_time params.
|
|
214
176
|
|
|
215
177
|
"""
|
|
178
|
+
return _get_offline_features(
|
|
179
|
+
feature_vector,
|
|
180
|
+
entity_rows,
|
|
181
|
+
entity_timestamp_column,
|
|
182
|
+
target,
|
|
183
|
+
run_config,
|
|
184
|
+
drop_columns,
|
|
185
|
+
start_time,
|
|
186
|
+
end_time,
|
|
187
|
+
with_indexes,
|
|
188
|
+
update_stats,
|
|
189
|
+
engine,
|
|
190
|
+
engine_args,
|
|
191
|
+
query,
|
|
192
|
+
order_by,
|
|
193
|
+
spark_service,
|
|
194
|
+
timestamp_for_filtering,
|
|
195
|
+
)
|
|
196
|
+
|
|
197
|
+
|
|
198
|
+
def _get_offline_features(
|
|
199
|
+
feature_vector: Union[str, FeatureVector],
|
|
200
|
+
entity_rows=None,
|
|
201
|
+
entity_timestamp_column: str = None,
|
|
202
|
+
target: DataTargetBase = None,
|
|
203
|
+
run_config: RunConfig = None,
|
|
204
|
+
drop_columns: List[str] = None,
|
|
205
|
+
start_time: Union[str, datetime] = None,
|
|
206
|
+
end_time: Union[str, datetime] = None,
|
|
207
|
+
with_indexes: bool = False,
|
|
208
|
+
update_stats: bool = False,
|
|
209
|
+
engine: str = None,
|
|
210
|
+
engine_args: dict = None,
|
|
211
|
+
query: str = None,
|
|
212
|
+
order_by: Union[str, List[str]] = None,
|
|
213
|
+
spark_service: str = None,
|
|
214
|
+
timestamp_for_filtering: Union[str, Dict[str, str]] = None,
|
|
215
|
+
) -> Union[OfflineVectorResponse, RemoteVectorResponse]:
|
|
216
216
|
if entity_rows is None and entity_timestamp_column is not None:
|
|
217
217
|
raise mlrun.errors.MLRunInvalidArgumentError(
|
|
218
218
|
"entity_timestamp_column param "
|
|
@@ -282,24 +282,6 @@ def get_online_feature_service(
|
|
|
282
282
|
update_stats: bool = False,
|
|
283
283
|
entity_keys: List[str] = None,
|
|
284
284
|
):
|
|
285
|
-
return _get_online_feature_service(
|
|
286
|
-
feature_vector,
|
|
287
|
-
run_config,
|
|
288
|
-
fixed_window_type,
|
|
289
|
-
impute_policy,
|
|
290
|
-
update_stats,
|
|
291
|
-
entity_keys,
|
|
292
|
-
)
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
def _get_online_feature_service(
|
|
296
|
-
feature_vector: Union[str, FeatureVector],
|
|
297
|
-
run_config: RunConfig = None,
|
|
298
|
-
fixed_window_type: FixedWindowType = FixedWindowType.LastClosedWindow,
|
|
299
|
-
impute_policy: dict = None,
|
|
300
|
-
update_stats: bool = False,
|
|
301
|
-
entity_keys: List[str] = None,
|
|
302
|
-
) -> OnlineVectorService:
|
|
303
285
|
"""initialize and return online feature vector service api,
|
|
304
286
|
returns :py:class:`~mlrun.feature_store.OnlineVectorService`
|
|
305
287
|
|
|
@@ -363,6 +345,24 @@ def _get_online_feature_service(
|
|
|
363
345
|
:return: Initialize the `OnlineVectorService`.
|
|
364
346
|
Will be used in subclasses where `support_online=True`.
|
|
365
347
|
"""
|
|
348
|
+
return _get_online_feature_service(
|
|
349
|
+
feature_vector,
|
|
350
|
+
run_config,
|
|
351
|
+
fixed_window_type,
|
|
352
|
+
impute_policy,
|
|
353
|
+
update_stats,
|
|
354
|
+
entity_keys,
|
|
355
|
+
)
|
|
356
|
+
|
|
357
|
+
|
|
358
|
+
def _get_online_feature_service(
|
|
359
|
+
feature_vector: Union[str, FeatureVector],
|
|
360
|
+
run_config: RunConfig = None,
|
|
361
|
+
fixed_window_type: FixedWindowType = FixedWindowType.LastClosedWindow,
|
|
362
|
+
impute_policy: dict = None,
|
|
363
|
+
update_stats: bool = False,
|
|
364
|
+
entity_keys: List[str] = None,
|
|
365
|
+
) -> OnlineVectorService:
|
|
366
366
|
if isinstance(feature_vector, FeatureVector):
|
|
367
367
|
update_stats = True
|
|
368
368
|
feature_vector = _features_to_vector_and_check_permissions(
|
|
@@ -744,6 +744,64 @@ class FeatureVector(ModelObj):
|
|
|
744
744
|
spark_service: str = None,
|
|
745
745
|
timestamp_for_filtering: Union[str, Dict[str, str]] = None,
|
|
746
746
|
):
|
|
747
|
+
"""retrieve offline feature vector results
|
|
748
|
+
|
|
749
|
+
specify a feature vector object/uri and retrieve the desired features, their metadata
|
|
750
|
+
and statistics. returns :py:class:`~mlrun.feature_store.OfflineVectorResponse`,
|
|
751
|
+
results can be returned as a dataframe or written to a target
|
|
752
|
+
|
|
753
|
+
The start_time and end_time attributes allow filtering the data to a given time range, they accept
|
|
754
|
+
string values or pandas `Timestamp` objects, string values can also be relative, for example:
|
|
755
|
+
"now", "now - 1d2h", "now+5m", where a valid pandas Timedelta string follows the verb "now",
|
|
756
|
+
for time alignment you can use the verb "floor" e.g. "now -1d floor 1H" will align the time to the last hour
|
|
757
|
+
(the floor string is passed to pandas.Timestamp.floor(), can use D, H, T, S for day, hour, min, sec alignment).
|
|
758
|
+
Another option to filter the data is by the `query` argument - can be seen in the example.
|
|
759
|
+
example::
|
|
760
|
+
|
|
761
|
+
features = [
|
|
762
|
+
"stock-quotes.bid",
|
|
763
|
+
"stock-quotes.asks_sum_5h",
|
|
764
|
+
"stock-quotes.ask as mycol",
|
|
765
|
+
"stocks.*",
|
|
766
|
+
]
|
|
767
|
+
vector = FeatureVector(features=features)
|
|
768
|
+
vector.get_offline_features(entity_rows=trades, entity_timestamp_column="time", query="ticker in ['GOOG']
|
|
769
|
+
and bid>100")
|
|
770
|
+
print(resp.to_dataframe())
|
|
771
|
+
print(vector.get_stats_table())
|
|
772
|
+
resp.to_parquet("./out.parquet")
|
|
773
|
+
|
|
774
|
+
:param entity_rows: dataframe with entity rows to join with
|
|
775
|
+
:param target: where to write the results to
|
|
776
|
+
:param drop_columns: list of columns to drop from the final result
|
|
777
|
+
:param entity_timestamp_column: timestamp column name in the entity rows dataframe. can be specified
|
|
778
|
+
only if param entity_rows was specified.
|
|
779
|
+
:param run_config: function and/or run configuration
|
|
780
|
+
see :py:class:`~mlrun.feature_store.RunConfig`
|
|
781
|
+
:param start_time: datetime, low limit of time needed to be filtered. Optional.
|
|
782
|
+
:param end_time: datetime, high limit of time needed to be filtered. Optional.
|
|
783
|
+
:param with_indexes: Return vector with/without the entities and the timestamp_key of the feature
|
|
784
|
+
sets and with/without entity_timestamp_column and timestamp_for_filtering
|
|
785
|
+
columns. This property can be specified also in the feature vector spec
|
|
786
|
+
(feature_vector.spec.with_indexes)
|
|
787
|
+
(default False)
|
|
788
|
+
:param update_stats: update features statistics from the requested feature sets on the vector.
|
|
789
|
+
(default False).
|
|
790
|
+
:param engine: processing engine kind ("local", "dask", or "spark")
|
|
791
|
+
:param engine_args: kwargs for the processing engine
|
|
792
|
+
:param query: The query string used to filter rows on the output
|
|
793
|
+
:param spark_service: Name of the spark service to be used (when using a remote-spark runtime)
|
|
794
|
+
:param order_by: Name or list of names to order by. The name or the names in the list can be the
|
|
795
|
+
feature name or the alias of the feature you pass in the feature list.
|
|
796
|
+
:param timestamp_for_filtering: name of the column to filter by, can be str for all the feature sets or a
|
|
797
|
+
dictionary ({<feature set name>: <timestamp column name>, ...})
|
|
798
|
+
that indicates the timestamp column name for each feature set. Optional.
|
|
799
|
+
By default, the filter executes on the timestamp_key of each feature set.
|
|
800
|
+
Note: the time filtering is performed on each feature set before the
|
|
801
|
+
merge process using start_time and end_time params.
|
|
802
|
+
|
|
803
|
+
"""
|
|
804
|
+
|
|
747
805
|
return mlrun.feature_store.api._get_offline_features(
|
|
748
806
|
self,
|
|
749
807
|
entity_rows,
|
|
@@ -771,6 +829,68 @@ class FeatureVector(ModelObj):
|
|
|
771
829
|
update_stats: bool = False,
|
|
772
830
|
entity_keys: List[str] = None,
|
|
773
831
|
):
|
|
832
|
+
"""initialize and return online feature vector service api,
|
|
833
|
+
returns :py:class:`~mlrun.feature_store.OnlineVectorService`
|
|
834
|
+
|
|
835
|
+
:**usage**:
|
|
836
|
+
There are two ways to use the function:
|
|
837
|
+
|
|
838
|
+
1. As context manager
|
|
839
|
+
|
|
840
|
+
Example::
|
|
841
|
+
|
|
842
|
+
with vector_uri.get_online_feature_service() as svc:
|
|
843
|
+
resp = svc.get([{"ticker": "GOOG"}, {"ticker": "MSFT"}])
|
|
844
|
+
print(resp)
|
|
845
|
+
resp = svc.get([{"ticker": "AAPL"}], as_list=True)
|
|
846
|
+
print(resp)
|
|
847
|
+
|
|
848
|
+
Example with imputing::
|
|
849
|
+
|
|
850
|
+
with vector_uri.get_online_feature_service(entity_keys=['id'],
|
|
851
|
+
impute_policy={"*": "$mean", "amount": 0)) as svc:
|
|
852
|
+
resp = svc.get([{"id": "C123487"}])
|
|
853
|
+
|
|
854
|
+
2. as simple function, note that in that option you need to close the session.
|
|
855
|
+
|
|
856
|
+
Example::
|
|
857
|
+
|
|
858
|
+
svc = vector_uri.get_online_feature_service(entity_keys=['ticker'])
|
|
859
|
+
try:
|
|
860
|
+
resp = svc.get([{"ticker": "GOOG"}, {"ticker": "MSFT"}])
|
|
861
|
+
print(resp)
|
|
862
|
+
resp = svc.get([{"ticker": "AAPL"}], as_list=True)
|
|
863
|
+
print(resp)
|
|
864
|
+
|
|
865
|
+
finally:
|
|
866
|
+
svc.close()
|
|
867
|
+
|
|
868
|
+
Example with imputing::
|
|
869
|
+
|
|
870
|
+
svc = vector_uri.get_online_feature_service(entity_keys=['id'],
|
|
871
|
+
impute_policy={"*": "$mean", "amount": 0))
|
|
872
|
+
try:
|
|
873
|
+
resp = svc.get([{"id": "C123487"}])
|
|
874
|
+
except Exception as e:
|
|
875
|
+
handling exception...
|
|
876
|
+
finally:
|
|
877
|
+
svc.close()
|
|
878
|
+
|
|
879
|
+
:param run_config: function and/or run configuration for remote jobs/services
|
|
880
|
+
:param impute_policy: a dict with `impute_policy` per feature, the dict key is the feature name and the
|
|
881
|
+
dict value indicate which value will be used in case the feature is NaN/empty, the
|
|
882
|
+
replaced value can be fixed number for constants or $mean, $max, $min, $std, $count
|
|
883
|
+
for statistical values.
|
|
884
|
+
"*" is used to specify the default for all features, example: `{"*": "$mean"}`
|
|
885
|
+
:param fixed_window_type: determines how to query the fixed window values which were previously inserted by
|
|
886
|
+
ingest
|
|
887
|
+
:param update_stats: update features statistics from the requested feature sets on the vector.
|
|
888
|
+
Default: False.
|
|
889
|
+
:param entity_keys: Entity list of the first feature_set in the vector.
|
|
890
|
+
The indexes that are used to query the online service.
|
|
891
|
+
:return: Initialize the `OnlineVectorService`.
|
|
892
|
+
Will be used in subclasses where `support_online=True`.
|
|
893
|
+
"""
|
|
774
894
|
return mlrun.feature_store.api._get_online_feature_service(
|
|
775
895
|
self,
|
|
776
896
|
run_config,
|
mlrun/projects/project.py
CHANGED
|
@@ -30,7 +30,6 @@ from typing import Callable, Dict, List, Optional, Union
|
|
|
30
30
|
import dotenv
|
|
31
31
|
import git
|
|
32
32
|
import git.exc
|
|
33
|
-
import inflection
|
|
34
33
|
import kfp
|
|
35
34
|
import nuclio
|
|
36
35
|
import requests
|
|
@@ -171,7 +170,7 @@ def new_project(
|
|
|
171
170
|
:param name: project name
|
|
172
171
|
:param context: project local directory path (default value = "./")
|
|
173
172
|
:param init_git: if True, will git init the context dir
|
|
174
|
-
:param user_project: add the current
|
|
173
|
+
:param user_project: add the current username to the provided project name (making it unique per user)
|
|
175
174
|
:param remote: remote Git url
|
|
176
175
|
:param from_template: path to project YAML/zip file that will be used as a template
|
|
177
176
|
:param secrets: key:secret dict or SecretsStore used to download sources
|
|
@@ -319,7 +318,7 @@ def load_project(
|
|
|
319
318
|
:param init_git: if True, will git init the context dir
|
|
320
319
|
:param subpath: project subpath (within the archive)
|
|
321
320
|
:param clone: if True, always clone (delete any existing content)
|
|
322
|
-
:param user_project: add the current
|
|
321
|
+
:param user_project: add the current username to the project name (for db:// prefixes)
|
|
323
322
|
:param save: whether to save the created project and artifact in the DB
|
|
324
323
|
:param sync_functions: sync the project's functions into the project object (will be saved to the DB if save=True)
|
|
325
324
|
:param parameters: key/value pairs to add to the project.spec.params
|
|
@@ -420,7 +419,7 @@ def get_or_create_project(
|
|
|
420
419
|
save: bool = True,
|
|
421
420
|
parameters: dict = None,
|
|
422
421
|
) -> "MlrunProject":
|
|
423
|
-
"""Load a project from MLRun DB, or create/import if
|
|
422
|
+
"""Load a project from MLRun DB, or create/import if it does not exist
|
|
424
423
|
|
|
425
424
|
MLRun looks for a project.yaml file with project definition and objects in the project root path
|
|
426
425
|
and use it to initialize the project, in addition it runs the project_setup.py file (if it exists)
|
|
@@ -620,9 +619,9 @@ def _add_username_to_project_name_if_needed(name, user_project):
|
|
|
620
619
|
if not name:
|
|
621
620
|
raise ValueError("user_project must be specified together with name")
|
|
622
621
|
username = environ.get("V3IO_USERNAME") or getpass.getuser()
|
|
623
|
-
normalized_username =
|
|
622
|
+
normalized_username = mlrun.utils.normalize_project_username(username.lower())
|
|
624
623
|
if username != normalized_username:
|
|
625
|
-
logger.
|
|
624
|
+
logger.debug(
|
|
626
625
|
"Username was normalized to match the required pattern for project name",
|
|
627
626
|
username=username,
|
|
628
627
|
normalized_username=normalized_username,
|
mlrun/serving/routers.py
CHANGED
|
@@ -1111,7 +1111,7 @@ class EnrichmentModelRouter(ModelRouter):
|
|
|
1111
1111
|
url_prefix: str = None,
|
|
1112
1112
|
health_prefix: str = None,
|
|
1113
1113
|
feature_vector_uri: str = "",
|
|
1114
|
-
impute_policy: dict =
|
|
1114
|
+
impute_policy: dict = None,
|
|
1115
1115
|
**kwargs,
|
|
1116
1116
|
):
|
|
1117
1117
|
"""Model router with feature enrichment (from the feature store)
|
|
@@ -1156,13 +1156,17 @@ class EnrichmentModelRouter(ModelRouter):
|
|
|
1156
1156
|
)
|
|
1157
1157
|
|
|
1158
1158
|
self.feature_vector_uri = feature_vector_uri
|
|
1159
|
-
self.impute_policy = impute_policy
|
|
1159
|
+
self.impute_policy = impute_policy or {}
|
|
1160
1160
|
|
|
1161
1161
|
self._feature_service = None
|
|
1162
1162
|
|
|
1163
1163
|
def post_init(self, mode="sync"):
|
|
1164
|
+
from ..feature_store import get_feature_vector
|
|
1165
|
+
|
|
1164
1166
|
super().post_init(mode)
|
|
1165
|
-
self._feature_service =
|
|
1167
|
+
self._feature_service = get_feature_vector(
|
|
1168
|
+
self.feature_vector_uri
|
|
1169
|
+
).get_online_feature_service(
|
|
1166
1170
|
impute_policy=self.impute_policy,
|
|
1167
1171
|
)
|
|
1168
1172
|
|
|
@@ -1191,7 +1195,7 @@ class EnrichmentVotingEnsemble(VotingEnsemble):
|
|
|
1191
1195
|
executor_type: Union[ParallelRunnerModes, str] = ParallelRunnerModes.thread,
|
|
1192
1196
|
prediction_col_name: str = None,
|
|
1193
1197
|
feature_vector_uri: str = "",
|
|
1194
|
-
impute_policy: dict =
|
|
1198
|
+
impute_policy: dict = None,
|
|
1195
1199
|
**kwargs,
|
|
1196
1200
|
):
|
|
1197
1201
|
"""Voting Ensemble with feature enrichment (from the feature store)
|
|
@@ -1298,13 +1302,17 @@ class EnrichmentVotingEnsemble(VotingEnsemble):
|
|
|
1298
1302
|
)
|
|
1299
1303
|
|
|
1300
1304
|
self.feature_vector_uri = feature_vector_uri
|
|
1301
|
-
self.impute_policy = impute_policy
|
|
1305
|
+
self.impute_policy = impute_policy or {}
|
|
1302
1306
|
|
|
1303
1307
|
self._feature_service = None
|
|
1304
1308
|
|
|
1305
1309
|
def post_init(self, mode="sync"):
|
|
1310
|
+
from ..feature_store import get_feature_vector
|
|
1311
|
+
|
|
1306
1312
|
super().post_init(mode)
|
|
1307
|
-
self._feature_service =
|
|
1313
|
+
self._feature_service = get_feature_vector(
|
|
1314
|
+
self.feature_vector_uri
|
|
1315
|
+
).get_online_feature_service(
|
|
1308
1316
|
impute_policy=self.impute_policy,
|
|
1309
1317
|
)
|
|
1310
1318
|
|
mlrun/utils/helpers.py
CHANGED
|
@@ -1525,6 +1525,24 @@ def normalize_workflow_name(name, project_name):
|
|
|
1525
1525
|
return name.removeprefix(project_name + "-")
|
|
1526
1526
|
|
|
1527
1527
|
|
|
1528
|
+
def normalize_project_username(username: str):
|
|
1529
|
+
username = username.lower()
|
|
1530
|
+
|
|
1531
|
+
# remove domain if exists
|
|
1532
|
+
username = username.split("@")[0]
|
|
1533
|
+
|
|
1534
|
+
# replace non r'a-z0-9\-_' chars with empty string
|
|
1535
|
+
username = inflection.parameterize(username, separator="")
|
|
1536
|
+
|
|
1537
|
+
# replace underscore with dashes
|
|
1538
|
+
username = inflection.dasherize(username)
|
|
1539
|
+
|
|
1540
|
+
# ensure ends with alphanumeric
|
|
1541
|
+
username = username.rstrip("-_")
|
|
1542
|
+
|
|
1543
|
+
return username
|
|
1544
|
+
|
|
1545
|
+
|
|
1528
1546
|
# run_in threadpool is taken from fastapi to allow us to run sync functions in a threadpool
|
|
1529
1547
|
# without importing fastapi in the client
|
|
1530
1548
|
async def run_in_threadpool(func, *args, **kwargs):
|
mlrun/utils/version/version.json
CHANGED
|
@@ -66,8 +66,8 @@ mlrun/data_types/to_pandas.py,sha256=uq7y1svEzaDaPg92YP3p3k3BDI48XWZ2bDdH6aJSvso
|
|
|
66
66
|
mlrun/datastore/__init__.py,sha256=bsRzu39UOocQAAl_nOKCbhxrZhWUEXrAc8WV3zs0VyI,4118
|
|
67
67
|
mlrun/datastore/azure_blob.py,sha256=zYHUN5WDvWje4f06GzLDlwJ__ePnjsckgSYbYJt8NF4,8728
|
|
68
68
|
mlrun/datastore/base.py,sha256=dNpBct2pcLtD2cqSCNCY3tnN-9qyyVeHoulzlcQiQlE,25614
|
|
69
|
-
mlrun/datastore/datastore.py,sha256=
|
|
70
|
-
mlrun/datastore/datastore_profile.py,sha256=
|
|
69
|
+
mlrun/datastore/datastore.py,sha256=xnK-zKrDwTkiZQgzLpcz8d629avpjYtU9UN3WZpdjww,8810
|
|
70
|
+
mlrun/datastore/datastore_profile.py,sha256=YK8VcBXm9wKIlHDQtyj4wx0icaIJnhL28EuO8aOoXC8,14108
|
|
71
71
|
mlrun/datastore/dbfs_store.py,sha256=5IkxnFQXkW0fdx-ca5jjQnUdTsTfNdJzMvV31ZpDNrM,6634
|
|
72
72
|
mlrun/datastore/filestore.py,sha256=cI_YvQqY5J3kEvdyPelfWofxKfBitoNHJvABBkpCGRc,3788
|
|
73
73
|
mlrun/datastore/google_cloud_storage.py,sha256=aLquHqEULqYv4xnR4ELW_82fsVLWeecJiUMxsCVXfvE,6034
|
|
@@ -90,10 +90,10 @@ mlrun/db/factory.py,sha256=wTEKHEmdDkylM6IkTYvmEYVF8gn2HdjLoLoWICCyatI,2403
|
|
|
90
90
|
mlrun/db/httpdb.py,sha256=Db3975lpID_zg8NAHebsKWQL51TCy_ux0NuuLaUVGj4,155910
|
|
91
91
|
mlrun/db/nopdb.py,sha256=rpZy5cpW-8--4OvMzlVoKNYjbhWJ3cn_z-JFwfuPqnI,14520
|
|
92
92
|
mlrun/feature_store/__init__.py,sha256=n1F5m1svFW2chbE2dJdWzZJJiYS4E-y8PQsG9Q-F0lU,1584
|
|
93
|
-
mlrun/feature_store/api.py,sha256=
|
|
93
|
+
mlrun/feature_store/api.py,sha256=ehEwKlmE07pq1FUwh-ehA8Jm9LTkQofl5MQpEiMwVqM,49520
|
|
94
94
|
mlrun/feature_store/common.py,sha256=jA7Flrv7iJx2Ug1-4BsOxPCQpVKeaPDcJPupBhu8MgI,12860
|
|
95
95
|
mlrun/feature_store/feature_set.py,sha256=qe8lAOr-519Jw7e8mGJocpvoXhhlHeeRAOqMcmaGAOs,55469
|
|
96
|
-
mlrun/feature_store/feature_vector.py,sha256=
|
|
96
|
+
mlrun/feature_store/feature_vector.py,sha256=2OmXYjdV8_ZcEpxy4yI_EaT__tnXwnCkNGlKFupPM5w,43634
|
|
97
97
|
mlrun/feature_store/ingestion.py,sha256=GZkrke5_JJfA_PGOFc6ekbHKujHgMgqr6t4vop5n_bg,11210
|
|
98
98
|
mlrun/feature_store/steps.py,sha256=aSLOt71dMLzBdsQExyN8T8NkhWPd6q1N7DmHT6L_4vc,29155
|
|
99
99
|
mlrun/feature_store/retrieval/__init__.py,sha256=bwA4copPpLQi8fyoUAYtOyrlw0-6f3-Knct8GbJSvRg,1282
|
|
@@ -239,7 +239,7 @@ mlrun/platforms/other.py,sha256=z4pWqxXkVVuMLk-MbNb0Y_ZR5pmIsUm0R8vHnqpEnew,1185
|
|
|
239
239
|
mlrun/projects/__init__.py,sha256=Lv5rfxyXJrw6WGOWJKhBz66M6t3_zsNMCfUD6waPwx4,1153
|
|
240
240
|
mlrun/projects/operations.py,sha256=CJRGKEFhqKXlg0VOKhcfjOUVAmWHA9WwAFNiXtUqBhg,18550
|
|
241
241
|
mlrun/projects/pipelines.py,sha256=9H5X_XyeoMxYe25HtXA4gdXcXW65auX8CoR36czy7sI,39577
|
|
242
|
-
mlrun/projects/project.py,sha256=
|
|
242
|
+
mlrun/projects/project.py,sha256=A8URLzajysKp6vUzKZq5DjBfTsLtzudn5Y8iVSWaWdw,151075
|
|
243
243
|
mlrun/runtimes/__init__.py,sha256=f5cdEg4raKNXQawJE-AuWzK6AqIsLfDODREeMnI2Ies,7062
|
|
244
244
|
mlrun/runtimes/base.py,sha256=saYKzFVh3phfA3ARHinla-JR8MJq9SBnGnj9yU66XwU,35699
|
|
245
245
|
mlrun/runtimes/constants.py,sha256=tB7nIlHob3yF0K9Uf9BUZ8yxjZNSzlzrd3K32K_vV7w,9550
|
|
@@ -268,7 +268,7 @@ mlrun/runtimes/sparkjob/spark3job.py,sha256=kvzZUd9oGCJZoVc0VP57qvs05JVFvnVPwGnw
|
|
|
268
268
|
mlrun/serving/__init__.py,sha256=_6HRAOuS2Ehjo3vwx5h1aI_-JppxEAsl4VfEERAbGFE,1078
|
|
269
269
|
mlrun/serving/merger.py,sha256=PXLn3A21FiLteJHaDSLm5xKNT-80eTTjfHUJnBX1gKY,6116
|
|
270
270
|
mlrun/serving/remote.py,sha256=XtCgEY-azxcP0VUG1TupZXQ_dttPkAKIAtszW-GfGpQ,18038
|
|
271
|
-
mlrun/serving/routers.py,sha256=
|
|
271
|
+
mlrun/serving/routers.py,sha256=VEeDhcQUeAeyREfaUN-ws7ZkxRw2wf9CKNWj-RUVemY,54988
|
|
272
272
|
mlrun/serving/server.py,sha256=8iLMgRm-W61-_mTueQ0q2vt6blpnpl5-aTQa6dQ6zEA,21357
|
|
273
273
|
mlrun/serving/serving_wrapper.py,sha256=R670-S6PX_d5ER6jiHtRvacuPyFzQH0mEf2K0sBIIOM,836
|
|
274
274
|
mlrun/serving/states.py,sha256=LORqEyNR6Rxq-rH0VfVvJ_aff3ws_KoT83UqXNccjyY,54821
|
|
@@ -286,7 +286,7 @@ mlrun/utils/azure_vault.py,sha256=IbPAZh-7mp0j4PcCy1L079LuEA6ENrkWhKZvkD4lcTY,34
|
|
|
286
286
|
mlrun/utils/clones.py,sha256=QG2ka65-ysfrOaoziudEjJqGgAxJvFKZOXkiD9WZGN4,7386
|
|
287
287
|
mlrun/utils/condition_evaluator.py,sha256=KFZC-apM7RU5TIlRszAzMFc0NqPj3W1rgP0Zv17Ud-A,1918
|
|
288
288
|
mlrun/utils/db.py,sha256=fp9p2_z7XW3DhsceJEObWKh-e5zKjPiCM55kSGNkZD8,1658
|
|
289
|
-
mlrun/utils/helpers.py,sha256=
|
|
289
|
+
mlrun/utils/helpers.py,sha256=3v3lnfQ--PzxEjpdn5tS3MuvZqVc7Ph-hiOlNYBClJI,52941
|
|
290
290
|
mlrun/utils/http.py,sha256=_3pJPuDPz7M9pU4uRN-NPUmCyaANCQsAWAIrlVLZPiY,8733
|
|
291
291
|
mlrun/utils/logger.py,sha256=3-oh9GMDCegObSo84rMYVb9W4nRBl5emZsVQTIZCM2I,7160
|
|
292
292
|
mlrun/utils/regex.py,sha256=V0kaw1-zuehkN20g_Pq6SgkJTBLRdBqNkXOGN_2TJEw,4430
|
|
@@ -303,11 +303,11 @@ mlrun/utils/notifications/notification/ipython.py,sha256=qrBmtECiRG6sZpCIVMg7RZc
|
|
|
303
303
|
mlrun/utils/notifications/notification/slack.py,sha256=5JysqIpUYUZKXPSeeZtbl7qb2L9dj7p2NvnEBcEsZkA,3898
|
|
304
304
|
mlrun/utils/notifications/notification/webhook.py,sha256=QHezCuN5uXkLcroAGxGrhGHaxAdUvkDLIsp27_Yrfd4,2390
|
|
305
305
|
mlrun/utils/version/__init__.py,sha256=7kkrB7hEZ3cLXoWj1kPoDwo4MaswsI2JVOBpbKgPAgc,614
|
|
306
|
-
mlrun/utils/version/version.json,sha256=
|
|
306
|
+
mlrun/utils/version/version.json,sha256=zIxur7NtstOWZtj-oCAOPYxgp2ymbNSyEX2H3ohrOXw,89
|
|
307
307
|
mlrun/utils/version/version.py,sha256=HMwseV8xjTQ__6T6yUWojx_z6yUj7Io7O4NcCCH_sz8,1970
|
|
308
|
-
mlrun-1.6.
|
|
309
|
-
mlrun-1.6.
|
|
310
|
-
mlrun-1.6.
|
|
311
|
-
mlrun-1.6.
|
|
312
|
-
mlrun-1.6.
|
|
313
|
-
mlrun-1.6.
|
|
308
|
+
mlrun-1.6.0rc33.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
|
309
|
+
mlrun-1.6.0rc33.dist-info/METADATA,sha256=LEUvgNpiHczpT4fgPDUYug0zEqt5BwjLCpdbeMg7oKg,18392
|
|
310
|
+
mlrun-1.6.0rc33.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
|
|
311
|
+
mlrun-1.6.0rc33.dist-info/entry_points.txt,sha256=1Owd16eAclD5pfRCoJpYC2ZJSyGNTtUr0nCELMioMmU,46
|
|
312
|
+
mlrun-1.6.0rc33.dist-info/top_level.txt,sha256=NObLzw3maSF9wVrgSeYBv-fgnHkAJ1kEkh12DLdd5KM,6
|
|
313
|
+
mlrun-1.6.0rc33.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|