acryl-datahub 1.0.0.3rc4__py3-none-any.whl → 1.0.0.3rc6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of acryl-datahub might be problematic. Click here for more details.
- {acryl_datahub-1.0.0.3rc4.dist-info → acryl_datahub-1.0.0.3rc6.dist-info}/METADATA +2405 -2405
- {acryl_datahub-1.0.0.3rc4.dist-info → acryl_datahub-1.0.0.3rc6.dist-info}/RECORD +14 -12
- datahub/_version.py +1 -1
- datahub/ingestion/source/looker/looker_common.py +17 -2
- datahub/ingestion/source/looker/looker_source.py +34 -5
- datahub/sdk/_all_entities.py +4 -0
- datahub/sdk/_shared.py +140 -0
- datahub/sdk/entity_client.py +8 -0
- datahub/sdk/mlmodel.py +301 -0
- datahub/sdk/mlmodelgroup.py +233 -0
- {acryl_datahub-1.0.0.3rc4.dist-info → acryl_datahub-1.0.0.3rc6.dist-info}/WHEEL +0 -0
- {acryl_datahub-1.0.0.3rc4.dist-info → acryl_datahub-1.0.0.3rc6.dist-info}/entry_points.txt +0 -0
- {acryl_datahub-1.0.0.3rc4.dist-info → acryl_datahub-1.0.0.3rc6.dist-info}/licenses/LICENSE +0 -0
- {acryl_datahub-1.0.0.3rc4.dist-info → acryl_datahub-1.0.0.3rc6.dist-info}/top_level.txt +0 -0
|
@@ -1,7 +1,7 @@
|
|
|
1
|
-
acryl_datahub-1.0.0.
|
|
1
|
+
acryl_datahub-1.0.0.3rc6.dist-info/licenses/LICENSE,sha256=9xNHpsD0uYF5ONzXsKDCuHHB-xbiCrSbueWXqrTNsxk,11365
|
|
2
2
|
datahub/__init__.py,sha256=aq_i5lVREmoLfYIqcx_pEQicO855YlhD19tWc1eZZNI,59
|
|
3
3
|
datahub/__main__.py,sha256=pegIvQ9hzK7IhqVeUi1MeADSZ2QlP-D3K0OQdEg55RU,106
|
|
4
|
-
datahub/_version.py,sha256=
|
|
4
|
+
datahub/_version.py,sha256=VrjEJq0-eU07ihy36V-x_6VzO_ae3ZtXi7aMCbdTSZw,323
|
|
5
5
|
datahub/entrypoints.py,sha256=2TYgHhs3sCxJlojIHjqfxzt3_ImPwPzq4vBtsUuMqu4,8885
|
|
6
6
|
datahub/errors.py,sha256=BzKdcmYseHOt36zfjJXc17WNutFhp9Y23cU_L6cIkxc,612
|
|
7
7
|
datahub/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -350,7 +350,7 @@ datahub/ingestion/source/kafka_connect/sink_connectors.py,sha256=kEnxOXTik5HSDLj
|
|
|
350
350
|
datahub/ingestion/source/kafka_connect/source_connectors.py,sha256=OQ0vjz9xF0T30pRln_gDvelmaOE5jTAxwsCtm1K4SWM,21080
|
|
351
351
|
datahub/ingestion/source/looker/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
352
352
|
datahub/ingestion/source/looker/lkml_patched.py,sha256=XShEU7Wbz0DubDhYMjKf9wjKZrBJa2XPg9MIjp8rPhk,733
|
|
353
|
-
datahub/ingestion/source/looker/looker_common.py,sha256=
|
|
353
|
+
datahub/ingestion/source/looker/looker_common.py,sha256=h1DLGyPCYbFM2QUjgJ8ZK_LDLzu-Q7F5X4VIRZMLweg,62817
|
|
354
354
|
datahub/ingestion/source/looker/looker_config.py,sha256=eVKw1nn9D8hUFdRfNyT3MtzL8w-zWhFeokiwSnNKQuc,13607
|
|
355
355
|
datahub/ingestion/source/looker/looker_connection.py,sha256=yDmC6lDsHmL2e_Pw8ULylwOIHPWPp_6gT1iyLvD0fTw,2075
|
|
356
356
|
datahub/ingestion/source/looker/looker_constant.py,sha256=GMKYtNXlpojPxa9azridKfcGLSJwKdUCTesp7U8dIrQ,402
|
|
@@ -359,7 +359,7 @@ datahub/ingestion/source/looker/looker_file_loader.py,sha256=gb2Z97_w28MsybYe01J
|
|
|
359
359
|
datahub/ingestion/source/looker/looker_lib_wrapper.py,sha256=2aEorQ3WjBzYVQIm-5QR2qDGAhpKflstwO5X9oboXS8,11553
|
|
360
360
|
datahub/ingestion/source/looker/looker_liquid_tag.py,sha256=mO4G4MNA4YZFvZaDBpdiJ2vP3irC82kY34RdaK4Pbfs,3100
|
|
361
361
|
datahub/ingestion/source/looker/looker_query_model.py,sha256=N0jBbFruiCIIGT6sJn6tNeppeQ78KGTkOwTLirhxFNc,2144
|
|
362
|
-
datahub/ingestion/source/looker/looker_source.py,sha256=
|
|
362
|
+
datahub/ingestion/source/looker/looker_source.py,sha256=TOOht_7c5PM77DhR-4hP7cPQZfvSuGU4F_xUeXDPfI8,67803
|
|
363
363
|
datahub/ingestion/source/looker/looker_template_language.py,sha256=5fZFPKFP3IYbJg3jLifjaji4wWg8wRy-1XDvc8Qucus,17949
|
|
364
364
|
datahub/ingestion/source/looker/looker_usage.py,sha256=qFBX7OHtIcarYIqFe0jQMrDV8MMPV_nN4PZrZRUznTw,23029
|
|
365
365
|
datahub/ingestion/source/looker/looker_view_id_cache.py,sha256=92gDy6NONhJYBp92z_IBzDVZvezmUIkaBCZY1bdk6mE,4392
|
|
@@ -899,15 +899,17 @@ datahub/metadata/schemas/__init__.py,sha256=uvLNC3VyCkWA_v8e9FdA1leFf46NFKDD0Aaj
|
|
|
899
899
|
datahub/pydantic/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
900
900
|
datahub/pydantic/compat.py,sha256=TUEo4kSEeOWVAhV6LQtst1phrpVgGtK4uif4OI5vQ2M,1937
|
|
901
901
|
datahub/sdk/__init__.py,sha256=QeutS6Th8K4E4ZxXuoGrmvahN6zA9Oh9asKk5mw9AIk,1670
|
|
902
|
-
datahub/sdk/_all_entities.py,sha256=
|
|
902
|
+
datahub/sdk/_all_entities.py,sha256=inbLFv2T7dhZpGfBY5FPhCWbyE0P0G8umOt0Bc7V4XA,520
|
|
903
903
|
datahub/sdk/_attribution.py,sha256=0Trh8steVd27GOr9MKCZeawbuDD2_q3GIsZlCtHqEUg,1321
|
|
904
|
-
datahub/sdk/_shared.py,sha256=
|
|
904
|
+
datahub/sdk/_shared.py,sha256=5L1IkihLc7Pd2x0ypDs96kZ8ecm6o0-UZEn0J1Sffqw,24808
|
|
905
905
|
datahub/sdk/_utils.py,sha256=aGE665Su8SGtj2CRDiTaXNYrJ8ADBsS0m4DmaXw79b8,1027
|
|
906
906
|
datahub/sdk/container.py,sha256=yw_vw9Jl1wOYNwMHxQHLz5ZvVQVDWWHi9CWBR3hOCd8,7547
|
|
907
907
|
datahub/sdk/dataset.py,sha256=5LG4c_8bHeSPYrW88KNXRgiPD8frBjR0OBVrrwdquU4,29152
|
|
908
908
|
datahub/sdk/entity.py,sha256=Q29AbpS58L4gD8ETwoNIwG-ouytz4c0MSSFi6-jLl_4,6742
|
|
909
|
-
datahub/sdk/entity_client.py,sha256=
|
|
909
|
+
datahub/sdk/entity_client.py,sha256=1AC9J7-jv3rD-MFEPz2PnFrT8nFkj_WO0M-4nyVOtQk,5319
|
|
910
910
|
datahub/sdk/main_client.py,sha256=h2MKRhR-BO0zGCMhF7z2bTncX4hagKrAYwR3wTNTtzA,3666
|
|
911
|
+
datahub/sdk/mlmodel.py,sha256=amS-hHg5tT7zAqEHG17kSA60Q7td2DFtO-W2rEfb2rY,10206
|
|
912
|
+
datahub/sdk/mlmodelgroup.py,sha256=_7IkqkLVeyqYVEUHTVePSDLQyESsnwht5ca1lcMODAg,7842
|
|
911
913
|
datahub/sdk/resolver_client.py,sha256=nKMAZJt2tRSGfKSzoREIh43PXqjM3umLiYkYHJjo1io,3243
|
|
912
914
|
datahub/sdk/search_client.py,sha256=BJR5t7Ff2oDNOGLcSCp9YHzrGKbgOQr7T8XQKGEpucw,3437
|
|
913
915
|
datahub/sdk/search_filters.py,sha256=BcMhvG5hGYAATtLPLz4WLRjKApX2oLYrrcGn-CG__ek,12901
|
|
@@ -1046,8 +1048,8 @@ datahub_provider/operators/datahub_assertion_operator.py,sha256=uvTQ-jk2F0sbqqxp
|
|
|
1046
1048
|
datahub_provider/operators/datahub_assertion_sensor.py,sha256=lCBj_3x1cf5GMNpHdfkpHuyHfVxsm6ff5x2Z5iizcAo,140
|
|
1047
1049
|
datahub_provider/operators/datahub_operation_operator.py,sha256=aevDp2FzX7FxGlXrR0khoHNbxbhKR2qPEX5e8O2Jyzw,174
|
|
1048
1050
|
datahub_provider/operators/datahub_operation_sensor.py,sha256=8fcdVBCEPgqy1etTXgLoiHoJrRt_nzFZQMdSzHqSG7M,168
|
|
1049
|
-
acryl_datahub-1.0.0.
|
|
1050
|
-
acryl_datahub-1.0.0.
|
|
1051
|
-
acryl_datahub-1.0.0.
|
|
1052
|
-
acryl_datahub-1.0.0.
|
|
1053
|
-
acryl_datahub-1.0.0.
|
|
1051
|
+
acryl_datahub-1.0.0.3rc6.dist-info/METADATA,sha256=ZOld7fZ4VTL1L7DczapOhGc9llaIYX4yscDrNUACf-g,176989
|
|
1052
|
+
acryl_datahub-1.0.0.3rc6.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
|
|
1053
|
+
acryl_datahub-1.0.0.3rc6.dist-info/entry_points.txt,sha256=o3mDeJXSKhsy7XLkuogihraiabBdLn9HaizYXPrxmk0,9710
|
|
1054
|
+
acryl_datahub-1.0.0.3rc6.dist-info/top_level.txt,sha256=iLjSrLK5ox1YVYcglRUkcvfZPvKlobBWx7CTUXx8_GI,25
|
|
1055
|
+
acryl_datahub-1.0.0.3rc6.dist-info/RECORD,,
|
datahub/_version.py
CHANGED
|
@@ -471,7 +471,10 @@ def get_view_file_path(
|
|
|
471
471
|
logger.debug("Entered")
|
|
472
472
|
|
|
473
473
|
for field in lkml_fields:
|
|
474
|
-
if
|
|
474
|
+
if (
|
|
475
|
+
LookerUtil.extract_view_name_from_lookml_model_explore_field(field)
|
|
476
|
+
== view_name
|
|
477
|
+
):
|
|
475
478
|
# This path is relative to git clone directory
|
|
476
479
|
logger.debug(f"Found view({view_name}) file-path {field.source_file}")
|
|
477
480
|
return field.source_file
|
|
@@ -1103,7 +1106,7 @@ class LookerExplore:
|
|
|
1103
1106
|
[column_ref] if column_ref is not None else []
|
|
1104
1107
|
)
|
|
1105
1108
|
|
|
1106
|
-
|
|
1109
|
+
looker_explore = cls(
|
|
1107
1110
|
name=explore_name,
|
|
1108
1111
|
model_name=model,
|
|
1109
1112
|
project_name=explore.project_name,
|
|
@@ -1121,6 +1124,8 @@ class LookerExplore:
|
|
|
1121
1124
|
source_file=explore.source_file,
|
|
1122
1125
|
tags=list(explore.tags) if explore.tags is not None else [],
|
|
1123
1126
|
)
|
|
1127
|
+
logger.debug(f"Created LookerExplore from API: {looker_explore}")
|
|
1128
|
+
return looker_explore
|
|
1124
1129
|
except SDKError as e:
|
|
1125
1130
|
if "<title>Looker Not Found (404)</title>" in str(e):
|
|
1126
1131
|
logger.info(
|
|
@@ -1161,6 +1166,9 @@ class LookerExplore:
|
|
|
1161
1166
|
dataset_name = config.explore_naming_pattern.replace_variables(
|
|
1162
1167
|
self.get_mapping(config)
|
|
1163
1168
|
)
|
|
1169
|
+
logger.debug(
|
|
1170
|
+
f"Generated dataset_name={dataset_name} for explore with model_name={self.model_name}, name={self.name}"
|
|
1171
|
+
)
|
|
1164
1172
|
|
|
1165
1173
|
return builder.make_dataset_urn_with_platform_instance(
|
|
1166
1174
|
platform=config.platform_name,
|
|
@@ -1362,6 +1370,7 @@ class LookerExploreRegistry:
|
|
|
1362
1370
|
|
|
1363
1371
|
@lru_cache(maxsize=200)
|
|
1364
1372
|
def get_explore(self, model: str, explore: str) -> Optional[LookerExplore]:
|
|
1373
|
+
logger.debug(f"Retrieving explore: model={model}, explore={explore}")
|
|
1365
1374
|
looker_explore = LookerExplore.from_api(
|
|
1366
1375
|
model,
|
|
1367
1376
|
explore,
|
|
@@ -1369,6 +1378,12 @@ class LookerExploreRegistry:
|
|
|
1369
1378
|
self.report,
|
|
1370
1379
|
self.source_config,
|
|
1371
1380
|
)
|
|
1381
|
+
if looker_explore is not None:
|
|
1382
|
+
logger.debug(
|
|
1383
|
+
f"Found explore with model_name={looker_explore.model_name}, name={looker_explore.name}"
|
|
1384
|
+
)
|
|
1385
|
+
else:
|
|
1386
|
+
logger.debug(f"No explore found for model={model}, explore={explore}")
|
|
1372
1387
|
return looker_explore
|
|
1373
1388
|
|
|
1374
1389
|
def compute_stats(self) -> Dict:
|
|
@@ -279,6 +279,11 @@ class LookerDashboardSource(TestableSource, StatefulIngestionSourceBase):
|
|
|
279
279
|
return []
|
|
280
280
|
result = []
|
|
281
281
|
|
|
282
|
+
if query is not None:
|
|
283
|
+
logger.debug(
|
|
284
|
+
f"Processing query: model={query.model}, view={query.view}, input_fields_count={len(query.fields) if query.fields else 0}"
|
|
285
|
+
)
|
|
286
|
+
|
|
282
287
|
# query.dynamic_fields can contain:
|
|
283
288
|
# - looker table calculations: https://docs.looker.com/exploring-data/using-table-calculations
|
|
284
289
|
# - looker custom measures: https://docs.looker.com/de/exploring-data/adding-fields/custom-measure
|
|
@@ -399,9 +404,12 @@ class LookerDashboardSource(TestableSource, StatefulIngestionSourceBase):
|
|
|
399
404
|
# Get the explore from the view directly
|
|
400
405
|
explores = [element.query.view] if element.query.view is not None else []
|
|
401
406
|
logger.debug(
|
|
402
|
-
f"
|
|
407
|
+
f"Dashboard element {element.title} (ID: {element.id}): Upstream explores added via query={explores} with model={element.query.model}, explore={element.query.view}"
|
|
403
408
|
)
|
|
404
409
|
for exp in explores:
|
|
410
|
+
logger.debug(
|
|
411
|
+
f"Adding reachable explore: model={element.query.model}, explore={exp}, element_id={element.id}, title={element.title}"
|
|
412
|
+
)
|
|
405
413
|
self.add_reachable_explore(
|
|
406
414
|
model=element.query.model,
|
|
407
415
|
explore=exp,
|
|
@@ -477,12 +485,10 @@ class LookerDashboardSource(TestableSource, StatefulIngestionSourceBase):
|
|
|
477
485
|
|
|
478
486
|
# Failing the above two approaches, pick out details from result_maker
|
|
479
487
|
elif element.result_maker is not None:
|
|
480
|
-
model: str = ""
|
|
481
488
|
input_fields = []
|
|
482
489
|
|
|
483
490
|
explores = []
|
|
484
491
|
if element.result_maker.query is not None:
|
|
485
|
-
model = element.result_maker.query.model
|
|
486
492
|
if element.result_maker.query.view is not None:
|
|
487
493
|
explores.append(element.result_maker.query.view)
|
|
488
494
|
input_fields = self._get_input_fields_from_query(
|
|
@@ -502,9 +508,15 @@ class LookerDashboardSource(TestableSource, StatefulIngestionSourceBase):
|
|
|
502
508
|
|
|
503
509
|
# In addition to the query, filters can point to fields as well
|
|
504
510
|
assert element.result_maker.filterables is not None
|
|
511
|
+
|
|
512
|
+
# Different dashboard elements my reference explores from different models
|
|
513
|
+
# so we need to create a mapping of explore names to their models to maintain correct associations
|
|
514
|
+
explore_to_model_map = {}
|
|
515
|
+
|
|
505
516
|
for filterable in element.result_maker.filterables:
|
|
506
517
|
if filterable.view is not None and filterable.model is not None:
|
|
507
|
-
model
|
|
518
|
+
# Store the model for this view/explore in our mapping
|
|
519
|
+
explore_to_model_map[filterable.view] = filterable.model
|
|
508
520
|
explores.append(filterable.view)
|
|
509
521
|
self.add_reachable_explore(
|
|
510
522
|
model=filterable.model,
|
|
@@ -527,6 +539,18 @@ class LookerDashboardSource(TestableSource, StatefulIngestionSourceBase):
|
|
|
527
539
|
|
|
528
540
|
explores = sorted(list(set(explores))) # dedup the list of views
|
|
529
541
|
|
|
542
|
+
logger.debug(
|
|
543
|
+
f"Dashboard element {element.id} and their explores with the corresponding model: {explore_to_model_map}"
|
|
544
|
+
)
|
|
545
|
+
|
|
546
|
+
# If we have a query, use its model as the default for any explores that don't have a model in our mapping
|
|
547
|
+
default_model = ""
|
|
548
|
+
if (
|
|
549
|
+
element.result_maker.query is not None
|
|
550
|
+
and element.result_maker.query.model is not None
|
|
551
|
+
):
|
|
552
|
+
default_model = element.result_maker.query.model
|
|
553
|
+
|
|
530
554
|
return LookerDashboardElement(
|
|
531
555
|
id=element.id,
|
|
532
556
|
title=element.title if element.title is not None else "",
|
|
@@ -540,7 +564,11 @@ class LookerDashboardSource(TestableSource, StatefulIngestionSourceBase):
|
|
|
540
564
|
else ""
|
|
541
565
|
),
|
|
542
566
|
upstream_explores=[
|
|
543
|
-
LookerExplore(
|
|
567
|
+
LookerExplore(
|
|
568
|
+
model_name=explore_to_model_map.get(exp, default_model),
|
|
569
|
+
name=exp,
|
|
570
|
+
)
|
|
571
|
+
for exp in explores
|
|
544
572
|
],
|
|
545
573
|
input_fields=input_fields,
|
|
546
574
|
owner=None,
|
|
@@ -1270,6 +1298,7 @@ class LookerDashboardSource(TestableSource, StatefulIngestionSourceBase):
|
|
|
1270
1298
|
chart_urn = self._make_chart_urn(
|
|
1271
1299
|
element_id=dashboard_element.get_urn_element_id()
|
|
1272
1300
|
)
|
|
1301
|
+
|
|
1273
1302
|
input_fields_aspect = InputFieldsClass(
|
|
1274
1303
|
fields=self._input_fields_from_dashboard_element(dashboard_element)
|
|
1275
1304
|
)
|
datahub/sdk/_all_entities.py
CHANGED
|
@@ -3,11 +3,15 @@ from typing import Dict, List, Type
|
|
|
3
3
|
from datahub.sdk.container import Container
|
|
4
4
|
from datahub.sdk.dataset import Dataset
|
|
5
5
|
from datahub.sdk.entity import Entity
|
|
6
|
+
from datahub.sdk.mlmodel import MLModel
|
|
7
|
+
from datahub.sdk.mlmodelgroup import MLModelGroup
|
|
6
8
|
|
|
7
9
|
# TODO: Is there a better way to declare this?
|
|
8
10
|
ENTITY_CLASSES_LIST: List[Type[Entity]] = [
|
|
9
11
|
Container,
|
|
10
12
|
Dataset,
|
|
13
|
+
MLModel,
|
|
14
|
+
MLModelGroup,
|
|
11
15
|
]
|
|
12
16
|
|
|
13
17
|
ENTITY_CLASSES: Dict[str, Type[Entity]] = {
|
datahub/sdk/_shared.py
CHANGED
|
@@ -5,6 +5,7 @@ from datetime import datetime
|
|
|
5
5
|
from typing import (
|
|
6
6
|
TYPE_CHECKING,
|
|
7
7
|
Callable,
|
|
8
|
+
Dict,
|
|
8
9
|
List,
|
|
9
10
|
Optional,
|
|
10
11
|
Sequence,
|
|
@@ -14,6 +15,7 @@ from typing import (
|
|
|
14
15
|
|
|
15
16
|
from typing_extensions import TypeAlias, assert_never
|
|
16
17
|
|
|
18
|
+
import datahub.emitter.mce_builder as builder
|
|
17
19
|
import datahub.metadata.schema_classes as models
|
|
18
20
|
from datahub.emitter.mce_builder import (
|
|
19
21
|
make_ts_millis,
|
|
@@ -30,12 +32,14 @@ from datahub.metadata.urns import (
|
|
|
30
32
|
DataJobUrn,
|
|
31
33
|
DataPlatformInstanceUrn,
|
|
32
34
|
DataPlatformUrn,
|
|
35
|
+
DataProcessInstanceUrn,
|
|
33
36
|
DatasetUrn,
|
|
34
37
|
DomainUrn,
|
|
35
38
|
GlossaryTermUrn,
|
|
36
39
|
OwnershipTypeUrn,
|
|
37
40
|
TagUrn,
|
|
38
41
|
Urn,
|
|
42
|
+
VersionSetUrn,
|
|
39
43
|
)
|
|
40
44
|
from datahub.sdk._utils import add_list_unique, remove_list_unique
|
|
41
45
|
from datahub.sdk.entity import Entity
|
|
@@ -52,6 +56,36 @@ ActorUrn: TypeAlias = Union[CorpUserUrn, CorpGroupUrn]
|
|
|
52
56
|
|
|
53
57
|
_DEFAULT_ACTOR_URN = CorpUserUrn("__ingestion").urn()
|
|
54
58
|
|
|
59
|
+
TrainingMetricsInputType: TypeAlias = Union[
|
|
60
|
+
List[models.MLMetricClass], Dict[str, Optional[str]]
|
|
61
|
+
]
|
|
62
|
+
HyperParamsInputType: TypeAlias = Union[
|
|
63
|
+
List[models.MLHyperParamClass], Dict[str, Optional[str]]
|
|
64
|
+
]
|
|
65
|
+
MLTrainingJobInputType: TypeAlias = Union[Sequence[Union[str, DataProcessInstanceUrn]]]
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def convert_training_metrics(
|
|
69
|
+
metrics: TrainingMetricsInputType,
|
|
70
|
+
) -> List[models.MLMetricClass]:
|
|
71
|
+
if isinstance(metrics, dict):
|
|
72
|
+
return [
|
|
73
|
+
models.MLMetricClass(name=name, value=str(value))
|
|
74
|
+
for name, value in metrics.items()
|
|
75
|
+
]
|
|
76
|
+
return metrics
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
def convert_hyper_params(
|
|
80
|
+
params: HyperParamsInputType,
|
|
81
|
+
) -> List[models.MLHyperParamClass]:
|
|
82
|
+
if isinstance(params, dict):
|
|
83
|
+
return [
|
|
84
|
+
models.MLHyperParamClass(name=name, value=str(value))
|
|
85
|
+
for name, value in params.items()
|
|
86
|
+
]
|
|
87
|
+
return params
|
|
88
|
+
|
|
55
89
|
|
|
56
90
|
def make_time_stamp(ts: Optional[datetime]) -> Optional[models.TimeStampClass]:
|
|
57
91
|
if ts is None:
|
|
@@ -578,3 +612,109 @@ class HasInstitutionalMemory(Entity):
|
|
|
578
612
|
self._link_key,
|
|
579
613
|
self._parse_link_association_class(link),
|
|
580
614
|
)
|
|
615
|
+
|
|
616
|
+
|
|
617
|
+
class HasVersion(Entity):
|
|
618
|
+
"""Mixin for entities that have version properties."""
|
|
619
|
+
|
|
620
|
+
def _get_version_props(self) -> Optional[models.VersionPropertiesClass]:
|
|
621
|
+
return self._get_aspect(models.VersionPropertiesClass)
|
|
622
|
+
|
|
623
|
+
def _ensure_version_props(self) -> models.VersionPropertiesClass:
|
|
624
|
+
version_props = self._get_version_props()
|
|
625
|
+
if version_props is None:
|
|
626
|
+
guid_dict = {"urn": str(self.urn)}
|
|
627
|
+
version_set_urn = VersionSetUrn(
|
|
628
|
+
id=builder.datahub_guid(guid_dict), entity_type=self.urn.ENTITY_TYPE
|
|
629
|
+
)
|
|
630
|
+
|
|
631
|
+
version_props = models.VersionPropertiesClass(
|
|
632
|
+
versionSet=str(version_set_urn),
|
|
633
|
+
version=models.VersionTagClass(versionTag="0.1.0"),
|
|
634
|
+
sortId="0000000.1.0",
|
|
635
|
+
)
|
|
636
|
+
self._set_aspect(version_props)
|
|
637
|
+
return version_props
|
|
638
|
+
|
|
639
|
+
@property
|
|
640
|
+
def version(self) -> Optional[str]:
|
|
641
|
+
version_props = self._get_version_props()
|
|
642
|
+
if version_props and version_props.version:
|
|
643
|
+
return version_props.version.versionTag
|
|
644
|
+
return None
|
|
645
|
+
|
|
646
|
+
def set_version(self, version: str) -> None:
|
|
647
|
+
"""Set the version of the entity."""
|
|
648
|
+
guid_dict = {"urn": str(self.urn)}
|
|
649
|
+
version_set_urn = VersionSetUrn(
|
|
650
|
+
id=builder.datahub_guid(guid_dict), entity_type=self.urn.ENTITY_TYPE
|
|
651
|
+
)
|
|
652
|
+
|
|
653
|
+
version_props = self._get_version_props()
|
|
654
|
+
if version_props is None:
|
|
655
|
+
# If no version properties exist, create a new one
|
|
656
|
+
version_props = models.VersionPropertiesClass(
|
|
657
|
+
version=models.VersionTagClass(versionTag=version),
|
|
658
|
+
versionSet=str(version_set_urn),
|
|
659
|
+
sortId=version.zfill(10), # Pad with zeros for sorting
|
|
660
|
+
)
|
|
661
|
+
else:
|
|
662
|
+
# Update existing version properties
|
|
663
|
+
version_props.version = models.VersionTagClass(versionTag=version)
|
|
664
|
+
version_props.versionSet = str(version_set_urn)
|
|
665
|
+
version_props.sortId = version.zfill(10)
|
|
666
|
+
|
|
667
|
+
self._set_aspect(version_props)
|
|
668
|
+
|
|
669
|
+
@property
|
|
670
|
+
def version_aliases(self) -> List[str]:
|
|
671
|
+
version_props = self._get_version_props()
|
|
672
|
+
if version_props and version_props.aliases:
|
|
673
|
+
return [
|
|
674
|
+
alias.versionTag
|
|
675
|
+
for alias in version_props.aliases
|
|
676
|
+
if alias.versionTag is not None
|
|
677
|
+
]
|
|
678
|
+
return [] # Return empty list instead of None
|
|
679
|
+
|
|
680
|
+
def set_version_aliases(self, aliases: List[str]) -> None:
|
|
681
|
+
version_props = self._get_aspect(models.VersionPropertiesClass)
|
|
682
|
+
if version_props:
|
|
683
|
+
version_props.aliases = [
|
|
684
|
+
models.VersionTagClass(versionTag=alias) for alias in aliases
|
|
685
|
+
]
|
|
686
|
+
else:
|
|
687
|
+
# If no version properties exist, we need to create one with a default version
|
|
688
|
+
guid_dict = {"urn": str(self.urn)}
|
|
689
|
+
version_set_urn = VersionSetUrn(
|
|
690
|
+
id=builder.datahub_guid(guid_dict), entity_type=self.urn.ENTITY_TYPE
|
|
691
|
+
)
|
|
692
|
+
self._set_aspect(
|
|
693
|
+
models.VersionPropertiesClass(
|
|
694
|
+
version=models.VersionTagClass(
|
|
695
|
+
versionTag="0.1.0"
|
|
696
|
+
), # Default version
|
|
697
|
+
versionSet=str(version_set_urn),
|
|
698
|
+
sortId="0000000.1.0",
|
|
699
|
+
aliases=[
|
|
700
|
+
models.VersionTagClass(versionTag=alias) for alias in aliases
|
|
701
|
+
],
|
|
702
|
+
)
|
|
703
|
+
)
|
|
704
|
+
|
|
705
|
+
def add_version_alias(self, alias: str) -> None:
|
|
706
|
+
if not alias:
|
|
707
|
+
raise ValueError("Alias cannot be empty")
|
|
708
|
+
version_props = self._ensure_version_props()
|
|
709
|
+
if version_props.aliases is None:
|
|
710
|
+
version_props.aliases = []
|
|
711
|
+
version_props.aliases.append(models.VersionTagClass(versionTag=alias))
|
|
712
|
+
self._set_aspect(version_props)
|
|
713
|
+
|
|
714
|
+
def remove_version_alias(self, alias: str) -> None:
|
|
715
|
+
version_props = self._get_version_props()
|
|
716
|
+
if version_props and version_props.aliases:
|
|
717
|
+
version_props.aliases = [
|
|
718
|
+
a for a in version_props.aliases if a.versionTag != alias
|
|
719
|
+
]
|
|
720
|
+
self._set_aspect(version_props)
|
datahub/sdk/entity_client.py
CHANGED
|
@@ -11,6 +11,8 @@ from datahub.ingestion.graph.client import DataHubGraph
|
|
|
11
11
|
from datahub.metadata.urns import (
|
|
12
12
|
ContainerUrn,
|
|
13
13
|
DatasetUrn,
|
|
14
|
+
MlModelGroupUrn,
|
|
15
|
+
MlModelUrn,
|
|
14
16
|
Urn,
|
|
15
17
|
)
|
|
16
18
|
from datahub.sdk._all_entities import ENTITY_CLASSES
|
|
@@ -18,6 +20,8 @@ from datahub.sdk._shared import UrnOrStr
|
|
|
18
20
|
from datahub.sdk.container import Container
|
|
19
21
|
from datahub.sdk.dataset import Dataset
|
|
20
22
|
from datahub.sdk.entity import Entity
|
|
23
|
+
from datahub.sdk.mlmodel import MLModel
|
|
24
|
+
from datahub.sdk.mlmodelgroup import MLModelGroup
|
|
21
25
|
|
|
22
26
|
if TYPE_CHECKING:
|
|
23
27
|
from datahub.sdk.main_client import DataHubClient
|
|
@@ -49,6 +53,10 @@ class EntityClient:
|
|
|
49
53
|
@overload
|
|
50
54
|
def get(self, urn: DatasetUrn) -> Dataset: ...
|
|
51
55
|
@overload
|
|
56
|
+
def get(self, urn: MlModelUrn) -> MLModel: ...
|
|
57
|
+
@overload
|
|
58
|
+
def get(self, urn: MlModelGroupUrn) -> MLModelGroup: ...
|
|
59
|
+
@overload
|
|
52
60
|
def get(self, urn: Union[Urn, str]) -> Entity: ...
|
|
53
61
|
def get(self, urn: UrnOrStr) -> Entity:
|
|
54
62
|
"""Retrieve an entity by its urn.
|