acryl-datahub-cloud 0.3.8.2rc8__py3-none-any.whl → 0.3.9rc2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of acryl-datahub-cloud might be problematic. Click here for more details.
- acryl_datahub_cloud/_codegen_config.json +1 -1
- acryl_datahub_cloud/acryl_cs_issues/acryl_customer.py +1 -1
- acryl_datahub_cloud/action_request/__init__.py +0 -0
- acryl_datahub_cloud/action_request/action_request_owner_source.py +174 -0
- acryl_datahub_cloud/api/__init__.py +1 -1
- acryl_datahub_cloud/api/client.py +2 -2
- acryl_datahub_cloud/datahub_reporting/datahub_dataset.py +6 -6
- acryl_datahub_cloud/datahub_reporting/datahub_form_reporting.py +67 -33
- acryl_datahub_cloud/datahub_reporting/extract_sql.py +4 -4
- acryl_datahub_cloud/datahub_usage_reporting/usage_feature_patch_builder.py +21 -21
- acryl_datahub_cloud/datahub_usage_reporting/usage_feature_reporter.py +14 -13
- acryl_datahub_cloud/metadata/_urns/urn_defs.py +1064 -418
- acryl_datahub_cloud/metadata/com/linkedin/pegasus2avro/actionrequest/__init__.py +6 -0
- acryl_datahub_cloud/metadata/com/linkedin/pegasus2avro/dataplatforminstance/__init__.py +2 -0
- acryl_datahub_cloud/metadata/com/linkedin/pegasus2avro/dataset/__init__.py +2 -0
- acryl_datahub_cloud/metadata/com/linkedin/pegasus2avro/executorglobalconfig/__init__.py +15 -0
- acryl_datahub_cloud/metadata/com/linkedin/pegasus2avro/executorpool/__init__.py +4 -0
- acryl_datahub_cloud/metadata/com/linkedin/pegasus2avro/metadata/key/__init__.py +4 -0
- acryl_datahub_cloud/metadata/com/linkedin/pegasus2avro/metric/__init__.py +29 -0
- acryl_datahub_cloud/metadata/schema.avsc +786 -48
- acryl_datahub_cloud/metadata/schema_classes.py +1113 -62
- acryl_datahub_cloud/metadata/schemas/ActionRequestInfo.avsc +422 -12
- acryl_datahub_cloud/metadata/schemas/ActionRequestStatus.avsc +12 -0
- acryl_datahub_cloud/metadata/schemas/AssertionAnalyticsRunEvent.avsc +5 -3
- acryl_datahub_cloud/metadata/schemas/AssertionInfo.avsc +5 -3
- acryl_datahub_cloud/metadata/schemas/AssertionRunEvent.avsc +5 -3
- acryl_datahub_cloud/metadata/schemas/BusinessAttributeInfo.avsc +6 -2
- acryl_datahub_cloud/metadata/schemas/BusinessAttributes.avsc +6 -0
- acryl_datahub_cloud/metadata/schemas/ChartInfo.avsc +1 -0
- acryl_datahub_cloud/metadata/schemas/ChartKey.avsc +3 -3
- acryl_datahub_cloud/metadata/schemas/ContainerKey.avsc +1 -1
- acryl_datahub_cloud/metadata/schemas/CorpGroupKey.avsc +1 -1
- acryl_datahub_cloud/metadata/schemas/DashboardKey.avsc +3 -3
- acryl_datahub_cloud/metadata/schemas/DataFlowKey.avsc +1 -1
- acryl_datahub_cloud/metadata/schemas/DataHubActionInfo.avsc +1 -1
- acryl_datahub_cloud/metadata/schemas/DataHubConnectionKey.avsc +2 -1
- acryl_datahub_cloud/metadata/schemas/DataHubIngestionSourceInfo.avsc +9 -4
- acryl_datahub_cloud/metadata/schemas/DataHubMetricCubeDefinition.avsc +185 -0
- acryl_datahub_cloud/metadata/schemas/DataHubMetricCubeEvent.avsc +184 -0
- acryl_datahub_cloud/metadata/schemas/DataHubMetricCubeKey.avsc +22 -0
- acryl_datahub_cloud/metadata/schemas/DataJobKey.avsc +4 -4
- acryl_datahub_cloud/metadata/schemas/DataPlatformInstanceKey.avsc +2 -1
- acryl_datahub_cloud/metadata/schemas/DataProcessInstanceInput.avsc +4 -2
- acryl_datahub_cloud/metadata/schemas/DataProcessInstanceOutput.avsc +2 -0
- acryl_datahub_cloud/metadata/schemas/DatasetKey.avsc +14 -13
- acryl_datahub_cloud/metadata/schemas/EditableSchemaMetadata.avsc +6 -2
- acryl_datahub_cloud/metadata/schemas/ExecutionRequestInput.avsc +6 -1
- acryl_datahub_cloud/metadata/schemas/ExecutionRequestSignal.avsc +1 -1
- acryl_datahub_cloud/metadata/schemas/FormInfo.avsc +5 -0
- acryl_datahub_cloud/metadata/schemas/GlossaryTerms.avsc +3 -1
- acryl_datahub_cloud/metadata/schemas/IcebergCatalogInfo.avsc +28 -0
- acryl_datahub_cloud/metadata/schemas/IcebergWarehouseInfo.avsc +96 -0
- acryl_datahub_cloud/metadata/schemas/IncidentActivityEvent.avsc +4 -1
- acryl_datahub_cloud/metadata/schemas/IncidentInfo.avsc +4 -1
- acryl_datahub_cloud/metadata/schemas/InputFields.avsc +3 -1
- acryl_datahub_cloud/metadata/schemas/MLFeatureKey.avsc +1 -1
- acryl_datahub_cloud/metadata/schemas/MLFeatureTableKey.avsc +1 -1
- acryl_datahub_cloud/metadata/schemas/MLModelGroupKey.avsc +1 -1
- acryl_datahub_cloud/metadata/schemas/MLModelKey.avsc +3 -3
- acryl_datahub_cloud/metadata/schemas/MLPrimaryKeyKey.avsc +1 -1
- acryl_datahub_cloud/metadata/schemas/MetadataChangeEvent.avsc +399 -176
- acryl_datahub_cloud/metadata/schemas/MonitorInfo.avsc +6 -4
- acryl_datahub_cloud/metadata/schemas/NotebookKey.avsc +1 -1
- acryl_datahub_cloud/metadata/schemas/Operation.avsc +4 -2
- acryl_datahub_cloud/metadata/schemas/RemoteExecutorGlobalConfigKey.avsc +21 -0
- acryl_datahub_cloud/metadata/schemas/RemoteExecutorPoolGlobalConfig.avsc +16 -0
- acryl_datahub_cloud/metadata/schemas/RemoteExecutorPoolInfo.avsc +85 -0
- acryl_datahub_cloud/metadata/schemas/RemoteExecutorPoolKey.avsc +1 -1
- acryl_datahub_cloud/metadata/schemas/RemoteExecutorStatus.avsc +5 -5
- acryl_datahub_cloud/metadata/schemas/SchemaFieldKey.avsc +2 -2
- acryl_datahub_cloud/metadata/schemas/SchemaMetadata.avsc +3 -1
- acryl_datahub_cloud/metadata/schemas/VersionProperties.avsc +18 -0
- acryl_datahub_cloud/metadata/schemas/VersionSetProperties.avsc +5 -0
- {acryl_datahub_cloud-0.3.8.2rc8.dist-info → acryl_datahub_cloud-0.3.9rc2.dist-info}/METADATA +43 -43
- {acryl_datahub_cloud-0.3.8.2rc8.dist-info → acryl_datahub_cloud-0.3.9rc2.dist-info}/RECORD +78 -68
- {acryl_datahub_cloud-0.3.8.2rc8.dist-info → acryl_datahub_cloud-0.3.9rc2.dist-info}/entry_points.txt +1 -0
- acryl_datahub_cloud/api/entity_versioning.py +0 -167
- {acryl_datahub_cloud-0.3.8.2rc8.dist-info → acryl_datahub_cloud-0.3.9rc2.dist-info}/WHEEL +0 -0
- {acryl_datahub_cloud-0.3.8.2rc8.dist-info → acryl_datahub_cloud-0.3.9rc2.dist-info}/top_level.txt +0 -0
|
@@ -301,7 +301,7 @@ class DataHubActionConfigClass(DictWrapper):
|
|
|
301
301
|
|
|
302
302
|
@property
|
|
303
303
|
def executorId(self) -> Union[None, str]:
|
|
304
|
-
"""The id of the executor to use to execute the automation. Defaults to 'default' (runs locally)"""
|
|
304
|
+
"""The id of the executor pool to use to execute the automation. Defaults to 'default' (runs locally embedded)"""
|
|
305
305
|
return self._inner_dict.get('executorId') # type: ignore
|
|
306
306
|
|
|
307
307
|
@executorId.setter
|
|
@@ -720,6 +720,7 @@ class ActionRequestInfoClass(_Aspect):
|
|
|
720
720
|
subResourceType: Union[None, str]=None,
|
|
721
721
|
subResource: Union[None, str]=None,
|
|
722
722
|
params: Union[None, "ActionRequestParamsClass"]=None,
|
|
723
|
+
description: Union[None, str]=None,
|
|
723
724
|
dueDate: Union[None, int]=None,
|
|
724
725
|
origin: Optional[Union[Union[str, "ActionRequestOriginClass"], None]]=None,
|
|
725
726
|
inferenceMetadata: Union[None, "InferenceMetadataClass"]=None,
|
|
@@ -737,6 +738,7 @@ class ActionRequestInfoClass(_Aspect):
|
|
|
737
738
|
self.params = params
|
|
738
739
|
self.created = created
|
|
739
740
|
self.createdBy = createdBy
|
|
741
|
+
self.description = description
|
|
740
742
|
self.dueDate = dueDate
|
|
741
743
|
if origin is None:
|
|
742
744
|
# default: 'MANUAL'
|
|
@@ -757,6 +759,7 @@ class ActionRequestInfoClass(_Aspect):
|
|
|
757
759
|
self.params = self.RECORD_SCHEMA.fields_dict["params"].default
|
|
758
760
|
self.created = int()
|
|
759
761
|
self.createdBy = str()
|
|
762
|
+
self.description = self.RECORD_SCHEMA.fields_dict["description"].default
|
|
760
763
|
self.dueDate = self.RECORD_SCHEMA.fields_dict["dueDate"].default
|
|
761
764
|
self.origin = self.RECORD_SCHEMA.fields_dict["origin"].default
|
|
762
765
|
self.inferenceMetadata = self.RECORD_SCHEMA.fields_dict["inferenceMetadata"].default
|
|
@@ -774,7 +777,9 @@ class ActionRequestInfoClass(_Aspect):
|
|
|
774
777
|
|
|
775
778
|
@property
|
|
776
779
|
def assignedUsers(self) -> List[str]:
|
|
777
|
-
"""The users this action request is assigned to
|
|
780
|
+
"""The users this action request is assigned to.
|
|
781
|
+
By default, action requests are assigned to Dataset Owners
|
|
782
|
+
and with anyone who has the View Proposals platform privilege."""
|
|
778
783
|
return self._inner_dict.get('assignedUsers') # type: ignore
|
|
779
784
|
|
|
780
785
|
@assignedUsers.setter
|
|
@@ -784,7 +789,9 @@ class ActionRequestInfoClass(_Aspect):
|
|
|
784
789
|
|
|
785
790
|
@property
|
|
786
791
|
def assignedGroups(self) -> List[str]:
|
|
787
|
-
"""The groups this action request is assigned to
|
|
792
|
+
"""The groups this action request is assigned to
|
|
793
|
+
By default, action requests are assigned to Dataset Owners
|
|
794
|
+
and with anyone who has the View Proposals platform privilege."""
|
|
788
795
|
return self._inner_dict.get('assignedGroups') # type: ignore
|
|
789
796
|
|
|
790
797
|
@assignedGroups.setter
|
|
@@ -794,7 +801,8 @@ class ActionRequestInfoClass(_Aspect):
|
|
|
794
801
|
|
|
795
802
|
@property
|
|
796
803
|
def assignedRoles(self) -> Union[None, List[str]]:
|
|
797
|
-
"""The roles this action request is assigned to
|
|
804
|
+
"""The roles this action request is assigned to
|
|
805
|
+
By default, action requests are assigned any roles that have the View Proposals platform privilege."""
|
|
798
806
|
return self._inner_dict.get('assignedRoles') # type: ignore
|
|
799
807
|
|
|
800
808
|
@assignedRoles.setter
|
|
@@ -834,7 +842,8 @@ class ActionRequestInfoClass(_Aspect):
|
|
|
834
842
|
|
|
835
843
|
@property
|
|
836
844
|
def subResource(self) -> Union[None, str]:
|
|
837
|
-
"""The sub-resource identifier that the action is associated with, for example 'fieldName'
|
|
845
|
+
"""The sub-resource identifier that the action is associated with, for example 'fieldName'.
|
|
846
|
+
Currently, this is only used for Field Paths & schema fields."""
|
|
838
847
|
return self._inner_dict.get('subResource') # type: ignore
|
|
839
848
|
|
|
840
849
|
@subResource.setter
|
|
@@ -872,6 +881,17 @@ class ActionRequestInfoClass(_Aspect):
|
|
|
872
881
|
self._inner_dict['createdBy'] = value
|
|
873
882
|
|
|
874
883
|
|
|
884
|
+
@property
|
|
885
|
+
def description(self) -> Union[None, str]:
|
|
886
|
+
"""An optional description that can be added to the action request
|
|
887
|
+
to explain the intention behind it."""
|
|
888
|
+
return self._inner_dict.get('description') # type: ignore
|
|
889
|
+
|
|
890
|
+
@description.setter
|
|
891
|
+
def description(self, value: Union[None, str]) -> None:
|
|
892
|
+
self._inner_dict['description'] = value
|
|
893
|
+
|
|
894
|
+
|
|
875
895
|
@property
|
|
876
896
|
def dueDate(self) -> Union[None, int]:
|
|
877
897
|
"""The time at which the request is due"""
|
|
@@ -921,6 +941,9 @@ class ActionRequestParamsClass(DictWrapper):
|
|
|
921
941
|
def __init__(self,
|
|
922
942
|
glossaryTermProposal: Union[None, "GlossaryTermProposalClass"]=None,
|
|
923
943
|
tagProposal: Union[None, "TagProposalClass"]=None,
|
|
944
|
+
domainProposal: Union[None, "DomainProposalClass"]=None,
|
|
945
|
+
ownerProposal: Union[None, "OwnerProposalClass"]=None,
|
|
946
|
+
structuredPropertyProposal: Union[None, "StructuredPropertyProposalClass"]=None,
|
|
924
947
|
createGlossaryTermProposal: Union[None, "CreateGlossaryTermProposalClass"]=None,
|
|
925
948
|
createGlossaryNodeProposal: Union[None, "CreateGlossaryNodeProposalClass"]=None,
|
|
926
949
|
updateDescriptionProposal: Union[None, "DescriptionProposalClass"]=None,
|
|
@@ -930,6 +953,9 @@ class ActionRequestParamsClass(DictWrapper):
|
|
|
930
953
|
|
|
931
954
|
self.glossaryTermProposal = glossaryTermProposal
|
|
932
955
|
self.tagProposal = tagProposal
|
|
956
|
+
self.domainProposal = domainProposal
|
|
957
|
+
self.ownerProposal = ownerProposal
|
|
958
|
+
self.structuredPropertyProposal = structuredPropertyProposal
|
|
933
959
|
self.createGlossaryTermProposal = createGlossaryTermProposal
|
|
934
960
|
self.createGlossaryNodeProposal = createGlossaryNodeProposal
|
|
935
961
|
self.updateDescriptionProposal = updateDescriptionProposal
|
|
@@ -938,6 +964,9 @@ class ActionRequestParamsClass(DictWrapper):
|
|
|
938
964
|
def _restore_defaults(self) -> None:
|
|
939
965
|
self.glossaryTermProposal = self.RECORD_SCHEMA.fields_dict["glossaryTermProposal"].default
|
|
940
966
|
self.tagProposal = self.RECORD_SCHEMA.fields_dict["tagProposal"].default
|
|
967
|
+
self.domainProposal = self.RECORD_SCHEMA.fields_dict["domainProposal"].default
|
|
968
|
+
self.ownerProposal = self.RECORD_SCHEMA.fields_dict["ownerProposal"].default
|
|
969
|
+
self.structuredPropertyProposal = self.RECORD_SCHEMA.fields_dict["structuredPropertyProposal"].default
|
|
941
970
|
self.createGlossaryTermProposal = self.RECORD_SCHEMA.fields_dict["createGlossaryTermProposal"].default
|
|
942
971
|
self.createGlossaryNodeProposal = self.RECORD_SCHEMA.fields_dict["createGlossaryNodeProposal"].default
|
|
943
972
|
self.updateDescriptionProposal = self.RECORD_SCHEMA.fields_dict["updateDescriptionProposal"].default
|
|
@@ -946,7 +975,8 @@ class ActionRequestParamsClass(DictWrapper):
|
|
|
946
975
|
|
|
947
976
|
@property
|
|
948
977
|
def glossaryTermProposal(self) -> Union[None, "GlossaryTermProposalClass"]:
|
|
949
|
-
"""An optional set of information specific to term proposals.
|
|
978
|
+
"""An optional set of information specific to term proposals.
|
|
979
|
+
TODO: Add validation that ensures that glossaryTerm or glossaryTerms field is provided, but not both."""
|
|
950
980
|
return self._inner_dict.get('glossaryTermProposal') # type: ignore
|
|
951
981
|
|
|
952
982
|
@glossaryTermProposal.setter
|
|
@@ -956,7 +986,8 @@ class ActionRequestParamsClass(DictWrapper):
|
|
|
956
986
|
|
|
957
987
|
@property
|
|
958
988
|
def tagProposal(self) -> Union[None, "TagProposalClass"]:
|
|
959
|
-
"""An optional set of information specific to tag proposals.
|
|
989
|
+
"""An optional set of information specific to tag proposals.
|
|
990
|
+
TODO: Add validation that ensures that tag or tagUrns field is provided, but not both."""
|
|
960
991
|
return self._inner_dict.get('tagProposal') # type: ignore
|
|
961
992
|
|
|
962
993
|
@tagProposal.setter
|
|
@@ -964,6 +995,36 @@ class ActionRequestParamsClass(DictWrapper):
|
|
|
964
995
|
self._inner_dict['tagProposal'] = value
|
|
965
996
|
|
|
966
997
|
|
|
998
|
+
@property
|
|
999
|
+
def domainProposal(self) -> Union[None, "DomainProposalClass"]:
|
|
1000
|
+
"""An optional set of information specific to domain proposals."""
|
|
1001
|
+
return self._inner_dict.get('domainProposal') # type: ignore
|
|
1002
|
+
|
|
1003
|
+
@domainProposal.setter
|
|
1004
|
+
def domainProposal(self, value: Union[None, "DomainProposalClass"]) -> None:
|
|
1005
|
+
self._inner_dict['domainProposal'] = value
|
|
1006
|
+
|
|
1007
|
+
|
|
1008
|
+
@property
|
|
1009
|
+
def ownerProposal(self) -> Union[None, "OwnerProposalClass"]:
|
|
1010
|
+
"""An optional set of information specific to ownership proposals."""
|
|
1011
|
+
return self._inner_dict.get('ownerProposal') # type: ignore
|
|
1012
|
+
|
|
1013
|
+
@ownerProposal.setter
|
|
1014
|
+
def ownerProposal(self, value: Union[None, "OwnerProposalClass"]) -> None:
|
|
1015
|
+
self._inner_dict['ownerProposal'] = value
|
|
1016
|
+
|
|
1017
|
+
|
|
1018
|
+
@property
|
|
1019
|
+
def structuredPropertyProposal(self) -> Union[None, "StructuredPropertyProposalClass"]:
|
|
1020
|
+
"""An optional set of information specific to structured property proposals."""
|
|
1021
|
+
return self._inner_dict.get('structuredPropertyProposal') # type: ignore
|
|
1022
|
+
|
|
1023
|
+
@structuredPropertyProposal.setter
|
|
1024
|
+
def structuredPropertyProposal(self, value: Union[None, "StructuredPropertyProposalClass"]) -> None:
|
|
1025
|
+
self._inner_dict['structuredPropertyProposal'] = value
|
|
1026
|
+
|
|
1027
|
+
|
|
967
1028
|
@property
|
|
968
1029
|
def createGlossaryTermProposal(self) -> Union[None, "CreateGlossaryTermProposalClass"]:
|
|
969
1030
|
"""An optional set of information specific to proposals for creating new Glossary Terms."""
|
|
@@ -1016,16 +1077,19 @@ class ActionRequestStatusClass(_Aspect):
|
|
|
1016
1077
|
status: str,
|
|
1017
1078
|
lastModified: "AuditStampClass",
|
|
1018
1079
|
result: Union[None, str]=None,
|
|
1080
|
+
note: Union[None, str]=None,
|
|
1019
1081
|
):
|
|
1020
1082
|
super().__init__()
|
|
1021
1083
|
|
|
1022
1084
|
self.status = status
|
|
1023
1085
|
self.result = result
|
|
1086
|
+
self.note = note
|
|
1024
1087
|
self.lastModified = lastModified
|
|
1025
1088
|
|
|
1026
1089
|
def _restore_defaults(self) -> None:
|
|
1027
1090
|
self.status = str()
|
|
1028
1091
|
self.result = self.RECORD_SCHEMA.fields_dict["result"].default
|
|
1092
|
+
self.note = self.RECORD_SCHEMA.fields_dict["note"].default
|
|
1029
1093
|
self.lastModified = AuditStampClass._construct_with_defaults()
|
|
1030
1094
|
|
|
1031
1095
|
|
|
@@ -1049,6 +1113,17 @@ class ActionRequestStatusClass(_Aspect):
|
|
|
1049
1113
|
self._inner_dict['result'] = value
|
|
1050
1114
|
|
|
1051
1115
|
|
|
1116
|
+
@property
|
|
1117
|
+
def note(self) -> Union[None, str]:
|
|
1118
|
+
"""Optional note associated with the status.
|
|
1119
|
+
E.g. if the request is rejected, the reason for rejection. If the request is approved, the reason for approval."""
|
|
1120
|
+
return self._inner_dict.get('note') # type: ignore
|
|
1121
|
+
|
|
1122
|
+
@note.setter
|
|
1123
|
+
def note(self, value: Union[None, str]) -> None:
|
|
1124
|
+
self._inner_dict['note'] = value
|
|
1125
|
+
|
|
1126
|
+
|
|
1052
1127
|
@property
|
|
1053
1128
|
def lastModified(self) -> "AuditStampClass":
|
|
1054
1129
|
"""Audit stamp containing who last modified the status and when."""
|
|
@@ -1265,56 +1340,164 @@ class DescriptionProposalClass(DictWrapper):
|
|
|
1265
1340
|
self._inner_dict['description'] = value
|
|
1266
1341
|
|
|
1267
1342
|
|
|
1343
|
+
class DomainProposalClass(DictWrapper):
|
|
1344
|
+
# No docs available.
|
|
1345
|
+
|
|
1346
|
+
RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.actionrequest.DomainProposal")
|
|
1347
|
+
def __init__(self,
|
|
1348
|
+
domains: List[str],
|
|
1349
|
+
):
|
|
1350
|
+
super().__init__()
|
|
1351
|
+
|
|
1352
|
+
self.domains = domains
|
|
1353
|
+
|
|
1354
|
+
def _restore_defaults(self) -> None:
|
|
1355
|
+
self.domains = list()
|
|
1356
|
+
|
|
1357
|
+
|
|
1358
|
+
@property
|
|
1359
|
+
def domains(self) -> List[str]:
|
|
1360
|
+
"""The urns of the domain(s) being proposed. Currently, only 1 domain is supported per asset.
|
|
1361
|
+
If this changes in the future, this data modeling will suffice."""
|
|
1362
|
+
return self._inner_dict.get('domains') # type: ignore
|
|
1363
|
+
|
|
1364
|
+
@domains.setter
|
|
1365
|
+
def domains(self, value: List[str]) -> None:
|
|
1366
|
+
self._inner_dict['domains'] = value
|
|
1367
|
+
|
|
1368
|
+
|
|
1268
1369
|
class GlossaryTermProposalClass(DictWrapper):
|
|
1269
1370
|
# No docs available.
|
|
1270
1371
|
|
|
1271
1372
|
RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.actionrequest.GlossaryTermProposal")
|
|
1272
1373
|
def __init__(self,
|
|
1273
|
-
glossaryTerm: str,
|
|
1374
|
+
glossaryTerm: Union[None, str]=None,
|
|
1375
|
+
glossaryTerms: Union[None, List[str]]=None,
|
|
1274
1376
|
):
|
|
1275
1377
|
super().__init__()
|
|
1276
1378
|
|
|
1277
1379
|
self.glossaryTerm = glossaryTerm
|
|
1380
|
+
self.glossaryTerms = glossaryTerms
|
|
1278
1381
|
|
|
1279
1382
|
def _restore_defaults(self) -> None:
|
|
1280
|
-
self.glossaryTerm =
|
|
1383
|
+
self.glossaryTerm = self.RECORD_SCHEMA.fields_dict["glossaryTerm"].default
|
|
1384
|
+
self.glossaryTerms = self.RECORD_SCHEMA.fields_dict["glossaryTerms"].default
|
|
1281
1385
|
|
|
1282
1386
|
|
|
1283
1387
|
@property
|
|
1284
|
-
def glossaryTerm(self) -> str:
|
|
1285
|
-
"""
|
|
1388
|
+
def glossaryTerm(self) -> Union[None, str]:
|
|
1389
|
+
"""This field is deprecated and will be removed in a future version. Use glossaryTerms instead.
|
|
1390
|
+
The urn of the glossary term being proposed."""
|
|
1286
1391
|
return self._inner_dict.get('glossaryTerm') # type: ignore
|
|
1287
1392
|
|
|
1288
1393
|
@glossaryTerm.setter
|
|
1289
|
-
def glossaryTerm(self, value: str) -> None:
|
|
1394
|
+
def glossaryTerm(self, value: Union[None, str]) -> None:
|
|
1290
1395
|
self._inner_dict['glossaryTerm'] = value
|
|
1291
1396
|
|
|
1292
1397
|
|
|
1398
|
+
@property
|
|
1399
|
+
def glossaryTerms(self) -> Union[None, List[str]]:
|
|
1400
|
+
"""The urns of the glossary terms being proposed.
|
|
1401
|
+
Use this field over glossaryTerm."""
|
|
1402
|
+
return self._inner_dict.get('glossaryTerms') # type: ignore
|
|
1403
|
+
|
|
1404
|
+
@glossaryTerms.setter
|
|
1405
|
+
def glossaryTerms(self, value: Union[None, List[str]]) -> None:
|
|
1406
|
+
self._inner_dict['glossaryTerms'] = value
|
|
1407
|
+
|
|
1408
|
+
|
|
1409
|
+
class OwnerProposalClass(DictWrapper):
|
|
1410
|
+
# No docs available.
|
|
1411
|
+
|
|
1412
|
+
RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.actionrequest.OwnerProposal")
|
|
1413
|
+
def __init__(self,
|
|
1414
|
+
owners: Union[None, List["OwnerClass"]]=None,
|
|
1415
|
+
):
|
|
1416
|
+
super().__init__()
|
|
1417
|
+
|
|
1418
|
+
self.owners = owners
|
|
1419
|
+
|
|
1420
|
+
def _restore_defaults(self) -> None:
|
|
1421
|
+
self.owners = self.RECORD_SCHEMA.fields_dict["owners"].default
|
|
1422
|
+
|
|
1423
|
+
|
|
1424
|
+
@property
|
|
1425
|
+
def owners(self) -> Union[None, List["OwnerClass"]]:
|
|
1426
|
+
"""The urns of the owner(s) being proposed."""
|
|
1427
|
+
return self._inner_dict.get('owners') # type: ignore
|
|
1428
|
+
|
|
1429
|
+
@owners.setter
|
|
1430
|
+
def owners(self, value: Union[None, List["OwnerClass"]]) -> None:
|
|
1431
|
+
self._inner_dict['owners'] = value
|
|
1432
|
+
|
|
1433
|
+
|
|
1434
|
+
class StructuredPropertyProposalClass(DictWrapper):
|
|
1435
|
+
# No docs available.
|
|
1436
|
+
|
|
1437
|
+
RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.actionrequest.StructuredPropertyProposal")
|
|
1438
|
+
def __init__(self,
|
|
1439
|
+
structuredPropertyValues: Union[None, List["StructuredPropertyValueAssignmentClass"]]=None,
|
|
1440
|
+
):
|
|
1441
|
+
super().__init__()
|
|
1442
|
+
|
|
1443
|
+
self.structuredPropertyValues = structuredPropertyValues
|
|
1444
|
+
|
|
1445
|
+
def _restore_defaults(self) -> None:
|
|
1446
|
+
self.structuredPropertyValues = self.RECORD_SCHEMA.fields_dict["structuredPropertyValues"].default
|
|
1447
|
+
|
|
1448
|
+
|
|
1449
|
+
@property
|
|
1450
|
+
def structuredPropertyValues(self) -> Union[None, List["StructuredPropertyValueAssignmentClass"]]:
|
|
1451
|
+
"""The urns of the domain(s) being proposed. Currently, only 1 domain is supported per asset.
|
|
1452
|
+
If this changes in the future, the data model will be ready.
|
|
1453
|
+
TODO: Decide if indexing the value would also be useful."""
|
|
1454
|
+
return self._inner_dict.get('structuredPropertyValues') # type: ignore
|
|
1455
|
+
|
|
1456
|
+
@structuredPropertyValues.setter
|
|
1457
|
+
def structuredPropertyValues(self, value: Union[None, List["StructuredPropertyValueAssignmentClass"]]) -> None:
|
|
1458
|
+
self._inner_dict['structuredPropertyValues'] = value
|
|
1459
|
+
|
|
1460
|
+
|
|
1293
1461
|
class TagProposalClass(DictWrapper):
|
|
1294
1462
|
# No docs available.
|
|
1295
1463
|
|
|
1296
1464
|
RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.actionrequest.TagProposal")
|
|
1297
1465
|
def __init__(self,
|
|
1298
|
-
tag: str,
|
|
1466
|
+
tag: Union[None, str]=None,
|
|
1467
|
+
tags: Union[None, List[str]]=None,
|
|
1299
1468
|
):
|
|
1300
1469
|
super().__init__()
|
|
1301
1470
|
|
|
1302
1471
|
self.tag = tag
|
|
1472
|
+
self.tags = tags
|
|
1303
1473
|
|
|
1304
1474
|
def _restore_defaults(self) -> None:
|
|
1305
|
-
self.tag =
|
|
1475
|
+
self.tag = self.RECORD_SCHEMA.fields_dict["tag"].default
|
|
1476
|
+
self.tags = self.RECORD_SCHEMA.fields_dict["tags"].default
|
|
1306
1477
|
|
|
1307
1478
|
|
|
1308
1479
|
@property
|
|
1309
|
-
def tag(self) -> str:
|
|
1310
|
-
"""
|
|
1480
|
+
def tag(self) -> Union[None, str]:
|
|
1481
|
+
"""This field is deprecated and will be removed in a future version. Use tags instead.
|
|
1482
|
+
The urn of the tag being proposed."""
|
|
1311
1483
|
return self._inner_dict.get('tag') # type: ignore
|
|
1312
1484
|
|
|
1313
1485
|
@tag.setter
|
|
1314
|
-
def tag(self, value: str) -> None:
|
|
1486
|
+
def tag(self, value: Union[None, str]) -> None:
|
|
1315
1487
|
self._inner_dict['tag'] = value
|
|
1316
1488
|
|
|
1317
1489
|
|
|
1490
|
+
@property
|
|
1491
|
+
def tags(self) -> Union[None, List[str]]:
|
|
1492
|
+
"""The urns of the glossary terms being proposed.
|
|
1493
|
+
Use this field over glossaryTerm."""
|
|
1494
|
+
return self._inner_dict.get('tags') # type: ignore
|
|
1495
|
+
|
|
1496
|
+
@tags.setter
|
|
1497
|
+
def tags(self, value: Union[None, List[str]]) -> None:
|
|
1498
|
+
self._inner_dict['tags'] = value
|
|
1499
|
+
|
|
1500
|
+
|
|
1318
1501
|
class AiInferenceMetadataClass(_Aspect):
|
|
1319
1502
|
"""AI Inference Metadata of various types."""
|
|
1320
1503
|
|
|
@@ -3296,13 +3479,15 @@ class AssertionSourceTypeClass(object):
|
|
|
3296
3479
|
# No docs available.
|
|
3297
3480
|
|
|
3298
3481
|
NATIVE = "NATIVE"
|
|
3299
|
-
"""The assertion was defined natively on DataHub by a user.
|
|
3482
|
+
"""The assertion was defined natively on DataHub by a user.
|
|
3483
|
+
DataHub Cloud only"""
|
|
3300
3484
|
|
|
3301
3485
|
EXTERNAL = "EXTERNAL"
|
|
3302
3486
|
"""The assertion was defined and managed externally of DataHub."""
|
|
3303
3487
|
|
|
3304
3488
|
INFERRED = "INFERRED"
|
|
3305
|
-
"""The assertion was inferred, e.g. from offline AI / ML models.
|
|
3489
|
+
"""The assertion was inferred, e.g. from offline AI / ML models.
|
|
3490
|
+
DataHub Cloud only"""
|
|
3306
3491
|
|
|
3307
3492
|
|
|
3308
3493
|
|
|
@@ -8759,7 +8944,7 @@ class OperationTypeClass(object):
|
|
|
8759
8944
|
"""Asset was dropped"""
|
|
8760
8945
|
|
|
8761
8946
|
CUSTOM = "CUSTOM"
|
|
8762
|
-
"""Custom asset operation"""
|
|
8947
|
+
"""Custom asset operation. If this is set, ensure customOperationType is filled out."""
|
|
8763
8948
|
|
|
8764
8949
|
UNKNOWN = "UNKNOWN"
|
|
8765
8950
|
|
|
@@ -9866,6 +10051,7 @@ class VersionPropertiesClass(_Aspect):
|
|
|
9866
10051
|
sortId: str,
|
|
9867
10052
|
aliases: Optional[List["VersionTagClass"]]=None,
|
|
9868
10053
|
comment: Union[None, str]=None,
|
|
10054
|
+
versioningScheme: Optional[Union[str, "VersioningSchemeClass"]]=None,
|
|
9869
10055
|
sourceCreatedTimestamp: Union[None, "AuditStampClass"]=None,
|
|
9870
10056
|
metadataCreatedTimestamp: Union[None, "AuditStampClass"]=None,
|
|
9871
10057
|
isLatest: Union[None, bool]=None,
|
|
@@ -9881,6 +10067,11 @@ class VersionPropertiesClass(_Aspect):
|
|
|
9881
10067
|
self.aliases = aliases
|
|
9882
10068
|
self.comment = comment
|
|
9883
10069
|
self.sortId = sortId
|
|
10070
|
+
if versioningScheme is None:
|
|
10071
|
+
# default: 'LEXICOGRAPHIC_STRING'
|
|
10072
|
+
self.versioningScheme = self.RECORD_SCHEMA.fields_dict["versioningScheme"].default
|
|
10073
|
+
else:
|
|
10074
|
+
self.versioningScheme = versioningScheme
|
|
9884
10075
|
self.sourceCreatedTimestamp = sourceCreatedTimestamp
|
|
9885
10076
|
self.metadataCreatedTimestamp = metadataCreatedTimestamp
|
|
9886
10077
|
self.isLatest = isLatest
|
|
@@ -9891,6 +10082,7 @@ class VersionPropertiesClass(_Aspect):
|
|
|
9891
10082
|
self.aliases = list()
|
|
9892
10083
|
self.comment = self.RECORD_SCHEMA.fields_dict["comment"].default
|
|
9893
10084
|
self.sortId = str()
|
|
10085
|
+
self.versioningScheme = self.RECORD_SCHEMA.fields_dict["versioningScheme"].default
|
|
9894
10086
|
self.sourceCreatedTimestamp = self.RECORD_SCHEMA.fields_dict["sourceCreatedTimestamp"].default
|
|
9895
10087
|
self.metadataCreatedTimestamp = self.RECORD_SCHEMA.fields_dict["metadataCreatedTimestamp"].default
|
|
9896
10088
|
self.isLatest = self.RECORD_SCHEMA.fields_dict["isLatest"].default
|
|
@@ -9947,6 +10139,17 @@ class VersionPropertiesClass(_Aspect):
|
|
|
9947
10139
|
self._inner_dict['sortId'] = value
|
|
9948
10140
|
|
|
9949
10141
|
|
|
10142
|
+
@property
|
|
10143
|
+
def versioningScheme(self) -> Union[str, "VersioningSchemeClass"]:
|
|
10144
|
+
"""What versioning scheme `sortId` belongs to.
|
|
10145
|
+
Defaults to a plain string that is lexicographically sorted."""
|
|
10146
|
+
return self._inner_dict.get('versioningScheme') # type: ignore
|
|
10147
|
+
|
|
10148
|
+
@versioningScheme.setter
|
|
10149
|
+
def versioningScheme(self, value: Union[str, "VersioningSchemeClass"]) -> None:
|
|
10150
|
+
self._inner_dict['versioningScheme'] = value
|
|
10151
|
+
|
|
10152
|
+
|
|
9950
10153
|
@property
|
|
9951
10154
|
def sourceCreatedTimestamp(self) -> Union[None, "AuditStampClass"]:
|
|
9952
10155
|
"""Timestamp reflecting when this asset version was created in the source system."""
|
|
@@ -13201,6 +13404,113 @@ class DataPlatformInstancePropertiesClass(_Aspect):
|
|
|
13201
13404
|
self._inner_dict['description'] = value
|
|
13202
13405
|
|
|
13203
13406
|
|
|
13407
|
+
class IcebergWarehouseInfoClass(_Aspect):
|
|
13408
|
+
"""An Iceberg warehouse location and credentails whose read/writes are governed by datahub catalog."""
|
|
13409
|
+
|
|
13410
|
+
|
|
13411
|
+
ASPECT_NAME = 'icebergWarehouseInfo'
|
|
13412
|
+
ASPECT_INFO = {}
|
|
13413
|
+
RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.dataplatforminstance.IcebergWarehouseInfo")
|
|
13414
|
+
|
|
13415
|
+
def __init__(self,
|
|
13416
|
+
dataRoot: str,
|
|
13417
|
+
clientId: str,
|
|
13418
|
+
clientSecret: str,
|
|
13419
|
+
region: str,
|
|
13420
|
+
env: Union[str, "FabricTypeClass"],
|
|
13421
|
+
role: Union[None, str]=None,
|
|
13422
|
+
tempCredentialExpirationSeconds: Union[None, int]=None,
|
|
13423
|
+
):
|
|
13424
|
+
super().__init__()
|
|
13425
|
+
|
|
13426
|
+
self.dataRoot = dataRoot
|
|
13427
|
+
self.clientId = clientId
|
|
13428
|
+
self.clientSecret = clientSecret
|
|
13429
|
+
self.region = region
|
|
13430
|
+
self.role = role
|
|
13431
|
+
self.tempCredentialExpirationSeconds = tempCredentialExpirationSeconds
|
|
13432
|
+
self.env = env
|
|
13433
|
+
|
|
13434
|
+
def _restore_defaults(self) -> None:
|
|
13435
|
+
self.dataRoot = str()
|
|
13436
|
+
self.clientId = str()
|
|
13437
|
+
self.clientSecret = str()
|
|
13438
|
+
self.region = str()
|
|
13439
|
+
self.role = self.RECORD_SCHEMA.fields_dict["role"].default
|
|
13440
|
+
self.tempCredentialExpirationSeconds = self.RECORD_SCHEMA.fields_dict["tempCredentialExpirationSeconds"].default
|
|
13441
|
+
self.env = FabricTypeClass.DEV
|
|
13442
|
+
|
|
13443
|
+
|
|
13444
|
+
@property
|
|
13445
|
+
def dataRoot(self) -> str:
|
|
13446
|
+
"""Path of the root for the backing store of the tables in the warehouse."""
|
|
13447
|
+
return self._inner_dict.get('dataRoot') # type: ignore
|
|
13448
|
+
|
|
13449
|
+
@dataRoot.setter
|
|
13450
|
+
def dataRoot(self, value: str) -> None:
|
|
13451
|
+
self._inner_dict['dataRoot'] = value
|
|
13452
|
+
|
|
13453
|
+
|
|
13454
|
+
@property
|
|
13455
|
+
def clientId(self) -> str:
|
|
13456
|
+
"""clientId to be used to authenticate with storage hosting this warehouse"""
|
|
13457
|
+
return self._inner_dict.get('clientId') # type: ignore
|
|
13458
|
+
|
|
13459
|
+
@clientId.setter
|
|
13460
|
+
def clientId(self, value: str) -> None:
|
|
13461
|
+
self._inner_dict['clientId'] = value
|
|
13462
|
+
|
|
13463
|
+
|
|
13464
|
+
@property
|
|
13465
|
+
def clientSecret(self) -> str:
|
|
13466
|
+
"""client secret to authenticate with storage hosting this warehouse"""
|
|
13467
|
+
return self._inner_dict.get('clientSecret') # type: ignore
|
|
13468
|
+
|
|
13469
|
+
@clientSecret.setter
|
|
13470
|
+
def clientSecret(self, value: str) -> None:
|
|
13471
|
+
self._inner_dict['clientSecret'] = value
|
|
13472
|
+
|
|
13473
|
+
|
|
13474
|
+
@property
|
|
13475
|
+
def region(self) -> str:
|
|
13476
|
+
"""region where the warehouse is located."""
|
|
13477
|
+
return self._inner_dict.get('region') # type: ignore
|
|
13478
|
+
|
|
13479
|
+
@region.setter
|
|
13480
|
+
def region(self, value: str) -> None:
|
|
13481
|
+
self._inner_dict['region'] = value
|
|
13482
|
+
|
|
13483
|
+
|
|
13484
|
+
@property
|
|
13485
|
+
def role(self) -> Union[None, str]:
|
|
13486
|
+
# No docs available.
|
|
13487
|
+
return self._inner_dict.get('role') # type: ignore
|
|
13488
|
+
|
|
13489
|
+
@role.setter
|
|
13490
|
+
def role(self, value: Union[None, str]) -> None:
|
|
13491
|
+
self._inner_dict['role'] = value
|
|
13492
|
+
|
|
13493
|
+
|
|
13494
|
+
@property
|
|
13495
|
+
def tempCredentialExpirationSeconds(self) -> Union[None, int]:
|
|
13496
|
+
# No docs available.
|
|
13497
|
+
return self._inner_dict.get('tempCredentialExpirationSeconds') # type: ignore
|
|
13498
|
+
|
|
13499
|
+
@tempCredentialExpirationSeconds.setter
|
|
13500
|
+
def tempCredentialExpirationSeconds(self, value: Union[None, int]) -> None:
|
|
13501
|
+
self._inner_dict['tempCredentialExpirationSeconds'] = value
|
|
13502
|
+
|
|
13503
|
+
|
|
13504
|
+
@property
|
|
13505
|
+
def env(self) -> Union[str, "FabricTypeClass"]:
|
|
13506
|
+
# No docs available.
|
|
13507
|
+
return self._inner_dict.get('env') # type: ignore
|
|
13508
|
+
|
|
13509
|
+
@env.setter
|
|
13510
|
+
def env(self, value: Union[str, "FabricTypeClass"]) -> None:
|
|
13511
|
+
self._inner_dict['env'] = value
|
|
13512
|
+
|
|
13513
|
+
|
|
13204
13514
|
class DataProcessInfoClass(_Aspect):
|
|
13205
13515
|
"""The inputs and outputs of this data process"""
|
|
13206
13516
|
|
|
@@ -13264,7 +13574,7 @@ class DataProcessInstanceInputClass(_Aspect):
|
|
|
13264
13574
|
|
|
13265
13575
|
@property
|
|
13266
13576
|
def inputs(self) -> List[str]:
|
|
13267
|
-
"""Input
|
|
13577
|
+
"""Input assets consumed"""
|
|
13268
13578
|
return self._inner_dict.get('inputs') # type: ignore
|
|
13269
13579
|
|
|
13270
13580
|
@inputs.setter
|
|
@@ -15186,6 +15496,49 @@ class HistogramClass(DictWrapper):
|
|
|
15186
15496
|
self._inner_dict['heights'] = value
|
|
15187
15497
|
|
|
15188
15498
|
|
|
15499
|
+
class IcebergCatalogInfoClass(_Aspect):
|
|
15500
|
+
"""Iceberg Catalog metadata associated with an Iceberg table/view"""
|
|
15501
|
+
|
|
15502
|
+
|
|
15503
|
+
ASPECT_NAME = 'icebergCatalogInfo'
|
|
15504
|
+
ASPECT_INFO = {}
|
|
15505
|
+
RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.dataset.IcebergCatalogInfo")
|
|
15506
|
+
|
|
15507
|
+
def __init__(self,
|
|
15508
|
+
metadataPointer: Union[None, str]=None,
|
|
15509
|
+
view: Union[None, bool]=None,
|
|
15510
|
+
):
|
|
15511
|
+
super().__init__()
|
|
15512
|
+
|
|
15513
|
+
self.metadataPointer = metadataPointer
|
|
15514
|
+
self.view = view
|
|
15515
|
+
|
|
15516
|
+
def _restore_defaults(self) -> None:
|
|
15517
|
+
self.metadataPointer = self.RECORD_SCHEMA.fields_dict["metadataPointer"].default
|
|
15518
|
+
self.view = self.RECORD_SCHEMA.fields_dict["view"].default
|
|
15519
|
+
|
|
15520
|
+
|
|
15521
|
+
@property
|
|
15522
|
+
def metadataPointer(self) -> Union[None, str]:
|
|
15523
|
+
"""When Datahub is the REST Catalog for an Iceberg Table, stores the current metadata pointer.
|
|
15524
|
+
If the Iceberg table is managed by an external catalog, the metadata pointer is not set."""
|
|
15525
|
+
return self._inner_dict.get('metadataPointer') # type: ignore
|
|
15526
|
+
|
|
15527
|
+
@metadataPointer.setter
|
|
15528
|
+
def metadataPointer(self, value: Union[None, str]) -> None:
|
|
15529
|
+
self._inner_dict['metadataPointer'] = value
|
|
15530
|
+
|
|
15531
|
+
|
|
15532
|
+
@property
|
|
15533
|
+
def view(self) -> Union[None, bool]:
|
|
15534
|
+
# No docs available.
|
|
15535
|
+
return self._inner_dict.get('view') # type: ignore
|
|
15536
|
+
|
|
15537
|
+
@view.setter
|
|
15538
|
+
def view(self, value: Union[None, bool]) -> None:
|
|
15539
|
+
self._inner_dict['view'] = value
|
|
15540
|
+
|
|
15541
|
+
|
|
15189
15542
|
class PartitionSummaryClass(DictWrapper):
|
|
15190
15543
|
"""Defines how the data is partitioned"""
|
|
15191
15544
|
|
|
@@ -16969,7 +17322,7 @@ class ExecutionRequestInputClass(_Aspect):
|
|
|
16969
17322
|
|
|
16970
17323
|
@property
|
|
16971
17324
|
def executorId(self) -> str:
|
|
16972
|
-
"""Advanced: specify a specific executor to route the request to. If none is provided, a "default" executor is used."""
|
|
17325
|
+
"""Advanced: specify a specific executor pool to route the request to. If none is provided, a "default" embedded executor is used."""
|
|
16973
17326
|
return self._inner_dict.get('executorId') # type: ignore
|
|
16974
17327
|
|
|
16975
17328
|
@executorId.setter
|
|
@@ -17148,7 +17501,7 @@ class ExecutionRequestSignalClass(_Aspect):
|
|
|
17148
17501
|
|
|
17149
17502
|
@property
|
|
17150
17503
|
def executorId(self) -> Union[None, str]:
|
|
17151
|
-
"""Advanced: specify a specific executor to route the request to. If none is provided, a
|
|
17504
|
+
"""Advanced: specify a specific executor pool to route the request to. If none is provided, a default embedded executor is used."""
|
|
17152
17505
|
return self._inner_dict.get('executorId') # type: ignore
|
|
17153
17506
|
|
|
17154
17507
|
@executorId.setter
|
|
@@ -17279,7 +17632,7 @@ class RemoteExecutorStatusClass(_Aspect):
|
|
|
17279
17632
|
RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.executor.RemoteExecutorStatus")
|
|
17280
17633
|
|
|
17281
17634
|
def __init__(self,
|
|
17282
|
-
|
|
17635
|
+
executorPoolId: str,
|
|
17283
17636
|
executorReleaseVersion: str,
|
|
17284
17637
|
executorAddress: str,
|
|
17285
17638
|
executorHostname: str,
|
|
@@ -17293,7 +17646,7 @@ class RemoteExecutorStatusClass(_Aspect):
|
|
|
17293
17646
|
):
|
|
17294
17647
|
super().__init__()
|
|
17295
17648
|
|
|
17296
|
-
self.
|
|
17649
|
+
self.executorPoolId = executorPoolId
|
|
17297
17650
|
self.executorReleaseVersion = executorReleaseVersion
|
|
17298
17651
|
self.executorAddress = executorAddress
|
|
17299
17652
|
self.executorHostname = executorHostname
|
|
@@ -17326,7 +17679,7 @@ class RemoteExecutorStatusClass(_Aspect):
|
|
|
17326
17679
|
self.reportedAt = reportedAt
|
|
17327
17680
|
|
|
17328
17681
|
def _restore_defaults(self) -> None:
|
|
17329
|
-
self.
|
|
17682
|
+
self.executorPoolId = str()
|
|
17330
17683
|
self.executorReleaseVersion = str()
|
|
17331
17684
|
self.executorAddress = str()
|
|
17332
17685
|
self.executorHostname = str()
|
|
@@ -17340,13 +17693,13 @@ class RemoteExecutorStatusClass(_Aspect):
|
|
|
17340
17693
|
|
|
17341
17694
|
|
|
17342
17695
|
@property
|
|
17343
|
-
def
|
|
17344
|
-
"""References the '
|
|
17345
|
-
return self._inner_dict.get('
|
|
17696
|
+
def executorPoolId(self) -> str:
|
|
17697
|
+
"""References the 'id' defined in RemoteExecutorPoolKey"""
|
|
17698
|
+
return self._inner_dict.get('executorPoolId') # type: ignore
|
|
17346
17699
|
|
|
17347
|
-
@
|
|
17348
|
-
def
|
|
17349
|
-
self._inner_dict['
|
|
17700
|
+
@executorPoolId.setter
|
|
17701
|
+
def executorPoolId(self, value: str) -> None:
|
|
17702
|
+
self._inner_dict['executorPoolId'] = value
|
|
17350
17703
|
|
|
17351
17704
|
|
|
17352
17705
|
@property
|
|
@@ -17391,7 +17744,8 @@ class RemoteExecutorStatusClass(_Aspect):
|
|
|
17391
17744
|
|
|
17392
17745
|
@property
|
|
17393
17746
|
def executorExpired(self) -> bool:
|
|
17394
|
-
"""Flag indicating whether remote executor status record is stale.
|
|
17747
|
+
"""Flag indicating whether remote executor status record is stale.
|
|
17748
|
+
This means executor no longer sends heartbeats and considered dead."""
|
|
17395
17749
|
return self._inner_dict.get('executorExpired') # type: ignore
|
|
17396
17750
|
|
|
17397
17751
|
@executorExpired.setter
|
|
@@ -17401,7 +17755,7 @@ class RemoteExecutorStatusClass(_Aspect):
|
|
|
17401
17755
|
|
|
17402
17756
|
@property
|
|
17403
17757
|
def executorStopped(self) -> bool:
|
|
17404
|
-
"""Flag indicating whether remote executor is stopped."""
|
|
17758
|
+
"""Flag indicating whether remote executor is stopped, and properly reported its termination."""
|
|
17405
17759
|
return self._inner_dict.get('executorStopped') # type: ignore
|
|
17406
17760
|
|
|
17407
17761
|
@executorStopped.setter
|
|
@@ -17411,7 +17765,7 @@ class RemoteExecutorStatusClass(_Aspect):
|
|
|
17411
17765
|
|
|
17412
17766
|
@property
|
|
17413
17767
|
def executorEmbedded(self) -> bool:
|
|
17414
|
-
"""Flag indicating whether remote executor is embedded
|
|
17768
|
+
"""Flag indicating whether remote executor is embedded into the coordinator pod"""
|
|
17415
17769
|
return self._inner_dict.get('executorEmbedded') # type: ignore
|
|
17416
17770
|
|
|
17417
17771
|
@executorEmbedded.setter
|
|
@@ -17450,6 +17804,36 @@ class RemoteExecutorStatusClass(_Aspect):
|
|
|
17450
17804
|
self._inner_dict['reportedAt'] = value
|
|
17451
17805
|
|
|
17452
17806
|
|
|
17807
|
+
class RemoteExecutorPoolGlobalConfigClass(_Aspect):
|
|
17808
|
+
"""Global singleton storing configs for remote executor pools."""
|
|
17809
|
+
|
|
17810
|
+
|
|
17811
|
+
ASPECT_NAME = 'dataHubRemoteExecutorPoolGlobalConfig'
|
|
17812
|
+
ASPECT_INFO = {}
|
|
17813
|
+
RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.executorglobalconfig.RemoteExecutorPoolGlobalConfig")
|
|
17814
|
+
|
|
17815
|
+
def __init__(self,
|
|
17816
|
+
defaultExecutorPoolId: str,
|
|
17817
|
+
):
|
|
17818
|
+
super().__init__()
|
|
17819
|
+
|
|
17820
|
+
self.defaultExecutorPoolId = defaultExecutorPoolId
|
|
17821
|
+
|
|
17822
|
+
def _restore_defaults(self) -> None:
|
|
17823
|
+
self.defaultExecutorPoolId = str()
|
|
17824
|
+
|
|
17825
|
+
|
|
17826
|
+
@property
|
|
17827
|
+
def defaultExecutorPoolId(self) -> str:
|
|
17828
|
+
"""The default pool to use for tasks that require remote executors.
|
|
17829
|
+
References 'id' in com.linkedin.pegasus2avro.metadata.key.RemoteExecutorPoolKey"""
|
|
17830
|
+
return self._inner_dict.get('defaultExecutorPoolId') # type: ignore
|
|
17831
|
+
|
|
17832
|
+
@defaultExecutorPoolId.setter
|
|
17833
|
+
def defaultExecutorPoolId(self, value: str) -> None:
|
|
17834
|
+
self._inner_dict['defaultExecutorPoolId'] = value
|
|
17835
|
+
|
|
17836
|
+
|
|
17453
17837
|
class RemoteExecutorPoolInfoClass(_Aspect):
|
|
17454
17838
|
# No docs available.
|
|
17455
17839
|
|
|
@@ -17460,13 +17844,28 @@ class RemoteExecutorPoolInfoClass(_Aspect):
|
|
|
17460
17844
|
|
|
17461
17845
|
def __init__(self,
|
|
17462
17846
|
createdAt: int,
|
|
17847
|
+
creator: Union[None, str]=None,
|
|
17848
|
+
description: Union[None, str]=None,
|
|
17849
|
+
queueUrl: Union[None, str]=None,
|
|
17850
|
+
isEmbedded: Union[None, bool]=None,
|
|
17851
|
+
state: Union[None, "RemoteExecutorPoolStateClass"]=None,
|
|
17463
17852
|
):
|
|
17464
17853
|
super().__init__()
|
|
17465
17854
|
|
|
17466
17855
|
self.createdAt = createdAt
|
|
17856
|
+
self.creator = creator
|
|
17857
|
+
self.description = description
|
|
17858
|
+
self.queueUrl = queueUrl
|
|
17859
|
+
self.isEmbedded = isEmbedded
|
|
17860
|
+
self.state = state
|
|
17467
17861
|
|
|
17468
17862
|
def _restore_defaults(self) -> None:
|
|
17469
17863
|
self.createdAt = int()
|
|
17864
|
+
self.creator = self.RECORD_SCHEMA.fields_dict["creator"].default
|
|
17865
|
+
self.description = self.RECORD_SCHEMA.fields_dict["description"].default
|
|
17866
|
+
self.queueUrl = self.RECORD_SCHEMA.fields_dict["queueUrl"].default
|
|
17867
|
+
self.isEmbedded = self.RECORD_SCHEMA.fields_dict["isEmbedded"].default
|
|
17868
|
+
self.state = self.RECORD_SCHEMA.fields_dict["state"].default
|
|
17470
17869
|
|
|
17471
17870
|
|
|
17472
17871
|
@property
|
|
@@ -17479,6 +17878,111 @@ class RemoteExecutorPoolInfoClass(_Aspect):
|
|
|
17479
17878
|
self._inner_dict['createdAt'] = value
|
|
17480
17879
|
|
|
17481
17880
|
|
|
17881
|
+
@property
|
|
17882
|
+
def creator(self) -> Union[None, str]:
|
|
17883
|
+
"""The creator of this pool"""
|
|
17884
|
+
return self._inner_dict.get('creator') # type: ignore
|
|
17885
|
+
|
|
17886
|
+
@creator.setter
|
|
17887
|
+
def creator(self, value: Union[None, str]) -> None:
|
|
17888
|
+
self._inner_dict['creator'] = value
|
|
17889
|
+
|
|
17890
|
+
|
|
17891
|
+
@property
|
|
17892
|
+
def description(self) -> Union[None, str]:
|
|
17893
|
+
"""A description for this pool"""
|
|
17894
|
+
return self._inner_dict.get('description') # type: ignore
|
|
17895
|
+
|
|
17896
|
+
@description.setter
|
|
17897
|
+
def description(self, value: Union[None, str]) -> None:
|
|
17898
|
+
self._inner_dict['description'] = value
|
|
17899
|
+
|
|
17900
|
+
|
|
17901
|
+
@property
|
|
17902
|
+
def queueUrl(self) -> Union[None, str]:
|
|
17903
|
+
"""The url to the task queue for this pool. I.e., SQS queue url."""
|
|
17904
|
+
return self._inner_dict.get('queueUrl') # type: ignore
|
|
17905
|
+
|
|
17906
|
+
@queueUrl.setter
|
|
17907
|
+
def queueUrl(self, value: Union[None, str]) -> None:
|
|
17908
|
+
self._inner_dict['queueUrl'] = value
|
|
17909
|
+
|
|
17910
|
+
|
|
17911
|
+
@property
|
|
17912
|
+
def isEmbedded(self) -> Union[None, bool]:
|
|
17913
|
+
"""Only set true if this is the pool embedded within the DataHub Cloud deployment"""
|
|
17914
|
+
return self._inner_dict.get('isEmbedded') # type: ignore
|
|
17915
|
+
|
|
17916
|
+
@isEmbedded.setter
|
|
17917
|
+
def isEmbedded(self, value: Union[None, bool]) -> None:
|
|
17918
|
+
self._inner_dict['isEmbedded'] = value
|
|
17919
|
+
|
|
17920
|
+
|
|
17921
|
+
@property
|
|
17922
|
+
def state(self) -> Union[None, "RemoteExecutorPoolStateClass"]:
|
|
17923
|
+
"""The status of the remote executor pool"""
|
|
17924
|
+
return self._inner_dict.get('state') # type: ignore
|
|
17925
|
+
|
|
17926
|
+
@state.setter
|
|
17927
|
+
def state(self, value: Union[None, "RemoteExecutorPoolStateClass"]) -> None:
|
|
17928
|
+
self._inner_dict['state'] = value
|
|
17929
|
+
|
|
17930
|
+
|
|
17931
|
+
class RemoteExecutorPoolStateClass(DictWrapper):
|
|
17932
|
+
# No docs available.
|
|
17933
|
+
|
|
17934
|
+
RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.executorpool.RemoteExecutorPoolState")
|
|
17935
|
+
def __init__(self,
|
|
17936
|
+
status: Union[str, "RemoteExecutorPoolStatusClass"],
|
|
17937
|
+
message: Union[None, str]=None,
|
|
17938
|
+
):
|
|
17939
|
+
super().__init__()
|
|
17940
|
+
|
|
17941
|
+
self.status = status
|
|
17942
|
+
self.message = message
|
|
17943
|
+
|
|
17944
|
+
def _restore_defaults(self) -> None:
|
|
17945
|
+
self.status = RemoteExecutorPoolStatusClass.PROVISIONING_PENDING
|
|
17946
|
+
self.message = self.RECORD_SCHEMA.fields_dict["message"].default
|
|
17947
|
+
|
|
17948
|
+
|
|
17949
|
+
@property
|
|
17950
|
+
def status(self) -> Union[str, "RemoteExecutorPoolStatusClass"]:
|
|
17951
|
+
"""The status of the remote executor pool"""
|
|
17952
|
+
return self._inner_dict.get('status') # type: ignore
|
|
17953
|
+
|
|
17954
|
+
@status.setter
|
|
17955
|
+
def status(self, value: Union[str, "RemoteExecutorPoolStatusClass"]) -> None:
|
|
17956
|
+
self._inner_dict['status'] = value
|
|
17957
|
+
|
|
17958
|
+
|
|
17959
|
+
@property
|
|
17960
|
+
def message(self) -> Union[None, str]:
|
|
17961
|
+
"""The message associated with the status. I.e., an error message explaining failure."""
|
|
17962
|
+
return self._inner_dict.get('message') # type: ignore
|
|
17963
|
+
|
|
17964
|
+
@message.setter
|
|
17965
|
+
def message(self, value: Union[None, str]) -> None:
|
|
17966
|
+
self._inner_dict['message'] = value
|
|
17967
|
+
|
|
17968
|
+
|
|
17969
|
+
class RemoteExecutorPoolStatusClass(object):
|
|
17970
|
+
# No docs available.
|
|
17971
|
+
|
|
17972
|
+
PROVISIONING_PENDING = "PROVISIONING_PENDING"
|
|
17973
|
+
"""The pool is pending provisioning. Default state on creation."""
|
|
17974
|
+
|
|
17975
|
+
PROVISIONING_IN_PROGRESS = "PROVISIONING_IN_PROGRESS"
|
|
17976
|
+
"""The pool has been picked up by DataHub and is in the process of being provisioned."""
|
|
17977
|
+
|
|
17978
|
+
PROVISIONING_FAILED = "PROVISIONING_FAILED"
|
|
17979
|
+
"""The pool has failed to be provisioned."""
|
|
17980
|
+
|
|
17981
|
+
READY = "READY"
|
|
17982
|
+
"""The pool has been successfully provisioned and is ready to accept tasks."""
|
|
17983
|
+
|
|
17984
|
+
|
|
17985
|
+
|
|
17482
17986
|
class DomainParamsClass(DictWrapper):
|
|
17483
17987
|
"""Information specific to glossary terms prompts."""
|
|
17484
17988
|
|
|
@@ -20662,7 +21166,7 @@ class DataHubIngestionSourceConfigClass(DictWrapper):
|
|
|
20662
21166
|
|
|
20663
21167
|
@property
|
|
20664
21168
|
def executorId(self) -> Union[None, str]:
|
|
20665
|
-
"""The id of the executor to use to execute the ingestion run"""
|
|
21169
|
+
"""The id of the executor pool to use to execute the ingestion run"""
|
|
20666
21170
|
return self._inner_dict.get('executorId') # type: ignore
|
|
20667
21171
|
|
|
20668
21172
|
@executorId.setter
|
|
@@ -21015,7 +21519,7 @@ class ChartKeyClass(_Aspect):
|
|
|
21015
21519
|
|
|
21016
21520
|
|
|
21017
21521
|
ASPECT_NAME = 'chartKey'
|
|
21018
|
-
ASPECT_INFO = {'keyForEntity': 'chart', 'entityCategory': 'core', 'entityAspects': ['chartInfo', 'editableChartProperties', 'chartQuery', 'inputFields', '
|
|
21522
|
+
ASPECT_INFO = {'keyForEntity': 'chart', 'entityCategory': 'core', 'entityAspects': ['chartInfo', 'editableChartProperties', 'chartQuery', 'inputFields', 'chartUsageStatistics', 'embed', 'browsePaths', 'domains', 'container', 'deprecation', 'ownership', 'status', 'institutionalMemory', 'dataPlatformInstance', 'globalTags', 'glossaryTerms', 'browsePathsV2', 'subTypes', 'structuredProperties', 'incidentsSummary', 'forms', 'testResults', 'usageFeatures', 'lineageFeatures', 'proposals', 'share', 'origin', 'documentation']}
|
|
21019
21523
|
RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.ChartKey")
|
|
21020
21524
|
|
|
21021
21525
|
def __init__(self,
|
|
@@ -21086,7 +21590,7 @@ class ContainerKeyClass(_Aspect):
|
|
|
21086
21590
|
|
|
21087
21591
|
|
|
21088
21592
|
ASPECT_NAME = 'containerKey'
|
|
21089
|
-
ASPECT_INFO = {'keyForEntity': 'container', 'entityCategory': 'core', 'entityAspects': ['containerProperties', 'editableContainerProperties', 'dataPlatformInstance', 'subTypes', 'ownership', 'deprecation', 'container', 'globalTags', 'glossaryTerms', 'institutionalMemory', 'browsePaths', 'status', 'domains', '
|
|
21593
|
+
ASPECT_INFO = {'keyForEntity': 'container', 'entityCategory': 'core', 'entityAspects': ['containerProperties', 'editableContainerProperties', 'dataPlatformInstance', 'subTypes', 'ownership', 'deprecation', 'container', 'globalTags', 'glossaryTerms', 'institutionalMemory', 'browsePaths', 'status', 'domains', 'browsePathsV2', 'structuredProperties', 'forms', 'testResults', 'access', 'proposals', 'share', 'origin', 'documentation'], 'entityDoc': 'A container of related data assets.'}
|
|
21090
21594
|
RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.ContainerKey")
|
|
21091
21595
|
|
|
21092
21596
|
def __init__(self,
|
|
@@ -21115,7 +21619,7 @@ class CorpGroupKeyClass(_Aspect):
|
|
|
21115
21619
|
|
|
21116
21620
|
|
|
21117
21621
|
ASPECT_NAME = 'corpGroupKey'
|
|
21118
|
-
ASPECT_INFO = {'keyForEntity': 'corpGroup', 'entityCategory': '_unset_', 'entityAspects': ['corpGroupInfo', 'corpGroupEditableInfo', 'globalTags', 'ownership', 'status', 'origin', '
|
|
21622
|
+
ASPECT_INFO = {'keyForEntity': 'corpGroup', 'entityCategory': '_unset_', 'entityAspects': ['corpGroupInfo', 'corpGroupEditableInfo', 'globalTags', 'ownership', 'status', 'origin', 'roleMembership', 'structuredProperties', 'forms', 'testResults', 'corpGroupSettings', 'share'], 'entityDoc': 'CorpGroup represents an identity of a group of users in the enterprise.'}
|
|
21119
21623
|
RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.CorpGroupKey")
|
|
21120
21624
|
|
|
21121
21625
|
def __init__(self,
|
|
@@ -21173,7 +21677,7 @@ class DashboardKeyClass(_Aspect):
|
|
|
21173
21677
|
|
|
21174
21678
|
|
|
21175
21679
|
ASPECT_NAME = 'dashboardKey'
|
|
21176
|
-
ASPECT_INFO = {'keyForEntity': 'dashboard', 'entityCategory': '_unset_', 'entityAspects': ['domains', 'container', 'deprecation', 'dashboardUsageStatistics', 'inputFields', '
|
|
21680
|
+
ASPECT_INFO = {'keyForEntity': 'dashboard', 'entityCategory': '_unset_', 'entityAspects': ['domains', 'container', 'deprecation', 'dashboardUsageStatistics', 'inputFields', 'subTypes', 'embed', 'dashboardInfo', 'editableDashboardProperties', 'ownership', 'status', 'globalTags', 'browsePaths', 'glossaryTerms', 'institutionalMemory', 'dataPlatformInstance', 'browsePathsV2', 'structuredProperties', 'incidentsSummary', 'forms', 'testResults', 'usageFeatures', 'lineageFeatures', 'proposals', 'share', 'origin', 'documentation']}
|
|
21177
21681
|
RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.DashboardKey")
|
|
21178
21682
|
|
|
21179
21683
|
def __init__(self,
|
|
@@ -21244,7 +21748,7 @@ class DataFlowKeyClass(_Aspect):
|
|
|
21244
21748
|
|
|
21245
21749
|
|
|
21246
21750
|
ASPECT_NAME = 'dataFlowKey'
|
|
21247
|
-
ASPECT_INFO = {'keyForEntity': 'dataFlow', 'entityCategory': 'core', 'entityAspects': ['domains', 'deprecation', 'versionInfo', '
|
|
21751
|
+
ASPECT_INFO = {'keyForEntity': 'dataFlow', 'entityCategory': 'core', 'entityAspects': ['domains', 'deprecation', 'versionInfo', 'dataFlowInfo', 'editableDataFlowProperties', 'ownership', 'status', 'globalTags', 'browsePaths', 'glossaryTerms', 'institutionalMemory', 'dataPlatformInstance', 'container', 'browsePathsV2', 'structuredProperties', 'incidentsSummary', 'forms', 'subTypes', 'testResults', 'proposals', 'share', 'origin', 'lineageFeatures', 'documentation']}
|
|
21248
21752
|
RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.DataFlowKey")
|
|
21249
21753
|
|
|
21250
21754
|
def __init__(self,
|
|
@@ -21357,7 +21861,7 @@ class DataHubConnectionKeyClass(_Aspect):
|
|
|
21357
21861
|
|
|
21358
21862
|
|
|
21359
21863
|
ASPECT_NAME = 'dataHubConnectionKey'
|
|
21360
|
-
ASPECT_INFO = {'keyForEntity': 'dataHubConnection', 'entityCategory': 'internal', 'entityAspects': ['dataHubConnectionDetails', 'dataPlatformInstance']}
|
|
21864
|
+
ASPECT_INFO = {'keyForEntity': 'dataHubConnection', 'entityCategory': 'internal', 'entityAspects': ['dataHubConnectionDetails', 'dataPlatformInstance', 'status']}
|
|
21361
21865
|
RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.DataHubConnectionKey")
|
|
21362
21866
|
|
|
21363
21867
|
def __init__(self,
|
|
@@ -21410,6 +21914,35 @@ class DataHubIngestionSourceKeyClass(_Aspect):
|
|
|
21410
21914
|
self._inner_dict['id'] = value
|
|
21411
21915
|
|
|
21412
21916
|
|
|
21917
|
+
class DataHubMetricCubeKeyClass(_Aspect):
|
|
21918
|
+
"""Key for a DataHub Metric Cube, e.g. an internal metric."""
|
|
21919
|
+
|
|
21920
|
+
|
|
21921
|
+
ASPECT_NAME = 'dataHubMetricCubeKey'
|
|
21922
|
+
ASPECT_INFO = {'keyForEntity': 'dataHubMetricCube', 'entityCategory': 'internal', 'entityAspects': ['dataHubMetricCubeDefinition', 'dataHubMetricCubeEvent']}
|
|
21923
|
+
RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.DataHubMetricCubeKey")
|
|
21924
|
+
|
|
21925
|
+
def __init__(self,
|
|
21926
|
+
id: str,
|
|
21927
|
+
):
|
|
21928
|
+
super().__init__()
|
|
21929
|
+
|
|
21930
|
+
self.id = id
|
|
21931
|
+
|
|
21932
|
+
def _restore_defaults(self) -> None:
|
|
21933
|
+
self.id = str()
|
|
21934
|
+
|
|
21935
|
+
|
|
21936
|
+
@property
|
|
21937
|
+
def id(self) -> str:
|
|
21938
|
+
"""Unique id for the cube type."""
|
|
21939
|
+
return self._inner_dict.get('id') # type: ignore
|
|
21940
|
+
|
|
21941
|
+
@id.setter
|
|
21942
|
+
def id(self, value: str) -> None:
|
|
21943
|
+
self._inner_dict['id'] = value
|
|
21944
|
+
|
|
21945
|
+
|
|
21413
21946
|
class DataHubPersonaKeyClass(_Aspect):
|
|
21414
21947
|
"""Key for a persona type"""
|
|
21415
21948
|
|
|
@@ -21660,7 +22193,7 @@ class DataJobKeyClass(_Aspect):
|
|
|
21660
22193
|
|
|
21661
22194
|
|
|
21662
22195
|
ASPECT_NAME = 'dataJobKey'
|
|
21663
|
-
ASPECT_INFO = {'keyForEntity': 'dataJob', 'entityCategory': '_unset_', 'entityAspects': ['datahubIngestionRunSummary', 'datahubIngestionCheckpoint', 'domains', 'deprecation', 'versionInfo', '
|
|
22196
|
+
ASPECT_INFO = {'keyForEntity': 'dataJob', 'entityCategory': '_unset_', 'entityAspects': ['datahubIngestionRunSummary', 'datahubIngestionCheckpoint', 'domains', 'deprecation', 'versionInfo', 'dataJobInfo', 'dataJobInputOutput', 'editableDataJobProperties', 'ownership', 'status', 'globalTags', 'browsePaths', 'glossaryTerms', 'institutionalMemory', 'dataPlatformInstance', 'container', 'browsePathsV2', 'structuredProperties', 'forms', 'subTypes', 'incidentsSummary', 'testResults', 'dataTransformLogic', 'proposals', 'anomaliesSummary', 'share', 'origin', 'lineageFeatures', 'documentation']}
|
|
21664
22197
|
RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.DataJobKey")
|
|
21665
22198
|
|
|
21666
22199
|
def __init__(self,
|
|
@@ -21702,7 +22235,7 @@ class DataPlatformInstanceKeyClass(_Aspect):
|
|
|
21702
22235
|
|
|
21703
22236
|
|
|
21704
22237
|
ASPECT_NAME = 'dataPlatformInstanceKey'
|
|
21705
|
-
ASPECT_INFO = {'keyForEntity': 'dataPlatformInstance', 'entityCategory': 'internal', 'entityAspects': ['dataPlatformInstanceProperties', 'ownership', 'globalTags', 'institutionalMemory', 'deprecation', 'status']}
|
|
22238
|
+
ASPECT_INFO = {'keyForEntity': 'dataPlatformInstance', 'entityCategory': 'internal', 'entityAspects': ['dataPlatformInstanceProperties', 'ownership', 'globalTags', 'institutionalMemory', 'deprecation', 'status', 'icebergWarehouseInfo']}
|
|
21706
22239
|
RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.DataPlatformInstanceKey")
|
|
21707
22240
|
|
|
21708
22241
|
def __init__(self,
|
|
@@ -21858,7 +22391,7 @@ class DatasetKeyClass(_Aspect):
|
|
|
21858
22391
|
|
|
21859
22392
|
|
|
21860
22393
|
ASPECT_NAME = 'datasetKey'
|
|
21861
|
-
ASPECT_INFO = {'keyForEntity': 'dataset', 'entityCategory': 'core', 'entityAspects': ['viewProperties', 'subTypes', 'datasetProfile', 'datasetUsageStatistics', 'operation', 'domains', '
|
|
22394
|
+
ASPECT_INFO = {'keyForEntity': 'dataset', 'entityCategory': 'core', 'entityAspects': ['viewProperties', 'subTypes', 'datasetProfile', 'datasetUsageStatistics', 'operation', 'domains', 'schemaMetadata', 'status', 'container', 'deprecation', 'testResults', 'siblings', 'embed', 'incidentsSummary', 'datasetProperties', 'editableDatasetProperties', 'datasetDeprecation', 'datasetUpstreamLineage', 'upstreamLineage', 'institutionalMemory', 'ownership', 'editableSchemaMetadata', 'globalTags', 'glossaryTerms', 'browsePaths', 'dataPlatformInstance', 'browsePathsV2', 'access', 'structuredProperties', 'forms', 'partitionsSummary', 'versionProperties', 'icebergCatalogInfo', 'inferredNeighbors', 'inferredMetadata', 'schemaFieldsInferredMetadata', 'schemaFieldsInferredNeighbors', 'assertionsSummary', 'usageFeatures', 'storageFeatures', 'lineageFeatures', 'proposals', 'schemaProposals', 'anomaliesSummary', 'share', 'origin', 'documentation', 'entityInferenceMetadata'], 'entityDoc': 'Datasets represent logical or physical data assets stored or represented in various data platforms. Tables, Views, Streams are all instances of datasets.'}
|
|
21862
22395
|
RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.DatasetKey")
|
|
21863
22396
|
|
|
21864
22397
|
def __init__(self,
|
|
@@ -22235,7 +22768,7 @@ class MLFeatureKeyClass(_Aspect):
|
|
|
22235
22768
|
|
|
22236
22769
|
|
|
22237
22770
|
ASPECT_NAME = 'mlFeatureKey'
|
|
22238
|
-
ASPECT_INFO = {'keyForEntity': 'mlFeature', 'entityCategory': 'core', 'entityAspects': ['glossaryTerms', 'editableMlFeatureProperties', 'domains', '
|
|
22771
|
+
ASPECT_INFO = {'keyForEntity': 'mlFeature', 'entityCategory': 'core', 'entityAspects': ['glossaryTerms', 'editableMlFeatureProperties', 'domains', 'mlFeatureProperties', 'ownership', 'institutionalMemory', 'status', 'deprecation', 'browsePaths', 'globalTags', 'dataPlatformInstance', 'browsePathsV2', 'structuredProperties', 'forms', 'testResults', 'proposals', 'share', 'origin', 'lineageFeatures', 'documentation', 'incidentsSummary']}
|
|
22239
22772
|
RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.MLFeatureKey")
|
|
22240
22773
|
|
|
22241
22774
|
def __init__(self,
|
|
@@ -22277,7 +22810,7 @@ class MLFeatureTableKeyClass(_Aspect):
|
|
|
22277
22810
|
|
|
22278
22811
|
|
|
22279
22812
|
ASPECT_NAME = 'mlFeatureTableKey'
|
|
22280
|
-
ASPECT_INFO = {'keyForEntity': 'mlFeatureTable', 'entityCategory': 'core', 'entityAspects': ['glossaryTerms', 'editableMlFeatureTableProperties', 'domains', '
|
|
22813
|
+
ASPECT_INFO = {'keyForEntity': 'mlFeatureTable', 'entityCategory': 'core', 'entityAspects': ['glossaryTerms', 'editableMlFeatureTableProperties', 'domains', 'mlFeatureTableProperties', 'ownership', 'institutionalMemory', 'status', 'deprecation', 'browsePaths', 'globalTags', 'dataPlatformInstance', 'browsePathsV2', 'structuredProperties', 'forms', 'testResults', 'proposals', 'share', 'origin', 'lineageFeatures', 'documentation']}
|
|
22281
22814
|
RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.MLFeatureTableKey")
|
|
22282
22815
|
|
|
22283
22816
|
def __init__(self,
|
|
@@ -22374,7 +22907,7 @@ class MLModelGroupKeyClass(_Aspect):
|
|
|
22374
22907
|
|
|
22375
22908
|
|
|
22376
22909
|
ASPECT_NAME = 'mlModelGroupKey'
|
|
22377
|
-
ASPECT_INFO = {'keyForEntity': 'mlModelGroup', 'entityCategory': 'core', 'entityAspects': ['glossaryTerms', 'editableMlModelGroupProperties', 'domains', '
|
|
22910
|
+
ASPECT_INFO = {'keyForEntity': 'mlModelGroup', 'entityCategory': 'core', 'entityAspects': ['glossaryTerms', 'editableMlModelGroupProperties', 'domains', 'mlModelGroupProperties', 'ownership', 'status', 'deprecation', 'browsePaths', 'globalTags', 'dataPlatformInstance', 'browsePathsV2', 'structuredProperties', 'forms', 'testResults', 'proposals', 'share', 'origin', 'lineageFeatures', 'documentation']}
|
|
22378
22911
|
RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.MLModelGroupKey")
|
|
22379
22912
|
|
|
22380
22913
|
def __init__(self,
|
|
@@ -22429,7 +22962,7 @@ class MLModelKeyClass(_Aspect):
|
|
|
22429
22962
|
|
|
22430
22963
|
|
|
22431
22964
|
ASPECT_NAME = 'mlModelKey'
|
|
22432
|
-
ASPECT_INFO = {'keyForEntity': 'mlModel', 'entityCategory': 'core', 'entityAspects': ['glossaryTerms', 'editableMlModelProperties', 'domains', '
|
|
22965
|
+
ASPECT_INFO = {'keyForEntity': 'mlModel', 'entityCategory': 'core', 'entityAspects': ['glossaryTerms', 'editableMlModelProperties', 'domains', 'ownership', 'mlModelProperties', 'intendedUse', 'mlModelFactorPrompts', 'mlModelMetrics', 'mlModelEvaluationData', 'mlModelTrainingData', 'mlModelQuantitativeAnalyses', 'mlModelEthicalConsiderations', 'mlModelCaveatsAndRecommendations', 'institutionalMemory', 'sourceCode', 'status', 'cost', 'deprecation', 'browsePaths', 'globalTags', 'dataPlatformInstance', 'browsePathsV2', 'structuredProperties', 'forms', 'testResults', 'versionProperties', 'proposals', 'share', 'origin', 'lineageFeatures', 'documentation', 'incidentsSummary']}
|
|
22433
22966
|
RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.MLModelKey")
|
|
22434
22967
|
|
|
22435
22968
|
def __init__(self,
|
|
@@ -22484,7 +23017,7 @@ class MLPrimaryKeyKeyClass(_Aspect):
|
|
|
22484
23017
|
|
|
22485
23018
|
|
|
22486
23019
|
ASPECT_NAME = 'mlPrimaryKeyKey'
|
|
22487
|
-
ASPECT_INFO = {'keyForEntity': 'mlPrimaryKey', 'entityCategory': 'core', 'entityAspects': ['glossaryTerms', 'editableMlPrimaryKeyProperties', 'domains', '
|
|
23020
|
+
ASPECT_INFO = {'keyForEntity': 'mlPrimaryKey', 'entityCategory': 'core', 'entityAspects': ['glossaryTerms', 'editableMlPrimaryKeyProperties', 'domains', 'mlPrimaryKeyProperties', 'ownership', 'institutionalMemory', 'status', 'deprecation', 'globalTags', 'dataPlatformInstance', 'structuredProperties', 'forms', 'testResults', 'proposals', 'share', 'origin', 'lineageFeatures']}
|
|
22488
23021
|
RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.MLPrimaryKeyKey")
|
|
22489
23022
|
|
|
22490
23023
|
def __init__(self,
|
|
@@ -22568,7 +23101,7 @@ class NotebookKeyClass(_Aspect):
|
|
|
22568
23101
|
|
|
22569
23102
|
|
|
22570
23103
|
ASPECT_NAME = 'notebookKey'
|
|
22571
|
-
ASPECT_INFO = {'keyForEntity': 'notebook', 'entityCategory': '_unset_', 'entityAspects': ['notebookInfo', 'notebookContent', 'editableNotebookProperties', 'ownership', 'status', 'globalTags', 'glossaryTerms', 'browsePaths', 'institutionalMemory', 'domains', 'subTypes', 'dataPlatformInstance', '
|
|
23104
|
+
ASPECT_INFO = {'keyForEntity': 'notebook', 'entityCategory': '_unset_', 'entityAspects': ['notebookInfo', 'notebookContent', 'editableNotebookProperties', 'ownership', 'status', 'globalTags', 'glossaryTerms', 'browsePaths', 'institutionalMemory', 'domains', 'subTypes', 'dataPlatformInstance', 'browsePathsV2', 'testResults', 'proposals', 'share', 'origin', 'documentation'], 'entityDoc': 'Notebook represents a combination of query, text, chart and etc. This is in BETA version'}
|
|
22572
23105
|
RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.NotebookKey")
|
|
22573
23106
|
|
|
22574
23107
|
def __init__(self,
|
|
@@ -22735,6 +23268,36 @@ class RecommendationModuleKeyClass(_Aspect):
|
|
|
22735
23268
|
self._inner_dict['identifier'] = value
|
|
22736
23269
|
|
|
22737
23270
|
|
|
23271
|
+
class RemoteExecutorGlobalConfigKeyClass(_Aspect):
|
|
23272
|
+
"""Key for the *Singleton* DataHub Remote Executor Global Config"""
|
|
23273
|
+
|
|
23274
|
+
|
|
23275
|
+
ASPECT_NAME = 'dataHubRemoteExecutorGlobalConfigKey'
|
|
23276
|
+
ASPECT_INFO = {'keyForEntity': 'dataHubRemoteExecutorGlobalConfig', 'entityCategory': 'internal', 'entityAspects': ['dataHubRemoteExecutorPoolGlobalConfig']}
|
|
23277
|
+
RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.RemoteExecutorGlobalConfigKey")
|
|
23278
|
+
|
|
23279
|
+
def __init__(self,
|
|
23280
|
+
id: str,
|
|
23281
|
+
):
|
|
23282
|
+
super().__init__()
|
|
23283
|
+
|
|
23284
|
+
self.id = id
|
|
23285
|
+
|
|
23286
|
+
def _restore_defaults(self) -> None:
|
|
23287
|
+
self.id = str()
|
|
23288
|
+
|
|
23289
|
+
|
|
23290
|
+
@property
|
|
23291
|
+
def id(self) -> str:
|
|
23292
|
+
"""The unique identifier for the remote executor global config
|
|
23293
|
+
NOTE: since this a singleton, there should be a hardcoded key in the AcrylConstants file"""
|
|
23294
|
+
return self._inner_dict.get('id') # type: ignore
|
|
23295
|
+
|
|
23296
|
+
@id.setter
|
|
23297
|
+
def id(self, value: str) -> None:
|
|
23298
|
+
self._inner_dict['id'] = value
|
|
23299
|
+
|
|
23300
|
+
|
|
22738
23301
|
class RemoteExecutorKeyClass(_Aspect):
|
|
22739
23302
|
"""Key for an DataHub Remote Executor"""
|
|
22740
23303
|
|
|
@@ -22773,24 +23336,24 @@ class RemoteExecutorPoolKeyClass(_Aspect):
|
|
|
22773
23336
|
RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.RemoteExecutorPoolKey")
|
|
22774
23337
|
|
|
22775
23338
|
def __init__(self,
|
|
22776
|
-
|
|
23339
|
+
id: str,
|
|
22777
23340
|
):
|
|
22778
23341
|
super().__init__()
|
|
22779
23342
|
|
|
22780
|
-
self.
|
|
23343
|
+
self.id = id
|
|
22781
23344
|
|
|
22782
23345
|
def _restore_defaults(self) -> None:
|
|
22783
|
-
self.
|
|
23346
|
+
self.id = str()
|
|
22784
23347
|
|
|
22785
23348
|
|
|
22786
23349
|
@property
|
|
22787
|
-
def
|
|
23350
|
+
def id(self) -> str:
|
|
22788
23351
|
"""The unique identifier for the remote executor pool"""
|
|
22789
|
-
return self._inner_dict.get('
|
|
23352
|
+
return self._inner_dict.get('id') # type: ignore
|
|
22790
23353
|
|
|
22791
|
-
@
|
|
22792
|
-
def
|
|
22793
|
-
self._inner_dict['
|
|
23354
|
+
@id.setter
|
|
23355
|
+
def id(self, value: str) -> None:
|
|
23356
|
+
self._inner_dict['id'] = value
|
|
22794
23357
|
|
|
22795
23358
|
|
|
22796
23359
|
class RoleKeyClass(_Aspect):
|
|
@@ -22827,7 +23390,7 @@ class SchemaFieldKeyClass(_Aspect):
|
|
|
22827
23390
|
|
|
22828
23391
|
|
|
22829
23392
|
ASPECT_NAME = 'schemaFieldKey'
|
|
22830
|
-
ASPECT_INFO = {'keyForEntity': 'schemaField', 'entityCategory': 'core', 'entityAspects': ['schemafieldInfo', 'structuredProperties', 'forms', 'businessAttributes', 'status', 'schemaFieldAliases', 'documentation', 'testResults', '
|
|
23393
|
+
ASPECT_INFO = {'keyForEntity': 'schemaField', 'entityCategory': 'core', 'entityAspects': ['schemafieldInfo', 'structuredProperties', 'forms', 'businessAttributes', 'status', 'schemaFieldAliases', 'documentation', 'testResults', 'deprecation', 'schemaFieldProfile', 'lineageFeatures']}
|
|
22831
23394
|
RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.SchemaFieldKey")
|
|
22832
23395
|
|
|
22833
23396
|
def __init__(self,
|
|
@@ -24859,6 +25422,437 @@ class TagSnapshotClass(DictWrapper):
|
|
|
24859
25422
|
self._inner_dict['aspects'] = value
|
|
24860
25423
|
|
|
24861
25424
|
|
|
25425
|
+
class DataHubMetricCubeDefinitionClass(_Aspect):
|
|
25426
|
+
"""The structure of an individual metric cube in DataHub."""
|
|
25427
|
+
|
|
25428
|
+
|
|
25429
|
+
ASPECT_NAME = 'dataHubMetricCubeDefinition'
|
|
25430
|
+
ASPECT_INFO = {}
|
|
25431
|
+
RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metric.DataHubMetricCubeDefinition")
|
|
25432
|
+
|
|
25433
|
+
def __init__(self,
|
|
25434
|
+
name: str,
|
|
25435
|
+
origin: "DataHubMetricCubeOriginClass",
|
|
25436
|
+
type: Optional[str]=None,
|
|
25437
|
+
description: Union[None, str]=None,
|
|
25438
|
+
entity: Union[None, str]=None,
|
|
25439
|
+
measures: Union[None, "DataHubMetricCubeMeasuresClass"]=None,
|
|
25440
|
+
dimensions: Union[None, "DataHubMetricCubeDimensionsClass"]=None,
|
|
25441
|
+
):
|
|
25442
|
+
super().__init__()
|
|
25443
|
+
|
|
25444
|
+
self.name = name
|
|
25445
|
+
if type is None:
|
|
25446
|
+
# default: 'custom'
|
|
25447
|
+
self.type = self.RECORD_SCHEMA.fields_dict["type"].default
|
|
25448
|
+
else:
|
|
25449
|
+
self.type = type
|
|
25450
|
+
self.description = description
|
|
25451
|
+
self.entity = entity
|
|
25452
|
+
self.origin = origin
|
|
25453
|
+
self.measures = measures
|
|
25454
|
+
self.dimensions = dimensions
|
|
25455
|
+
|
|
25456
|
+
def _restore_defaults(self) -> None:
|
|
25457
|
+
self.name = str()
|
|
25458
|
+
self.type = self.RECORD_SCHEMA.fields_dict["type"].default
|
|
25459
|
+
self.description = self.RECORD_SCHEMA.fields_dict["description"].default
|
|
25460
|
+
self.entity = self.RECORD_SCHEMA.fields_dict["entity"].default
|
|
25461
|
+
self.origin = DataHubMetricCubeOriginClass._construct_with_defaults()
|
|
25462
|
+
self.measures = self.RECORD_SCHEMA.fields_dict["measures"].default
|
|
25463
|
+
self.dimensions = self.RECORD_SCHEMA.fields_dict["dimensions"].default
|
|
25464
|
+
|
|
25465
|
+
|
|
25466
|
+
@property
|
|
25467
|
+
def name(self) -> str:
|
|
25468
|
+
""" Display name of the metric cube"""
|
|
25469
|
+
return self._inner_dict.get('name') # type: ignore
|
|
25470
|
+
|
|
25471
|
+
@name.setter
|
|
25472
|
+
def name(self, value: str) -> None:
|
|
25473
|
+
self._inner_dict['name'] = value
|
|
25474
|
+
|
|
25475
|
+
|
|
25476
|
+
@property
|
|
25477
|
+
def type(self) -> str:
|
|
25478
|
+
"""A type or category for the metric cube. This is used to categorize the metric cube & for filtering.
|
|
25479
|
+
|
|
25480
|
+
This may be used to group similar types of metrics for a given entity, e.g. 'row_count', 'error_count', etc.
|
|
25481
|
+
that originated in different places."""
|
|
25482
|
+
return self._inner_dict.get('type') # type: ignore
|
|
25483
|
+
|
|
25484
|
+
@type.setter
|
|
25485
|
+
def type(self, value: str) -> None:
|
|
25486
|
+
self._inner_dict['type'] = value
|
|
25487
|
+
|
|
25488
|
+
|
|
25489
|
+
@property
|
|
25490
|
+
def description(self) -> Union[None, str]:
|
|
25491
|
+
""" Optional description for the metric"""
|
|
25492
|
+
return self._inner_dict.get('description') # type: ignore
|
|
25493
|
+
|
|
25494
|
+
@description.setter
|
|
25495
|
+
def description(self, value: Union[None, str]) -> None:
|
|
25496
|
+
self._inner_dict['description'] = value
|
|
25497
|
+
|
|
25498
|
+
|
|
25499
|
+
@property
|
|
25500
|
+
def entity(self) -> Union[None, str]:
|
|
25501
|
+
""" An optional URN for the entity that this metric cube is associated with."""
|
|
25502
|
+
return self._inner_dict.get('entity') # type: ignore
|
|
25503
|
+
|
|
25504
|
+
@entity.setter
|
|
25505
|
+
def entity(self, value: Union[None, str]) -> None:
|
|
25506
|
+
self._inner_dict['entity'] = value
|
|
25507
|
+
|
|
25508
|
+
|
|
25509
|
+
@property
|
|
25510
|
+
def origin(self) -> "DataHubMetricCubeOriginClass":
|
|
25511
|
+
"""The origin of the metric cube."""
|
|
25512
|
+
return self._inner_dict.get('origin') # type: ignore
|
|
25513
|
+
|
|
25514
|
+
@origin.setter
|
|
25515
|
+
def origin(self, value: "DataHubMetricCubeOriginClass") -> None:
|
|
25516
|
+
self._inner_dict['origin'] = value
|
|
25517
|
+
|
|
25518
|
+
|
|
25519
|
+
@property
|
|
25520
|
+
def measures(self) -> Union[None, "DataHubMetricCubeMeasuresClass"]:
|
|
25521
|
+
""" Optional - The measures of the cube for display purposes."""
|
|
25522
|
+
return self._inner_dict.get('measures') # type: ignore
|
|
25523
|
+
|
|
25524
|
+
@measures.setter
|
|
25525
|
+
def measures(self, value: Union[None, "DataHubMetricCubeMeasuresClass"]) -> None:
|
|
25526
|
+
self._inner_dict['measures'] = value
|
|
25527
|
+
|
|
25528
|
+
|
|
25529
|
+
@property
|
|
25530
|
+
def dimensions(self) -> Union[None, "DataHubMetricCubeDimensionsClass"]:
|
|
25531
|
+
"""Optional - The dimensions of the cube for display purposes."""
|
|
25532
|
+
return self._inner_dict.get('dimensions') # type: ignore
|
|
25533
|
+
|
|
25534
|
+
@dimensions.setter
|
|
25535
|
+
def dimensions(self, value: Union[None, "DataHubMetricCubeDimensionsClass"]) -> None:
|
|
25536
|
+
self._inner_dict['dimensions'] = value
|
|
25537
|
+
|
|
25538
|
+
|
|
25539
|
+
class DataHubMetricCubeDimensionClass(DictWrapper):
|
|
25540
|
+
"""The definition of a metric cube dimension."""
|
|
25541
|
+
|
|
25542
|
+
RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metric.DataHubMetricCubeDimension")
|
|
25543
|
+
def __init__(self,
|
|
25544
|
+
name: str,
|
|
25545
|
+
):
|
|
25546
|
+
super().__init__()
|
|
25547
|
+
|
|
25548
|
+
self.name = name
|
|
25549
|
+
|
|
25550
|
+
def _restore_defaults(self) -> None:
|
|
25551
|
+
self.name = str()
|
|
25552
|
+
|
|
25553
|
+
|
|
25554
|
+
@property
|
|
25555
|
+
def name(self) -> str:
|
|
25556
|
+
""" The name of the dimension"""
|
|
25557
|
+
return self._inner_dict.get('name') # type: ignore
|
|
25558
|
+
|
|
25559
|
+
@name.setter
|
|
25560
|
+
def name(self, value: str) -> None:
|
|
25561
|
+
self._inner_dict['name'] = value
|
|
25562
|
+
|
|
25563
|
+
|
|
25564
|
+
class DataHubMetricCubeDimensionsClass(DictWrapper):
|
|
25565
|
+
"""The dimensions of the cube. This is what you filter and group by.
|
|
25566
|
+
This is a record to allow for future expansion of the dimensions."""
|
|
25567
|
+
|
|
25568
|
+
RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metric.DataHubMetricCubeDimensions")
|
|
25569
|
+
def __init__(self,
|
|
25570
|
+
dim1: Union[None, "DataHubMetricCubeDimensionClass"]=None,
|
|
25571
|
+
dim2: Union[None, "DataHubMetricCubeDimensionClass"]=None,
|
|
25572
|
+
dim3: Union[None, "DataHubMetricCubeDimensionClass"]=None,
|
|
25573
|
+
):
|
|
25574
|
+
super().__init__()
|
|
25575
|
+
|
|
25576
|
+
self.dim1 = dim1
|
|
25577
|
+
self.dim2 = dim2
|
|
25578
|
+
self.dim3 = dim3
|
|
25579
|
+
|
|
25580
|
+
def _restore_defaults(self) -> None:
|
|
25581
|
+
self.dim1 = self.RECORD_SCHEMA.fields_dict["dim1"].default
|
|
25582
|
+
self.dim2 = self.RECORD_SCHEMA.fields_dict["dim2"].default
|
|
25583
|
+
self.dim3 = self.RECORD_SCHEMA.fields_dict["dim3"].default
|
|
25584
|
+
|
|
25585
|
+
|
|
25586
|
+
@property
|
|
25587
|
+
def dim1(self) -> Union[None, "DataHubMetricCubeDimensionClass"]:
|
|
25588
|
+
""" The first measure being tracked in the cube."""
|
|
25589
|
+
return self._inner_dict.get('dim1') # type: ignore
|
|
25590
|
+
|
|
25591
|
+
@dim1.setter
|
|
25592
|
+
def dim1(self, value: Union[None, "DataHubMetricCubeDimensionClass"]) -> None:
|
|
25593
|
+
self._inner_dict['dim1'] = value
|
|
25594
|
+
|
|
25595
|
+
|
|
25596
|
+
@property
|
|
25597
|
+
def dim2(self) -> Union[None, "DataHubMetricCubeDimensionClass"]:
|
|
25598
|
+
"""Optional: A second measure being tracked in the cube."""
|
|
25599
|
+
return self._inner_dict.get('dim2') # type: ignore
|
|
25600
|
+
|
|
25601
|
+
@dim2.setter
|
|
25602
|
+
def dim2(self, value: Union[None, "DataHubMetricCubeDimensionClass"]) -> None:
|
|
25603
|
+
self._inner_dict['dim2'] = value
|
|
25604
|
+
|
|
25605
|
+
|
|
25606
|
+
@property
|
|
25607
|
+
def dim3(self) -> Union[None, "DataHubMetricCubeDimensionClass"]:
|
|
25608
|
+
"""Optional: A third measure being tracked in the cube."""
|
|
25609
|
+
return self._inner_dict.get('dim3') # type: ignore
|
|
25610
|
+
|
|
25611
|
+
@dim3.setter
|
|
25612
|
+
def dim3(self, value: Union[None, "DataHubMetricCubeDimensionClass"]) -> None:
|
|
25613
|
+
self._inner_dict['dim3'] = value
|
|
25614
|
+
|
|
25615
|
+
|
|
25616
|
+
class DataHubMetricCubeEventClass(_Aspect):
|
|
25617
|
+
"""A timeseries measure event, e.g. a single observation."""
|
|
25618
|
+
|
|
25619
|
+
|
|
25620
|
+
ASPECT_NAME = 'dataHubMetricCubeEvent'
|
|
25621
|
+
ASPECT_TYPE = 'timeseries'
|
|
25622
|
+
ASPECT_INFO = {}
|
|
25623
|
+
RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metric.DataHubMetricCubeEvent")
|
|
25624
|
+
|
|
25625
|
+
def __init__(self,
|
|
25626
|
+
reportedTimeMillis: int,
|
|
25627
|
+
measure: float,
|
|
25628
|
+
timestampMillis: int,
|
|
25629
|
+
dim1: Union[None, List[str]]=None,
|
|
25630
|
+
dim2: Union[None, List[str]]=None,
|
|
25631
|
+
dim3: Union[None, List[str]]=None,
|
|
25632
|
+
eventGranularity: Union[None, "TimeWindowSizeClass"]=None,
|
|
25633
|
+
partitionSpec: Optional[Union["PartitionSpecClass", None]]=None,
|
|
25634
|
+
messageId: Union[None, str]=None,
|
|
25635
|
+
):
|
|
25636
|
+
super().__init__()
|
|
25637
|
+
|
|
25638
|
+
self.reportedTimeMillis = reportedTimeMillis
|
|
25639
|
+
self.measure = measure
|
|
25640
|
+
self.dim1 = dim1
|
|
25641
|
+
self.dim2 = dim2
|
|
25642
|
+
self.dim3 = dim3
|
|
25643
|
+
self.timestampMillis = timestampMillis
|
|
25644
|
+
self.eventGranularity = eventGranularity
|
|
25645
|
+
if partitionSpec is None:
|
|
25646
|
+
# default: {'partition': 'FULL_TABLE_SNAPSHOT', 'type': 'FULL_TABLE', 'timePartition': None}
|
|
25647
|
+
self.partitionSpec = _json_converter.from_json_object(self.RECORD_SCHEMA.fields_dict["partitionSpec"].default, writers_schema=self.RECORD_SCHEMA.fields_dict["partitionSpec"].type)
|
|
25648
|
+
else:
|
|
25649
|
+
self.partitionSpec = partitionSpec
|
|
25650
|
+
self.messageId = messageId
|
|
25651
|
+
|
|
25652
|
+
def _restore_defaults(self) -> None:
|
|
25653
|
+
self.reportedTimeMillis = int()
|
|
25654
|
+
self.measure = float()
|
|
25655
|
+
self.dim1 = self.RECORD_SCHEMA.fields_dict["dim1"].default
|
|
25656
|
+
self.dim2 = self.RECORD_SCHEMA.fields_dict["dim2"].default
|
|
25657
|
+
self.dim3 = self.RECORD_SCHEMA.fields_dict["dim3"].default
|
|
25658
|
+
self.timestampMillis = int()
|
|
25659
|
+
self.eventGranularity = self.RECORD_SCHEMA.fields_dict["eventGranularity"].default
|
|
25660
|
+
self.partitionSpec = _json_converter.from_json_object(self.RECORD_SCHEMA.fields_dict["partitionSpec"].default, writers_schema=self.RECORD_SCHEMA.fields_dict["partitionSpec"].type)
|
|
25661
|
+
self.messageId = self.RECORD_SCHEMA.fields_dict["messageId"].default
|
|
25662
|
+
|
|
25663
|
+
|
|
25664
|
+
@property
|
|
25665
|
+
def reportedTimeMillis(self) -> int:
|
|
25666
|
+
"""The event or bucket reported time field as epoch at UTC in milli seconds.
|
|
25667
|
+
This must be provided in order to pass validation."""
|
|
25668
|
+
return self._inner_dict.get('reportedTimeMillis') # type: ignore
|
|
25669
|
+
|
|
25670
|
+
@reportedTimeMillis.setter
|
|
25671
|
+
def reportedTimeMillis(self, value: int) -> None:
|
|
25672
|
+
self._inner_dict['reportedTimeMillis'] = value
|
|
25673
|
+
|
|
25674
|
+
|
|
25675
|
+
@property
|
|
25676
|
+
def measure(self) -> float:
|
|
25677
|
+
"""The first measure value - Typically this is the primary metric."""
|
|
25678
|
+
return self._inner_dict.get('measure') # type: ignore
|
|
25679
|
+
|
|
25680
|
+
@measure.setter
|
|
25681
|
+
def measure(self, value: float) -> None:
|
|
25682
|
+
self._inner_dict['measure'] = value
|
|
25683
|
+
|
|
25684
|
+
|
|
25685
|
+
@property
|
|
25686
|
+
def dim1(self) -> Union[None, List[str]]:
|
|
25687
|
+
"""The first dimension value(s). Array type to support multi-dimensionality."""
|
|
25688
|
+
return self._inner_dict.get('dim1') # type: ignore
|
|
25689
|
+
|
|
25690
|
+
@dim1.setter
|
|
25691
|
+
def dim1(self, value: Union[None, List[str]]) -> None:
|
|
25692
|
+
self._inner_dict['dim1'] = value
|
|
25693
|
+
|
|
25694
|
+
|
|
25695
|
+
@property
|
|
25696
|
+
def dim2(self) -> Union[None, List[str]]:
|
|
25697
|
+
"""The second dimension value(s). Array type to support multi-dimensionality."""
|
|
25698
|
+
return self._inner_dict.get('dim2') # type: ignore
|
|
25699
|
+
|
|
25700
|
+
@dim2.setter
|
|
25701
|
+
def dim2(self, value: Union[None, List[str]]) -> None:
|
|
25702
|
+
self._inner_dict['dim2'] = value
|
|
25703
|
+
|
|
25704
|
+
|
|
25705
|
+
@property
|
|
25706
|
+
def dim3(self) -> Union[None, List[str]]:
|
|
25707
|
+
"""The third dimension value(s). Array type to support multi-dimensionality."""
|
|
25708
|
+
return self._inner_dict.get('dim3') # type: ignore
|
|
25709
|
+
|
|
25710
|
+
@dim3.setter
|
|
25711
|
+
def dim3(self, value: Union[None, List[str]]) -> None:
|
|
25712
|
+
self._inner_dict['dim3'] = value
|
|
25713
|
+
|
|
25714
|
+
|
|
25715
|
+
@property
|
|
25716
|
+
def timestampMillis(self) -> int:
|
|
25717
|
+
"""The event timestamp field as epoch at UTC in milli seconds."""
|
|
25718
|
+
return self._inner_dict.get('timestampMillis') # type: ignore
|
|
25719
|
+
|
|
25720
|
+
@timestampMillis.setter
|
|
25721
|
+
def timestampMillis(self, value: int) -> None:
|
|
25722
|
+
self._inner_dict['timestampMillis'] = value
|
|
25723
|
+
|
|
25724
|
+
|
|
25725
|
+
@property
|
|
25726
|
+
def eventGranularity(self) -> Union[None, "TimeWindowSizeClass"]:
|
|
25727
|
+
"""Granularity of the event if applicable"""
|
|
25728
|
+
return self._inner_dict.get('eventGranularity') # type: ignore
|
|
25729
|
+
|
|
25730
|
+
@eventGranularity.setter
|
|
25731
|
+
def eventGranularity(self, value: Union[None, "TimeWindowSizeClass"]) -> None:
|
|
25732
|
+
self._inner_dict['eventGranularity'] = value
|
|
25733
|
+
|
|
25734
|
+
|
|
25735
|
+
@property
|
|
25736
|
+
def partitionSpec(self) -> Union["PartitionSpecClass", None]:
|
|
25737
|
+
"""The optional partition specification."""
|
|
25738
|
+
return self._inner_dict.get('partitionSpec') # type: ignore
|
|
25739
|
+
|
|
25740
|
+
@partitionSpec.setter
|
|
25741
|
+
def partitionSpec(self, value: Union["PartitionSpecClass", None]) -> None:
|
|
25742
|
+
self._inner_dict['partitionSpec'] = value
|
|
25743
|
+
|
|
25744
|
+
|
|
25745
|
+
@property
|
|
25746
|
+
def messageId(self) -> Union[None, str]:
|
|
25747
|
+
"""The optional messageId, if provided serves as a custom user-defined unique identifier for an aspect value."""
|
|
25748
|
+
return self._inner_dict.get('messageId') # type: ignore
|
|
25749
|
+
|
|
25750
|
+
@messageId.setter
|
|
25751
|
+
def messageId(self, value: Union[None, str]) -> None:
|
|
25752
|
+
self._inner_dict['messageId'] = value
|
|
25753
|
+
|
|
25754
|
+
|
|
25755
|
+
class DataHubMetricCubeMeasureClass(DictWrapper):
|
|
25756
|
+
"""The definition of a metric cube measure."""
|
|
25757
|
+
|
|
25758
|
+
RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metric.DataHubMetricCubeMeasure")
|
|
25759
|
+
def __init__(self,
|
|
25760
|
+
name: str,
|
|
25761
|
+
):
|
|
25762
|
+
super().__init__()
|
|
25763
|
+
|
|
25764
|
+
self.name = name
|
|
25765
|
+
|
|
25766
|
+
def _restore_defaults(self) -> None:
|
|
25767
|
+
self.name = str()
|
|
25768
|
+
|
|
25769
|
+
|
|
25770
|
+
@property
|
|
25771
|
+
def name(self) -> str:
|
|
25772
|
+
""" The name of the measure"""
|
|
25773
|
+
return self._inner_dict.get('name') # type: ignore
|
|
25774
|
+
|
|
25775
|
+
@name.setter
|
|
25776
|
+
def name(self, value: str) -> None:
|
|
25777
|
+
self._inner_dict['name'] = value
|
|
25778
|
+
|
|
25779
|
+
|
|
25780
|
+
class DataHubMetricCubeMeasuresClass(DictWrapper):
|
|
25781
|
+
"""The definition of the measures of a metric cube.
|
|
25782
|
+
A measure is a metric that is being tracked in the cube."""
|
|
25783
|
+
|
|
25784
|
+
RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metric.DataHubMetricCubeMeasures")
|
|
25785
|
+
def __init__(self,
|
|
25786
|
+
measure: "DataHubMetricCubeMeasureClass",
|
|
25787
|
+
):
|
|
25788
|
+
super().__init__()
|
|
25789
|
+
|
|
25790
|
+
self.measure = measure
|
|
25791
|
+
|
|
25792
|
+
def _restore_defaults(self) -> None:
|
|
25793
|
+
self.measure = DataHubMetricCubeMeasureClass._construct_with_defaults()
|
|
25794
|
+
|
|
25795
|
+
|
|
25796
|
+
@property
|
|
25797
|
+
def measure(self) -> "DataHubMetricCubeMeasureClass":
|
|
25798
|
+
""" The first measure being tracked in the cube."""
|
|
25799
|
+
return self._inner_dict.get('measure') # type: ignore
|
|
25800
|
+
|
|
25801
|
+
@measure.setter
|
|
25802
|
+
def measure(self, value: "DataHubMetricCubeMeasureClass") -> None:
|
|
25803
|
+
self._inner_dict['measure'] = value
|
|
25804
|
+
|
|
25805
|
+
|
|
25806
|
+
class DataHubMetricCubeOriginClass(DictWrapper):
|
|
25807
|
+
"""Information about the origin of the metric cube"""
|
|
25808
|
+
|
|
25809
|
+
RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metric.DataHubMetricCubeOrigin")
|
|
25810
|
+
def __init__(self,
|
|
25811
|
+
type: Union[str, "DataHubMetricSourceTypeClass"],
|
|
25812
|
+
originUrn: Union[None, str]=None,
|
|
25813
|
+
):
|
|
25814
|
+
super().__init__()
|
|
25815
|
+
|
|
25816
|
+
self.type = type
|
|
25817
|
+
self.originUrn = originUrn
|
|
25818
|
+
|
|
25819
|
+
def _restore_defaults(self) -> None:
|
|
25820
|
+
self.type = DataHubMetricSourceTypeClass.MANUAL
|
|
25821
|
+
self.originUrn = self.RECORD_SCHEMA.fields_dict["originUrn"].default
|
|
25822
|
+
|
|
25823
|
+
|
|
25824
|
+
@property
|
|
25825
|
+
def type(self) -> Union[str, "DataHubMetricSourceTypeClass"]:
|
|
25826
|
+
"""Message associated with the incident"""
|
|
25827
|
+
return self._inner_dict.get('type') # type: ignore
|
|
25828
|
+
|
|
25829
|
+
@type.setter
|
|
25830
|
+
def type(self, value: Union[str, "DataHubMetricSourceTypeClass"]) -> None:
|
|
25831
|
+
self._inner_dict['type'] = value
|
|
25832
|
+
|
|
25833
|
+
|
|
25834
|
+
@property
|
|
25835
|
+
def originUrn(self) -> Union[None, str]:
|
|
25836
|
+
"""Reference to the source that created the metric.
|
|
25837
|
+
In the case of assertion monitor, this is the URN of the assertion monitor."""
|
|
25838
|
+
return self._inner_dict.get('originUrn') # type: ignore
|
|
25839
|
+
|
|
25840
|
+
@originUrn.setter
|
|
25841
|
+
def originUrn(self, value: Union[None, str]) -> None:
|
|
25842
|
+
self._inner_dict['originUrn'] = value
|
|
25843
|
+
|
|
25844
|
+
|
|
25845
|
+
class DataHubMetricSourceTypeClass(object):
|
|
25846
|
+
# No docs available.
|
|
25847
|
+
|
|
25848
|
+
MANUAL = "MANUAL"
|
|
25849
|
+
"""Manually created metric, via UI or API."""
|
|
25850
|
+
|
|
25851
|
+
ASSERTION_MONITOR = "ASSERTION_MONITOR"
|
|
25852
|
+
"""Assertion monitor created the metric."""
|
|
25853
|
+
|
|
25854
|
+
|
|
25855
|
+
|
|
24862
25856
|
class BaseDataClass(DictWrapper):
|
|
24863
25857
|
"""BaseData record"""
|
|
24864
25858
|
|
|
@@ -27339,7 +28333,7 @@ class MonitorInfoClass(_Aspect):
|
|
|
27339
28333
|
|
|
27340
28334
|
@property
|
|
27341
28335
|
def executorId(self) -> Union[None, str]:
|
|
27342
|
-
"""Advanced: The executor
|
|
28336
|
+
"""Advanced: The executor pool id of the remote monitor service, if any."""
|
|
27343
28337
|
return self._inner_dict.get('executorId') # type: ignore
|
|
27344
28338
|
|
|
27345
28339
|
@executorId.setter
|
|
@@ -34992,7 +35986,7 @@ class VersionSetPropertiesClass(_Aspect):
|
|
|
34992
35986
|
def _restore_defaults(self) -> None:
|
|
34993
35987
|
self.customProperties = dict()
|
|
34994
35988
|
self.latest = str()
|
|
34995
|
-
self.versioningScheme = VersioningSchemeClass.
|
|
35989
|
+
self.versioningScheme = VersioningSchemeClass.LEXICOGRAPHIC_STRING
|
|
34996
35990
|
|
|
34997
35991
|
|
|
34998
35992
|
@property
|
|
@@ -35028,7 +36022,12 @@ class VersionSetPropertiesClass(_Aspect):
|
|
|
35028
36022
|
class VersioningSchemeClass(object):
|
|
35029
36023
|
# No docs available.
|
|
35030
36024
|
|
|
36025
|
+
LEXICOGRAPHIC_STRING = "LEXICOGRAPHIC_STRING"
|
|
36026
|
+
"""String sorted lexicographically."""
|
|
36027
|
+
|
|
35031
36028
|
ALPHANUMERIC_GENERATED_BY_DATAHUB = "ALPHANUMERIC_GENERATED_BY_DATAHUB"
|
|
36029
|
+
"""String managed by DataHub. Currently, an 8 character alphabetical string."""
|
|
36030
|
+
|
|
35032
36031
|
|
|
35033
36032
|
|
|
35034
36033
|
class DataHubViewDefinitionClass(DictWrapper):
|
|
@@ -35195,7 +36194,10 @@ __SCHEMA_TYPES = {
|
|
|
35195
36194
|
'com.linkedin.pegasus2avro.actionrequest.DataContractProposal': DataContractProposalClass,
|
|
35196
36195
|
'com.linkedin.pegasus2avro.actionrequest.DataContractProposalOperationType': DataContractProposalOperationTypeClass,
|
|
35197
36196
|
'com.linkedin.pegasus2avro.actionrequest.DescriptionProposal': DescriptionProposalClass,
|
|
36197
|
+
'com.linkedin.pegasus2avro.actionrequest.DomainProposal': DomainProposalClass,
|
|
35198
36198
|
'com.linkedin.pegasus2avro.actionrequest.GlossaryTermProposal': GlossaryTermProposalClass,
|
|
36199
|
+
'com.linkedin.pegasus2avro.actionrequest.OwnerProposal': OwnerProposalClass,
|
|
36200
|
+
'com.linkedin.pegasus2avro.actionrequest.StructuredPropertyProposal': StructuredPropertyProposalClass,
|
|
35199
36201
|
'com.linkedin.pegasus2avro.actionrequest.TagProposal': TagProposalClass,
|
|
35200
36202
|
'com.linkedin.pegasus2avro.ai.AiInferenceMetadata': AiInferenceMetadataClass,
|
|
35201
36203
|
'com.linkedin.pegasus2avro.ai.EntityInferenceMetadata': EntityInferenceMetadataClass,
|
|
@@ -35407,6 +36409,7 @@ __SCHEMA_TYPES = {
|
|
|
35407
36409
|
'com.linkedin.pegasus2avro.dataplatform.PlatformType': PlatformTypeClass,
|
|
35408
36410
|
'com.linkedin.pegasus2avro.dataplatform.slack.SlackUserInfo': SlackUserInfoClass,
|
|
35409
36411
|
'com.linkedin.pegasus2avro.dataplatforminstance.DataPlatformInstanceProperties': DataPlatformInstancePropertiesClass,
|
|
36412
|
+
'com.linkedin.pegasus2avro.dataplatforminstance.IcebergWarehouseInfo': IcebergWarehouseInfoClass,
|
|
35410
36413
|
'com.linkedin.pegasus2avro.dataprocess.DataProcessInfo': DataProcessInfoClass,
|
|
35411
36414
|
'com.linkedin.pegasus2avro.dataprocess.DataProcessInstanceInput': DataProcessInstanceInputClass,
|
|
35412
36415
|
'com.linkedin.pegasus2avro.dataprocess.DataProcessInstanceOutput': DataProcessInstanceOutputClass,
|
|
@@ -35439,6 +36442,7 @@ __SCHEMA_TYPES = {
|
|
|
35439
36442
|
'com.linkedin.pegasus2avro.dataset.FineGrainedLineageDownstreamType': FineGrainedLineageDownstreamTypeClass,
|
|
35440
36443
|
'com.linkedin.pegasus2avro.dataset.FineGrainedLineageUpstreamType': FineGrainedLineageUpstreamTypeClass,
|
|
35441
36444
|
'com.linkedin.pegasus2avro.dataset.Histogram': HistogramClass,
|
|
36445
|
+
'com.linkedin.pegasus2avro.dataset.IcebergCatalogInfo': IcebergCatalogInfoClass,
|
|
35442
36446
|
'com.linkedin.pegasus2avro.dataset.PartitionSummary': PartitionSummaryClass,
|
|
35443
36447
|
'com.linkedin.pegasus2avro.dataset.PartitionsSummary': PartitionsSummaryClass,
|
|
35444
36448
|
'com.linkedin.pegasus2avro.dataset.Quantile': QuantileClass,
|
|
@@ -35477,7 +36481,10 @@ __SCHEMA_TYPES = {
|
|
|
35477
36481
|
'com.linkedin.pegasus2avro.execution.ExecutionRequestSource': ExecutionRequestSourceClass,
|
|
35478
36482
|
'com.linkedin.pegasus2avro.execution.StructuredExecutionReport': StructuredExecutionReportClass,
|
|
35479
36483
|
'com.linkedin.pegasus2avro.executor.RemoteExecutorStatus': RemoteExecutorStatusClass,
|
|
36484
|
+
'com.linkedin.pegasus2avro.executorglobalconfig.RemoteExecutorPoolGlobalConfig': RemoteExecutorPoolGlobalConfigClass,
|
|
35480
36485
|
'com.linkedin.pegasus2avro.executorpool.RemoteExecutorPoolInfo': RemoteExecutorPoolInfoClass,
|
|
36486
|
+
'com.linkedin.pegasus2avro.executorpool.RemoteExecutorPoolState': RemoteExecutorPoolStateClass,
|
|
36487
|
+
'com.linkedin.pegasus2avro.executorpool.RemoteExecutorPoolStatus': RemoteExecutorPoolStatusClass,
|
|
35481
36488
|
'com.linkedin.pegasus2avro.form.DomainParams': DomainParamsClass,
|
|
35482
36489
|
'com.linkedin.pegasus2avro.form.DynamicFormAssignment': DynamicFormAssignmentClass,
|
|
35483
36490
|
'com.linkedin.pegasus2avro.form.FormActorAssignment': FormActorAssignmentClass,
|
|
@@ -35557,6 +36564,7 @@ __SCHEMA_TYPES = {
|
|
|
35557
36564
|
'com.linkedin.pegasus2avro.metadata.key.DataHubActionKey': DataHubActionKeyClass,
|
|
35558
36565
|
'com.linkedin.pegasus2avro.metadata.key.DataHubConnectionKey': DataHubConnectionKeyClass,
|
|
35559
36566
|
'com.linkedin.pegasus2avro.metadata.key.DataHubIngestionSourceKey': DataHubIngestionSourceKeyClass,
|
|
36567
|
+
'com.linkedin.pegasus2avro.metadata.key.DataHubMetricCubeKey': DataHubMetricCubeKeyClass,
|
|
35560
36568
|
'com.linkedin.pegasus2avro.metadata.key.DataHubPersonaKey': DataHubPersonaKeyClass,
|
|
35561
36569
|
'com.linkedin.pegasus2avro.metadata.key.DataHubPolicyKey': DataHubPolicyKeyClass,
|
|
35562
36570
|
'com.linkedin.pegasus2avro.metadata.key.DataHubRetentionKey': DataHubRetentionKeyClass,
|
|
@@ -35594,6 +36602,7 @@ __SCHEMA_TYPES = {
|
|
|
35594
36602
|
'com.linkedin.pegasus2avro.metadata.key.PostKey': PostKeyClass,
|
|
35595
36603
|
'com.linkedin.pegasus2avro.metadata.key.QueryKey': QueryKeyClass,
|
|
35596
36604
|
'com.linkedin.pegasus2avro.metadata.key.RecommendationModuleKey': RecommendationModuleKeyClass,
|
|
36605
|
+
'com.linkedin.pegasus2avro.metadata.key.RemoteExecutorGlobalConfigKey': RemoteExecutorGlobalConfigKeyClass,
|
|
35597
36606
|
'com.linkedin.pegasus2avro.metadata.key.RemoteExecutorKey': RemoteExecutorKeyClass,
|
|
35598
36607
|
'com.linkedin.pegasus2avro.metadata.key.RemoteExecutorPoolKey': RemoteExecutorPoolKeyClass,
|
|
35599
36608
|
'com.linkedin.pegasus2avro.metadata.key.RoleKey': RoleKeyClass,
|
|
@@ -35641,6 +36650,14 @@ __SCHEMA_TYPES = {
|
|
|
35641
36650
|
'com.linkedin.pegasus2avro.metadata.snapshot.MLPrimaryKeySnapshot': MLPrimaryKeySnapshotClass,
|
|
35642
36651
|
'com.linkedin.pegasus2avro.metadata.snapshot.SchemaFieldSnapshot': SchemaFieldSnapshotClass,
|
|
35643
36652
|
'com.linkedin.pegasus2avro.metadata.snapshot.TagSnapshot': TagSnapshotClass,
|
|
36653
|
+
'com.linkedin.pegasus2avro.metric.DataHubMetricCubeDefinition': DataHubMetricCubeDefinitionClass,
|
|
36654
|
+
'com.linkedin.pegasus2avro.metric.DataHubMetricCubeDimension': DataHubMetricCubeDimensionClass,
|
|
36655
|
+
'com.linkedin.pegasus2avro.metric.DataHubMetricCubeDimensions': DataHubMetricCubeDimensionsClass,
|
|
36656
|
+
'com.linkedin.pegasus2avro.metric.DataHubMetricCubeEvent': DataHubMetricCubeEventClass,
|
|
36657
|
+
'com.linkedin.pegasus2avro.metric.DataHubMetricCubeMeasure': DataHubMetricCubeMeasureClass,
|
|
36658
|
+
'com.linkedin.pegasus2avro.metric.DataHubMetricCubeMeasures': DataHubMetricCubeMeasuresClass,
|
|
36659
|
+
'com.linkedin.pegasus2avro.metric.DataHubMetricCubeOrigin': DataHubMetricCubeOriginClass,
|
|
36660
|
+
'com.linkedin.pegasus2avro.metric.DataHubMetricSourceType': DataHubMetricSourceTypeClass,
|
|
35644
36661
|
'com.linkedin.pegasus2avro.ml.metadata.BaseData': BaseDataClass,
|
|
35645
36662
|
'com.linkedin.pegasus2avro.ml.metadata.CaveatDetails': CaveatDetailsClass,
|
|
35646
36663
|
'com.linkedin.pegasus2avro.ml.metadata.CaveatsAndRecommendations': CaveatsAndRecommendationsClass,
|
|
@@ -35866,7 +36883,10 @@ __SCHEMA_TYPES = {
|
|
|
35866
36883
|
'DataContractProposal': DataContractProposalClass,
|
|
35867
36884
|
'DataContractProposalOperationType': DataContractProposalOperationTypeClass,
|
|
35868
36885
|
'DescriptionProposal': DescriptionProposalClass,
|
|
36886
|
+
'DomainProposal': DomainProposalClass,
|
|
35869
36887
|
'GlossaryTermProposal': GlossaryTermProposalClass,
|
|
36888
|
+
'OwnerProposal': OwnerProposalClass,
|
|
36889
|
+
'StructuredPropertyProposal': StructuredPropertyProposalClass,
|
|
35870
36890
|
'TagProposal': TagProposalClass,
|
|
35871
36891
|
'AiInferenceMetadata': AiInferenceMetadataClass,
|
|
35872
36892
|
'EntityInferenceMetadata': EntityInferenceMetadataClass,
|
|
@@ -36078,6 +37098,7 @@ __SCHEMA_TYPES = {
|
|
|
36078
37098
|
'PlatformType': PlatformTypeClass,
|
|
36079
37099
|
'SlackUserInfo': SlackUserInfoClass,
|
|
36080
37100
|
'DataPlatformInstanceProperties': DataPlatformInstancePropertiesClass,
|
|
37101
|
+
'IcebergWarehouseInfo': IcebergWarehouseInfoClass,
|
|
36081
37102
|
'DataProcessInfo': DataProcessInfoClass,
|
|
36082
37103
|
'DataProcessInstanceInput': DataProcessInstanceInputClass,
|
|
36083
37104
|
'DataProcessInstanceOutput': DataProcessInstanceOutputClass,
|
|
@@ -36110,6 +37131,7 @@ __SCHEMA_TYPES = {
|
|
|
36110
37131
|
'FineGrainedLineageDownstreamType': FineGrainedLineageDownstreamTypeClass,
|
|
36111
37132
|
'FineGrainedLineageUpstreamType': FineGrainedLineageUpstreamTypeClass,
|
|
36112
37133
|
'Histogram': HistogramClass,
|
|
37134
|
+
'IcebergCatalogInfo': IcebergCatalogInfoClass,
|
|
36113
37135
|
'PartitionSummary': PartitionSummaryClass,
|
|
36114
37136
|
'PartitionsSummary': PartitionsSummaryClass,
|
|
36115
37137
|
'Quantile': QuantileClass,
|
|
@@ -36148,7 +37170,10 @@ __SCHEMA_TYPES = {
|
|
|
36148
37170
|
'ExecutionRequestSource': ExecutionRequestSourceClass,
|
|
36149
37171
|
'StructuredExecutionReport': StructuredExecutionReportClass,
|
|
36150
37172
|
'RemoteExecutorStatus': RemoteExecutorStatusClass,
|
|
37173
|
+
'RemoteExecutorPoolGlobalConfig': RemoteExecutorPoolGlobalConfigClass,
|
|
36151
37174
|
'RemoteExecutorPoolInfo': RemoteExecutorPoolInfoClass,
|
|
37175
|
+
'RemoteExecutorPoolState': RemoteExecutorPoolStateClass,
|
|
37176
|
+
'RemoteExecutorPoolStatus': RemoteExecutorPoolStatusClass,
|
|
36152
37177
|
'DomainParams': DomainParamsClass,
|
|
36153
37178
|
'DynamicFormAssignment': DynamicFormAssignmentClass,
|
|
36154
37179
|
'FormActorAssignment': FormActorAssignmentClass,
|
|
@@ -36228,6 +37253,7 @@ __SCHEMA_TYPES = {
|
|
|
36228
37253
|
'DataHubActionKey': DataHubActionKeyClass,
|
|
36229
37254
|
'DataHubConnectionKey': DataHubConnectionKeyClass,
|
|
36230
37255
|
'DataHubIngestionSourceKey': DataHubIngestionSourceKeyClass,
|
|
37256
|
+
'DataHubMetricCubeKey': DataHubMetricCubeKeyClass,
|
|
36231
37257
|
'DataHubPersonaKey': DataHubPersonaKeyClass,
|
|
36232
37258
|
'DataHubPolicyKey': DataHubPolicyKeyClass,
|
|
36233
37259
|
'DataHubRetentionKey': DataHubRetentionKeyClass,
|
|
@@ -36265,6 +37291,7 @@ __SCHEMA_TYPES = {
|
|
|
36265
37291
|
'PostKey': PostKeyClass,
|
|
36266
37292
|
'QueryKey': QueryKeyClass,
|
|
36267
37293
|
'RecommendationModuleKey': RecommendationModuleKeyClass,
|
|
37294
|
+
'RemoteExecutorGlobalConfigKey': RemoteExecutorGlobalConfigKeyClass,
|
|
36268
37295
|
'RemoteExecutorKey': RemoteExecutorKeyClass,
|
|
36269
37296
|
'RemoteExecutorPoolKey': RemoteExecutorPoolKeyClass,
|
|
36270
37297
|
'RoleKey': RoleKeyClass,
|
|
@@ -36312,6 +37339,14 @@ __SCHEMA_TYPES = {
|
|
|
36312
37339
|
'MLPrimaryKeySnapshot': MLPrimaryKeySnapshotClass,
|
|
36313
37340
|
'SchemaFieldSnapshot': SchemaFieldSnapshotClass,
|
|
36314
37341
|
'TagSnapshot': TagSnapshotClass,
|
|
37342
|
+
'DataHubMetricCubeDefinition': DataHubMetricCubeDefinitionClass,
|
|
37343
|
+
'DataHubMetricCubeDimension': DataHubMetricCubeDimensionClass,
|
|
37344
|
+
'DataHubMetricCubeDimensions': DataHubMetricCubeDimensionsClass,
|
|
37345
|
+
'DataHubMetricCubeEvent': DataHubMetricCubeEventClass,
|
|
37346
|
+
'DataHubMetricCubeMeasure': DataHubMetricCubeMeasureClass,
|
|
37347
|
+
'DataHubMetricCubeMeasures': DataHubMetricCubeMeasuresClass,
|
|
37348
|
+
'DataHubMetricCubeOrigin': DataHubMetricCubeOriginClass,
|
|
37349
|
+
'DataHubMetricSourceType': DataHubMetricSourceTypeClass,
|
|
36315
37350
|
'BaseData': BaseDataClass,
|
|
36316
37351
|
'CaveatDetails': CaveatDetailsClass,
|
|
36317
37352
|
'CaveatsAndRecommendations': CaveatsAndRecommendationsClass,
|
|
@@ -36566,6 +37601,7 @@ ASPECT_CLASSES: List[Type[_Aspect]] = [
|
|
|
36566
37601
|
EditableContainerPropertiesClass,
|
|
36567
37602
|
ContainerPropertiesClass,
|
|
36568
37603
|
ContainerClass,
|
|
37604
|
+
RemoteExecutorPoolGlobalConfigClass,
|
|
36569
37605
|
DataHubRetentionConfigClass,
|
|
36570
37606
|
TelemetryClientIdClass,
|
|
36571
37607
|
DataHubAccessTokenInfoClass,
|
|
@@ -36609,6 +37645,7 @@ ASPECT_CLASSES: List[Type[_Aspect]] = [
|
|
|
36609
37645
|
TestInfoClass,
|
|
36610
37646
|
BatchTestRunEventClass,
|
|
36611
37647
|
DataPlatformInstancePropertiesClass,
|
|
37648
|
+
IcebergWarehouseInfoClass,
|
|
36612
37649
|
EditableERModelRelationshipPropertiesClass,
|
|
36613
37650
|
ERModelRelationshipPropertiesClass,
|
|
36614
37651
|
EntityTypeInfoClass,
|
|
@@ -36618,6 +37655,8 @@ ASPECT_CLASSES: List[Type[_Aspect]] = [
|
|
|
36618
37655
|
EditableSchemaMetadataClass,
|
|
36619
37656
|
SchemaProposalsClass,
|
|
36620
37657
|
SchemaMetadataClass,
|
|
37658
|
+
DataHubMetricCubeEventClass,
|
|
37659
|
+
DataHubMetricCubeDefinitionClass,
|
|
36621
37660
|
AccessClass,
|
|
36622
37661
|
AnomaliesSummaryClass,
|
|
36623
37662
|
ProposalsClass,
|
|
@@ -36666,6 +37705,7 @@ ASPECT_CLASSES: List[Type[_Aspect]] = [
|
|
|
36666
37705
|
EditableDatasetPropertiesClass,
|
|
36667
37706
|
DatasetProfileClass,
|
|
36668
37707
|
DatasetDeprecationClass,
|
|
37708
|
+
IcebergCatalogInfoClass,
|
|
36669
37709
|
DatasetPropertiesClass,
|
|
36670
37710
|
PartitionsSummaryClass,
|
|
36671
37711
|
DatasetUpstreamLineageClass,
|
|
@@ -36710,6 +37750,7 @@ ASPECT_CLASSES: List[Type[_Aspect]] = [
|
|
|
36710
37750
|
PostKeyClass,
|
|
36711
37751
|
MLPrimaryKeyKeyClass,
|
|
36712
37752
|
DataPlatformInstanceKeyClass,
|
|
37753
|
+
DataHubMetricCubeKeyClass,
|
|
36713
37754
|
QueryKeyClass,
|
|
36714
37755
|
DatasetKeyClass,
|
|
36715
37756
|
ExecutionRequestKeyClass,
|
|
@@ -36744,6 +37785,7 @@ ASPECT_CLASSES: List[Type[_Aspect]] = [
|
|
|
36744
37785
|
DataHubRetentionKeyClass,
|
|
36745
37786
|
OwnershipTypeKeyClass,
|
|
36746
37787
|
ActionRequestKeyClass,
|
|
37788
|
+
RemoteExecutorGlobalConfigKeyClass,
|
|
36747
37789
|
DataFlowKeyClass,
|
|
36748
37790
|
GenericEntityKeyClass,
|
|
36749
37791
|
DataContractKeyClass,
|
|
@@ -36835,6 +37877,7 @@ class AspectBag(TypedDict, total=False):
|
|
|
36835
37877
|
editableContainerProperties: EditableContainerPropertiesClass
|
|
36836
37878
|
containerProperties: ContainerPropertiesClass
|
|
36837
37879
|
container: ContainerClass
|
|
37880
|
+
dataHubRemoteExecutorPoolGlobalConfig: RemoteExecutorPoolGlobalConfigClass
|
|
36838
37881
|
dataHubRetentionConfig: DataHubRetentionConfigClass
|
|
36839
37882
|
telemetryClientId: TelemetryClientIdClass
|
|
36840
37883
|
dataHubAccessTokenInfo: DataHubAccessTokenInfoClass
|
|
@@ -36878,6 +37921,7 @@ class AspectBag(TypedDict, total=False):
|
|
|
36878
37921
|
testInfo: TestInfoClass
|
|
36879
37922
|
batchTestRunEvent: BatchTestRunEventClass
|
|
36880
37923
|
dataPlatformInstanceProperties: DataPlatformInstancePropertiesClass
|
|
37924
|
+
icebergWarehouseInfo: IcebergWarehouseInfoClass
|
|
36881
37925
|
editableERModelRelationshipProperties: EditableERModelRelationshipPropertiesClass
|
|
36882
37926
|
erModelRelationshipProperties: ERModelRelationshipPropertiesClass
|
|
36883
37927
|
entityTypeInfo: EntityTypeInfoClass
|
|
@@ -36887,6 +37931,8 @@ class AspectBag(TypedDict, total=False):
|
|
|
36887
37931
|
editableSchemaMetadata: EditableSchemaMetadataClass
|
|
36888
37932
|
schemaProposals: SchemaProposalsClass
|
|
36889
37933
|
schemaMetadata: SchemaMetadataClass
|
|
37934
|
+
dataHubMetricCubeEvent: DataHubMetricCubeEventClass
|
|
37935
|
+
dataHubMetricCubeDefinition: DataHubMetricCubeDefinitionClass
|
|
36890
37936
|
access: AccessClass
|
|
36891
37937
|
anomaliesSummary: AnomaliesSummaryClass
|
|
36892
37938
|
proposals: ProposalsClass
|
|
@@ -36935,6 +37981,7 @@ class AspectBag(TypedDict, total=False):
|
|
|
36935
37981
|
editableDatasetProperties: EditableDatasetPropertiesClass
|
|
36936
37982
|
datasetProfile: DatasetProfileClass
|
|
36937
37983
|
datasetDeprecation: DatasetDeprecationClass
|
|
37984
|
+
icebergCatalogInfo: IcebergCatalogInfoClass
|
|
36938
37985
|
datasetProperties: DatasetPropertiesClass
|
|
36939
37986
|
partitionsSummary: PartitionsSummaryClass
|
|
36940
37987
|
datasetUpstreamLineage: DatasetUpstreamLineageClass
|
|
@@ -36979,6 +38026,7 @@ class AspectBag(TypedDict, total=False):
|
|
|
36979
38026
|
postKey: PostKeyClass
|
|
36980
38027
|
mlPrimaryKeyKey: MLPrimaryKeyKeyClass
|
|
36981
38028
|
dataPlatformInstanceKey: DataPlatformInstanceKeyClass
|
|
38029
|
+
dataHubMetricCubeKey: DataHubMetricCubeKeyClass
|
|
36982
38030
|
queryKey: QueryKeyClass
|
|
36983
38031
|
datasetKey: DatasetKeyClass
|
|
36984
38032
|
dataHubExecutionRequestKey: ExecutionRequestKeyClass
|
|
@@ -37013,6 +38061,7 @@ class AspectBag(TypedDict, total=False):
|
|
|
37013
38061
|
dataHubRetentionKey: DataHubRetentionKeyClass
|
|
37014
38062
|
ownershipTypeKey: OwnershipTypeKeyClass
|
|
37015
38063
|
actionRequestKey: ActionRequestKeyClass
|
|
38064
|
+
dataHubRemoteExecutorGlobalConfigKey: RemoteExecutorGlobalConfigKeyClass
|
|
37016
38065
|
dataFlowKey: DataFlowKeyClass
|
|
37017
38066
|
genericEntityKey: GenericEntityKeyClass
|
|
37018
38067
|
dataContractKey: DataContractKeyClass
|
|
@@ -37077,6 +38126,7 @@ KEY_ASPECTS: Dict[str, Type[_Aspect]] = {
|
|
|
37077
38126
|
'post': PostKeyClass,
|
|
37078
38127
|
'mlPrimaryKey': MLPrimaryKeyKeyClass,
|
|
37079
38128
|
'dataPlatformInstance': DataPlatformInstanceKeyClass,
|
|
38129
|
+
'dataHubMetricCube': DataHubMetricCubeKeyClass,
|
|
37080
38130
|
'query': QueryKeyClass,
|
|
37081
38131
|
'dataset': DatasetKeyClass,
|
|
37082
38132
|
'dataHubExecutionRequest': ExecutionRequestKeyClass,
|
|
@@ -37111,6 +38161,7 @@ KEY_ASPECTS: Dict[str, Type[_Aspect]] = {
|
|
|
37111
38161
|
'dataHubRetention': DataHubRetentionKeyClass,
|
|
37112
38162
|
'ownershipType': OwnershipTypeKeyClass,
|
|
37113
38163
|
'actionRequest': ActionRequestKeyClass,
|
|
38164
|
+
'dataHubRemoteExecutorGlobalConfig': RemoteExecutorGlobalConfigKeyClass,
|
|
37114
38165
|
'dataFlow': DataFlowKeyClass,
|
|
37115
38166
|
'dataContract': DataContractKeyClass,
|
|
37116
38167
|
'dataHubConnection': DataHubConnectionKeyClass,
|